Commit b6641db6 authored by asargent@chromium.org's avatar asargent@chromium.org

Fix several problems with the content verification code

-While we're doing the initial scan of files to build up
 computed_hashes.json, if we notice a mismatch, report that right
 away. This also fixes problems where if you modify/delete
 computed_hashes.json, enforcement would never work.

-Avoid an infinite loop in ContentVerifyJob by making sure we stop
 checking newly read bytes after we've already reported a failure
 (we were stuck failing to advance to the next block after reporting
  a failure)

BUG=375992,375953

Review URL: https://codereview.chromium.org/329303007

git-svn-id: svn://svn.chromium.org/chrome/trunk/src@278071 0039d316-1c4b-4281-b951-d872f2087c98
parent 6dc5125b
...@@ -20,7 +20,9 @@ ...@@ -20,7 +20,9 @@
#include "crypto/secure_hash.h" #include "crypto/secure_hash.h"
#include "crypto/sha2.h" #include "crypto/sha2.h"
#include "extensions/browser/computed_hashes.h" #include "extensions/browser/computed_hashes.h"
#include "extensions/browser/content_hash_tree.h"
#include "extensions/browser/extension_registry.h" #include "extensions/browser/extension_registry.h"
#include "extensions/browser/verified_contents.h"
#include "extensions/common/constants.h" #include "extensions/common/constants.h"
#include "extensions/common/extension.h" #include "extensions/common/extension.h"
#include "extensions/common/file_util.h" #include "extensions/common/file_util.h"
...@@ -47,9 +49,11 @@ class ContentHashFetcherJob ...@@ -47,9 +49,11 @@ class ContentHashFetcherJob
public: public:
typedef base::Callback<void(ContentHashFetcherJob*)> CompletionCallback; typedef base::Callback<void(ContentHashFetcherJob*)> CompletionCallback;
ContentHashFetcherJob(net::URLRequestContextGetter* request_context, ContentHashFetcherJob(net::URLRequestContextGetter* request_context,
ContentVerifierKey key,
const std::string& extension_id, const std::string& extension_id,
const base::FilePath& extension_path, const base::FilePath& extension_path,
const GURL& fetch_url, const GURL& fetch_url,
bool force,
const CompletionCallback& callback); const CompletionCallback& callback);
void Start(); void Start();
...@@ -58,20 +62,28 @@ class ContentHashFetcherJob ...@@ -58,20 +62,28 @@ class ContentHashFetcherJob
// just waiting for the entire job to complete. Safe to call from any thread. // just waiting for the entire job to complete. Safe to call from any thread.
void Cancel(); void Cancel();
// Returns whether this job was completely successful (we have both verified // Checks whether this job has been cancelled. Safe to call from any thread.
// contents and computed hashes). bool IsCancelled();
// Returns whether this job was successful (we have both verified contents
// and computed hashes). Even if the job was a success, there might have been
// files that were found to have contents not matching expectations; these
// are available by calling hash_mismatch_paths().
bool success() { return success_; } bool success() { return success_; }
// Do we have a verified_contents.json file? bool force() { return force_; }
bool have_verified_contents() { return have_verified_contents_; }
const std::string& extension_id() { return extension_id_; }
// Returns the set of paths that had a hash mismatch.
const std::set<base::FilePath>& hash_mismatch_paths() {
return hash_mismatch_paths_;
}
private: private:
friend class base::RefCountedThreadSafe<ContentHashFetcherJob>; friend class base::RefCountedThreadSafe<ContentHashFetcherJob>;
virtual ~ContentHashFetcherJob(); virtual ~ContentHashFetcherJob();
// Checks whether this job has been cancelled. Safe to call from any thread.
bool IsCancelled();
// Callback for when we're done doing file I/O to see if we already have // Callback for when we're done doing file I/O to see if we already have
// a verified contents file. If we don't, this will kick off a network // a verified contents file. If we don't, this will kick off a network
// request to get one. // request to get one.
...@@ -110,18 +122,22 @@ class ContentHashFetcherJob ...@@ -110,18 +122,22 @@ class ContentHashFetcherJob
// The url we'll need to use to fetch a verified_contents.json file. // The url we'll need to use to fetch a verified_contents.json file.
GURL fetch_url_; GURL fetch_url_;
bool force_;
CompletionCallback callback_; CompletionCallback callback_;
content::BrowserThread::ID creation_thread_; content::BrowserThread::ID creation_thread_;
// Used for fetching content signatures. // Used for fetching content signatures.
scoped_ptr<net::URLFetcher> url_fetcher_; scoped_ptr<net::URLFetcher> url_fetcher_;
// The key used to validate verified_contents.json.
ContentVerifierKey key_;
// Whether this job succeeded. // Whether this job succeeded.
bool success_; bool success_;
// Whether we either found a verified contents file, or were successful in // Paths that were found to have a mismatching hash.
// fetching one and saving it to disk. std::set<base::FilePath> hash_mismatch_paths_;
bool have_verified_contents_;
// The block size to use for hashing. // The block size to use for hashing.
int block_size_; int block_size_;
...@@ -132,21 +148,26 @@ class ContentHashFetcherJob ...@@ -132,21 +148,26 @@ class ContentHashFetcherJob
// A lock for synchronizing access to |cancelled_|. // A lock for synchronizing access to |cancelled_|.
base::Lock cancelled_lock_; base::Lock cancelled_lock_;
DISALLOW_COPY_AND_ASSIGN(ContentHashFetcherJob);
}; };
ContentHashFetcherJob::ContentHashFetcherJob( ContentHashFetcherJob::ContentHashFetcherJob(
net::URLRequestContextGetter* request_context, net::URLRequestContextGetter* request_context,
ContentVerifierKey key,
const std::string& extension_id, const std::string& extension_id,
const base::FilePath& extension_path, const base::FilePath& extension_path,
const GURL& fetch_url, const GURL& fetch_url,
bool force,
const CompletionCallback& callback) const CompletionCallback& callback)
: request_context_(request_context), : request_context_(request_context),
extension_id_(extension_id), extension_id_(extension_id),
extension_path_(extension_path), extension_path_(extension_path),
fetch_url_(fetch_url), fetch_url_(fetch_url),
force_(force),
callback_(callback), callback_(callback),
key_(key),
success_(false), success_(false),
have_verified_contents_(false),
// TODO(asargent) - use the value from verified_contents.json for each // TODO(asargent) - use the value from verified_contents.json for each
// file, instead of using a constant. // file, instead of using a constant.
block_size_(4096), block_size_(4096),
...@@ -172,21 +193,24 @@ void ContentHashFetcherJob::Cancel() { ...@@ -172,21 +193,24 @@ void ContentHashFetcherJob::Cancel() {
cancelled_ = true; cancelled_ = true;
} }
ContentHashFetcherJob::~ContentHashFetcherJob() {
}
bool ContentHashFetcherJob::IsCancelled() { bool ContentHashFetcherJob::IsCancelled() {
base::AutoLock autolock(cancelled_lock_); base::AutoLock autolock(cancelled_lock_);
bool result = cancelled_; bool result = cancelled_;
return result; return result;
} }
ContentHashFetcherJob::~ContentHashFetcherJob() {
}
void ContentHashFetcherJob::DoneCheckingForVerifiedContents(bool found) { void ContentHashFetcherJob::DoneCheckingForVerifiedContents(bool found) {
if (IsCancelled()) if (IsCancelled())
return; return;
if (found) { if (found) {
VLOG(1) << "Found verified contents for " << extension_id_;
DoneFetchingVerifiedContents(true); DoneFetchingVerifiedContents(true);
} else { } else {
VLOG(1) << "Missing verified contents for " << extension_id_
<< ", fetching...";
url_fetcher_.reset( url_fetcher_.reset(
net::URLFetcher::Create(fetch_url_, net::URLFetcher::GET, this)); net::URLFetcher::Create(fetch_url_, net::URLFetcher::GET, this));
url_fetcher_->SetRequestContext(request_context_); url_fetcher_->SetRequestContext(request_context_);
...@@ -209,6 +233,9 @@ static int WriteFileHelper(const base::FilePath& path, ...@@ -209,6 +233,9 @@ static int WriteFileHelper(const base::FilePath& path,
} }
void ContentHashFetcherJob::OnURLFetchComplete(const net::URLFetcher* source) { void ContentHashFetcherJob::OnURLFetchComplete(const net::URLFetcher* source) {
VLOG(1) << "URLFetchComplete for " << extension_id_
<< " is_success:" << url_fetcher_->GetStatus().is_success() << " "
<< fetch_url_.possibly_invalid_spec();
if (IsCancelled()) if (IsCancelled())
return; return;
scoped_ptr<std::string> response(new std::string); scoped_ptr<std::string> response(new std::string);
...@@ -224,6 +251,8 @@ void ContentHashFetcherJob::OnURLFetchComplete(const net::URLFetcher* source) { ...@@ -224,6 +251,8 @@ void ContentHashFetcherJob::OnURLFetchComplete(const net::URLFetcher* source) {
// move to parsing this in a sandboxed helper (crbug.com/372878). // move to parsing this in a sandboxed helper (crbug.com/372878).
scoped_ptr<base::Value> parsed(base::JSONReader::Read(*response)); scoped_ptr<base::Value> parsed(base::JSONReader::Read(*response));
if (parsed) { if (parsed) {
VLOG(1) << "JSON parsed ok for " << extension_id_;
parsed.reset(); // no longer needed parsed.reset(); // no longer needed
base::FilePath destination = base::FilePath destination =
file_util::GetVerifiedContentsPath(extension_path_); file_util::GetVerifiedContentsPath(extension_path_);
...@@ -247,14 +276,13 @@ void ContentHashFetcherJob::OnVerifiedContentsWritten(size_t expected_size, ...@@ -247,14 +276,13 @@ void ContentHashFetcherJob::OnVerifiedContentsWritten(size_t expected_size,
} }
void ContentHashFetcherJob::DoneFetchingVerifiedContents(bool success) { void ContentHashFetcherJob::DoneFetchingVerifiedContents(bool success) {
have_verified_contents_ = success;
if (IsCancelled()) if (IsCancelled())
return; return;
// TODO(asargent) - eventually we should abort here on !success, but for if (!success) {
// testing purposes it's actually still helpful to continue on to create the DispatchCallback();
// computed hashes. return;
}
content::BrowserThread::PostBlockingPoolSequencedTask( content::BrowserThread::PostBlockingPoolSequencedTask(
"ContentHashFetcher", "ContentHashFetcher",
...@@ -268,10 +296,13 @@ void ContentHashFetcherJob::MaybeCreateHashes() { ...@@ -268,10 +296,13 @@ void ContentHashFetcherJob::MaybeCreateHashes() {
base::FilePath hashes_file = base::FilePath hashes_file =
file_util::GetComputedHashesPath(extension_path_); file_util::GetComputedHashesPath(extension_path_);
if (base::PathExists(hashes_file)) if (!force_ && base::PathExists(hashes_file)) {
success_ = true; success_ = true;
else } else {
if (force_)
base::DeleteFile(hashes_file, false /* recursive */);
success_ = CreateHashes(hashes_file); success_ = CreateHashes(hashes_file);
}
content::BrowserThread::PostTask( content::BrowserThread::PostTask(
creation_thread_, creation_thread_,
...@@ -286,6 +317,12 @@ bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) { ...@@ -286,6 +317,12 @@ bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) {
if (!base::CreateDirectoryAndGetError(hashes_file.DirName(), NULL)) if (!base::CreateDirectoryAndGetError(hashes_file.DirName(), NULL))
return false; return false;
base::FilePath verified_contents_path =
file_util::GetVerifiedContentsPath(extension_path_);
VerifiedContents verified_contents(key_.data, key_.size);
if (!verified_contents.InitFrom(verified_contents_path, false))
return false;
base::FileEnumerator enumerator(extension_path_, base::FileEnumerator enumerator(extension_path_,
true, /* recursive */ true, /* recursive */
base::FileEnumerator::FILES); base::FileEnumerator::FILES);
...@@ -310,6 +347,12 @@ bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) { ...@@ -310,6 +347,12 @@ bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) {
const base::FilePath& full_path = *i; const base::FilePath& full_path = *i;
base::FilePath relative_path; base::FilePath relative_path;
extension_path_.AppendRelativePath(full_path, &relative_path); extension_path_.AppendRelativePath(full_path, &relative_path);
const std::string* expected_root =
verified_contents.GetTreeHashRoot(relative_path);
if (!expected_root)
continue;
std::string contents; std::string contents;
if (!base::ReadFileToString(full_path, &contents)) { if (!base::ReadFileToString(full_path, &contents)) {
LOG(ERROR) << "Could not read " << full_path.MaybeAsASCII(); LOG(ERROR) << "Could not read " << full_path.MaybeAsASCII();
...@@ -339,6 +382,14 @@ bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) { ...@@ -339,6 +382,14 @@ bool ContentHashFetcherJob::CreateHashes(const base::FilePath& hashes_file) {
// Get ready for next iteration. // Get ready for next iteration.
offset += bytes_to_read; offset += bytes_to_read;
} }
std::string root =
ComputeTreeHashRoot(hashes, block_size_ / crypto::kSHA256Length);
if (expected_root && *expected_root != root) {
VLOG(1) << "content mismatch for " << relative_path.AsUTF8Unsafe();
hash_mismatch_paths_.insert(relative_path);
continue;
}
writer.AddHashes(relative_path, block_size_, hashes); writer.AddHashes(relative_path, block_size_, hashes);
} }
return writer.WriteToFile(hashes_file); return writer.WriteToFile(hashes_file);
...@@ -356,9 +407,11 @@ void ContentHashFetcherJob::DispatchCallback() { ...@@ -356,9 +407,11 @@ void ContentHashFetcherJob::DispatchCallback() {
// ---- // ----
ContentHashFetcher::ContentHashFetcher(content::BrowserContext* context, ContentHashFetcher::ContentHashFetcher(content::BrowserContext* context,
ContentVerifierDelegate* delegate) ContentVerifierDelegate* delegate,
const FetchCallback& callback)
: context_(context), : context_(context),
delegate_(delegate), delegate_(delegate),
fetch_callback_(callback),
observer_(this), observer_(this),
weak_ptr_factory_(this) { weak_ptr_factory_(this) {
} }
...@@ -374,13 +427,22 @@ void ContentHashFetcher::Start() { ...@@ -374,13 +427,22 @@ void ContentHashFetcher::Start() {
observer_.Add(registry); observer_.Add(registry);
} }
void ContentHashFetcher::DoFetch(const Extension* extension) { void ContentHashFetcher::DoFetch(const Extension* extension, bool force) {
if (!extension || !delegate_->ShouldBeVerified(*extension)) if (!extension || !delegate_->ShouldBeVerified(*extension))
return; return;
IdAndVersion key(extension->id(), extension->version()->GetString()); IdAndVersion key(extension->id(), extension->version()->GetString());
if (ContainsKey(jobs_, key)) JobMap::iterator found = jobs_.find(key);
return; if (found != jobs_.end()) {
if (!force || found->second->force()) {
// Just let the existing job keep running.
return;
} else {
// Kill the existing non-force job, so we can start a new one below.
found->second->Cancel();
jobs_.erase(found);
}
}
// TODO(asargent) - we should do something here to remember recent attempts // TODO(asargent) - we should do something here to remember recent attempts
// to fetch signatures by extension id, and use exponential backoff to avoid // to fetch signatures by extension id, and use exponential backoff to avoid
...@@ -392,9 +454,11 @@ void ContentHashFetcher::DoFetch(const Extension* extension) { ...@@ -392,9 +454,11 @@ void ContentHashFetcher::DoFetch(const Extension* extension) {
delegate_->GetSignatureFetchUrl(extension->id(), *extension->version()); delegate_->GetSignatureFetchUrl(extension->id(), *extension->version());
ContentHashFetcherJob* job = ContentHashFetcherJob* job =
new ContentHashFetcherJob(context_->GetRequestContext(), new ContentHashFetcherJob(context_->GetRequestContext(),
delegate_->PublicKey(),
extension->id(), extension->id(),
extension->path(), extension->path(),
url, url,
force,
base::Bind(&ContentHashFetcher::JobFinished, base::Bind(&ContentHashFetcher::JobFinished,
weak_ptr_factory_.GetWeakPtr())); weak_ptr_factory_.GetWeakPtr()));
jobs_.insert(std::make_pair(key, job)); jobs_.insert(std::make_pair(key, job));
...@@ -405,7 +469,7 @@ void ContentHashFetcher::OnExtensionLoaded( ...@@ -405,7 +469,7 @@ void ContentHashFetcher::OnExtensionLoaded(
content::BrowserContext* browser_context, content::BrowserContext* browser_context,
const Extension* extension) { const Extension* extension) {
CHECK(extension); CHECK(extension);
DoFetch(extension); DoFetch(extension, false);
} }
void ContentHashFetcher::OnExtensionUnloaded( void ContentHashFetcher::OnExtensionUnloaded(
...@@ -415,11 +479,20 @@ void ContentHashFetcher::OnExtensionUnloaded( ...@@ -415,11 +479,20 @@ void ContentHashFetcher::OnExtensionUnloaded(
CHECK(extension); CHECK(extension);
IdAndVersion key(extension->id(), extension->version()->GetString()); IdAndVersion key(extension->id(), extension->version()->GetString());
JobMap::iterator found = jobs_.find(key); JobMap::iterator found = jobs_.find(key);
if (found != jobs_.end()) if (found != jobs_.end()) {
found->second->Cancel();
jobs_.erase(found); jobs_.erase(found);
}
} }
void ContentHashFetcher::JobFinished(ContentHashFetcherJob* job) { void ContentHashFetcher::JobFinished(ContentHashFetcherJob* job) {
if (!job->IsCancelled()) {
fetch_callback_.Run(job->extension_id(),
job->success(),
job->force(),
job->hash_mismatch_paths());
}
for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) { for (JobMap::iterator i = jobs_.begin(); i != jobs_.end(); ++i) {
if (i->second.get() == job) { if (i->second.get() == job) {
jobs_.erase(i); jobs_.erase(i);
......
...@@ -5,6 +5,11 @@ ...@@ -5,6 +5,11 @@
#ifndef EXTENSIONS_BROWSER_CONTENT_HASH_FETCHER_H_ #ifndef EXTENSIONS_BROWSER_CONTENT_HASH_FETCHER_H_
#define EXTENSIONS_BROWSER_CONTENT_HASH_FETCHER_H_ #define EXTENSIONS_BROWSER_CONTENT_HASH_FETCHER_H_
#include <set>
#include <string>
#include "base/callback.h"
#include "base/files/file_path.h"
#include "base/memory/weak_ptr.h" #include "base/memory/weak_ptr.h"
#include "base/scoped_observer.h" #include "base/scoped_observer.h"
#include "extensions/browser/content_verifier_delegate.h" #include "extensions/browser/content_verifier_delegate.h"
...@@ -28,18 +33,31 @@ class ContentHashFetcherJob; ...@@ -28,18 +33,31 @@ class ContentHashFetcherJob;
// sure they match the signed treehash root hash). // sure they match the signed treehash root hash).
class ContentHashFetcher : public ExtensionRegistryObserver { class ContentHashFetcher : public ExtensionRegistryObserver {
public: public:
// A callback for when a fetch is complete. This reports back:
// -extension id
// -whether we were successful or not (have verified_contents.json and
// -computed_hashes.json files)
// -was it a forced check?
// -a set of paths whose contents didn't match expected values
typedef base::Callback<
void(const std::string&, bool, bool, const std::set<base::FilePath>&)>
FetchCallback;
// The consumer of this class needs to ensure that context and delegate // The consumer of this class needs to ensure that context and delegate
// outlive this object. // outlive this object.
ContentHashFetcher(content::BrowserContext* context, ContentHashFetcher(content::BrowserContext* context,
ContentVerifierDelegate* delegate); ContentVerifierDelegate* delegate,
const FetchCallback& callback);
virtual ~ContentHashFetcher(); virtual ~ContentHashFetcher();
// Begins the process of trying to fetch any needed verified contents, and // Begins the process of trying to fetch any needed verified contents, and
// listening for extension load/unload. // listening for extension load/unload.
void Start(); void Start();
// Explicitly ask to fetch hashes for |extension|. // Explicitly ask to fetch hashes for |extension|. If |force| is true,
void DoFetch(const Extension* extension); // we will always check the validity of the verified_contents.json and
// re-check the contents of the files in the filesystem.
void DoFetch(const Extension* extension, bool force);
// ExtensionRegistryObserver interface // ExtensionRegistryObserver interface
virtual void OnExtensionLoaded(content::BrowserContext* browser_context, virtual void OnExtensionLoaded(content::BrowserContext* browser_context,
...@@ -55,6 +73,7 @@ class ContentHashFetcher : public ExtensionRegistryObserver { ...@@ -55,6 +73,7 @@ class ContentHashFetcher : public ExtensionRegistryObserver {
content::BrowserContext* context_; content::BrowserContext* context_;
ContentVerifierDelegate* delegate_; ContentVerifierDelegate* delegate_;
FetchCallback fetch_callback_;
// We keep around pointers to in-progress jobs, both so we can avoid // We keep around pointers to in-progress jobs, both so we can avoid
// scheduling duplicate work if fetching is already in progress, and so that // scheduling duplicate work if fetching is already in progress, and so that
......
...@@ -33,6 +33,8 @@ ContentHashReader::ContentHashReader(const std::string& extension_id, ...@@ -33,6 +33,8 @@ ContentHashReader::ContentHashReader(const std::string& extension_id,
relative_path_(relative_path), relative_path_(relative_path),
key_(key), key_(key),
status_(NOT_INITIALIZED), status_(NOT_INITIALIZED),
have_verified_contents_(false),
have_computed_hashes_(false),
block_size_(0) { block_size_(0) {
} }
...@@ -52,10 +54,10 @@ bool ContentHashReader::Init() { ...@@ -52,10 +54,10 @@ bool ContentHashReader::Init() {
if (!verified_contents_->InitFrom(verified_contents_path, false) || if (!verified_contents_->InitFrom(verified_contents_path, false) ||
!verified_contents_->valid_signature() || !verified_contents_->valid_signature() ||
!verified_contents_->version().Equals(extension_version_) || !verified_contents_->version().Equals(extension_version_) ||
verified_contents_->extension_id() != extension_id_) { verified_contents_->extension_id() != extension_id_)
base::DeleteFile(verified_contents_path, false /* recursive */);
return false; return false;
}
have_verified_contents_ = true;
base::FilePath computed_hashes_path = base::FilePath computed_hashes_path =
file_util::GetComputedHashesPath(extension_root_); file_util::GetComputedHashesPath(extension_root_);
...@@ -63,21 +65,24 @@ bool ContentHashReader::Init() { ...@@ -63,21 +65,24 @@ bool ContentHashReader::Init() {
return false; return false;
ComputedHashes::Reader reader; ComputedHashes::Reader reader;
if (!reader.InitFromFile(computed_hashes_path) || if (!reader.InitFromFile(computed_hashes_path))
!reader.GetHashes(relative_path_, &block_size_, &hashes_) || return false;
block_size_ % crypto::kSHA256Length != 0) {
base::DeleteFile(computed_hashes_path, false /* recursive */); have_computed_hashes_ = true;
if (!reader.GetHashes(relative_path_, &block_size_, &hashes_) ||
block_size_ % crypto::kSHA256Length != 0)
return false;
const std::string* expected_root =
verified_contents_->GetTreeHashRoot(relative_path_);
if (!expected_root)
return false; return false;
}
std::string root = std::string root =
ComputeTreeHashRoot(hashes_, block_size_ / crypto::kSHA256Length); ComputeTreeHashRoot(hashes_, block_size_ / crypto::kSHA256Length);
const std::string* expected_root = NULL; if (*expected_root != root)
expected_root = verified_contents_->GetTreeHashRoot(relative_path_);
if (expected_root && *expected_root != root) {
base::DeleteFile(computed_hashes_path, false /* recursive */);
return false; return false;
}
status_ = SUCCESS; status_ = SUCCESS;
return true; return true;
......
...@@ -40,6 +40,12 @@ class ContentHashReader : public base::RefCountedThreadSafe<ContentHashReader> { ...@@ -40,6 +40,12 @@ class ContentHashReader : public base::RefCountedThreadSafe<ContentHashReader> {
// should likely be discarded. // should likely be discarded.
bool Init(); bool Init();
// These return whether we found valid verified_contents.json /
// computed_hashes.json files respectively. Note that both of these can be
// true but we still didn't find an entry for |relative_path_| in them.
bool have_verified_contents() { return have_verified_contents_; }
bool have_computed_hashes() { return have_computed_hashes_; }
// Return the number of blocks and block size, respectively. Only valid after // Return the number of blocks and block size, respectively. Only valid after
// calling Init(). // calling Init().
int block_count() const; int block_count() const;
...@@ -63,6 +69,9 @@ class ContentHashReader : public base::RefCountedThreadSafe<ContentHashReader> { ...@@ -63,6 +69,9 @@ class ContentHashReader : public base::RefCountedThreadSafe<ContentHashReader> {
InitStatus status_; InitStatus status_;
bool have_verified_contents_;
bool have_computed_hashes_;
// The blocksize used for generating the hashes. // The blocksize used for generating the hashes.
int block_size_; int block_size_;
......
...@@ -32,7 +32,10 @@ ContentVerifier::ContentVerifier(content::BrowserContext* context, ...@@ -32,7 +32,10 @@ ContentVerifier::ContentVerifier(content::BrowserContext* context,
: mode_(GetMode()), : mode_(GetMode()),
context_(context), context_(context),
delegate_(delegate), delegate_(delegate),
fetcher_(new ContentHashFetcher(context, delegate)) { fetcher_(new ContentHashFetcher(
context,
delegate,
base::Bind(&ContentVerifier::OnFetchComplete, this))) {
} }
ContentVerifier::~ContentVerifier() { ContentVerifier::~ContentVerifier() {
...@@ -59,33 +62,11 @@ ContentVerifyJob* ContentVerifier::CreateJobFor( ...@@ -59,33 +62,11 @@ ContentVerifyJob* ContentVerifier::CreateJobFor(
const Extension* extension = const Extension* extension =
registry->GetExtensionById(extension_id, ExtensionRegistry::EVERYTHING); registry->GetExtensionById(extension_id, ExtensionRegistry::EVERYTHING);
if (!extension || !extension->version() || std::set<base::FilePath> paths;
!delegate_->ShouldBeVerified(*extension)) paths.insert(relative_path);
return NULL; if (!ShouldVerifyAnyPaths(extension, paths))
// Images used in the browser get transcoded during install, so skip checking
// them for now. TODO(asargent) - see if we can cache this list for a given
// extension id/version pair.
std::set<base::FilePath> browser_images =
delegate_->GetBrowserImagePaths(extension);
if (ContainsKey(browser_images, relative_path))
return NULL; return NULL;
base::FilePath locales_dir = extension_root.Append(kLocaleFolder);
base::FilePath full_path = extension_root.Append(relative_path);
if (locales_dir.IsParent(full_path)) {
// TODO(asargent) - see if we can cache this list to avoid having to fetch
// it every time. Maybe it can never change at runtime? (Or if it can,
// maybe there is an event we can listen for to know to drop our cache).
std::set<std::string> all_locales;
extension_l10n_util::GetAllLocales(&all_locales);
// Since message catalogs get transcoded during installation, we want to
// ignore only those paths that the localization transcoding *did* ignore.
if (!extension_l10n_util::ShouldSkipValidation(
locales_dir, full_path, all_locales))
return NULL;
}
// TODO(asargent) - we can probably get some good performance wins by having // TODO(asargent) - we can probably get some good performance wins by having
// a cache of ContentHashReader's that we hold onto past the end of each job. // a cache of ContentHashReader's that we hold onto past the end of each job.
return new ContentVerifyJob( return new ContentVerifyJob(
...@@ -107,21 +88,96 @@ void ContentVerifier::VerifyFailed(const std::string& extension_id, ...@@ -107,21 +88,96 @@ void ContentVerifier::VerifyFailed(const std::string& extension_id,
return; return;
} }
if (!delegate_ || mode_ < ENFORCE) VLOG(1) << "VerifyFailed " << extension_id << " reason:" << reason;
if (!delegate_ || !fetcher_.get() || mode_ < ENFORCE)
return; return;
if (reason == ContentVerifyJob::NO_HASHES && mode_ < ENFORCE_STRICT && if (reason == ContentVerifyJob::MISSING_ALL_HASHES) {
fetcher_.get()) {
// If we failed because there were no hashes yet for this extension, just // If we failed because there were no hashes yet for this extension, just
// request some. // request some.
ExtensionRegistry* registry = ExtensionRegistry::Get(context_); ExtensionRegistry* registry = ExtensionRegistry::Get(context_);
const Extension* extension = const Extension* extension =
registry->GetExtensionById(extension_id, ExtensionRegistry::EVERYTHING); registry->GetExtensionById(extension_id, ExtensionRegistry::EVERYTHING);
if (extension) if (extension)
fetcher_->DoFetch(extension); fetcher_->DoFetch(extension, true /* force */);
} else {
delegate_->VerifyFailed(extension_id);
}
}
void ContentVerifier::OnFetchComplete(
const std::string& extension_id,
bool success,
bool was_force_check,
const std::set<base::FilePath>& hash_mismatch_paths) {
VLOG(1) << "OnFetchComplete " << extension_id << " success:" << success;
if (!delegate_ || mode_ < ENFORCE)
return; return;
if (!success && mode_ < ENFORCE_STRICT)
return;
ExtensionRegistry* registry = ExtensionRegistry::Get(context_);
const Extension* extension =
registry->GetExtensionById(extension_id, ExtensionRegistry::EVERYTHING);
if (!extension)
return;
if ((was_force_check && !success) ||
ShouldVerifyAnyPaths(extension, hash_mismatch_paths))
delegate_->VerifyFailed(extension_id);
}
bool ContentVerifier::ShouldVerifyAnyPaths(
const Extension* extension,
const std::set<base::FilePath>& relative_paths) {
if (!extension || !extension->version() ||
!delegate_->ShouldBeVerified(*extension))
return false;
// Images used in the browser get transcoded during install, so skip
// checking them for now. TODO(asargent) - see if we can cache this list
// for a given extension id/version pair.
std::set<base::FilePath> browser_images =
delegate_->GetBrowserImagePaths(extension);
base::FilePath locales_dir = extension->path().Append(kLocaleFolder);
scoped_ptr<std::set<std::string> > all_locales;
for (std::set<base::FilePath>::const_iterator i = relative_paths.begin();
i != relative_paths.end();
++i) {
const base::FilePath& relative_path = *i;
if (relative_path == base::FilePath(kManifestFilename))
continue;
if (ContainsKey(browser_images, relative_path))
continue;
base::FilePath full_path = extension->path().Append(relative_path);
if (locales_dir.IsParent(full_path)) {
if (!all_locales) {
// TODO(asargent) - see if we can cache this list longer to avoid
// having to fetch it more than once for a given run of the
// browser. Maybe it can never change at runtime? (Or if it can, maybe
// there is an event we can listen for to know to drop our cache).
all_locales.reset(new std::set<std::string>);
extension_l10n_util::GetAllLocales(all_locales.get());
}
// Since message catalogs get transcoded during installation, we want
// to skip those paths.
if (full_path.DirName().DirName() == locales_dir &&
!extension_l10n_util::ShouldSkipValidation(
locales_dir, full_path.DirName(), *all_locales))
continue;
}
return true;
} }
delegate_->VerifyFailed(extension_id); return false;
} }
// static // static
......
...@@ -5,6 +5,9 @@ ...@@ -5,6 +5,9 @@
#ifndef EXTENSIONS_BROWSER_CONTENT_VERIFIER_H_ #ifndef EXTENSIONS_BROWSER_CONTENT_VERIFIER_H_
#define EXTENSIONS_BROWSER_CONTENT_VERIFIER_H_ #define EXTENSIONS_BROWSER_CONTENT_VERIFIER_H_
#include <set>
#include <string>
#include "base/macros.h" #include "base/macros.h"
#include "base/memory/ref_counted.h" #include "base/memory/ref_counted.h"
#include "base/memory/scoped_ptr.h" #include "base/memory/scoped_ptr.h"
...@@ -46,12 +49,24 @@ class ContentVerifier : public base::RefCountedThreadSafe<ContentVerifier> { ...@@ -46,12 +49,24 @@ class ContentVerifier : public base::RefCountedThreadSafe<ContentVerifier> {
void VerifyFailed(const std::string& extension_id, void VerifyFailed(const std::string& extension_id,
ContentVerifyJob::FailureReason reason); ContentVerifyJob::FailureReason reason);
void OnFetchComplete(const std::string& extension_id,
bool success,
bool was_force_check,
const std::set<base::FilePath>& hash_mismatch_paths);
private: private:
DISALLOW_COPY_AND_ASSIGN(ContentVerifier); DISALLOW_COPY_AND_ASSIGN(ContentVerifier);
friend class base::RefCountedThreadSafe<ContentVerifier>; friend class base::RefCountedThreadSafe<ContentVerifier>;
virtual ~ContentVerifier(); virtual ~ContentVerifier();
// Returns true if any of the paths in |relative_paths| *should* have their
// contents verified. (Some files get transcoded during the install process,
// so we don't want to verify their contents because they are expected not
// to match).
bool ShouldVerifyAnyPaths(const Extension* extension,
const std::set<base::FilePath>& relative_paths);
// Note that it is important for these to appear in increasing "severity" // Note that it is important for these to appear in increasing "severity"
// order, because we use this to let command line flags increase, but not // order, because we use this to let command line flags increase, but not
// decrease, the mode you're running in compared to the experiment group. // decrease, the mode you're running in compared to the experiment group.
......
...@@ -23,6 +23,8 @@ struct ContentVerifierKey { ...@@ -23,6 +23,8 @@ struct ContentVerifierKey {
const uint8* data; const uint8* data;
int size; int size;
ContentVerifierKey() : data(NULL), size(0) {}
ContentVerifierKey(const uint8* data, int size) { ContentVerifierKey(const uint8* data, int size) {
this->data = data; this->data = data;
this->size = size; this->size = size;
......
...@@ -29,7 +29,8 @@ ContentVerifyJob::ContentVerifyJob(ContentHashReader* hash_reader, ...@@ -29,7 +29,8 @@ ContentVerifyJob::ContentVerifyJob(ContentHashReader* hash_reader,
current_block_(0), current_block_(0),
current_hash_byte_count_(0), current_hash_byte_count_(0),
hash_reader_(hash_reader), hash_reader_(hash_reader),
failure_callback_(failure_callback) { failure_callback_(failure_callback),
failed_(false) {
// It's ok for this object to be constructed on a different thread from where // It's ok for this object to be constructed on a different thread from where
// it's used. // it's used.
thread_checker_.DetachFromThread(); thread_checker_.DetachFromThread();
...@@ -49,6 +50,8 @@ void ContentVerifyJob::Start() { ...@@ -49,6 +50,8 @@ void ContentVerifyJob::Start() {
void ContentVerifyJob::BytesRead(int count, const char* data) { void ContentVerifyJob::BytesRead(int count, const char* data) {
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
if (failed_)
return;
if (g_test_delegate) { if (g_test_delegate) {
FailureReason reason = FailureReason reason =
g_test_delegate->BytesRead(hash_reader_->extension_id(), count, data); g_test_delegate->BytesRead(hash_reader_->extension_id(), count, data);
...@@ -75,6 +78,7 @@ void ContentVerifyJob::BytesRead(int count, const char* data) { ...@@ -75,6 +78,7 @@ void ContentVerifyJob::BytesRead(int count, const char* data) {
int bytes_to_hash = int bytes_to_hash =
std::min(hash_reader_->block_size() - current_hash_byte_count_, std::min(hash_reader_->block_size() - current_hash_byte_count_,
count - bytes_added); count - bytes_added);
DCHECK(bytes_to_hash > 0);
current_hash_->Update(data + bytes_added, bytes_to_hash); current_hash_->Update(data + bytes_added, bytes_to_hash);
bytes_added += bytes_to_hash; bytes_added += bytes_to_hash;
current_hash_byte_count_ += bytes_to_hash; current_hash_byte_count_ += bytes_to_hash;
...@@ -82,13 +86,18 @@ void ContentVerifyJob::BytesRead(int count, const char* data) { ...@@ -82,13 +86,18 @@ void ContentVerifyJob::BytesRead(int count, const char* data) {
// If we finished reading a block worth of data, finish computing the hash // If we finished reading a block worth of data, finish computing the hash
// for it and make sure the expected hash matches. // for it and make sure the expected hash matches.
if (current_hash_byte_count_ == hash_reader_->block_size()) if (current_hash_byte_count_ == hash_reader_->block_size() &&
FinishBlock(); !FinishBlock()) {
DispatchFailureCallback(HASH_MISMATCH);
return;
}
} }
} }
void ContentVerifyJob::DoneReading() { void ContentVerifyJob::DoneReading() {
DCHECK(thread_checker_.CalledOnValidThread()); DCHECK(thread_checker_.CalledOnValidThread());
if (failed_)
return;
if (g_test_delegate) { if (g_test_delegate) {
FailureReason reason = FailureReason reason =
g_test_delegate->DoneReading(hash_reader_->extension_id()); g_test_delegate->DoneReading(hash_reader_->extension_id());
...@@ -98,31 +107,36 @@ void ContentVerifyJob::DoneReading() { ...@@ -98,31 +107,36 @@ void ContentVerifyJob::DoneReading() {
} }
} }
done_reading_ = true; done_reading_ = true;
if (hashes_ready_) if (hashes_ready_ && !FinishBlock())
FinishBlock(); DispatchFailureCallback(HASH_MISMATCH);
} }
void ContentVerifyJob::FinishBlock() { bool ContentVerifyJob::FinishBlock() {
if (current_hash_byte_count_ <= 0) if (current_hash_byte_count_ <= 0)
return; return true;
std::string final(crypto::kSHA256Length, 0); std::string final(crypto::kSHA256Length, 0);
current_hash_->Finish(string_as_array(&final), final.size()); current_hash_->Finish(string_as_array(&final), final.size());
const std::string* expected_hash = NULL; const std::string* expected_hash = NULL;
if (!hash_reader_->GetHashForBlock(current_block_, &expected_hash)) if (!hash_reader_->GetHashForBlock(current_block_, &expected_hash) ||
return DispatchFailureCallback(HASH_MISMATCH); *expected_hash != final)
return false;
if (*expected_hash != final)
return DispatchFailureCallback(HASH_MISMATCH);
current_hash_.reset(); current_hash_.reset();
current_hash_byte_count_ = 0; current_hash_byte_count_ = 0;
current_block_++; current_block_++;
return true;
} }
void ContentVerifyJob::OnHashesReady(bool success) { void ContentVerifyJob::OnHashesReady(bool success) {
if (!success && !g_test_delegate) if (!success && !g_test_delegate) {
return DispatchFailureCallback(NO_HASHES); if (hash_reader_->have_verified_contents() &&
hash_reader_->have_computed_hashes())
DispatchFailureCallback(NO_HASHES_FOR_FILE);
else
DispatchFailureCallback(MISSING_ALL_HASHES);
return;
}
hashes_ready_ = true; hashes_ready_ = true;
if (!queue_.empty()) { if (!queue_.empty()) {
...@@ -130,8 +144,8 @@ void ContentVerifyJob::OnHashesReady(bool success) { ...@@ -130,8 +144,8 @@ void ContentVerifyJob::OnHashesReady(bool success) {
queue_.swap(tmp); queue_.swap(tmp);
BytesRead(tmp.size(), string_as_array(&tmp)); BytesRead(tmp.size(), string_as_array(&tmp));
} }
if (done_reading_) if (done_reading_ && !FinishBlock())
FinishBlock(); DispatchFailureCallback(HASH_MISMATCH);
} }
// static // static
...@@ -140,7 +154,12 @@ void ContentVerifyJob::SetDelegateForTests(TestDelegate* delegate) { ...@@ -140,7 +154,12 @@ void ContentVerifyJob::SetDelegateForTests(TestDelegate* delegate) {
} }
void ContentVerifyJob::DispatchFailureCallback(FailureReason reason) { void ContentVerifyJob::DispatchFailureCallback(FailureReason reason) {
DCHECK(!failed_);
failed_ = true;
if (!failure_callback_.is_null()) { if (!failure_callback_.is_null()) {
VLOG(1) << "job failed for " << hash_reader_->extension_id() << " "
<< hash_reader_->relative_path().MaybeAsASCII()
<< " reason:" << reason;
failure_callback_.Run(reason); failure_callback_.Run(reason);
failure_callback_.Reset(); failure_callback_.Reset();
} }
......
...@@ -34,8 +34,12 @@ class ContentVerifyJob : public base::RefCountedThreadSafe<ContentVerifyJob> { ...@@ -34,8 +34,12 @@ class ContentVerifyJob : public base::RefCountedThreadSafe<ContentVerifyJob> {
// No failure. // No failure.
NONE, NONE,
// Failed because there were no expected hashes. // Failed because there were no expected hashes at all (eg they haven't
NO_HASHES, // been fetched yet).
MISSING_ALL_HASHES,
// Failed because this file wasn't found in the list of expected hashes.
NO_HASHES_FOR_FILE,
// Some of the content read did not match the expected hash. // Some of the content read did not match the expected hash.
HASH_MISMATCH HASH_MISMATCH
...@@ -81,9 +85,10 @@ class ContentVerifyJob : public base::RefCountedThreadSafe<ContentVerifyJob> { ...@@ -81,9 +85,10 @@ class ContentVerifyJob : public base::RefCountedThreadSafe<ContentVerifyJob> {
friend class base::RefCountedThreadSafe<ContentVerifyJob>; friend class base::RefCountedThreadSafe<ContentVerifyJob>;
// Called each time we're done adding bytes for the current block, and are // Called each time we're done adding bytes for the current block, and are
// ready to finish the hash operation for those bytes and make sure it matches // ready to finish the hash operation for those bytes and make sure it
// what was expected for that block. // matches what was expected for that block. Returns true if everything is
void FinishBlock(); // still ok so far, or false if a mismatch was detected.
bool FinishBlock();
// Dispatches the failure callback with the given reason. // Dispatches the failure callback with the given reason.
void DispatchFailureCallback(FailureReason reason); void DispatchFailureCallback(FailureReason reason);
...@@ -118,6 +123,9 @@ class ContentVerifyJob : public base::RefCountedThreadSafe<ContentVerifyJob> { ...@@ -118,6 +123,9 @@ class ContentVerifyJob : public base::RefCountedThreadSafe<ContentVerifyJob> {
// Called once if verification fails. // Called once if verification fails.
FailureCallback failure_callback_; FailureCallback failure_callback_;
// Set to true if we detected a mismatch and called the failure callback.
bool failed_;
// For ensuring methods on called on the right thread. // For ensuring methods on called on the right thread.
base::ThreadChecker thread_checker_; base::ThreadChecker thread_checker_;
}; };
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment