Commit 3641f523 authored by Istiaque Ahmed's avatar Istiaque Ahmed Committed by Commit Bot

Always pass ContentHash creation through IO thread.

This will allow ContentHash caching to be (trivially)
implemented in the future in ContentVerifier::GetContentHash().
This CL also makes extension load at startup to queue all inflight
requests to load verified_contents and computed_hashes reading
from disk. This is because inflight requests will be served by
single file read.

Summary of changes:
The primary method is ContentVerifier::GetContentHash(). This
method is responsible for retrieving ContentHash on IO thread. It
uses ContentVerifier::HashHelper to manage thread hops and such.
HashHelper is not thread safe and it uses static method + weak
pointers to safely call ContentHash methods on blocking thread.

ContentHash class is responsible for fetching verified_contents.json
from network, using ContentHashFetcher. ContentHashFetcher
manages its own lifetime and is no longer ref-counted. Also,
ContentHashFetcherJob is no longer required.
ContentHashFetcher is currently an implementation detail of
ContentHash, and can be further restricted so that it cannot be
instantiated outside of ContentHash.
Because of async nature requirement of net fetch, ContentHash::Create
is now async.

Since ContentHash instance can travel between sequences, they are
RefCountedThreadSafe now.

ContentVerifier::VerifyFailed used to be (also) called when we fail to
retrieve hashes on disk and it would fall back to ContentHashFetcher
for fetching the hashes. As GetContentHash currently takes care of
this through ContentHash::Create, ContentVerifier is only made aware
of failures on when hash mismatch occurs, simplifying things further.

Bug: 796395
Change-Id: I7eb6bf93daba908258ae604a8cd45ea643b4e00d
Reviewed-on: https://chromium-review.googlesource.com/933863
Commit-Queue: Istiaque Ahmed <lazyboy@chromium.org>
Reviewed-by: default avatarDevlin <rdevlin.cronin@chromium.org>
Cr-Commit-Position: refs/heads/master@{#545623}
parent 325503ce
......@@ -264,7 +264,7 @@ IN_PROC_BROWSER_TEST_F(ContentVerifierTest, DotSlashPaths) {
job_observer.ExpectJobResult(id, base::FilePath(FILE_PATH_LITERAL("cs2.js")),
Result::SUCCESS);
VerifierObserver verifier_observer;
auto verifier_observer = std::make_unique<VerifierObserver>();
// Install a test extension we copied from the webstore that has actual
// signatures, and contains paths with a leading "./" in various places.
......@@ -277,8 +277,14 @@ IN_PROC_BROWSER_TEST_F(ContentVerifierTest, DotSlashPaths) {
// The content scripts might fail verification the first time since the
// one-time processing might not be finished yet - if that's the case then
// we want to wait until that work is done.
if (!base::ContainsKey(verifier_observer.completed_fetches(), id))
verifier_observer.WaitForFetchComplete(id);
if (!base::ContainsKey(verifier_observer->completed_fetches(), id))
verifier_observer->WaitForFetchComplete(id);
// It is important to destroy |verifier_observer| here so that it doesn't see
// any fetch from EnableExtension call below (the observer pointer in
// content_verifier.cc isn't thread safe, so it might asynchronously call
// OnFetchComplete after this test's body executes).
verifier_observer.reset();
// Now disable/re-enable the extension to cause the content scripts to be
// read again.
......
This diff is collapsed.
......@@ -14,72 +14,73 @@
#include "base/files/file_path.h"
#include "base/macros.h"
#include "base/memory/weak_ptr.h"
#include "extensions/browser/content_verifier/content_hash.h"
#include "extensions/common/extension_id.h"
#include "net/url_request/url_fetcher_delegate.h"
namespace base {
class SequencedTaskRunner;
}
namespace net {
class URLRequestContextGetter;
class URLFetcher;
}
namespace extensions {
class Extension;
class ContentHashFetcherJob;
class ContentVerifierDelegate;
namespace internals {
// This class is responsible for getting signed expected hashes for use in
// extension content verification. As extensions are loaded it will fetch and
// parse/validate/cache this data as needed, including calculating expected
// hashes for each block of each file within an extension. (These unsigned leaf
// node block level hashes will always be checked at time of use use to make
// sure they match the signed treehash root hash).
class ContentHashFetcher {
// extension content verification.
//
// This class takes care of doing the network I/O work to ensure we
// have the contents of verified_contents.json files from the webstore.
//
// Note: This class manages its own lifetime. It deletes itself when
// Start() completes at OnURLFetchComplete().
//
// Note: This class is an internal implementation detail of ContentHash and is
// not be used independently.
// TODO(lazyboy): Consider changing BUILD rules to enforce the above, yet
// keeping the class unit testable.
class ContentHashFetcher : public net::URLFetcherDelegate {
public:
// A callback for when a fetch is complete. This reports back:
// -extension id
// -whether we were successful or not (have verified_contents.json and
// -computed_hashes.json files)
// -was it a forced check?
// -a set of unix style paths whose contents didn't match expected values
typedef base::Callback<
void(const std::string&, bool, bool, const std::set<base::FilePath>&)>
FetchCallback;
// The consumer of this class needs to ensure that context and delegate
// outlive this object.
ContentHashFetcher(net::URLRequestContextGetter* context_getter,
ContentVerifierDelegate* delegate,
const FetchCallback& callback);
virtual ~ContentHashFetcher();
// Explicitly ask to fetch hashes for |extension|. If |force| is true,
// we will always check the validity of the verified_contents.json and
// re-check the contents of the files in the filesystem.
void DoFetch(const Extension* extension, bool force);
// These should be called when an extension is loaded or unloaded.
virtual void ExtensionLoaded(const Extension* extension);
virtual void ExtensionUnloaded(const Extension* extension);
// A callback for when fetch is complete.
// The response contents is passed through std::unique_ptr<std::string>.
using HashFetcherCallback =
base::OnceCallback<void(const ContentHash::ExtensionKey&,
const ContentHash::FetchParams&,
std::unique_ptr<std::string>)>;
ContentHashFetcher(const ContentHash::ExtensionKey& extension_key,
const ContentHash::FetchParams& fetch_params);
// net::URLFetcherDelegate:
void OnURLFetchComplete(const net::URLFetcher* source) override;
// Note: |this| is deleted once OnURLFetchComplete() completes.
void Start(HashFetcherCallback hash_fetcher_callback);
private:
// Callback for when a job getting content hashes has completed.
void JobFinished(scoped_refptr<ContentHashFetcherJob> job);
friend class base::RefCounted<ContentHashFetcher>;
~ContentHashFetcher() override;
ContentHash::ExtensionKey extension_key_;
ContentHash::FetchParams fetch_params_;
HashFetcherCallback hash_fetcher_callback_;
net::URLRequestContextGetter* context_getter_;
ContentVerifierDelegate* delegate_;
FetchCallback fetch_callback_;
scoped_refptr<base::SequencedTaskRunner> response_task_runner_;
// We keep around pointers to in-progress jobs, both so we can avoid
// scheduling duplicate work if fetching is already in progress, and so that
// we can cancel in-progress work at shutdown time.
typedef std::pair<ExtensionId, std::string> IdAndVersion;
typedef std::map<IdAndVersion, scoped_refptr<ContentHashFetcherJob> > JobMap;
JobMap jobs_;
// Alive when url fetch is ongoing.
std::unique_ptr<net::URLFetcher> url_fetcher_;
// Used for binding callbacks passed to jobs.
base::WeakPtrFactory<ContentHashFetcher> weak_ptr_factory_;
SEQUENCE_CHECKER(sequence_checker_);
DISALLOW_COPY_AND_ASSIGN(ContentHashFetcher);
};
} // namespace internals
} // namespace extensions
#endif // EXTENSIONS_BROWSER_CONTENT_HASH_FETCHER_H_
......@@ -19,6 +19,7 @@
#include "content/public/test/test_browser_thread_bundle.h"
#include "extensions/browser/content_hash_fetcher.h"
#include "extensions/browser/content_verifier/test_utils.h"
#include "extensions/browser/extension_file_task_runner.h"
#include "extensions/browser/extensions_test.h"
#include "extensions/common/constants.h"
#include "extensions/common/extension_paths.h"
......@@ -35,51 +36,63 @@ namespace extensions {
struct ContentHashFetcherResult {
std::string extension_id;
bool success;
bool force;
bool was_cancelled;
std::set<base::FilePath> mismatch_paths;
};
// Allows waiting for the callback from a ContentHashFetcher, returning the
// Allows waiting for the callback from a ContentHash, returning the
// data that was passed to that callback.
class ContentHashFetcherWaiter {
class ContentHashWaiter {
public:
ContentHashFetcherWaiter() : weak_factory_(this) {}
ContentHashFetcher::FetchCallback GetCallback() {
return base::Bind(&ContentHashFetcherWaiter::Callback,
weak_factory_.GetWeakPtr());
}
std::unique_ptr<ContentHashFetcherResult> WaitForCallback() {
if (!result_) {
base::RunLoop run_loop;
run_loop_quit_ = run_loop.QuitClosure();
run_loop.Run();
}
ContentHashWaiter()
: reply_task_runner_(base::SequencedTaskRunnerHandle::Get()) {}
std::unique_ptr<ContentHashFetcherResult> CreateAndWaitForCallback(
const ContentHash::ExtensionKey& key,
const ContentHash::FetchParams& fetch_params) {
GetExtensionFileTaskRunner()->PostTask(
FROM_HERE, base::BindOnce(&ContentHashWaiter::CreateContentHash,
base::Unretained(this), key, fetch_params));
run_loop_.Run();
DCHECK(result_);
return std::move(result_);
}
private:
// Matches signature of ContentHashFetcher::FetchCallback.
void Callback(const std::string& extension_id,
bool success,
bool force,
const std::set<base::FilePath>& mismatch_paths) {
void CreatedCallback(const scoped_refptr<ContentHash>& content_hash,
bool was_cancelled) {
if (!reply_task_runner_->RunsTasksInCurrentSequence()) {
reply_task_runner_->PostTask(
FROM_HERE,
base::BindOnce(&ContentHashWaiter::CreatedCallback,
base::Unretained(this), content_hash, was_cancelled));
return;
}
result_ = std::make_unique<ContentHashFetcherResult>();
result_->extension_id = extension_id;
result_->success = success;
result_->force = force;
result_->mismatch_paths = mismatch_paths;
if (run_loop_quit_)
std::move(run_loop_quit_).Run();
result_->extension_id = content_hash->extension_key().extension_id;
result_->success = content_hash->succeeded();
result_->was_cancelled = was_cancelled;
result_->mismatch_paths = content_hash->hash_mismatch_unix_paths();
run_loop_.QuitWhenIdle();
}
void CreateContentHash(const ContentHash::ExtensionKey& key,
const ContentHash::FetchParams& fetch_params) {
ContentHash::Create(key, fetch_params, ContentHash::IsCancelledCallback(),
base::BindOnce(&ContentHashWaiter::CreatedCallback,
base::Unretained(this)));
}
base::OnceClosure run_loop_quit_;
scoped_refptr<base::SequencedTaskRunner> reply_task_runner_;
base::RunLoop run_loop_;
std::unique_ptr<ContentHashFetcherResult> result_;
base::WeakPtrFactory<ContentHashFetcherWaiter> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(ContentHashFetcherWaiter);
DISALLOW_COPY_AND_ASSIGN(ContentHashWaiter);
};
// Installs and tests various functionality of an extension loaded without
// verified_contents.json file.
class ContentHashFetcherTest : public ExtensionsTest {
public:
ContentHashFetcherTest()
......@@ -120,12 +133,18 @@ class ContentHashFetcherTest : public ExtensionsTest {
return nullptr;
}
ContentHashFetcherWaiter waiter;
ContentHashFetcher fetcher(request_context(), delegate_.get(),
waiter.GetCallback());
fetcher.DoFetch(extension_.get(), true /* force */);
return waiter.WaitForCallback();
std::unique_ptr<ContentHashFetcherResult> result =
ContentHashWaiter().CreateAndWaitForCallback(
ContentHash::ExtensionKey(extension_->id(), extension_->path(),
extension_->version(),
delegate_->GetPublicKey()),
ContentHash::FetchParams(request_context(), fetch_url_));
delegate_.reset();
return result;
}
const GURL& fetch_url() { return fetch_url_; }
const base::FilePath& extension_root() { return extension_->path(); }
......@@ -149,8 +168,7 @@ class ContentHashFetcherTest : public ExtensionsTest {
url.scheme(), url.host(),
content::BrowserThread::GetTaskRunnerForThread(
content::BrowserThread::IO),
base::CreateTaskRunnerWithTraits(
{base::MayBlock(), base::TaskPriority::BACKGROUND}));
GetExtensionFileTaskRunner());
interceptor_->SetResponse(url, response_path);
}
......@@ -206,7 +224,7 @@ TEST_F(ContentHashFetcherTest, MissingVerifiedContents) {
std::unique_ptr<ContentHashFetcherResult> result = DoHashFetch();
ASSERT_TRUE(result.get());
EXPECT_TRUE(result->success);
EXPECT_TRUE(result->force);
EXPECT_FALSE(result->was_cancelled);
EXPECT_TRUE(result->mismatch_paths.empty());
// Make sure the verified_contents.json file was written into the extension's
......@@ -229,7 +247,7 @@ TEST_F(ContentHashFetcherTest, FetchInvalidVerifiedContents) {
std::unique_ptr<ContentHashFetcherResult> result = DoHashFetch();
ASSERT_TRUE(result.get());
EXPECT_FALSE(result->success);
EXPECT_TRUE(result->force);
EXPECT_FALSE(result->was_cancelled);
EXPECT_TRUE(result->mismatch_paths.empty());
// TODO(lazyboy): This should be EXPECT_FALSE, we shouldn't be writing
......@@ -251,7 +269,7 @@ TEST_F(ContentHashFetcherTest, Fetch404VerifiedContents) {
std::unique_ptr<ContentHashFetcherResult> result = DoHashFetch();
ASSERT_TRUE(result.get());
EXPECT_FALSE(result->success);
EXPECT_TRUE(result->force);
EXPECT_FALSE(result->was_cancelled);
EXPECT_TRUE(result->mismatch_paths.empty());
// Make sure the verified_contents.json file was *not* written into the
......@@ -276,7 +294,7 @@ TEST_F(ContentHashFetcherTest, MissingVerifiedContentsAndCorrupt) {
std::unique_ptr<ContentHashFetcherResult> result = DoHashFetch();
ASSERT_NE(nullptr, result.get());
EXPECT_TRUE(result->success);
EXPECT_TRUE(result->force);
EXPECT_FALSE(result->was_cancelled);
EXPECT_TRUE(
base::ContainsKey(result->mismatch_paths, script_path.BaseName()));
......
......@@ -8,6 +8,7 @@
#include "base/memory/ptr_util.h"
#include "base/metrics/histogram_macros.h"
#include "base/strings/string_util.h"
#include "base/threading/thread_restrictions.h"
#include "base/timer/elapsed_timer.h"
#include "base/values.h"
#include "crypto/sha2.h"
......@@ -24,16 +25,13 @@ ContentHashReader::~ContentHashReader() {}
// static.
std::unique_ptr<const ContentHashReader> ContentHashReader::Create(
const ExtensionId& extension_id,
const base::Version& extension_version,
const base::FilePath& extension_root,
const base::FilePath& relative_path,
const ContentVerifierKey& key) {
const scoped_refptr<const ContentHash>& content_hash) {
base::AssertBlockingAllowed();
base::ElapsedTimer timer;
std::unique_ptr<ContentHash> content_hash =
ContentHash::Create(ContentHash::ExtensionKey(
extension_id, extension_root, extension_version, key));
const ContentHash::ExtensionKey& extension_key =
content_hash->extension_key();
auto hash_reader = base::WrapUnique(new ContentHashReader);
if (!content_hash->succeeded())
......@@ -47,7 +45,8 @@ std::unique_ptr<const ContentHashReader> ContentHashReader::Create(
// verified_contents.json. This can happen when an extension sends an XHR to a
// resource.
if (!verified_contents.HasTreeHashRoot(relative_path)) {
base::FilePath full_path = extension_root.Append(relative_path);
base::FilePath full_path =
extension_key.extension_root.Append(relative_path);
// Making a request to a non-existent file or to a directory should not
// result in content verification failure.
// TODO(proberge): This logic could be simplified if |content_verify_job|
......
......@@ -18,6 +18,8 @@
namespace extensions {
class ContentHash;
// This class creates an object that will read expected hashes that may have
// been fetched/calculated by the ContentHashFetcher, and vends them out for
// use in ContentVerifyJob's.
......@@ -31,11 +33,8 @@ class ContentHashReader {
// instance whose succees/failure can be determined by calling succeeded()
// method. On failure, this object should likely be discarded.
static std::unique_ptr<const ContentHashReader> Create(
const ExtensionId& extension_id,
const base::Version& extension_version,
const base::FilePath& extension_root,
const base::FilePath& relative_path,
const ContentVerifierKey& key);
const scoped_refptr<const ContentHash>& content_hash);
bool succeeded() const { return status_ == SUCCESS; }
......
This diff is collapsed.
......@@ -13,7 +13,10 @@
#include "base/memory/ref_counted.h"
#include "base/scoped_observer.h"
#include "base/version.h"
#include "content/public/browser/browser_thread.h"
#include "extensions/browser/content_verifier/content_hash.h"
#include "extensions/browser/content_verifier_delegate.h"
#include "extensions/browser/content_verifier_io_data.h"
#include "extensions/browser/content_verify_job.h"
#include "extensions/browser/extension_registry_observer.h"
......@@ -25,11 +28,13 @@ namespace content {
class BrowserContext;
}
namespace net {
class URLRequestContextGetter;
}
namespace extensions {
class Extension;
class ContentHashFetcher;
class ContentVerifierIOData;
class ManagementPolicy;
// Used for managing overall content verification - both fetching content
......@@ -64,7 +69,7 @@ class ContentVerifier : public base::RefCountedThreadSafe<ContentVerifier>,
// Called (typically by a verification job) to indicate that verification
// failed while reading some file in |extension_id|.
void VerifyFailed(const std::string& extension_id,
void VerifyFailed(const ExtensionId& extension_id,
ContentVerifyJob::FailureReason reason);
// ExtensionRegistryObserver interface
......@@ -74,6 +79,25 @@ class ContentVerifier : public base::RefCountedThreadSafe<ContentVerifier>,
const Extension* extension,
UnloadedExtensionReason reason) override;
using ContentHashCallback =
base::OnceCallback<void(const scoped_refptr<const ContentHash>&)>;
// Retrieves ContentHash for an extension through |callback|.
// Must be called on IO thread.
// |callback| is called on IO thread.
// |force_missing_computed_hashes_creation| should be true if
// computed_hashes.json is required to be created if that file is missing or
// unreadable.
// TODO(lazyboy): |force_missing_computed_hashes_creation| should always be
// true, handing its behavior adds extra complexity in HashHelper and this
// param should be removed when we can unify/fix computed_hashes.json
// treatment, see https://crbug.com/819832 for details.
void GetContentHash(const ExtensionId& extension_id,
const base::FilePath& extension_root,
const base::Version& extension_version,
bool force_missing_computed_hashes_creation,
ContentHashCallback callback);
GURL GetSignatureFetchUrlForTest(const ExtensionId& extension_id,
const base::Version& extension_version);
......@@ -81,16 +105,25 @@ class ContentVerifier : public base::RefCountedThreadSafe<ContentVerifier>,
DISALLOW_COPY_AND_ASSIGN(ContentVerifier);
friend class base::RefCountedThreadSafe<ContentVerifier>;
friend class HashHelper;
~ContentVerifier() override;
void OnFetchComplete(
const std::string& extension_id,
bool success,
bool was_force_check,
const std::set<base::FilePath>& hash_mismatch_unix_paths);
class HashHelper;
void OnFetchCompleteHelper(const std::string& extension_id,
bool should_verify_any_paths_result);
void OnFetchComplete(const scoped_refptr<const ContentHash>& content_hash);
ContentHash::FetchParams GetFetchParams(
const ExtensionId& extension_id,
const base::Version& extension_version);
// Performs IO thread operations after extension load.
void OnExtensionLoadedOnIO(
const ExtensionId& extension_id,
const base::FilePath& extension_root,
const base::Version& extension_version,
std::unique_ptr<ContentVerifierIOData::ExtensionData> data);
// Performs IO thread operations after extension unload.
void OnExtensionUnloadedOnIO(const ExtensionId& extension_id,
const base::Version& extension_version);
// Returns true if any of the paths in |relative_unix_paths| *should* have
// their contents verified. (Some files get transcoded during the install
......@@ -101,15 +134,26 @@ class ContentVerifier : public base::RefCountedThreadSafe<ContentVerifier>,
const base::FilePath& extension_root,
const std::set<base::FilePath>& relative_unix_paths);
// Returns the HashHelper instance, making sure we create it at most once.
HashHelper* GetOrCreateHashHelper();
// Set to true once we've begun shutting down.
bool shutdown_;
content::BrowserContext* context_;
// Guards creation of |hash_helper_|, limiting number of creation to <= 1.
// Accessed only on IO thread.
bool hash_helper_created_ = false;
// Created and used on IO thread.
std::unique_ptr<HashHelper, content::BrowserThread::DeleteOnIOThread>
hash_helper_;
std::unique_ptr<ContentVerifierDelegate> delegate_;
// For fetching content hash signatures.
std::unique_ptr<ContentHashFetcher> fetcher_;
// Used by ContentHashFetcher.
net::URLRequestContextGetter* request_context_getter_;
// For observing the ExtensionRegistry.
ScopedObserver<ExtensionRegistry, ExtensionRegistryObserver> observer_;
......
......@@ -15,12 +15,17 @@
#include "extensions/browser/verified_contents.h"
#include "extensions/common/constants.h"
#include "extensions/common/extension_id.h"
#include "url/gurl.h"
namespace net {
class URLRequestContextGetter;
}
namespace extensions {
// Represents content verification hashes for an extension.
//
// Instances can be created using Create() factory methods on sequences with
// Instances can be created using Create() factory method on sequences with
// blocking IO access. If hash retrieval succeeds then ContentHash::succeeded()
// will return true and
// a. ContentHash::verified_contents() will return structured representation of
......@@ -28,11 +33,23 @@ namespace extensions {
// b. ContentHash::computed_hashes() will return structured representation of
// computed_hashes.json.
//
// Additionally if computed_hashes.json was required to be written to disk and
// If verified_contents.json was missing on disk (e.g. because of disk
// corruption or such), this class will fetch the file from network. After
// fetching the class will parse/validate this data as needed, including
// calculating expected hashes for each block of each file within an extension.
// (These unsigned leaf node block level hashes will always be checked at time
// of use use to make sure they match the signed treehash root hash).
//
// computed_hashes.json is computed over the files in an extension's directory.
// If computed_hashes.json was required to be written to disk and
// it was successful, ContentHash::hash_mismatch_unix_paths() will return all
// FilePaths from the extension directory that had content verification
// mismatch.
class ContentHash {
//
// Clients of this class can cancel the disk write operation of
// computed_hashes.json while it is ongoing. This is because it can potentially
// take long time. This cancellation can be performed through |is_cancelled|.
class ContentHash : public base::RefCountedThreadSafe<ContentHash> {
public:
// Key to identify an extension.
struct ExtensionKey {
......@@ -51,30 +68,39 @@ class ContentHash {
ExtensionKey& operator=(const ExtensionKey& other);
};
// Specifiable modes for ContentHash instantiation.
enum Mode {
// Deletes verified_contents.json if the file on disk is invalid.
kDeleteInvalidVerifiedContents = 1 << 0,
// Parameters to fetch verified_contents.json.
struct FetchParams {
net::URLRequestContextGetter* request_context;
GURL fetch_url;
FetchParams(net::URLRequestContextGetter* request_context,
const GURL& fetch_url);
// Always creates computed_hashes.json (as opposed to only when the file is
// non-existent).
kForceRecreateExistingComputedHashesFile = 1 << 1,
FetchParams(const FetchParams& other);
FetchParams& operator=(const FetchParams& other);
};
using IsCancelledCallback = base::RepeatingCallback<bool(void)>;
// Factories:
// These will never return nullptr, but verified_contents or computed_hashes
// may be empty if something fails.
// Reads existing hashes from disk.
static std::unique_ptr<ContentHash> Create(const ExtensionKey& key);
// Reads existing hashes from disk, with specifying flags from |Mode|.
static std::unique_ptr<ContentHash> Create(
const ExtensionKey& key,
int mode,
const IsCancelledCallback& is_cancelled);
~ContentHash();
// Factory:
// Returns ContentHash through |created_callback|, the returned values are:
// - |hash| The content hash. This will never be nullptr, but
// verified_contents or computed_hashes may be empty if something fails.
// - |was_cancelled| Indicates whether or not the request was cancelled
// through |is_cancelled|, while it was being processed.
using CreatedCallback =
base::OnceCallback<void(const scoped_refptr<ContentHash>& hash,
bool was_cancelled)>;
static void Create(const ExtensionKey& key,
const FetchParams& fetch_params,
const IsCancelledCallback& is_cancelled,
CreatedCallback created_callback);
// Forces creation of computed_hashes.json. Must be called with after
// |verified_contents| has been successfully set.
// TODO(lazyboy): Remove this once https://crbug.com/819832 is fixed.
void ForceBuildComputedHashes(const IsCancelledCallback& is_cancelled,
CreatedCallback created_callback);
const VerifiedContents& verified_contents() const;
const ComputedHashes::Reader& computed_hashes() const;
......@@ -86,11 +112,22 @@ class ContentHash {
// If ContentHash creation writes computed_hashes.json, then this returns the
// FilePaths whose content hash didn't match expected hashes.
const std::set<base::FilePath>& hash_mismatch_unix_paths() {
const std::set<base::FilePath>& hash_mismatch_unix_paths() const {
return hash_mismatch_unix_paths_;
}
const ExtensionKey extension_key() const { return key_; }
// Returns whether or not computed_hashes.json re-creation might be required
// for |this| to succeed.
// TODO(lazyboy): Remove this once https://crbug.com/819832 is fixed.
bool might_require_computed_hashes_force_creation() const {
return !succeeded() && has_verified_contents() &&
!did_attempt_creating_computed_hashes_;
}
private:
friend class base::RefCountedThreadSafe<ContentHash>;
enum class Status {
// Retrieving hashes failed.
kInvalid,
......@@ -105,12 +142,22 @@ class ContentHash {
ContentHash(const ExtensionKey& key,
std::unique_ptr<VerifiedContents> verified_contents,
std::unique_ptr<ComputedHashes::Reader> computed_hashes);
~ContentHash();
static std::unique_ptr<ContentHash> CreateImpl(
static void FetchVerifiedContents(const ExtensionKey& extension_key,
const FetchParams& fetch_params,
const IsCancelledCallback& is_cancelled,
CreatedCallback created_callback);
static void DidFetchVerifiedContents(
CreatedCallback created_callback,
const IsCancelledCallback& is_cancelled,
const ExtensionKey& key,
int mode,
bool create_computed_hashes_file,
const IsCancelledCallback& is_cancelled);
const FetchParams& fetch_params,
std::unique_ptr<std::string> fetched_contents);
static void DispatchFetchFailure(const ExtensionKey& key,
CreatedCallback created_callback,
const IsCancelledCallback& is_cancelled);
// Computes hashes for all files in |key_.extension_root|, and uses
// a ComputedHashes::Writer to write that information into |hashes_file|.
......@@ -124,9 +171,17 @@ class ContentHash {
bool CreateHashes(const base::FilePath& hashes_file,
const IsCancelledCallback& is_cancelled);
// Builds computed_hashes. Possibly after creating computed_hashes.json file
// if necessary.
void BuildComputedHashes(bool attempted_fetching_verified_contents,
bool force_build,
const IsCancelledCallback& is_cancelled);
ExtensionKey key_;
Status status_;
Status status_ = Status::kInvalid;
bool did_attempt_creating_computed_hashes_ = false;
// TODO(lazyboy): Avoid dynamic allocations here, |this| already supports
// move.
......
......@@ -166,6 +166,8 @@ void MockContentVerifierDelegate::Shutdown() {}
// VerifierObserver -----------------------------------------------------------
VerifierObserver::VerifierObserver() {
EXPECT_TRUE(
content::BrowserThread::GetCurrentThreadIdentifier(&creation_thread_));
ContentVerifier::SetObserverForTests(this);
}
......@@ -174,6 +176,7 @@ VerifierObserver::~VerifierObserver() {
}
void VerifierObserver::WaitForFetchComplete(const ExtensionId& extension_id) {
EXPECT_TRUE(content::BrowserThread::CurrentlyOn(creation_thread_));
EXPECT_TRUE(id_to_wait_for_.empty());
EXPECT_EQ(loop_runner_.get(), nullptr);
id_to_wait_for_ = extension_id;
......@@ -185,6 +188,13 @@ void VerifierObserver::WaitForFetchComplete(const ExtensionId& extension_id) {
void VerifierObserver::OnFetchComplete(const ExtensionId& extension_id,
bool success) {
if (!content::BrowserThread::CurrentlyOn(creation_thread_)) {
content::BrowserThread::PostTask(
creation_thread_, FROM_HERE,
base::BindOnce(&VerifierObserver::OnFetchComplete,
base::Unretained(this), extension_id, success));
return;
}
completed_fetches_.insert(extension_id);
if (extension_id == id_to_wait_for_)
loop_runner_->Quit();
......
......@@ -145,7 +145,12 @@ class VerifierObserver : public ContentVerifier::TestObserver {
private:
std::set<ExtensionId> completed_fetches_;
ExtensionId id_to_wait_for_;
// Created and accessed on |creation_thread_|.
scoped_refptr<content::MessageLoopRunner> loop_runner_;
content::BrowserThread::ID creation_thread_;
DISALLOW_COPY_AND_ASSIGN(VerifierObserver);
};
namespace content_verifier_test_utils {
......
......@@ -10,9 +10,12 @@
#include "base/stl_util.h"
#include "base/task_scheduler/post_task.h"
#include "base/timer/elapsed_timer.h"
#include "content/public/browser/browser_thread.h"
#include "crypto/secure_hash.h"
#include "crypto/sha2.h"
#include "extensions/browser/content_hash_reader.h"
#include "extensions/browser/content_verifier.h"
#include "extensions/browser/content_verifier/content_hash.h"
namespace extensions {
......@@ -44,7 +47,6 @@ ContentVerifyJob::ContentVerifyJob(const ExtensionId& extension_id,
const base::Version& extension_version,
const base::FilePath& extension_root,
const base::FilePath& relative_path,
const ContentVerifierKey& key,
FailureCallback failure_callback)
: done_reading_(false),
hashes_ready_(false),
......@@ -55,7 +57,6 @@ ContentVerifyJob::ContentVerifyJob(const ExtensionId& extension_id,
extension_version_(extension_version),
extension_root_(extension_root),
relative_path_(relative_path),
content_verifier_key_(key),
failure_callback_(std::move(failure_callback)),
failed_(false) {}
......@@ -64,16 +65,26 @@ ContentVerifyJob::~ContentVerifyJob() {
time_spent_.InMicroseconds());
}
void ContentVerifyJob::Start() {
void ContentVerifyJob::Start(ContentVerifier* verifier) {
DCHECK_CURRENTLY_ON(content::BrowserThread::IO);
base::AutoLock auto_lock(lock_);
verifier->GetContentHash(
extension_id_, extension_root_, extension_version_,
true /* force_missing_computed_hashes_creation */,
base::BindOnce(&ContentVerifyJob::DidGetContentHashOnIO, this));
}
void ContentVerifyJob::DidGetContentHashOnIO(
const scoped_refptr<const ContentHash>& content_hash) {
DCHECK_CURRENTLY_ON(content::BrowserThread::IO);
base::AutoLock auto_lock(lock_);
if (g_content_verify_job_test_observer)
g_content_verify_job_test_observer->JobStarted(extension_id_,
relative_path_);
// Build |hash_reader_|.
base::PostTaskWithTraitsAndReplyWithResult(
FROM_HERE, {base::MayBlock(), base::TaskPriority::USER_VISIBLE},
base::BindOnce(&ContentHashReader::Create, extension_id_,
extension_version_, extension_root_, relative_path_,
content_verifier_key_),
base::BindOnce(&ContentHashReader::Create, relative_path_, content_hash),
base::BindOnce(&ContentVerifyJob::OnHashesReady, this));
}
......
......@@ -29,7 +29,9 @@ class SecureHash;
namespace extensions {
class ContentHash;
class ContentHashReader;
class ContentVerifier;
// Objects of this class are responsible for verifying that the actual content
// read from an extension file matches an expected set of hashes. This class
......@@ -59,12 +61,11 @@ class ContentVerifyJob : public base::RefCountedThreadSafe<ContentVerifyJob> {
const base::Version& extension_version,
const base::FilePath& extension_root,
const base::FilePath& relative_path,
const ContentVerifierKey& content_verifier_key,
FailureCallback failure_callback);
// This begins the process of getting expected hashes, so it should be called
// as early as possible.
void Start();
void Start(ContentVerifier* verifier);
// Call this to add more bytes to verify. If at any point the read bytes
// don't match the expected hashes, this will dispatch the failure
......@@ -100,6 +101,8 @@ class ContentVerifyJob : public base::RefCountedThreadSafe<ContentVerifyJob> {
virtual ~ContentVerifyJob();
friend class base::RefCountedThreadSafe<ContentVerifyJob>;
void DidGetContentHashOnIO(const scoped_refptr<const ContentHash>& hash);
// Same as BytesRead, but is run without acquiring lock.
void BytesReadImpl(int count, const char* data);
......@@ -146,8 +149,6 @@ class ContentVerifyJob : public base::RefCountedThreadSafe<ContentVerifyJob> {
const base::FilePath extension_root_;
const base::FilePath relative_path_;
const ContentVerifierKey content_verifier_key_;
base::TimeDelta time_spent_;
// Called once if verification fails.
......
......@@ -9,12 +9,18 @@
#include "base/path_service.h"
#include "base/version.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/test/mock_resource_context.h"
#include "content/public/test/test_browser_context.h"
#include "content/public/test/test_browser_thread_bundle.h"
#include "extensions/browser/content_verifier.h"
#include "extensions/browser/content_verifier/test_utils.h"
#include "extensions/browser/extensions_test.h"
#include "extensions/browser/info_map.h"
#include "extensions/common/constants.h"
#include "extensions/common/extension_paths.h"
#include "extensions/common/file_util.h"
#include "net/url_request/url_request_job_factory_impl.h"
#include "net/url_request/url_request_test_util.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/zlib/google/zip.h"
......@@ -42,7 +48,7 @@ class ContentVerifyJobUnittest : public ExtensionsTest {
public:
ContentVerifyJobUnittest()
// The TestBrowserThreadBundle is needed for ContentVerifyJob::Start().
: ExtensionsTest(content::TestBrowserThreadBundle::REAL_IO_THREAD) {}
: resource_context_(&test_url_request_context_) {}
~ContentVerifyJobUnittest() override {}
// Helper to get files from our subdirectory in the general extensions test
......@@ -54,6 +60,29 @@ class ContentVerifyJobUnittest : public ExtensionsTest {
.AppendASCII(relative_path);
}
void SetUp() override {
ExtensionsTest::SetUp();
extension_info_map_ = new InfoMap();
net::URLRequestContext* request_context =
resource_context_.GetRequestContext();
old_factory_ = request_context->job_factory();
content_verifier_ = new ContentVerifier(
&testing_context_, std::make_unique<MockContentVerifierDelegate>());
extension_info_map_->SetContentVerifier(content_verifier_.get());
}
void TearDown() override {
net::URLRequestContext* request_context =
resource_context_.GetRequestContext();
request_context->set_job_factory(old_factory_);
content_verifier_->Shutdown();
ExtensionsTest::TearDown();
}
ContentVerifier* content_verifier() { return content_verifier_.get(); }
protected:
ContentVerifyJob::FailureReason RunContentVerifyJob(
const Extension& extension,
......@@ -63,8 +92,6 @@ class ContentVerifyJobUnittest : public ExtensionsTest {
TestContentVerifySingleJobObserver observer(extension.id(), resource_path);
scoped_refptr<ContentVerifyJob> verify_job = new ContentVerifyJob(
extension.id(), extension.version(), extension.path(), resource_path,
ContentVerifierKey(kWebstoreSignaturesPublicKey,
kWebstoreSignaturesPublicKeySize),
base::DoNothing());
auto run_content_read_step = [](ContentVerifyJob* verify_job,
......@@ -77,15 +104,15 @@ class ContentVerifyJobUnittest : public ExtensionsTest {
switch (run_mode) {
case kNone:
verify_job->Start(); // Read hashes asynchronously.
StartJob(verify_job); // Read hashes asynchronously.
run_content_read_step(verify_job.get(), &resource_contents);
break;
case kContentReadBeforeHashesReady:
run_content_read_step(verify_job.get(), &resource_contents);
verify_job->Start(); // Read hashes asynchronously.
StartJob(verify_job); // Read hashes asynchronously.
break;
case kHashesReadyBeforeContentRead:
verify_job->Start();
StartJob(verify_job);
// Wait for hashes to become ready.
observer.WaitForOnHashesReady();
run_content_read_step(verify_job.get(), &resource_contents);
......@@ -128,6 +155,21 @@ class ContentVerifyJobUnittest : public ExtensionsTest {
}
private:
void StartJob(scoped_refptr<ContentVerifyJob> job) {
content::BrowserThread::PostTask(
content::BrowserThread::IO, FROM_HERE,
base::BindOnce(&ContentVerifyJob::Start, job,
base::Unretained(content_verifier_.get())));
}
scoped_refptr<InfoMap> extension_info_map_;
net::URLRequestJobFactoryImpl job_factory_;
const net::URLRequestJobFactory* old_factory_;
net::TestURLRequestContext test_url_request_context_;
content::MockResourceContext resource_context_;
scoped_refptr<ContentVerifier> content_verifier_;
content::TestBrowserContext testing_context_;
DISALLOW_COPY_AND_ASSIGN(ContentVerifyJobUnittest);
};
......
......@@ -650,7 +650,7 @@ ExtensionProtocolHandler::MaybeCreateJob(
verify_job =
verifier->CreateJobFor(extension_id, directory_path, relative_path);
if (verify_job)
verify_job->Start();
verify_job->Start(verifier);
}
return new URLRequestExtensionJob(request,
......@@ -964,7 +964,7 @@ class ExtensionURLLoaderFactory : public network::mojom::URLLoaderFactory {
resource.extension_root(),
resource.relative_path());
if (verify_job)
verify_job->Start();
verify_job->Start(content_verifier.get());
}
content::CreateFileURLLoader(
......
......@@ -52,7 +52,7 @@ void VerifyContent(const scoped_refptr<ContentVerifier>& verifier,
scoped_refptr<ContentVerifyJob> job(
verifier->CreateJobFor(extension_id, extension_root, relative_path));
if (job.get()) {
job->Start();
job->Start(verifier.get());
job->BytesRead(content.size(), content.data());
job->DoneReading();
}
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment