Commit 0fad62b7 authored by eroman's avatar eroman Committed by Commit bot

Initial implementation for CertNetFetcher.

CertNetFetcher is a class for issuing and cancelling network fetches, that will be used by CertVerifier. It fetches http:// AIA and CRL URLs.

This initial implementation has some remaining TODOs around:
  - Add POST parameters
  - Add cache bypass controls
  - Add maximum requests thresholds
  - Add more tests for cancellation/de-duplication

BUG=455366
NOPRESUBMIT=true

Review URL: https://codereview.chromium.org/908863004

Cr-Commit-Position: refs/heads/master@{#324261}
parent 3523bf88
......@@ -1564,6 +1564,7 @@ if (!is_android && !is_win && !is_mac) {
"spdy/fuzzing/hpack_fuzz_util_test.cc",
# Need TestServer.
"cert_net/cert_net_fetcher_impl_unittest.cc",
"proxy/proxy_script_fetcher_impl_unittest.cc",
"socket/ssl_client_socket_unittest.cc",
"url_request/url_fetcher_impl_unittest.cc",
......
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef NET_CERT_CERT_NET_FETCHER_H_
#define NET_CERT_CERT_NET_FETCHER_H_
#include <vector>
#include "base/callback.h"
#include "net/base/net_errors.h"
#include "net/base/net_export.h"
class GURL;
namespace net {
class URLRequestContext;
// CertNetFetcher is an asynchronous interface for fetching AIA URLs and CRL
// URLs.
//
// -------------------------
// Cancellation of requests
// -------------------------
//
// * Network requests started by the CertNetFetcher can be cancelled by
// deleting the Request object. Cancellation means the request's callback
// will no longer be invoked.
//
// * If the CertNetFetcher is deleted then any outstanding
// requests are automatically cancelled.
//
// * Cancelling a request within the execution of a callback is allowed.
//
// * Deleting the CertNetFetcher from within the execution of a callback is
// allowed.
//
// -------------------------
// Threading
// -------------------------
//
// The CertNetFetcher is expected to be operated from a single thread, which has
// an IO message loop. The URLRequestContext will be accessed from this same
// thread, and callbacks will be posted to this message loop.
//
// For more details see the design document:
// https://docs.google.com/a/chromium.org/document/d/1CdS9YOnPdAyVZBJqHY7ZJ6tUlU71OCvX8kHnaVhf144/edit
class NET_EXPORT CertNetFetcher {
public:
class Request {
public:
virtual ~Request() {}
};
// Callback invoked on request completion. If the Error is OK, then the
// vector contains the response bytes.
using FetchCallback =
base::Callback<void(Error, const std::vector<uint8_t>&)>;
// This value can be used in place of timeout or max size limits.
enum { DEFAULT = -1 };
CertNetFetcher() {}
// Deletion implicitly cancels any outstanding requests.
virtual ~CertNetFetcher() {}
// The Fetch*() methods start an asynchronous request which can be cancelled
// by deleting the returned Request. Here is the meaning of the common
// parameters:
//
// * url -- The http:// URL to fetch.
// * timeout_seconds -- The maximum allowed duration for the fetch job. If
// this delay is exceeded then the request will fail. To use a default
// timeout pass DEFAULT.
// * max_response_bytes -- The maximum size of the response body. If this
// size is exceeded then the request will fail. To use a default timeout
// pass DEFAULT.
// * callback -- The callback that will be invoked on completion of the job.
virtual WARN_UNUSED_RESULT scoped_ptr<Request> FetchCaIssuers(
const GURL& url,
int timeout_milliseconds,
int max_response_bytes,
const FetchCallback& callback) = 0;
virtual WARN_UNUSED_RESULT scoped_ptr<Request> FetchCrl(
const GURL& url,
int timeout_milliseconds,
int max_response_bytes,
const FetchCallback& callback) = 0;
virtual WARN_UNUSED_RESULT scoped_ptr<Request> FetchOcsp(
const GURL& url,
int timeout_milliseconds,
int max_response_bytes,
const FetchCallback& callback) = 0;
private:
DISALLOW_COPY_AND_ASSIGN(CertNetFetcher);
};
} // namespace net
#endif // NET_CERT_NET_CERT_NET_FETCHER_H_
cert_net/ contains certificate functionality that depends on network loading (OCSP, CRL, AIA fetching).
Conceptually certificates (net/cert/) is a separable concept from net/ and may
end up becoming its own build target. This file organization encourages not
adding dependencies in cert/ for doing network loading. Instead that code
should be placed here.
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/cert_net/cert_net_fetcher_impl.h"
#include <deque>
#include "base/callback_helpers.h"
#include "base/logging.h"
#include "base/numerics/safe_math.h"
#include "base/stl_util.h"
#include "base/timer/timer.h"
#include "net/base/load_flags.h"
#include "net/url_request/redirect_info.h"
#include "net/url_request/url_request_context.h"
// TODO(eroman): Add support for POST parameters.
// TODO(eroman): Add controls for bypassing the cache.
// TODO(eroman): Add a maximum number of in-flight jobs/requests.
// TODO(eroman): Add NetLog integration.
namespace net {
namespace {
// The size of the buffer used for reading the response body of the URLRequest.
const int kReadBufferSizeInBytes = 4096;
// The maximum size in bytes for the response body when fetching a CRL.
const int kMaxResponseSizeInBytesForCrl = 5 * 1024 * 1024;
// The maximum size in bytes for the response body when fetching an AIA URL
// (caIssuers/OCSP).
const int kMaxResponseSizeInBytesForAia = 64 * 1024;
// The default timeout in seconds for fetch requests.
const int kTimeoutSeconds = 15;
// Policy for which URLs are allowed to be fetched. This is called both for the
// initial URL and for each redirect. Returns OK on success or a net error
// code on failure.
Error CanFetchUrl(const GURL& url) {
if (!url.SchemeIs("http"))
return ERR_DISALLOWED_URL_SCHEME;
return OK;
}
base::TimeDelta GetTimeout(int timeout_milliseconds) {
if (timeout_milliseconds == CertNetFetcher::DEFAULT)
return base::TimeDelta::FromSeconds(kTimeoutSeconds);
return base::TimeDelta::FromMilliseconds(timeout_milliseconds);
}
size_t GetMaxResponseBytes(int max_response_bytes,
size_t default_max_response_bytes) {
if (max_response_bytes == CertNetFetcher::DEFAULT)
return default_max_response_bytes;
// Ensure that the specified limit is not negative, and cannot result in an
// overflow while reading.
base::CheckedNumeric<size_t> check(max_response_bytes);
check += kReadBufferSizeInBytes;
DCHECK(check.IsValid());
return max_response_bytes;
}
enum HttpMethod {
HTTP_METHOD_GET,
HTTP_METHOD_POST,
};
} // namespace
// CertNetFetcherImpl::RequestImpl tracks an outstanding call to Fetch().
class CertNetFetcherImpl::RequestImpl : public CertNetFetcher::Request {
public:
RequestImpl(Job* job, const FetchCallback& callback)
: callback_(callback), job_(job) {
DCHECK(!callback.is_null());
}
// Deletion cancels the outstanding request.
~RequestImpl() override;
void OnJobCancelled(Job* job) {
DCHECK_EQ(job_, job);
job_ = nullptr;
callback_.Reset();
}
void OnJobCompleted(Job* job,
Error error,
const std::vector<uint8_t>& response_body) {
DCHECK_EQ(job_, job);
job_ = nullptr;
base::ResetAndReturn(&callback_).Run(error, response_body);
}
private:
// The callback to invoke when the request has completed.
FetchCallback callback_;
// A non-owned pointer to the job that is executing the request.
Job* job_;
private:
DISALLOW_COPY_AND_ASSIGN(RequestImpl);
};
struct CertNetFetcherImpl::RequestParams {
RequestParams();
bool operator<(const RequestParams& other) const;
GURL url;
HttpMethod http_method;
size_t max_response_bytes;
// If set to a value <= 0 then means "no timeout".
base::TimeDelta timeout;
// IMPORTANT: When adding fields to this structure, update operator<().
private:
DISALLOW_COPY_AND_ASSIGN(RequestParams);
};
CertNetFetcherImpl::RequestParams::RequestParams()
: http_method(HTTP_METHOD_GET), max_response_bytes(0) {
}
bool CertNetFetcherImpl::RequestParams::operator<(
const RequestParams& other) const {
if (url != other.url)
return url < other.url;
if (http_method != other.http_method)
return http_method < other.http_method;
if (max_response_bytes != other.max_response_bytes)
return max_response_bytes < other.max_response_bytes;
return timeout < other.timeout;
}
// CertNetFetcherImpl::Job tracks an outstanding URLRequest as well as all of
// the pending requests for it.
class CertNetFetcherImpl::Job : public URLRequest::Delegate {
public:
Job(scoped_ptr<RequestParams> request_params, CertNetFetcherImpl* parent);
~Job() override;
// Cancels the job and all requests attached to it. No callbacks will be
// invoked following cancellation.
void Cancel();
const RequestParams& request_params() const { return *request_params_; }
// Create a request and attaches it to the job. When the job completes it will
// notify the request of completion through OnJobCompleted. Note that the Job
// does NOT own the request.
scoped_ptr<Request> CreateRequest(const FetchCallback& callback);
// Removes |request| from the job.
void DetachRequest(RequestImpl* request);
// Creates and starts a URLRequest for the job. After the request has
// completed, OnJobCompleted() will be invoked and all the registered requests
// notified of completion.
void StartURLRequest(URLRequestContext* context);
private:
// The pointers in RequestList are not owned by the Job.
using RequestList = std::deque<RequestImpl*>;
// Implementation of URLRequest::Delegate
void OnReceivedRedirect(URLRequest* request,
const RedirectInfo& redirect_info,
bool* defer_redirect) override;
void OnResponseStarted(URLRequest* request) override;
void OnReadCompleted(URLRequest* request, int bytes_read) override;
// Clears the URLRequest and timer. Helper for doing work common to
// cancellation and job completion.
void Stop();
// Reads as much data as available from |request|.
void ReadBody(URLRequest* request);
// Helper to copy the partial bytes read from the read IOBuffer to an
// aggregated buffer.
bool ConsumeBytesRead(URLRequest* request, int num_bytes);
// Called once the job has exceeded its deadline.
void OnTimeout();
// Called when the URLRequest has completed (either success or failure).
void OnUrlRequestCompleted(URLRequest* request);
// Called when the Job has completed. The job may finish in response to a
// timeout, an invalid URL, or the URLRequest completing. By the time this
// method is called, the response variables have been assigned
// (result_net_error_ and response_body_).
void OnJobCompleted();
// The requests attached to this job.
RequestList requests_;
// The input parameters for starting a URLRequest.
scoped_ptr<RequestParams> request_params_;
// The URLRequest response information.
std::vector<uint8_t> response_body_;
Error result_net_error_;
scoped_ptr<URLRequest> url_request_;
scoped_refptr<IOBuffer> read_buffer_;
// Used to timeout the job when the URLRequest takes too long. This timer is
// also used for notifying a failure to start the URLRequest.
base::OneShotTimer<Job> timer_;
// Non-owned pointer to the CertNetFetcherImpl that created this job.
CertNetFetcherImpl* parent_;
DISALLOW_COPY_AND_ASSIGN(Job);
};
CertNetFetcherImpl::RequestImpl::~RequestImpl() {
if (job_)
job_->DetachRequest(this);
}
CertNetFetcherImpl::Job::Job(scoped_ptr<RequestParams> request_params,
CertNetFetcherImpl* parent)
: request_params_(request_params.Pass()),
result_net_error_(ERR_IO_PENDING),
parent_(parent) {
}
CertNetFetcherImpl::Job::~Job() {
Cancel();
}
void CertNetFetcherImpl::Job::Cancel() {
parent_ = nullptr;
for (RequestImpl* request : requests_)
request->OnJobCancelled(this);
requests_.clear();
Stop();
}
scoped_ptr<CertNetFetcher::Request> CertNetFetcherImpl::Job::CreateRequest(
const FetchCallback& callback) {
scoped_ptr<RequestImpl> request(new RequestImpl(this, callback));
requests_.push_back(request.get());
return request.Pass();
}
void CertNetFetcherImpl::Job::DetachRequest(RequestImpl* request) {
scoped_ptr<Job> delete_this;
// TODO(eroman): If a lot of requests are cancelled this is not efficient.
RequestList::iterator it =
std::find(requests_.begin(), requests_.end(), request);
CHECK(it != requests_.end());
requests_.erase(it);
// If there are no longer any requests attached to the job then
// cancel and delete it.
if (requests_.empty())
delete_this = parent_->RemoveJob(this);
}
void CertNetFetcherImpl::Job::StartURLRequest(URLRequestContext* context) {
Error error = CanFetchUrl(request_params_->url);
if (error != OK) {
result_net_error_ = error;
// The CertNetFetcher's API contract is that requests always complete
// asynchronously. Use the timer class so the task is easily cancelled.
timer_.Start(FROM_HERE, base::TimeDelta(), this, &Job::OnJobCompleted);
return;
}
// Start the URLRequest.
read_buffer_ = new IOBuffer(kReadBufferSizeInBytes);
url_request_ =
context->CreateRequest(request_params_->url, DEFAULT_PRIORITY, this);
if (request_params_->http_method == HTTP_METHOD_POST)
url_request_->set_method("POST");
url_request_->SetLoadFlags(LOAD_DO_NOT_SAVE_COOKIES |
LOAD_DO_NOT_SEND_COOKIES);
url_request_->Start();
// Start a timer to limit how long the job runs for.
if (request_params_->timeout > base::TimeDelta())
timer_.Start(FROM_HERE, request_params_->timeout, this, &Job::OnTimeout);
}
void CertNetFetcherImpl::Job::OnReceivedRedirect(
URLRequest* request,
const RedirectInfo& redirect_info,
bool* defer_redirect) {
DCHECK_EQ(url_request_.get(), request);
// Ensure that the new URL matches the policy.
Error error = CanFetchUrl(redirect_info.new_url);
if (error != OK) {
request->CancelWithError(error);
OnUrlRequestCompleted(request);
return;
}
}
void CertNetFetcherImpl::Job::OnResponseStarted(URLRequest* request) {
DCHECK_EQ(url_request_.get(), request);
if (!request->status().is_success()) {
OnUrlRequestCompleted(request);
return;
}
if (request->GetResponseCode() != 200) {
// TODO(eroman): Use a more specific error code.
request->CancelWithError(ERR_FAILED);
OnUrlRequestCompleted(request);
return;
}
ReadBody(request);
}
void CertNetFetcherImpl::Job::OnReadCompleted(URLRequest* request,
int bytes_read) {
DCHECK_EQ(url_request_.get(), request);
// Keep reading the response body.
if (ConsumeBytesRead(request, bytes_read))
ReadBody(request);
}
void CertNetFetcherImpl::Job::Stop() {
timer_.Stop();
url_request_.reset();
}
void CertNetFetcherImpl::Job::ReadBody(URLRequest* request) {
// Read as many bytes as are available synchronously.
int num_bytes;
while (
request->Read(read_buffer_.get(), kReadBufferSizeInBytes, &num_bytes)) {
if (!ConsumeBytesRead(request, num_bytes))
return;
}
// Check whether the read failed synchronously.
if (!request->status().is_io_pending())
OnUrlRequestCompleted(request);
return;
}
bool CertNetFetcherImpl::Job::ConsumeBytesRead(URLRequest* request,
int num_bytes) {
if (num_bytes <= 0) {
// Error while reading, or EOF.
OnUrlRequestCompleted(request);
return false;
}
// Enforce maximum size bound.
if (num_bytes + response_body_.size() > request_params_->max_response_bytes) {
request->CancelWithError(ERR_FILE_TOO_BIG);
OnUrlRequestCompleted(request);
return false;
}
// Append the data to |response_body_|.
response_body_.reserve(num_bytes);
response_body_.insert(response_body_.end(), read_buffer_->data(),
read_buffer_->data() + num_bytes);
return true;
}
void CertNetFetcherImpl::Job::OnTimeout() {
result_net_error_ = ERR_TIMED_OUT;
url_request_->CancelWithError(result_net_error_);
OnJobCompleted();
}
void CertNetFetcherImpl::Job::OnUrlRequestCompleted(URLRequest* request) {
DCHECK_EQ(request, url_request_.get());
if (request->status().is_success())
result_net_error_ = OK;
else
result_net_error_ = static_cast<Error>(request->status().error());
OnJobCompleted();
}
void CertNetFetcherImpl::Job::OnJobCompleted() {
// Stop the timer and clear the URLRequest.
Stop();
// Invoking the callbacks is subtle as state may be mutated while iterating
// through the callbacks:
//
// * The parent CertNetFetcherImpl may be deleted
// * Requests in this job may be cancelled
scoped_ptr<Job> delete_this = parent_->RemoveJob(this);
parent_->SetCurrentlyCompletingJob(this);
while (!requests_.empty()) {
RequestImpl* request = requests_.front();
requests_.pop_front();
request->OnJobCompleted(this, result_net_error_, response_body_);
}
if (parent_)
parent_->ClearCurrentlyCompletingJob(this);
}
CertNetFetcherImpl::CertNetFetcherImpl(URLRequestContext* context)
: currently_completing_job_(nullptr), context_(context) {
}
CertNetFetcherImpl::~CertNetFetcherImpl() {
STLDeleteElements(&jobs_);
// The CertNetFetcherImpl was destroyed in a FetchCallback. Detach all
// remaining requests from the job so no further callbacks are called.
if (currently_completing_job_)
currently_completing_job_->Cancel();
}
scoped_ptr<CertNetFetcher::Request> CertNetFetcherImpl::FetchCaIssuers(
const GURL& url,
int timeout_milliseconds,
int max_response_bytes,
const FetchCallback& callback) {
scoped_ptr<RequestParams> request_params(new RequestParams);
request_params->url = url;
request_params->http_method = HTTP_METHOD_GET;
request_params->timeout = GetTimeout(timeout_milliseconds);
request_params->max_response_bytes =
GetMaxResponseBytes(max_response_bytes, kMaxResponseSizeInBytesForAia);
return Fetch(request_params.Pass(), callback);
}
scoped_ptr<CertNetFetcher::Request> CertNetFetcherImpl::FetchCrl(
const GURL& url,
int timeout_milliseconds,
int max_response_bytes,
const FetchCallback& callback) {
scoped_ptr<RequestParams> request_params(new RequestParams);
request_params->url = url;
request_params->http_method = HTTP_METHOD_GET;
request_params->timeout = GetTimeout(timeout_milliseconds);
request_params->max_response_bytes =
GetMaxResponseBytes(max_response_bytes, kMaxResponseSizeInBytesForCrl);
return Fetch(request_params.Pass(), callback);
}
scoped_ptr<CertNetFetcher::Request> CertNetFetcherImpl::FetchOcsp(
const GURL& url,
int timeout_milliseconds,
int max_response_bytes,
const FetchCallback& callback) {
scoped_ptr<RequestParams> request_params(new RequestParams);
request_params->url = url;
request_params->http_method = HTTP_METHOD_GET;
request_params->timeout = GetTimeout(timeout_milliseconds);
request_params->max_response_bytes =
GetMaxResponseBytes(max_response_bytes, kMaxResponseSizeInBytesForAia);
return Fetch(request_params.Pass(), callback);
}
bool CertNetFetcherImpl::JobComparator::operator()(const Job* job1,
const Job* job2) const {
return job1->request_params() < job2->request_params();
}
scoped_ptr<CertNetFetcher::Request> CertNetFetcherImpl::Fetch(
scoped_ptr<RequestParams> request_params,
const FetchCallback& callback) {
DCHECK(thread_checker_.CalledOnValidThread());
// If there is an in-progress job that matches the request parameters use it.
// Otherwise start a new job.
Job* job = FindJob(*request_params);
if (!job) {
job = new Job(request_params.Pass(), this);
jobs_.insert(job);
job->StartURLRequest(context_);
}
return job->CreateRequest(callback);
}
struct CertNetFetcherImpl::JobToRequestParamsComparator {
bool operator()(const Job* job,
const CertNetFetcherImpl::RequestParams& value) const {
return job->request_params() < value;
}
};
CertNetFetcherImpl::Job* CertNetFetcherImpl::FindJob(
const RequestParams& params) {
DCHECK(thread_checker_.CalledOnValidThread());
// The JobSet is kept in sorted order so items can be found using binary
// search.
JobSet::iterator it = std::lower_bound(jobs_.begin(), jobs_.end(), params,
JobToRequestParamsComparator());
if (it != jobs_.end() && !(params < (*it)->request_params()))
return *it;
return nullptr;
}
scoped_ptr<CertNetFetcherImpl::Job> CertNetFetcherImpl::RemoveJob(Job* job) {
DCHECK(thread_checker_.CalledOnValidThread());
bool erased_job = jobs_.erase(job) == 1;
DCHECK(erased_job);
return make_scoped_ptr(job);
}
void CertNetFetcherImpl::SetCurrentlyCompletingJob(Job* job) {
DCHECK(!currently_completing_job_);
DCHECK(job);
currently_completing_job_ = job;
}
void CertNetFetcherImpl::ClearCurrentlyCompletingJob(Job* job) {
DCHECK_EQ(currently_completing_job_, job);
currently_completing_job_ = nullptr;
}
} // namespace net
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef NET_CERT_NET_CERT_NET_FETCHER_H_
#define NET_CERT_NET_CERT_NET_FETCHER_H_
#include <set>
#include "base/callback.h"
#include "base/memory/scoped_ptr.h"
#include "base/threading/thread_checker.h"
#include "net/base/net_errors.h"
#include "net/base/net_export.h"
#include "net/cert/cert_net_fetcher.h"
namespace net {
class URLRequestContext;
// CertNetFetcherImpl is an implementation of CertNetFetcher that uses the
// network stack.
//
// For more details refer to the documentation for the interface.
class NET_EXPORT CertNetFetcherImpl : public CertNetFetcher {
public:
// Initializes CertNetFetcherImpl using the specified URLRequestContext for
// issuing requests. |context| must remain valid for the entire lifetime of
// the CertNetFetcherImpl.
explicit CertNetFetcherImpl(URLRequestContext* context);
// Deletion implicitly cancels any outstanding requests.
~CertNetFetcherImpl() override;
WARN_UNUSED_RESULT scoped_ptr<Request> FetchCaIssuers(
const GURL& url,
int timeout_milliseconds,
int max_response_bytes,
const FetchCallback& callback) override;
WARN_UNUSED_RESULT scoped_ptr<Request> FetchCrl(
const GURL& url,
int timeout_milliseconds,
int max_response_bytes,
const FetchCallback& callback) override;
WARN_UNUSED_RESULT scoped_ptr<Request> FetchOcsp(
const GURL& url,
int timeout_milliseconds,
int max_response_bytes,
const FetchCallback& callback) override;
private:
class RequestImpl;
class Job;
struct JobToRequestParamsComparator;
struct RequestParams;
struct JobComparator {
bool operator()(const Job* job1, const Job* job2) const;
};
// Owns the jobs.
using JobSet = std::set<Job*, JobComparator>;
// Starts an asynchronous request to fetch the given URL. On completion
// |callback| will be invoked.
//
// Completion of the request will never occur synchronously. In other words it
// is guaranteed that |callback| will only be invoked once the Fetch*() method
// has returned.
WARN_UNUSED_RESULT scoped_ptr<Request> Fetch(
scoped_ptr<RequestParams> request_params,
const FetchCallback& callback);
// Finds a job with a matching RequestPararms or returns nullptr if there was
// no match.
Job* FindJob(const RequestParams& params);
// Removes |job| from the in progress jobs and transfers ownership to the
// caller.
scoped_ptr<Job> RemoveJob(Job* job);
// Indicates which Job is currently executing inside of OnJobCompleted().
void SetCurrentlyCompletingJob(Job* job);
void ClearCurrentlyCompletingJob(Job* job);
// The in-progress jobs. This set does not contain the job which is actively
// invoking callbacks (OnJobCompleted). Instead that is tracked by
// |currently_completing_job_|.
JobSet jobs_;
// The Job that is currently executing OnJobCompleted(). There can be at most
// one such job. This pointer is not owned.
Job* currently_completing_job_;
// Not owned. CertNetFetcherImpl must outlive the URLRequestContext.
URLRequestContext* context_;
base::ThreadChecker thread_checker_;
DISALLOW_COPY_AND_ASSIGN(CertNetFetcherImpl);
};
} // namespace net
#endif // NET_CERT_NET_CERT_NET_FETCHER_H_
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/cert_net/cert_net_fetcher_impl.h"
#include <string>
#include "base/compiler_specific.h"
#include "base/run_loop.h"
#include "net/cert/mock_cert_verifier.h"
#include "net/dns/mock_host_resolver.h"
#include "net/http/http_server_properties_impl.h"
#include "net/test/spawned_test_server/spawned_test_server.h"
#include "net/url_request/url_request_job_factory_impl.h"
#include "net/url_request/url_request_test_util.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "testing/platform_test.h"
// TODO(eroman): Test that cookies aren't sent.
using base::ASCIIToUTF16;
namespace net {
namespace {
const base::FilePath::CharType kDocRoot[] =
FILE_PATH_LITERAL("net/data/cert_net_fetcher_impl_unittest");
// A non-mock URLRequestContext which can access http:// urls.
class RequestContext : public URLRequestContext {
public:
RequestContext() : storage_(this) {
ProxyConfig no_proxy;
storage_.set_host_resolver(scoped_ptr<HostResolver>(new MockHostResolver));
storage_.set_cert_verifier(new MockCertVerifier);
storage_.set_transport_security_state(new TransportSecurityState);
storage_.set_proxy_service(ProxyService::CreateFixed(no_proxy));
storage_.set_ssl_config_service(new SSLConfigServiceDefaults);
storage_.set_http_server_properties(
scoped_ptr<HttpServerProperties>(new HttpServerPropertiesImpl()));
HttpNetworkSession::Params params;
params.host_resolver = host_resolver();
params.cert_verifier = cert_verifier();
params.transport_security_state = transport_security_state();
params.proxy_service = proxy_service();
params.ssl_config_service = ssl_config_service();
params.http_server_properties = http_server_properties();
scoped_refptr<HttpNetworkSession> network_session(
new HttpNetworkSession(params));
storage_.set_http_transaction_factory(new HttpCache(
network_session.get(), HttpCache::DefaultBackend::InMemory(0)));
URLRequestJobFactoryImpl* job_factory = new URLRequestJobFactoryImpl();
storage_.set_job_factory(job_factory);
}
~RequestContext() override { AssertNoURLRequests(); }
private:
URLRequestContextStorage storage_;
};
class FetchResult {
public:
FetchResult(Error net_error, const std::vector<uint8_t>& response_body)
: net_error_(net_error), response_body_(response_body) {}
void VerifySuccess(const std::string& expected_body) {
EXPECT_EQ(OK, net_error_);
EXPECT_EQ(expected_body,
std::string(response_body_.begin(), response_body_.end()));
}
void VerifyFailure(Error expected_error) {
EXPECT_EQ(expected_error, net_error_);
EXPECT_EQ(0u, response_body_.size());
}
private:
const Error net_error_;
const std::vector<uint8_t> response_body_;
};
// Helper to synchronously wait for the fetch completion. This is similar to
// net's TestCompletionCallback, but built around FetchCallback.
class TestFetchCallback {
public:
TestFetchCallback()
: callback_(base::Bind(&TestFetchCallback::OnCallback,
base::Unretained(this))) {}
const CertNetFetcher::FetchCallback& callback() const { return callback_; }
scoped_ptr<FetchResult> WaitForResult() {
DCHECK(quit_closure_.is_null());
while (!HasResult()) {
base::RunLoop run_loop;
quit_closure_ = run_loop.QuitClosure();
run_loop.Run();
quit_closure_.Reset();
}
return result_.Pass();
}
bool HasResult() const { return result_.get(); }
// Sets an extra action (in addition to recording the result) that is run when
// the FetchCallback is invoked.
void set_extra_closure(const base::Closure& closure) {
extra_closure_ = closure;
}
private:
void OnCallback(Error net_error, const std::vector<uint8_t>& response_body) {
DCHECK(!HasResult());
result_.reset(new FetchResult(net_error, response_body));
if (!extra_closure_.is_null())
extra_closure_.Run();
if (!quit_closure_.is_null())
quit_closure_.Run();
}
CertNetFetcher::FetchCallback callback_;
scoped_ptr<FetchResult> result_;
base::Closure quit_closure_;
base::Closure extra_closure_;
};
} // namespace
class CertNetFetcherImplTest : public PlatformTest {
public:
CertNetFetcherImplTest()
: test_server_(SpawnedTestServer::TYPE_HTTP,
net::SpawnedTestServer::kLocalhost,
base::FilePath(kDocRoot)) {
context_.set_network_delegate(&network_delegate_);
}
protected:
SpawnedTestServer test_server_;
TestNetworkDelegate network_delegate_;
RequestContext context_;
};
// Helper to start an AIA fetch using default parameters.
WARN_UNUSED_RESULT scoped_ptr<CertNetFetcher::Request> StartRequest(
CertNetFetcher* fetcher,
const GURL& url,
const TestFetchCallback& callback) {
return fetcher->FetchCaIssuers(url, CertNetFetcher::DEFAULT,
CertNetFetcher::DEFAULT, callback.callback());
}
// Fetch a few unique URLs using GET in parallel. Each URL has a different body
// and Content-Type.
TEST_F(CertNetFetcherImplTest, ParallelFetchNoDuplicates) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
TestFetchCallback callback1;
TestFetchCallback callback2;
TestFetchCallback callback3;
// Request a URL with Content-Type "application/pkix-cert"
GURL url1 = test_server_.GetURL("files/cert.crt");
scoped_ptr<CertNetFetcher::Request> request1 =
StartRequest(&fetcher, url1, callback1);
// Request a URL with Content-Type "application/pkix-crl"
GURL url2 = test_server_.GetURL("files/root.crl");
scoped_ptr<CertNetFetcher::Request> request2 =
StartRequest(&fetcher, url2, callback2);
// Request a URL with Content-Type "application/pkcs7-mime"
GURL url3 = test_server_.GetURL("files/certs.p7c");
scoped_ptr<CertNetFetcher::Request> request3 =
StartRequest(&fetcher, url3, callback3);
// Wait for all of the requests to complete.
scoped_ptr<FetchResult> result1 = callback1.WaitForResult();
scoped_ptr<FetchResult> result2 = callback2.WaitForResult();
scoped_ptr<FetchResult> result3 = callback3.WaitForResult();
// Verify the fetch results.
result1->VerifySuccess("-cert.crt-\n");
result2->VerifySuccess("-root.crl-\n");
result3->VerifySuccess("-certs.p7c-\n");
EXPECT_EQ(3, network_delegate_.created_requests());
}
// Fetch a caIssuers URL which has an unexpected extension and Content-Type.
// The extension is .txt and the Content-Type is text/plain. Despite being
// unusual this succeeds as the extension and Content-Type are not required to
// be meaningful.
TEST_F(CertNetFetcherImplTest, ContentTypeDoesntMatter) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
TestFetchCallback callback;
GURL url = test_server_.GetURL("files/foo.txt");
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(&fetcher, url, callback);
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifySuccess("-foo.txt-\n");
}
// Fetch a URLs whose HTTP response code is not 200. These are considered
// failures.
TEST_F(CertNetFetcherImplTest, HttpStatusCode) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
// Response was HTTP status 404.
{
TestFetchCallback callback;
GURL url = test_server_.GetURL("files/404.html");
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(&fetcher, url, callback);
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifyFailure(ERR_FAILED);
}
// Response was HTTP status 500.
{
TestFetchCallback callback;
GURL url = test_server_.GetURL("files/500.html");
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(&fetcher, url, callback);
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifyFailure(ERR_FAILED);
}
}
// Fetching a URL with a Content-Disposition header should have no effect.
TEST_F(CertNetFetcherImplTest, ContentDisposition) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
TestFetchCallback callback;
GURL url = test_server_.GetURL("files/downloadable.js");
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(&fetcher, url, callback);
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifySuccess("-downloadable.js-\n");
}
// Verifies that a cachable request will be served from the HTTP cache the
// second time it is requested.
TEST_F(CertNetFetcherImplTest, Cache) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
// Fetch a URL whose HTTP headers make it cacheable for 1 hour.
GURL url(test_server_.GetURL("files/cacheable_1hr.crt"));
{
TestFetchCallback callback;
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(&fetcher, url, callback);
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifySuccess("-cacheable_1hr.crt-\n");
}
EXPECT_EQ(1, network_delegate_.created_requests());
// Kill the HTTP server.
ASSERT_TRUE(test_server_.Stop());
// Fetch again -- will fail unless served from cache.
{
TestFetchCallback callback;
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(&fetcher, url, callback);
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifySuccess("-cacheable_1hr.crt-\n");
}
EXPECT_EQ(2, network_delegate_.created_requests());
}
// Verify that the maximum response body constraints are enforced by fetching a
// resource that is larger than the limit.
TEST_F(CertNetFetcherImplTest, TooLarge) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
// This file has a response body 12 bytes long. So setting the maximum to 11
// bytes will cause it to fail.
GURL url(test_server_.GetURL("files/certs.p7c"));
TestFetchCallback callback;
scoped_ptr<CertNetFetcher::Request> request = fetcher.FetchCaIssuers(
url, CertNetFetcher::DEFAULT, 11, callback.callback());
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifyFailure(ERR_FILE_TOO_BIG);
}
// Set the timeout to 10 milliseconds, and try fetching a URL that takes 5
// seconds to complete. It should fail due to a timeout.
TEST_F(CertNetFetcherImplTest, Hang) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
GURL url(test_server_.GetURL("slow/certs.p7c?5"));
TestFetchCallback callback;
scoped_ptr<CertNetFetcher::Request> request = fetcher.FetchCaIssuers(
url, 10, CertNetFetcher::DEFAULT, callback.callback());
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifyFailure(ERR_TIMED_OUT);
}
// Verify that if a response is gzip-encoded it gets inflated before being
// returned to the caller.
TEST_F(CertNetFetcherImplTest, Gzip) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
GURL url(test_server_.GetURL("files/gzipped_crl"));
TestFetchCallback callback;
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(&fetcher, url, callback);
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifySuccess("-gzipped_crl-\n");
}
// Try fetching an unsupported URL scheme (https).
TEST_F(CertNetFetcherImplTest, HttpsNotAllowed) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
GURL url("https://foopy/foo.crt");
TestFetchCallback callback;
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(&fetcher, url, callback);
// Should NOT complete synchronously despite being a test that could be done
// immediately.
EXPECT_FALSE(callback.HasResult());
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifyFailure(ERR_DISALLOWED_URL_SCHEME);
// No request was created because the URL scheme was unsupported.
EXPECT_EQ(0, network_delegate_.created_requests());
}
// Try fetching a URL which redirects to https.
TEST_F(CertNetFetcherImplTest, RedirectToHttpsNotAllowed) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
GURL url(test_server_.GetURL("files/redirect_https"));
TestFetchCallback callback;
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(&fetcher, url, callback);
scoped_ptr<FetchResult> result = callback.WaitForResult();
result->VerifyFailure(ERR_DISALLOWED_URL_SCHEME);
EXPECT_EQ(1, network_delegate_.created_requests());
}
// Try fetching an unsupported URL scheme (https) and then immediately
// cancelling. This is a bit special because this codepath needs to post a task.
TEST_F(CertNetFetcherImplTest, CancelHttpsNotAllowed) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
GURL url("https://foopy/foo.crt");
TestFetchCallback callback;
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(&fetcher, url, callback);
// Cancel the request.
request.reset();
// Spin the message loop to increase chance of catching a bug.
base::RunLoop().RunUntilIdle();
// Should NOT complete synchronously despite being a test that could be done
// immediately.
EXPECT_FALSE(callback.HasResult());
EXPECT_EQ(0, network_delegate_.created_requests());
}
// Start a few requests, and cancel one of them before running the message loop
// again.
TEST_F(CertNetFetcherImplTest, CancelBeforeRunningMessageLoop) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
TestFetchCallback callback1;
TestFetchCallback callback2;
TestFetchCallback callback3;
GURL url1 = test_server_.GetURL("files/cert.crt");
scoped_ptr<CertNetFetcher::Request> request1 =
StartRequest(&fetcher, url1, callback1);
GURL url2 = test_server_.GetURL("files/root.crl");
scoped_ptr<CertNetFetcher::Request> request2 =
StartRequest(&fetcher, url2, callback2);
GURL url3 = test_server_.GetURL("files/certs.p7c");
scoped_ptr<CertNetFetcher::Request> request3 =
StartRequest(&fetcher, url3, callback3);
EXPECT_EQ(3, network_delegate_.created_requests());
EXPECT_FALSE(callback1.HasResult());
EXPECT_FALSE(callback2.HasResult());
EXPECT_FALSE(callback3.HasResult());
// Cancel the second request.
request2.reset();
// Wait for the non-cancelled requests to complete.
scoped_ptr<FetchResult> result1 = callback1.WaitForResult();
scoped_ptr<FetchResult> result3 = callback3.WaitForResult();
// Verify the fetch results.
result1->VerifySuccess("-cert.crt-\n");
result3->VerifySuccess("-certs.p7c-\n");
EXPECT_FALSE(callback2.HasResult());
}
// Start several requests, and cancel one of them after the first has completed.
// NOTE: The python test server is single threaded and can only service one
// request at a time. After a socket is opened by the server it waits for it to
// be completed, and any subsequent request will hang until the first socket is
// closed.
// Cancelling the first request can therefore be problematic, since if
// cancellation is done after the socket is opened but before reading/writing,
// then the socket is re-cycled and things will be stalled until the cleanup
// timer (10 seconds) closes it.
// To work around this, the last request is cancelled, and hope that the
// requests are given opened sockets in a FIFO order.
// TODO(eroman): Make this more robust.
TEST_F(CertNetFetcherImplTest, CancelAfterRunningMessageLoop) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
TestFetchCallback callback1;
TestFetchCallback callback2;
TestFetchCallback callback3;
GURL url1 = test_server_.GetURL("files/cert.crt");
scoped_ptr<CertNetFetcher::Request> request1 =
StartRequest(&fetcher, url1, callback1);
GURL url2 = test_server_.GetURL("files/certs.p7c");
scoped_ptr<CertNetFetcher::Request> request2 =
StartRequest(&fetcher, url2, callback2);
GURL url3("ftp://www.not.supported.com/foo");
scoped_ptr<CertNetFetcher::Request> request3 =
StartRequest(&fetcher, url3, callback3);
EXPECT_FALSE(callback1.HasResult());
EXPECT_FALSE(callback2.HasResult());
EXPECT_FALSE(callback3.HasResult());
// Wait for the ftp request to complete (it should complete right away since
// it doesn't even try to connect to the server).
scoped_ptr<FetchResult> result3 = callback3.WaitForResult();
result3->VerifyFailure(ERR_DISALLOWED_URL_SCHEME);
// Cancel the second outstanding request.
request2.reset();
// Wait for the first request to complete.
scoped_ptr<FetchResult> result2 = callback1.WaitForResult();
// Verify the fetch results.
result2->VerifySuccess("-cert.crt-\n");
}
// Delete a CertNetFetcherImpl with outstanding requests on it.
TEST_F(CertNetFetcherImplTest, DeleteCancels) {
ASSERT_TRUE(test_server_.Start());
scoped_ptr<CertNetFetcherImpl> fetcher(new CertNetFetcherImpl(&context_));
GURL url(test_server_.GetURL("slow/certs.p7c?20"));
TestFetchCallback callback;
scoped_ptr<CertNetFetcher::Request> request =
StartRequest(fetcher.get(), url, callback);
// Destroy the fetcher before the outstanding request.
fetcher.reset();
}
// Fetch the same URLs in parallel and verify that only 1 request is made per
// URL.
TEST_F(CertNetFetcherImplTest, ParallelFetchDuplicates) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
GURL url1 = test_server_.GetURL("files/cert.crt");
GURL url2 = test_server_.GetURL("files/root.crl");
// Issue 3 requests for url1, and 3 requests for url2
TestFetchCallback callback1;
scoped_ptr<CertNetFetcher::Request> request1 =
StartRequest(&fetcher, url1, callback1);
TestFetchCallback callback2;
scoped_ptr<CertNetFetcher::Request> request2 =
StartRequest(&fetcher, url2, callback2);
TestFetchCallback callback3;
scoped_ptr<CertNetFetcher::Request> request3 =
StartRequest(&fetcher, url1, callback3);
TestFetchCallback callback4;
scoped_ptr<CertNetFetcher::Request> request4 =
StartRequest(&fetcher, url2, callback4);
TestFetchCallback callback5;
scoped_ptr<CertNetFetcher::Request> request5 =
StartRequest(&fetcher, url2, callback5);
TestFetchCallback callback6;
scoped_ptr<CertNetFetcher::Request> request6 =
StartRequest(&fetcher, url1, callback6);
// Cancel all but one of the requests for url1.
request1.reset();
request3.reset();
// Wait for the remaining requests to finish.
scoped_ptr<FetchResult> result2 = callback2.WaitForResult();
scoped_ptr<FetchResult> result4 = callback4.WaitForResult();
scoped_ptr<FetchResult> result5 = callback5.WaitForResult();
scoped_ptr<FetchResult> result6 = callback6.WaitForResult();
// Verify that none of the cancelled requests for url1 completed (since they
// were cancelled).
EXPECT_FALSE(callback1.HasResult());
EXPECT_FALSE(callback3.HasResult());
// Verify the fetch results.
result2->VerifySuccess("-root.crl-\n");
result4->VerifySuccess("-root.crl-\n");
result5->VerifySuccess("-root.crl-\n");
result6->VerifySuccess("-cert.crt-\n");
// Verify that only 2 URLRequests were started even though 6 requests were
// issued.
EXPECT_EQ(2, network_delegate_.created_requests());
}
// Cancel a request and then start another one for the same URL.
TEST_F(CertNetFetcherImplTest, CancelThenStart) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
TestFetchCallback callback1;
TestFetchCallback callback2;
TestFetchCallback callback3;
GURL url = test_server_.GetURL("files/cert.crt");
scoped_ptr<CertNetFetcher::Request> request1 =
StartRequest(&fetcher, url, callback1);
request1.reset();
scoped_ptr<CertNetFetcher::Request> request2 =
StartRequest(&fetcher, url, callback2);
scoped_ptr<CertNetFetcher::Request> request3 =
StartRequest(&fetcher, url, callback3);
request3.reset();
// All but |request2| were canceled.
scoped_ptr<FetchResult> result = callback2.WaitForResult();
result->VerifySuccess("-cert.crt-\n");
EXPECT_FALSE(callback1.HasResult());
EXPECT_FALSE(callback3.HasResult());
// One URLRequest that was cancelled, then another right afterwards.
EXPECT_EQ(2, network_delegate_.created_requests());
}
// Start duplicate requests and then cancel all of them.
TEST_F(CertNetFetcherImplTest, CancelAll) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
TestFetchCallback callback[3];
scoped_ptr<CertNetFetcher::Request> request[3];
GURL url = test_server_.GetURL("files/cert.crt");
for (size_t i = 0; i < arraysize(callback); ++i) {
request[i] = StartRequest(&fetcher, url, callback[i]);
}
// Cancel all the requests.
for (size_t i = 0; i < arraysize(request); ++i)
request[i].reset();
EXPECT_EQ(1, network_delegate_.created_requests());
for (size_t i = 0; i < arraysize(request); ++i)
EXPECT_FALSE(callback[i].HasResult());
}
void DeleteCertNetFetcher(CertNetFetcher* fetcher) {
delete fetcher;
}
// Delete the CertNetFetcherImpl within a request callback.
TEST_F(CertNetFetcherImplTest, DeleteWithinCallback) {
ASSERT_TRUE(test_server_.Start());
// Deleted by callback2.
CertNetFetcher* fetcher = new CertNetFetcherImpl(&context_);
GURL url = test_server_.GetURL("files/cert.crt");
TestFetchCallback callback[4];
scoped_ptr<CertNetFetcher::Request> reqs[4];
callback[1].set_extra_closure(base::Bind(DeleteCertNetFetcher, fetcher));
for (size_t i = 0; i < arraysize(callback); ++i)
reqs[i] = StartRequest(fetcher, url, callback[i]);
EXPECT_EQ(1, network_delegate_.created_requests());
callback[1].WaitForResult();
// Assume requests for the same URL are executed in FIFO order.
EXPECT_TRUE(callback[0].HasResult());
EXPECT_FALSE(callback[2].HasResult());
EXPECT_FALSE(callback[3].HasResult());
}
void FetchRequest(CertNetFetcher* fetcher,
const GURL& url,
TestFetchCallback* callback,
scoped_ptr<CertNetFetcher::Request>* request) {
*request = StartRequest(fetcher, url, *callback);
}
// Make a request during callback for the same URL.
TEST_F(CertNetFetcherImplTest, FetchWithinCallback) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
GURL url = test_server_.GetURL("files/cert.crt");
TestFetchCallback callback[5];
scoped_ptr<CertNetFetcher::Request> req[5];
callback[1].set_extra_closure(
base::Bind(FetchRequest, &fetcher, url, &callback[4], &req[4]));
for (size_t i = 0; i < arraysize(callback) - 1; ++i)
req[i] = StartRequest(&fetcher, url, callback[i]);
EXPECT_EQ(1, network_delegate_.created_requests());
for (size_t i = 0; i < arraysize(callback); ++i) {
scoped_ptr<FetchResult> result = callback[i].WaitForResult();
result->VerifySuccess("-cert.crt-\n");
}
// The fetch started within a callback should have started a new request
// rather than attaching to the current job.
EXPECT_EQ(2, network_delegate_.created_requests());
}
void CancelRequest(scoped_ptr<CertNetFetcher::Request>* request) {
request->reset();
}
// Cancel a request while executing a callback for the same job.
TEST_F(CertNetFetcherImplTest, CancelWithinCallback) {
ASSERT_TRUE(test_server_.Start());
CertNetFetcherImpl fetcher(&context_);
GURL url = test_server_.GetURL("files/cert.crt");
TestFetchCallback callback[4];
scoped_ptr<CertNetFetcher::Request> request[4];
for (size_t i = 0; i < arraysize(callback); ++i)
request[i] = StartRequest(&fetcher, url, callback[i]);
// Cancel request[2] when the callback for request[1] runs.
callback[1].set_extra_closure(base::Bind(CancelRequest, &request[2]));
EXPECT_EQ(1, network_delegate_.created_requests());
for (size_t i = 0; i < arraysize(request); ++i) {
if (i == 2)
continue;
scoped_ptr<FetchResult> result = callback[i].WaitForResult();
result->VerifySuccess("-cert.crt-\n");
}
// request[2] was cancelled.
EXPECT_FALSE(callback[2].HasResult());
}
} // namespace net
HTTP/1.1 200 OK
Content-Type: application/pkix-cert
Cache-Control: public, max-age=3600
HTTP/1.1 200 OK
Content-Type: application/pkix-cert
HTTP/1.1 200 OK
Content-Type: application/pkcs7-mime
HTTP/1.1 200 OK
Content-Type: text/javascript
Content-Disposition: attachment; filename="download-me.exe"
HTTP/1.1 200 OK
Content-Type: application/pkix-crl
Content-Encoding: gzip
HTTP/1.1 200 OK
Content-Type: application/pkix-crl
......@@ -380,6 +380,7 @@
# Need to read input data files.
'filter/gzip_filter_unittest.cc',
# Need TestServer.
"cert_net/cert_net_fetcher_impl_unittest.cc",
'proxy/proxy_script_fetcher_impl_unittest.cc',
'socket/ssl_client_socket_unittest.cc',
'socket/ssl_server_socket_unittest.cc',
......
......@@ -303,6 +303,7 @@
'cert/cert_database_mac.cc',
'cert/cert_database_nss.cc',
'cert/cert_database_win.cc',
'cert/cert_net_fetcher.h',
'cert/cert_trust_anchor_provider.h',
'cert/cert_verify_proc.cc',
'cert/cert_verify_proc.h',
......@@ -375,6 +376,8 @@
'cert/x509_util_mac.h',
'cert/x509_util_nss.cc',
'cert/x509_util_nss.h',
'cert_net/cert_net_fetcher_impl.cc',
'cert_net/cert_net_fetcher_impl.h',
'cookies/canonical_cookie.cc',
'cookies/canonical_cookie.h',
'cookies/cookie_constants.cc',
......@@ -1314,6 +1317,7 @@
'cert/x509_util_nss_unittest.cc',
'cert/x509_util_openssl_unittest.cc',
'cert/x509_util_unittest.cc',
'cert_net/cert_net_fetcher_impl_unittest.cc',
'cookies/canonical_cookie_unittest.cc',
'cookies/cookie_constants_unittest.cc',
'cookies/cookie_monster_unittest.cc',
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment