Commit 728f3c83 authored by Maks Orlovich's avatar Maks Orlovich Committed by Commit Bot

Fix crash in URLLoader::Read when pending_write_ null during mime/corb detection

Bug: 900608
Change-Id: Ic1647bbcc227f8ac497b01a05a4ca4fcefe3b9ae
Reviewed-on: https://chromium-review.googlesource.com/c/1340829
Commit-Queue: Maks Orlovich <morlovich@chromium.org>
Reviewed-by: default avatarMatt Menke <mmenke@chromium.org>
Reviewed-by: default avatarŁukasz Anforowicz <lukasza@chromium.org>
Cr-Commit-Position: refs/heads/master@{#609523}
parent 1dc06d62
...@@ -877,35 +877,40 @@ void URLLoader::DidRead(int num_bytes, bool completed_synchronously) { ...@@ -877,35 +877,40 @@ void URLLoader::DidRead(int num_bytes, bool completed_synchronously) {
bool complete_read = true; bool complete_read = true;
if (consumer_handle_.is_valid()) { if (consumer_handle_.is_valid()) {
// Limit sniffing to the first net::kMaxBytesToSniff. // |pending_write_| may be null if the job self-aborts due to a suspend;
size_t data_length = pending_write_buffer_offset_; // this will have |consumer_handle_| valid when the loader is paused.
if (data_length > net::kMaxBytesToSniff) if (pending_write_) {
data_length = net::kMaxBytesToSniff; // Limit sniffing to the first net::kMaxBytesToSniff.
base::StringPiece data(pending_write_->buffer(), data_length); size_t data_length = pending_write_buffer_offset_;
if (data_length > net::kMaxBytesToSniff)
if (is_more_mime_sniffing_needed_) { data_length = net::kMaxBytesToSniff;
const std::string& type_hint = response_->head.mime_type;
std::string new_type; base::StringPiece data(pending_write_->buffer(), data_length);
is_more_mime_sniffing_needed_ = !net::SniffMimeType(
data.data(), data.size(), url_request_->url(), type_hint, if (is_more_mime_sniffing_needed_) {
net::ForceSniffFileUrlsForHtml::kDisabled, &new_type); const std::string& type_hint = response_->head.mime_type;
// SniffMimeType() returns false if there is not enough data to determine std::string new_type;
// the mime type. However, even if it returns false, it returns a new type is_more_mime_sniffing_needed_ = !net::SniffMimeType(
// that is probably better than the current one. data.data(), data.size(), url_request_->url(), type_hint,
response_->head.mime_type.assign(new_type); net::ForceSniffFileUrlsForHtml::kDisabled, &new_type);
response_->head.did_mime_sniff = true; // SniffMimeType() returns false if there is not enough data to
} // determine the mime type. However, even if it returns false, it
// returns a new type that is probably better than the current one.
response_->head.mime_type.assign(new_type);
response_->head.did_mime_sniff = true;
}
if (is_more_corb_sniffing_needed_) { if (is_more_corb_sniffing_needed_) {
corb_analyzer_->SniffResponseBody(data, new_data_offset); corb_analyzer_->SniffResponseBody(data, new_data_offset);
if (corb_analyzer_->ShouldBlock()) { if (corb_analyzer_->ShouldBlock()) {
corb_analyzer_->LogBlockedResponse(); corb_analyzer_->LogBlockedResponse();
is_more_corb_sniffing_needed_ = false; is_more_corb_sniffing_needed_ = false;
if (BlockResponseForCorb() == kWillCancelRequest) if (BlockResponseForCorb() == kWillCancelRequest)
return; return;
} else if (corb_analyzer_->ShouldAllow()) { } else if (corb_analyzer_->ShouldAllow()) {
corb_analyzer_->LogAllowedResponse(); corb_analyzer_->LogAllowedResponse();
is_more_corb_sniffing_needed_ = false; is_more_corb_sniffing_needed_ = false;
}
} }
} }
......
...@@ -277,9 +277,11 @@ class URLRequestSimulatedCacheJob : public net::URLRequestJob { ...@@ -277,9 +277,11 @@ class URLRequestSimulatedCacheJob : public net::URLRequestJob {
URLRequestSimulatedCacheJob( URLRequestSimulatedCacheJob(
net::URLRequest* request, net::URLRequest* request,
net::NetworkDelegate* network_delegate, net::NetworkDelegate* network_delegate,
scoped_refptr<net::IOBuffer>* simulated_cache_dest) scoped_refptr<net::IOBuffer>* simulated_cache_dest,
bool use_text_plain)
: URLRequestJob(request, network_delegate), : URLRequestJob(request, network_delegate),
simulated_cache_dest_(simulated_cache_dest), simulated_cache_dest_(simulated_cache_dest),
use_text_plain_(use_text_plain),
weak_factory_(this) {} weak_factory_(this) {}
// net::URLRequestJob implementation: // net::URLRequestJob implementation:
...@@ -289,6 +291,15 @@ class URLRequestSimulatedCacheJob : public net::URLRequestJob { ...@@ -289,6 +291,15 @@ class URLRequestSimulatedCacheJob : public net::URLRequestJob {
weak_factory_.GetWeakPtr())); weak_factory_.GetWeakPtr()));
} }
void GetResponseInfo(net::HttpResponseInfo* info) override {
if (!use_text_plain_)
return URLRequestJob::GetResponseInfo(info);
if (!info->headers) {
info->headers = net::HttpResponseHeaders::TryToCreate(
"HTTP/1.1 200 OK\r\nContent-Type: text/plain");
}
}
int ReadRawData(net::IOBuffer* buf, int buf_size) override { int ReadRawData(net::IOBuffer* buf, int buf_size) override {
DCHECK_GT(buf_size, 0); DCHECK_GT(buf_size, 0);
...@@ -307,6 +318,7 @@ class URLRequestSimulatedCacheJob : public net::URLRequestJob { ...@@ -307,6 +318,7 @@ class URLRequestSimulatedCacheJob : public net::URLRequestJob {
void StartAsync() { NotifyHeadersComplete(); } void StartAsync() { NotifyHeadersComplete(); }
scoped_refptr<net::IOBuffer>* simulated_cache_dest_; scoped_refptr<net::IOBuffer>* simulated_cache_dest_;
bool use_text_plain_;
base::WeakPtrFactory<URLRequestSimulatedCacheJob> weak_factory_; base::WeakPtrFactory<URLRequestSimulatedCacheJob> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(URLRequestSimulatedCacheJob); DISALLOW_COPY_AND_ASSIGN(URLRequestSimulatedCacheJob);
...@@ -315,18 +327,21 @@ class URLRequestSimulatedCacheJob : public net::URLRequestJob { ...@@ -315,18 +327,21 @@ class URLRequestSimulatedCacheJob : public net::URLRequestJob {
class SimulatedCacheInterceptor : public net::URLRequestInterceptor { class SimulatedCacheInterceptor : public net::URLRequestInterceptor {
public: public:
explicit SimulatedCacheInterceptor( explicit SimulatedCacheInterceptor(
scoped_refptr<net::IOBuffer>* simulated_cache_dest) scoped_refptr<net::IOBuffer>* simulated_cache_dest,
: simulated_cache_dest_(simulated_cache_dest) {} bool use_text_plain)
: simulated_cache_dest_(simulated_cache_dest),
use_text_plain_(use_text_plain) {}
net::URLRequestJob* MaybeInterceptRequest( net::URLRequestJob* MaybeInterceptRequest(
net::URLRequest* request, net::URLRequest* request,
net::NetworkDelegate* network_delegate) const override { net::NetworkDelegate* network_delegate) const override {
return new URLRequestSimulatedCacheJob(request, network_delegate, return new URLRequestSimulatedCacheJob(
simulated_cache_dest_); request, network_delegate, simulated_cache_dest_, use_text_plain_);
} }
private: private:
scoped_refptr<net::IOBuffer>* simulated_cache_dest_; scoped_refptr<net::IOBuffer>* simulated_cache_dest_;
bool use_text_plain_;
DISALLOW_COPY_AND_ASSIGN(SimulatedCacheInterceptor); DISALLOW_COPY_AND_ASSIGN(SimulatedCacheInterceptor);
}; };
...@@ -2019,6 +2034,52 @@ TEST_F(URLLoaderTest, EnterSuspendModeWhileNoPendingRead) { ...@@ -2019,6 +2034,52 @@ TEST_F(URLLoaderTest, EnterSuspendModeWhileNoPendingRead) {
unowned_power_monitor_source->Resume(); unowned_power_monitor_source->Resume();
} }
// This tests the case where suspend mode is entered when a job is trying to do
// mime detection, but is paused and therefore does not have a pending read to
// provide partial data.
TEST_F(URLLoaderTest, EnterSuspendModePaused) {
GURL url("http://www.example.com");
scoped_refptr<net::IOBuffer> simulated_cache_dest;
// Using SimulatedCacheInterceptor here since it marks the read pending,
// which avoids races between various events.
net::URLRequestFilter::GetInstance()->AddUrlInterceptor(
url, std::make_unique<SimulatedCacheInterceptor>(
&simulated_cache_dest, true /* use_text_plain */));
std::unique_ptr<TestPowerMonitorSource> power_monitor_source =
std::make_unique<TestPowerMonitorSource>();
TestPowerMonitorSource* unowned_power_monitor_source =
power_monitor_source.get();
base::PowerMonitor power_monitor(std::move(power_monitor_source));
ResourceRequest request = CreateResourceRequest("GET", url);
base::RunLoop delete_run_loop;
mojom::URLLoaderPtr loader;
std::unique_ptr<URLLoader> url_loader;
mojom::URLLoaderFactoryParams params;
params.process_id = mojom::kBrowserProcessId;
params.is_corb_enabled = false;
url_loader = std::make_unique<URLLoader>(
context(), nullptr /* network_service_client */,
DeleteLoaderCallback(&delete_run_loop, &url_loader),
mojo::MakeRequest(&loader), mojom::kURLLoadOptionSniffMimeType, request,
false, client()->CreateInterfacePtr(), TRAFFIC_ANNOTATION_FOR_TESTS,
&params, 0 /* request_id */, resource_scheduler_client(), nullptr,
nullptr /* network_usage_accumulator */);
url_loader->PauseReadingBodyFromNet();
base::RunLoop().RunUntilIdle();
unowned_power_monitor_source->Suspend();
client()->RunUntilComplete();
EXPECT_EQ(net::ERR_ABORTED, client()->completion_status().error_code);
delete_run_loop.Run();
unowned_power_monitor_source->Resume();
}
TEST_F(URLLoaderTest, EnterSuspendDiskCacheWriteQueued) { TEST_F(URLLoaderTest, EnterSuspendDiskCacheWriteQueued) {
// Test to make sure that fetch abort on suspend doesn't yank out the backing // Test to make sure that fetch abort on suspend doesn't yank out the backing
// for IOBuffer for an issued disk_cache Write. // for IOBuffer for an issued disk_cache Write.
...@@ -2026,7 +2087,8 @@ TEST_F(URLLoaderTest, EnterSuspendDiskCacheWriteQueued) { ...@@ -2026,7 +2087,8 @@ TEST_F(URLLoaderTest, EnterSuspendDiskCacheWriteQueued) {
GURL url("http://www.example.com"); GURL url("http://www.example.com");
scoped_refptr<net::IOBuffer> simulated_cache_dest; scoped_refptr<net::IOBuffer> simulated_cache_dest;
net::URLRequestFilter::GetInstance()->AddUrlInterceptor( net::URLRequestFilter::GetInstance()->AddUrlInterceptor(
url, std::make_unique<SimulatedCacheInterceptor>(&simulated_cache_dest)); url, std::make_unique<SimulatedCacheInterceptor>(
&simulated_cache_dest, false /* use_text_plain */));
std::unique_ptr<TestPowerMonitorSource> power_monitor_source = std::unique_ptr<TestPowerMonitorSource> power_monitor_source =
std::make_unique<TestPowerMonitorSource>(); std::make_unique<TestPowerMonitorSource>();
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment