Commit cc9f081b authored by bnc's avatar bnc Committed by Commit bot

Remove HpackHuffmanAggregator.

Remove HpackHuffmanAggregator header, implementation, test, and build rules.
Obsolete Net.SpdyHpackEncoddeCharacterFrequency histogram.  As the HTTP/2
standard is finalized, there is no longer a need to gather character
distribution statistics.

BUG=414758

Review URL: https://codereview.chromium.org/1017853004

Cr-Commit-Position: refs/heads/master@{#322467}
parent cca236a1
...@@ -28,7 +28,6 @@ ...@@ -28,7 +28,6 @@
#include "net/socket/client_socket_pool_manager_impl.h" #include "net/socket/client_socket_pool_manager_impl.h"
#include "net/socket/next_proto.h" #include "net/socket/next_proto.h"
#include "net/socket/ssl_client_socket.h" #include "net/socket/ssl_client_socket.h"
#include "net/spdy/hpack_huffman_aggregator.h"
#include "net/spdy/spdy_session_pool.h" #include "net/spdy/spdy_session_pool.h"
namespace { namespace {
...@@ -191,10 +190,6 @@ HttpNetworkSession::HttpNetworkSession(const Params& params) ...@@ -191,10 +190,6 @@ HttpNetworkSession::HttpNetworkSession(const Params& params)
} }
} }
if (HpackHuffmanAggregator::UseAggregator()) {
huffman_aggregator_.reset(new HpackHuffmanAggregator());
}
http_server_properties_->SetAlternateProtocolProbabilityThreshold( http_server_properties_->SetAlternateProtocolProbabilityThreshold(
params.alternate_protocol_probability_threshold); params.alternate_protocol_probability_threshold);
} }
......
...@@ -36,7 +36,6 @@ class ClientSocketFactory; ...@@ -36,7 +36,6 @@ class ClientSocketFactory;
class ClientSocketPoolManager; class ClientSocketPoolManager;
class CTVerifier; class CTVerifier;
class HostResolver; class HostResolver;
class HpackHuffmanAggregator;
class HttpAuthHandlerFactory; class HttpAuthHandlerFactory;
class HttpNetworkSessionPeer; class HttpNetworkSessionPeer;
class HttpProxyClientSocketPool; class HttpProxyClientSocketPool;
...@@ -185,9 +184,6 @@ class NET_EXPORT HttpNetworkSession ...@@ -185,9 +184,6 @@ class NET_EXPORT HttpNetworkSession
NetLog* net_log() { NetLog* net_log() {
return net_log_; return net_log_;
} }
HpackHuffmanAggregator* huffman_aggregator() {
return huffman_aggregator_.get();
}
// Creates a Value summary of the state of the socket pools. The caller is // Creates a Value summary of the state of the socket pools. The caller is
// responsible for deleting the returned value. // responsible for deleting the returned value.
...@@ -244,9 +240,6 @@ class NET_EXPORT HttpNetworkSession ...@@ -244,9 +240,6 @@ class NET_EXPORT HttpNetworkSession
scoped_ptr<HttpStreamFactory> http_stream_factory_for_websocket_; scoped_ptr<HttpStreamFactory> http_stream_factory_for_websocket_;
std::set<HttpResponseBodyDrainer*> response_drainers_; std::set<HttpResponseBodyDrainer*> response_drainers_;
// TODO(jgraettinger): Remove when Huffman collection is complete.
scoped_ptr<HpackHuffmanAggregator> huffman_aggregator_;
NextProtoVector next_protos_; NextProtoVector next_protos_;
bool enabled_protocols_[NUM_VALID_ALTERNATE_PROTOCOLS]; bool enabled_protocols_[NUM_VALID_ALTERNATE_PROTOCOLS];
......
...@@ -54,7 +54,6 @@ ...@@ -54,7 +54,6 @@
#include "net/socket/ssl_client_socket.h" #include "net/socket/ssl_client_socket.h"
#include "net/socket/ssl_client_socket_pool.h" #include "net/socket/ssl_client_socket_pool.h"
#include "net/socket/transport_client_socket_pool.h" #include "net/socket/transport_client_socket_pool.h"
#include "net/spdy/hpack_huffman_aggregator.h"
#include "net/spdy/spdy_http_stream.h" #include "net/spdy/spdy_http_stream.h"
#include "net/spdy/spdy_session.h" #include "net/spdy/spdy_session.h"
#include "net/spdy/spdy_session_pool.h" #include "net/spdy/spdy_session_pool.h"
...@@ -1058,14 +1057,6 @@ int HttpNetworkTransaction::DoReadHeadersComplete(int result) { ...@@ -1058,14 +1057,6 @@ int HttpNetworkTransaction::DoReadHeadersComplete(int result) {
stream_->GetSSLInfo(&response_.ssl_info); stream_->GetSSLInfo(&response_.ssl_info);
headers_valid_ = true; headers_valid_ = true;
if (session_->huffman_aggregator()) {
session_->huffman_aggregator()->AggregateTransactionCharacterCounts(
*request_,
request_headers_,
proxy_info_.proxy_server(),
*response_.headers.get());
}
return OK; return OK;
} }
......
...@@ -1040,8 +1040,6 @@ ...@@ -1040,8 +1040,6 @@
'spdy/hpack_entry.h', 'spdy/hpack_entry.h',
'spdy/hpack_header_table.cc', 'spdy/hpack_header_table.cc',
'spdy/hpack_header_table.h', 'spdy/hpack_header_table.h',
'spdy/hpack_huffman_aggregator.cc',
'spdy/hpack_huffman_aggregator.h',
'spdy/hpack_huffman_table.cc', 'spdy/hpack_huffman_table.cc',
'spdy/hpack_huffman_table.h', 'spdy/hpack_huffman_table.h',
'spdy/hpack_input_stream.cc', 'spdy/hpack_input_stream.cc',
...@@ -1628,7 +1626,6 @@ ...@@ -1628,7 +1626,6 @@
'spdy/hpack_encoder_test.cc', 'spdy/hpack_encoder_test.cc',
'spdy/hpack_entry_test.cc', 'spdy/hpack_entry_test.cc',
'spdy/hpack_header_table_test.cc', 'spdy/hpack_header_table_test.cc',
'spdy/hpack_huffman_aggregator_test.cc',
'spdy/hpack_huffman_table_test.cc', 'spdy/hpack_huffman_table_test.cc',
'spdy/hpack_input_stream_test.cc', 'spdy/hpack_input_stream_test.cc',
'spdy/hpack_output_stream_test.cc', 'spdy/hpack_output_stream_test.cc',
......
...@@ -45,7 +45,6 @@ class NET_EXPORT_PRIVATE HpackEncoder { ...@@ -45,7 +45,6 @@ class NET_EXPORT_PRIVATE HpackEncoder {
// Encodes the given header set into the given string. Only non-indexed // Encodes the given header set into the given string. Only non-indexed
// literal representations are emitted, bypassing the header table. Huffman // literal representations are emitted, bypassing the header table. Huffman
// coding is also not used. Returns whether the encoding was successful. // coding is also not used. Returns whether the encoding was successful.
// TODO(jgraettinger): Enable Huffman coding once the table as stablized.
bool EncodeHeaderSetWithoutCompression( bool EncodeHeaderSetWithoutCompression(
const std::map<std::string, std::string>& header_set, const std::map<std::string, std::string>& header_set,
std::string* output); std::string* output);
......
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/spdy/hpack_huffman_aggregator.h"
#include "base/metrics/bucket_ranges.h"
#include "base/metrics/field_trial.h"
#include "base/metrics/histogram.h"
#include "base/metrics/sample_vector.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
#include "net/base/load_flags.h"
#include "net/http/http_request_headers.h"
#include "net/http/http_request_info.h"
#include "net/http/http_response_headers.h"
#include "net/spdy/hpack_encoder.h"
#include "net/spdy/spdy_http_utils.h"
namespace net {
namespace {
const char kHistogramName[] = "Net.SpdyHpackEncodedCharacterFrequency";
const size_t kTotalCountsPublishThreshold = 50000;
// Each encoder uses the default dynamic table size of 4096 total bytes.
const size_t kMaxEncoders = 20;
} // namespace
HpackHuffmanAggregator::HpackHuffmanAggregator()
: counts_(256, 0),
total_counts_(0),
max_encoders_(kMaxEncoders) {
}
HpackHuffmanAggregator::~HpackHuffmanAggregator() {
STLDeleteContainerPairSecondPointers(encoders_.begin(), encoders_.end());
encoders_.clear();
}
void HpackHuffmanAggregator::AggregateTransactionCharacterCounts(
const HttpRequestInfo& request,
const HttpRequestHeaders& request_headers,
const ProxyServer& proxy,
const HttpResponseHeaders& response_headers) {
if (IsCrossOrigin(request)) {
return;
}
HpackEncoder* encoder = ObtainEncoder(SpdySessionKey(
HostPortPair::FromURL(request.url), proxy, request.privacy_mode));
// Convert and encode the request and response header sets.
{
SpdyHeaderBlock headers;
CreateSpdyHeadersFromHttpRequest(
request, request_headers, SPDY4, false, &headers);
std::string tmp_out;
encoder->EncodeHeaderSet(headers, &tmp_out);
}
{
SpdyHeaderBlock headers;
CreateSpdyHeadersFromHttpResponse(response_headers, &headers);
std::string tmp_out;
encoder->EncodeHeaderSet(headers, &tmp_out);
}
if (total_counts_ >= kTotalCountsPublishThreshold) {
PublishCounts();
}
}
// static
bool HpackHuffmanAggregator::UseAggregator() {
const std::string group_name =
base::FieldTrialList::FindFullName("HpackHuffmanAggregator");
if (group_name == "Enabled") {
return true;
}
return false;
}
// static
void HpackHuffmanAggregator::CreateSpdyHeadersFromHttpResponse(
const HttpResponseHeaders& headers,
SpdyHeaderBlock* headers_out) {
// Lower-case header names, and coalesce multiple values delimited by \0.
// Also add the fixed status header.
std::string name, value;
void* it = NULL;
while (headers.EnumerateHeaderLines(&it, &name, &value)) {
base::StringToLowerASCII(&name);
if (headers_out->find(name) == headers_out->end()) {
(*headers_out)[name] = value;
} else {
(*headers_out)[name] += std::string(1, '\0') + value;
}
}
(*headers_out)[":status"] = base::IntToString(headers.response_code());
}
// static
bool HpackHuffmanAggregator::IsCrossOrigin(const HttpRequestInfo& request) {
// Require that the request is top-level, or that it shares
// an origin with its referer.
if ((request.load_flags & LOAD_MAIN_FRAME) == 0) {
std::string referer_str;
if (!request.extra_headers.GetHeader(HttpRequestHeaders::kReferer,
&referer_str)) {
// Require a referer.
return true;
}
GURL referer(referer_str);
if (!HostPortPair::FromURL(request.url).Equals(
HostPortPair::FromURL(referer))) {
// Cross-origin request.
return true;
}
}
return false;
}
HpackEncoder* HpackHuffmanAggregator::ObtainEncoder(const SpdySessionKey& key) {
for (OriginEncoders::iterator it = encoders_.begin();
it != encoders_.end(); ++it) {
if (key.Equals(it->first)) {
// Move to head of list and return.
OriginEncoder origin_encoder = *it;
encoders_.erase(it);
encoders_.push_front(origin_encoder);
return origin_encoder.second;
}
}
// Not found. Create a new encoder, evicting one if needed.
encoders_.push_front(std::make_pair(
key, new HpackEncoder(ObtainHpackHuffmanTable())));
if (encoders_.size() > max_encoders_) {
delete encoders_.back().second;
encoders_.pop_back();
}
encoders_.front().second->SetCharCountsStorage(&counts_, &total_counts_);
return encoders_.front().second;
}
void HpackHuffmanAggregator::PublishCounts() {
// base::Histogram requires that values be 1-indexed.
const size_t kRangeMin = 1;
const size_t kRangeMax = counts_.size() + 1;
const size_t kBucketCount = kRangeMax + 1;
base::BucketRanges ranges(kBucketCount + 1);
for (size_t i = 0; i != ranges.size(); ++i) {
ranges.set_range(i, i);
}
ranges.ResetChecksum();
// Copy |counts_| into a SampleVector.
base::SampleVector samples(&ranges);
for (size_t i = 0; i != counts_.size(); ++i) {
samples.Accumulate(i + 1, counts_[i]);
}
STATIC_HISTOGRAM_POINTER_BLOCK(
kHistogramName,
AddSamples(samples),
base::LinearHistogram::FactoryGet(
kHistogramName, kRangeMin, kRangeMax, kBucketCount,
base::HistogramBase::kUmaTargetedHistogramFlag));
// Clear counts.
counts_.assign(counts_.size(), 0);
total_counts_ = 0;
}
} // namespace net
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <list>
#include <vector>
#include "base/macros.h"
#include "net/base/net_export.h"
#include "net/spdy/spdy_header_block.h"
#include "net/spdy/spdy_protocol.h"
#include "net/spdy/spdy_session_key.h"
namespace net {
class HpackEncoder;
class HttpRequestHeaders;
struct HttpRequestInfo;
class HttpResponseHeaders;
class ProxyServer;
namespace test {
class HpackHuffmanAggregatorPeer;
} // namespace test
class NET_EXPORT_PRIVATE HpackHuffmanAggregator {
public:
friend class test::HpackHuffmanAggregatorPeer;
HpackHuffmanAggregator();
~HpackHuffmanAggregator();
// Encodes the request and response headers of the transaction with an
// HpackEncoder keyed on the transaction's SpdySessionKey. Literal headers
// emitted by that encoder are aggregated into internal character counts,
// which are periodically published to a UMA histogram.
void AggregateTransactionCharacterCounts(
const HttpRequestInfo& request,
const HttpRequestHeaders& request_headers,
const ProxyServer& proxy,
const HttpResponseHeaders& response_headers);
// Returns whether the aggregator is enabled for the session by a field trial.
static bool UseAggregator();
private:
typedef std::pair<SpdySessionKey, HpackEncoder*> OriginEncoder;
typedef std::list<OriginEncoder> OriginEncoders;
// Returns true if the request is considered cross-origin,
// and should not be aggregated.
static bool IsCrossOrigin(const HttpRequestInfo& request);
// Converts |headers| into SPDY headers block |headers_out|.
static void CreateSpdyHeadersFromHttpResponse(
const HttpResponseHeaders& headers,
SpdyHeaderBlock* headers_out);
// Creates or returns an encoder for the origin key.
HpackEncoder* ObtainEncoder(const SpdySessionKey& key);
// Publishes aggregated counts to a UMA histogram.
void PublishCounts();
std::vector<size_t> counts_;
size_t total_counts_;
OriginEncoders encoders_;
size_t max_encoders_;
DISALLOW_COPY_AND_ASSIGN(HpackHuffmanAggregator);
};
} // namespace net
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/spdy/hpack_huffman_aggregator.h"
#include "base/metrics/histogram.h"
#include "base/metrics/statistics_recorder.h"
#include "net/base/load_flags.h"
#include "net/http/http_request_headers.h"
#include "net/http/http_request_info.h"
#include "net/http/http_response_headers.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace net {
using ::testing::Each;
using ::testing::ElementsAre;
using ::testing::Eq;
using ::testing::Pair;
namespace {
const char kHistogramName[] = "Net.SpdyHpackEncodedCharacterFrequency";
} // namespace
namespace test {
class HpackHuffmanAggregatorPeer {
public:
explicit HpackHuffmanAggregatorPeer(HpackHuffmanAggregator* agg)
: agg_(agg) {}
std::vector<size_t>* counts() {
return &agg_->counts_;
}
HpackHuffmanAggregator::OriginEncoders* encoders() {
return &agg_->encoders_;
}
size_t total_counts() {
return agg_->total_counts_;
}
void set_total_counts(size_t total_counts) {
agg_->total_counts_ = total_counts;
}
void set_max_encoders(size_t max_encoders) {
agg_->max_encoders_ = max_encoders;
}
static bool IsCrossOrigin(const HttpRequestInfo& request) {
return HpackHuffmanAggregator::IsCrossOrigin(request);
}
static void CreateSpdyHeadersFromHttpResponse(
const HttpResponseHeaders& headers,
SpdyHeaderBlock* headers_out) {
HpackHuffmanAggregator::CreateSpdyHeadersFromHttpResponse(
headers, headers_out);
}
HpackEncoder* ObtainEncoder(const SpdySessionKey& key) {
return agg_->ObtainEncoder(key);
}
void PublishCounts() {
agg_->PublishCounts();
}
private:
HpackHuffmanAggregator* agg_;
};
} // namespace test
class HpackHuffmanAggregatorTest : public ::testing::Test {
protected:
HpackHuffmanAggregatorTest()
: peer_(&agg_) {}
HpackHuffmanAggregator agg_;
test::HpackHuffmanAggregatorPeer peer_;
};
TEST_F(HpackHuffmanAggregatorTest, CrossOriginDetermination) {
HttpRequestInfo request;
request.url = GURL("https://www.foo.com/a/page");
// Main load without referer.
request.load_flags = LOAD_MAIN_FRAME;
EXPECT_FALSE(peer_.IsCrossOrigin(request));
// Non-main load without referer. Treated as cross-origin.
request.load_flags = 0;
EXPECT_TRUE(peer_.IsCrossOrigin(request));
// Main load with different referer origin.
request.load_flags = LOAD_MAIN_FRAME;
request.extra_headers.SetHeader(HttpRequestHeaders::kReferer,
"https://www.bar.com/other/page");
EXPECT_FALSE(peer_.IsCrossOrigin(request));
// Non-main load with different referer orign.
request.load_flags = 0;
EXPECT_TRUE(peer_.IsCrossOrigin(request));
// Non-main load with same referer orign.
request.extra_headers.SetHeader(HttpRequestHeaders::kReferer,
"https://www.foo.com/other/page");
EXPECT_FALSE(peer_.IsCrossOrigin(request));
// Non-main load with same referer host but different schemes.
request.extra_headers.SetHeader(HttpRequestHeaders::kReferer,
"http://www.foo.com/other/page");
EXPECT_TRUE(peer_.IsCrossOrigin(request));
}
TEST_F(HpackHuffmanAggregatorTest, EncoderLRUQueue) {
peer_.set_max_encoders(2);
SpdySessionKey key1(HostPortPair("one.com", 443), ProxyServer::Direct(),
PRIVACY_MODE_ENABLED);
SpdySessionKey key2(HostPortPair("two.com", 443), ProxyServer::Direct(),
PRIVACY_MODE_ENABLED);
SpdySessionKey key3(HostPortPair("three.com", 443), ProxyServer::Direct(),
PRIVACY_MODE_ENABLED);
// Creates one.com.
HpackEncoder* one = peer_.ObtainEncoder(key1);
EXPECT_EQ(1u, peer_.encoders()->size());
// Creates two.com. No evictions.
HpackEncoder* two = peer_.ObtainEncoder(key2);
EXPECT_EQ(2u, peer_.encoders()->size());
EXPECT_NE(one, two);
// Touch one.com.
EXPECT_EQ(one, peer_.ObtainEncoder(key1));
// Creates three.com. Evicts two.com, as it's least-recently used.
HpackEncoder* three = peer_.ObtainEncoder(key3);
EXPECT_EQ(one, peer_.ObtainEncoder(key1));
EXPECT_NE(one, three);
EXPECT_EQ(2u, peer_.encoders()->size());
}
TEST_F(HpackHuffmanAggregatorTest, PublishCounts) {
(*peer_.counts())[0] = 1;
(*peer_.counts())[255] = 10;
(*peer_.counts())[128] = 101;
peer_.set_total_counts(112);
peer_.PublishCounts();
// Internal counts were reset after being published.
EXPECT_THAT(*peer_.counts(), Each(Eq(0u)));
EXPECT_EQ(0u, peer_.total_counts());
// Verify histogram counts match the expectation.
scoped_ptr<base::HistogramSamples> samples =
base::StatisticsRecorder::FindHistogram(kHistogramName)
->SnapshotSamples();
EXPECT_EQ(0, samples->GetCount(0));
EXPECT_EQ(1, samples->GetCount(1));
EXPECT_EQ(101, samples->GetCount(129));
EXPECT_EQ(10, samples->GetCount(256));
EXPECT_EQ(112, samples->TotalCount());
// Publish a second round of counts;
(*peer_.counts())[1] = 32;
(*peer_.counts())[128] = 5;
peer_.set_total_counts(37);
peer_.PublishCounts();
// Verify they've been aggregated into the previous counts.
samples = base::StatisticsRecorder::FindHistogram(kHistogramName)
->SnapshotSamples();
EXPECT_EQ(0, samples->GetCount(0));
EXPECT_EQ(1, samples->GetCount(1));
EXPECT_EQ(32, samples->GetCount(2));
EXPECT_EQ(106, samples->GetCount(129));
EXPECT_EQ(10, samples->GetCount(256));
EXPECT_EQ(149, samples->TotalCount());
}
TEST_F(HpackHuffmanAggregatorTest, CreateSpdyResponseHeaders) {
char kRawHeaders[] =
"HTTP/1.1 202 Accepted \0"
"Content-TYPE : text/html; charset=utf-8 \0"
"Set-Cookie: foo=bar \0"
"Set-Cookie: baz=bing \0"
"Cache-Control: pragma=no-cache \0"
"Cache-CONTROL: expires=12345 \0\0";
scoped_refptr<HttpResponseHeaders> parsed_headers(new HttpResponseHeaders(
std::string(kRawHeaders, arraysize(kRawHeaders) - 1)));
SpdyHeaderBlock headers;
peer_.CreateSpdyHeadersFromHttpResponse(*parsed_headers, &headers);
EXPECT_THAT(headers, ElementsAre(
Pair(":status", "202"),
Pair("cache-control", std::string("pragma=no-cache\0expires=12345", 29)),
Pair("content-type", "text/html; charset=utf-8"),
Pair("set-cookie", std::string("foo=bar\0baz=bing", 16))));
}
} // namespace net
...@@ -20185,6 +20185,9 @@ Therefore, the affected-histogram name has to have at least one dot in it. ...@@ -20185,6 +20185,9 @@ Therefore, the affected-histogram name has to have at least one dot in it.
</histogram> </histogram>
<histogram name="Net.SpdyHpackEncodedCharacterFrequency" units="ASCII codes"> <histogram name="Net.SpdyHpackEncodedCharacterFrequency" units="ASCII codes">
<obsolete>
Obsolete as HTTP/2 standard is finalized.
</obsolete>
<owner>bnc@chromium.org</owner> <owner>bnc@chromium.org</owner>
<summary> <summary>
Frequencies of characters observed in request and response headers. Frequencies of characters observed in request and response headers.
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment