Commit 0e5e459a authored by Scott Violet's avatar Scott Violet Committed by Commit Bot

history: use more inclusive terms in history

There are a couple of things still remaining, but those will be a bit
more fiddly.

TEST=covered by tests
BUG=1097644

Change-Id: If9c62187088b1fce2c432607c808b4e5b6426d9a
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2300623Reviewed-by: default avatarClark DuVall <cduvall@chromium.org>
Commit-Queue: Scott Violet <sky@chromium.org>
Cr-Commit-Position: refs/heads/master@{#789112}
parent 68dd21b4
......@@ -58,23 +58,23 @@ class TopSites : public RefcountedKeyedService {
// nothing. Should be called from the UI thread.
virtual void SyncWithHistory() = 0;
// Blacklisted URLs
// Blocked Urls.
// Returns true if there is at least one item in the blacklist.
virtual bool HasBlacklistedItems() const = 0;
// Returns true if there is at least one blocked url.
virtual bool HasBlockedUrls() const = 0;
// Add a URL to the blacklist. Should be called from the UI thread.
virtual void AddBlacklistedURL(const GURL& url) = 0;
// Add a URL to the set of urls that will not be shown. Should be called from
// the UI thread.
virtual void AddBlockedUrl(const GURL& url) = 0;
// Removes a URL from the blacklist. Should be called from the UI thread.
virtual void RemoveBlacklistedURL(const GURL& url) = 0;
// Removes a previously blocked url. Should be called from the UI thread.
virtual void RemoveBlockedUrl(const GURL& url) = 0;
// Returns true if the URL is blacklisted. Should be called from the UI
// thread.
virtual bool IsBlacklisted(const GURL& url) = 0;
// Returns true if the URL is blocked. Should be called from the UI thread.
virtual bool IsBlocked(const GURL& url) = 0;
// Clear the blacklist. Should be called from the UI thread.
virtual void ClearBlacklistedURLs() = 0;
// Removes all blocked urls. Should be called from the UI thread.
virtual void ClearBlockedUrls() = 0;
// Returns true if the top sites list is full (i.e. we already have the
// maximum number of top sites). This function also returns false if TopSites
......
......@@ -77,7 +77,8 @@ constexpr base::TimeDelta kDelayForUpdates = base::TimeDelta::FromMinutes(60);
// Key for preference listing the URLs that should not be shown as most visited
// tiles.
const char kMostVisitedURLsBlacklist[] = "ntp.most_visited_blacklist";
// TODO(sky): rename actual value to 'most_visited_blocked_urls.'
const char kBlockedUrlsPrefsKey[] = "ntp.most_visited_blacklist";
} // namespace
......@@ -141,53 +142,53 @@ void TopSitesImpl::SyncWithHistory() {
StartQueryForMostVisited();
}
bool TopSitesImpl::HasBlacklistedItems() const {
const base::DictionaryValue* blacklist =
pref_service_->GetDictionary(kMostVisitedURLsBlacklist);
return blacklist && !blacklist->empty();
bool TopSitesImpl::HasBlockedUrls() const {
const base::DictionaryValue* blocked_urls =
pref_service_->GetDictionary(kBlockedUrlsPrefsKey);
return blocked_urls && !blocked_urls->empty();
}
void TopSitesImpl::AddBlacklistedURL(const GURL& url) {
void TopSitesImpl::AddBlockedUrl(const GURL& url) {
DCHECK(thread_checker_.CalledOnValidThread());
auto dummy = std::make_unique<base::Value>();
{
DictionaryPrefUpdate update(pref_service_, kMostVisitedURLsBlacklist);
base::DictionaryValue* blacklist = update.Get();
blacklist->SetWithoutPathExpansion(GetURLHash(url), std::move(dummy));
DictionaryPrefUpdate update(pref_service_, kBlockedUrlsPrefsKey);
base::DictionaryValue* blocked_urls = update.Get();
blocked_urls->SetWithoutPathExpansion(GetURLHash(url), std::move(dummy));
}
ResetThreadSafeCache();
NotifyTopSitesChanged(TopSitesObserver::ChangeReason::BLACKLIST);
NotifyTopSitesChanged(TopSitesObserver::ChangeReason::BLOCKED_URLS);
}
void TopSitesImpl::RemoveBlacklistedURL(const GURL& url) {
void TopSitesImpl::RemoveBlockedUrl(const GURL& url) {
DCHECK(thread_checker_.CalledOnValidThread());
{
DictionaryPrefUpdate update(pref_service_, kMostVisitedURLsBlacklist);
base::DictionaryValue* blacklist = update.Get();
blacklist->RemoveKey(GetURLHash(url));
DictionaryPrefUpdate update(pref_service_, kBlockedUrlsPrefsKey);
base::DictionaryValue* blocked_urls = update.Get();
blocked_urls->RemoveKey(GetURLHash(url));
}
ResetThreadSafeCache();
NotifyTopSitesChanged(TopSitesObserver::ChangeReason::BLACKLIST);
NotifyTopSitesChanged(TopSitesObserver::ChangeReason::BLOCKED_URLS);
}
bool TopSitesImpl::IsBlacklisted(const GURL& url) {
bool TopSitesImpl::IsBlocked(const GURL& url) {
DCHECK(thread_checker_.CalledOnValidThread());
const base::DictionaryValue* blacklist =
pref_service_->GetDictionary(kMostVisitedURLsBlacklist);
return blacklist && blacklist->HasKey(GetURLHash(url));
const base::DictionaryValue* blocked_urls =
pref_service_->GetDictionary(kBlockedUrlsPrefsKey);
return blocked_urls && blocked_urls->HasKey(GetURLHash(url));
}
void TopSitesImpl::ClearBlacklistedURLs() {
void TopSitesImpl::ClearBlockedUrls() {
DCHECK(thread_checker_.CalledOnValidThread());
{
DictionaryPrefUpdate update(pref_service_, kMostVisitedURLsBlacklist);
base::DictionaryValue* blacklist = update.Get();
blacklist->Clear();
DictionaryPrefUpdate update(pref_service_, kBlockedUrlsPrefsKey);
base::DictionaryValue* blocked_urls = update.Get();
blocked_urls->Clear();
}
ResetThreadSafeCache();
NotifyTopSitesChanged(TopSitesObserver::ChangeReason::BLACKLIST);
NotifyTopSitesChanged(TopSitesObserver::ChangeReason::BLOCKED_URLS);
}
bool TopSitesImpl::IsFull() {
......@@ -224,7 +225,7 @@ void TopSitesImpl::ShutdownOnUIThread() {
// static
void TopSitesImpl::RegisterPrefs(PrefRegistrySimple* registry) {
registry->RegisterDictionaryPref(kMostVisitedURLsBlacklist);
registry->RegisterDictionaryPref(kBlockedUrlsPrefsKey);
}
TopSitesImpl::~TopSitesImpl() = default;
......@@ -297,18 +298,18 @@ bool TopSitesImpl::AddPrepopulatedPages(MostVisitedURLList* urls) const {
return added;
}
MostVisitedURLList TopSitesImpl::ApplyBlacklist(
MostVisitedURLList TopSitesImpl::ApplyBlockedUrls(
const MostVisitedURLList& urls) {
// Log the number of times ApplyBlacklist is called so we can compute the
// average number of blacklisted items per user.
const base::DictionaryValue* blacklist =
pref_service_->GetDictionary(kMostVisitedURLsBlacklist);
// Log the number of times ApplyBlockedUrls is called so we can compute the
// average number of blocked urls per user.
const base::DictionaryValue* blocked_urls =
pref_service_->GetDictionary(kBlockedUrlsPrefsKey);
UMA_HISTOGRAM_BOOLEAN("TopSites.NumberOfApplyBlacklist", true);
UMA_HISTOGRAM_COUNTS_100("TopSites.NumberOfBlacklistedItems",
(blacklist ? blacklist->size() : 0));
(blocked_urls ? blocked_urls->size() : 0));
MostVisitedURLList result;
for (const auto& url : urls) {
if (IsBlacklisted(url.url))
if (IsBlocked(url.url))
continue;
if (result.size() >= kTopSitesNumber)
break;
......@@ -319,8 +320,8 @@ MostVisitedURLList TopSitesImpl::ApplyBlacklist(
// static
std::string TopSitesImpl::GetURLHash(const GURL& url) {
// We don't use canonical URLs here to be able to blacklist only one of
// the two 'duplicate' sites, e.g. 'gmail.com' and 'mail.google.com'.
// We don't use canonical URLs here to be able to block only one of the two
// 'duplicate' sites, e.g. 'gmail.com' and 'mail.google.com'.
return base::MD5String(url.spec());
}
......@@ -375,9 +376,9 @@ void TopSitesImpl::SetTopSites(MostVisitedURLList top_sites,
int TopSitesImpl::num_results_to_request_from_history() const {
DCHECK(thread_checker_.CalledOnValidThread());
const base::DictionaryValue* blacklist =
pref_service_->GetDictionary(kMostVisitedURLsBlacklist);
return kTopSitesNumber + (blacklist ? blacklist->size() : 0);
const base::DictionaryValue* blocked_urls =
pref_service_->GetDictionary(kBlockedUrlsPrefsKey);
return kTopSitesNumber + (blocked_urls ? blocked_urls->size() : 0);
}
void TopSitesImpl::MoveStateToLoaded() {
......@@ -411,7 +412,7 @@ void TopSitesImpl::MoveStateToLoaded() {
void TopSitesImpl::ResetThreadSafeCache() {
base::AutoLock lock(lock_);
thread_safe_cache_ = ApplyBlacklist(top_sites_);
thread_safe_cache_ = ApplyBlockedUrls(top_sites_);
}
void TopSitesImpl::ScheduleUpdateTimer() {
......
......@@ -61,11 +61,11 @@ class TopSitesImpl : public TopSites, public HistoryServiceObserver {
// TopSites implementation.
void GetMostVisitedURLs(GetMostVisitedURLsCallback callback) override;
void SyncWithHistory() override;
bool HasBlacklistedItems() const override;
void AddBlacklistedURL(const GURL& url) override;
void RemoveBlacklistedURL(const GURL& url) override;
bool IsBlacklisted(const GURL& url) override;
void ClearBlacklistedURLs() override;
bool HasBlockedUrls() const override;
void AddBlockedUrl(const GURL& url) override;
void RemoveBlockedUrl(const GURL& url) override;
bool IsBlocked(const GURL& url) override;
void ClearBlockedUrls() override;
bool IsFull() override;
PrepopulatedPageList GetPrepopulatedPages() override;
bool loaded() const override;
......@@ -123,11 +123,11 @@ class TopSitesImpl : public TopSites, public HistoryServiceObserver {
// Adds prepopulated pages to TopSites. Returns true if any pages were added.
bool AddPrepopulatedPages(MostVisitedURLList* urls) const;
// Takes |urls|, produces it's copy in |out| after removing blacklisted URLs.
// Takes |urls|, produces it's copy in |out| after removing blocked urls.
// Also ensures we respect the maximum number TopSites URLs.
MostVisitedURLList ApplyBlacklist(const MostVisitedURLList& urls);
MostVisitedURLList ApplyBlockedUrls(const MostVisitedURLList& urls);
// Returns an MD5 hash of the URL. Hashing is required for blacklisted URLs.
// Returns an MD5 hash of the URL. Hashing is required for blocking urls.
static std::string GetURLHash(const GURL& url);
// Updates URLs in |cache_| and the db (in the background). The URLs in
......@@ -137,8 +137,8 @@ class TopSitesImpl : public TopSites, public HistoryServiceObserver {
const CallLocation location);
// Returns the number of most visited results to request from history. This
// changes depending upon how many urls have been blacklisted. Should be
// called from the UI thread.
// changes depending upon how many urls have been blocked. Should be called
// from the UI thread.
int num_results_to_request_from_history() const;
// Invoked when transitioning to LOADED. Notifies any queued up callbacks.
......@@ -174,7 +174,7 @@ class TopSitesImpl : public TopSites, public HistoryServiceObserver {
MostVisitedURLList top_sites_;
// Copy of the top sites data that may be accessed on any thread (assuming
// you hold |lock_|). The data in |thread_safe_cache_| has blacklisted urls
// you hold |lock_|). The data in |thread_safe_cache_| has blocked urls
// applied (|top_sites_| does not).
MostVisitedURLList thread_safe_cache_ GUARDED_BY(lock_);
......@@ -193,8 +193,7 @@ class TopSitesImpl : public TopSites, public HistoryServiceObserver {
// URL List of prepopulated page.
const PrepopulatedPageList prepopulated_pages_;
// PrefService holding the NTP URL blacklist dictionary. Must outlive
// TopSitesImpl.
// PrefService holding the set of blocked urls. Must outlive TopSitesImpl.
PrefService* pref_service_;
// HistoryService that TopSitesImpl can query. May be null, but if defined it
......
......@@ -712,9 +712,8 @@ TEST_F(TopSitesImplTest, CancelingRequestsForTopSites) {
EXPECT_EQ(0, querier2.number_of_callbacks());
}
// Tests variations of blacklisting without testing prepopulated page
// blacklisting.
TEST_F(TopSitesImplTest, BlacklistingWithoutPrepopulated) {
// Tests variations of blocked urls.
TEST_F(TopSitesImplTest, BlockedUrlsWithoutPrepopulated) {
MostVisitedURLList pages;
MostVisitedURL url, url1;
url.url = GURL("http://bbc.com/");
......@@ -723,23 +722,23 @@ TEST_F(TopSitesImplTest, BlacklistingWithoutPrepopulated) {
pages.push_back(url1);
SetTopSites(pages);
EXPECT_FALSE(top_sites()->IsBlacklisted(GURL("http://bbc.com/")));
EXPECT_FALSE(top_sites()->IsBlocked(GURL("http://bbc.com/")));
// Blacklist google.com.
top_sites()->AddBlacklistedURL(GURL("http://google.com/"));
// Block google.com.
top_sites()->AddBlockedUrl(GURL("http://google.com/"));
EXPECT_TRUE(top_sites()->HasBlacklistedItems());
EXPECT_TRUE(top_sites()->IsBlacklisted(GURL("http://google.com/")));
EXPECT_FALSE(top_sites()->IsBlacklisted(GURL("http://bbc.com/")));
EXPECT_TRUE(top_sites()->HasBlockedUrls());
EXPECT_TRUE(top_sites()->IsBlocked(GURL("http://google.com/")));
EXPECT_FALSE(top_sites()->IsBlocked(GURL("http://bbc.com/")));
// Make sure the blacklisted site isn't returned in the results.
// Make sure the blocked site isn't returned in the results.
{
TopSitesQuerier q;
q.QueryTopSites(top_sites(), true);
EXPECT_EQ("http://bbc.com/", q.urls()[0].url.spec());
}
// Recreate top sites and make sure blacklisted url was correctly read.
// Recreate top sites and make sure the blocked url was correctly read.
RecreateTopSitesAndBlock();
{
TopSitesQuerier q;
......@@ -747,10 +746,10 @@ TEST_F(TopSitesImplTest, BlacklistingWithoutPrepopulated) {
EXPECT_EQ("http://bbc.com/", q.urls()[0].url.spec());
}
// Mark google as no longer blacklisted.
top_sites()->RemoveBlacklistedURL(GURL("http://google.com/"));
EXPECT_FALSE(top_sites()->HasBlacklistedItems());
EXPECT_FALSE(top_sites()->IsBlacklisted(GURL("http://google.com/")));
// Mark google as no longer blocked.
top_sites()->RemoveBlockedUrl(GURL("http://google.com/"));
EXPECT_FALSE(top_sites()->HasBlockedUrls());
EXPECT_FALSE(top_sites()->IsBlocked(GURL("http://google.com/")));
// Make sure google is returned now.
{
......@@ -760,9 +759,9 @@ TEST_F(TopSitesImplTest, BlacklistingWithoutPrepopulated) {
EXPECT_EQ("http://google.com/", q.urls()[1].url.spec());
}
// Remove all blacklisted sites.
top_sites()->ClearBlacklistedURLs();
EXPECT_FALSE(top_sites()->HasBlacklistedItems());
// Remove all blocked urls.
top_sites()->ClearBlockedUrls();
EXPECT_FALSE(top_sites()->HasBlockedUrls());
{
TopSitesQuerier q;
......@@ -773,10 +772,8 @@ TEST_F(TopSitesImplTest, BlacklistingWithoutPrepopulated) {
}
}
// Tests variations of blacklisting including blacklisting prepopulated pages.
// This test is disable for Android because Android does not have any
// prepopulated pages.
TEST_F(TopSitesImplTest, BlacklistingWithPrepopulated) {
// Tests variations of blocking including blocking prepopulated pages.
TEST_F(TopSitesImplTest, BlockingPrepopulated) {
MostVisitedURLList pages;
MostVisitedURL url, url1;
url.url = GURL("http://bbc.com/");
......@@ -785,20 +782,20 @@ TEST_F(TopSitesImplTest, BlacklistingWithPrepopulated) {
pages.push_back(url1);
SetTopSites(pages);
EXPECT_FALSE(top_sites()->IsBlacklisted(GURL("http://bbc.com/")));
EXPECT_FALSE(top_sites()->IsBlocked(GURL("http://bbc.com/")));
// Blacklist google.com.
top_sites()->AddBlacklistedURL(GURL("http://google.com/"));
// Block google.com.
top_sites()->AddBlockedUrl(GURL("http://google.com/"));
DCHECK_GE(GetPrepopulatedPages().size(), 1u);
GURL prepopulate_url = GetPrepopulatedPages()[0].most_visited.url;
EXPECT_TRUE(top_sites()->HasBlacklistedItems());
EXPECT_TRUE(top_sites()->IsBlacklisted(GURL("http://google.com/")));
EXPECT_FALSE(top_sites()->IsBlacklisted(GURL("http://bbc.com/")));
EXPECT_FALSE(top_sites()->IsBlacklisted(prepopulate_url));
EXPECT_TRUE(top_sites()->HasBlockedUrls());
EXPECT_TRUE(top_sites()->IsBlocked(GURL("http://google.com/")));
EXPECT_FALSE(top_sites()->IsBlocked(GURL("http://bbc.com/")));
EXPECT_FALSE(top_sites()->IsBlocked(prepopulate_url));
// Make sure the blacklisted site isn't returned in the results.
// Make sure the blocked site isn't returned in the results.
{
TopSitesQuerier q;
q.QueryTopSites(top_sites(), true);
......@@ -807,7 +804,7 @@ TEST_F(TopSitesImplTest, BlacklistingWithPrepopulated) {
ASSERT_NO_FATAL_FAILURE(ContainsPrepopulatePages(q, 1));
}
// Recreate top sites and make sure blacklisted url was correctly read.
// Recreate top sites and make sure blocked url was correctly read.
RecreateTopSitesAndBlock();
{
TopSitesQuerier q;
......@@ -817,11 +814,11 @@ TEST_F(TopSitesImplTest, BlacklistingWithPrepopulated) {
ASSERT_NO_FATAL_FAILURE(ContainsPrepopulatePages(q, 1));
}
// Blacklist one of the prepopulate urls.
top_sites()->AddBlacklistedURL(prepopulate_url);
EXPECT_TRUE(top_sites()->HasBlacklistedItems());
// Block one of the prepopulate urls.
top_sites()->AddBlockedUrl(prepopulate_url);
EXPECT_TRUE(top_sites()->HasBlockedUrls());
// Make sure the blacklisted prepopulate url isn't returned.
// Make sure the blacked prepopulate url isn't returned.
{
TopSitesQuerier q;
q.QueryTopSites(top_sites(), true);
......@@ -831,10 +828,10 @@ TEST_F(TopSitesImplTest, BlacklistingWithPrepopulated) {
EXPECT_NE(prepopulate_url.spec(), q.urls()[i].url.spec());
}
// Mark google as no longer blacklisted.
top_sites()->RemoveBlacklistedURL(GURL("http://google.com/"));
EXPECT_TRUE(top_sites()->HasBlacklistedItems());
EXPECT_FALSE(top_sites()->IsBlacklisted(GURL("http://google.com/")));
// Mark google as no longer blocked.
top_sites()->RemoveBlockedUrl(GURL("http://google.com/"));
EXPECT_TRUE(top_sites()->HasBlockedUrls());
EXPECT_FALSE(top_sites()->IsBlocked(GURL("http://google.com/")));
// Make sure google is returned now.
{
......@@ -843,7 +840,7 @@ TEST_F(TopSitesImplTest, BlacklistingWithPrepopulated) {
ASSERT_EQ(2u + GetPrepopulatedPages().size() - 1, q.urls().size());
EXPECT_EQ("http://bbc.com/", q.urls()[0].url.spec());
EXPECT_EQ("http://google.com/", q.urls()[1].url.spec());
// Android has only one prepopulated page which has been blacklisted, so
// Android has only one prepopulated page which has been blocked, so
// only 2 urls are returned.
if (q.urls().size() > 2)
EXPECT_NE(prepopulate_url.spec(), q.urls()[2].url.spec());
......@@ -851,9 +848,9 @@ TEST_F(TopSitesImplTest, BlacklistingWithPrepopulated) {
EXPECT_EQ(1u, GetPrepopulatedPages().size());
}
// Remove all blacklisted sites.
top_sites()->ClearBlacklistedURLs();
EXPECT_FALSE(top_sites()->HasBlacklistedItems());
// Remove all blocked urls.
top_sites()->ClearBlockedUrls();
EXPECT_FALSE(top_sites()->HasBlockedUrls());
{
TopSitesQuerier q;
......
......@@ -18,8 +18,8 @@ class TopSitesObserver {
enum class ChangeReason {
// TopSites was changed by most visited.
MOST_VISITED,
// TopSites was changed by add/remove/clear blacklist.
BLACKLIST,
// The set of blocked urls has changed.
BLOCKED_URLS,
// TopSites was changed by AddForcedURLs.
FORCED_URL,
};
......
......@@ -389,11 +389,10 @@ void MostVisitedSites::AddOrRemoveBlacklistedUrl(const GURL& url,
}
if (top_sites_) {
// Always blacklist in the local TopSites.
if (add_url)
top_sites_->AddBlacklistedURL(url);
top_sites_->AddBlockedUrl(url);
else
top_sites_->RemoveBlacklistedURL(url);
top_sites_->RemoveBlockedUrl(url);
}
// Only blacklist in the server-side suggestions service if it's active.
......@@ -406,10 +405,8 @@ void MostVisitedSites::AddOrRemoveBlacklistedUrl(const GURL& url,
}
void MostVisitedSites::ClearBlacklistedUrls() {
if (top_sites_) {
// Always update the blacklist in the local TopSites.
top_sites_->ClearBlacklistedURLs();
}
if (top_sites_)
top_sites_->ClearBlockedUrls();
// Only update the server-side blacklist if it's active.
if (mv_source_ == TileSource::SUGGESTIONS_SERVICE) {
......@@ -567,8 +564,8 @@ NTPTilesVector MostVisitedSites::CreateWhitelistEntryPointTiles(
if (whitelist_tiles.size() + num_actual_tiles >= max_num_sites_)
break;
// Skip blacklisted sites.
if (top_sites_ && top_sites_->IsBlacklisted(whitelist.entry_point))
// Skip blocked sites.
if (top_sites_ && top_sites_->IsBlocked(whitelist.entry_point))
continue;
// Skip tiles already present.
......@@ -639,8 +636,8 @@ NTPTilesVector MostVisitedSites::CreatePopularSitesTiles(
break;
}
// Skip blacklisted sites.
if (top_sites_ && top_sites_->IsBlacklisted(popular_site.url))
// Skip blocked sites.
if (top_sites_ && top_sites_->IsBlocked(popular_site.url))
continue;
const std::string& host = popular_site.url.host();
......@@ -893,7 +890,7 @@ bool MostVisitedSites::ShouldAddHomeTile() const {
homepage_client_->IsHomepageTileEnabled() &&
!homepage_client_->GetHomepageUrl().is_empty() &&
!(top_sites_ &&
top_sites_->IsBlacklisted(homepage_client_->GetHomepageUrl()));
top_sites_->IsBlocked(homepage_client_->GetHomepageUrl()));
}
void MostVisitedSites::AddToHostsAndTotalCount(const NTPTilesVector& new_tiles,
......
......@@ -43,15 +43,11 @@ class FakeEmptyTopSites : public history::TopSites {
// history::TopSites:
void GetMostVisitedURLs(GetMostVisitedURLsCallback callback) override;
void SyncWithHistory() override {}
bool HasBlacklistedItems() const override {
return false;
}
void AddBlacklistedURL(const GURL& url) override {}
void RemoveBlacklistedURL(const GURL& url) override {}
bool IsBlacklisted(const GURL& url) override {
return false;
}
void ClearBlacklistedURLs() override {}
bool HasBlockedUrls() const override { return false; }
void AddBlockedUrl(const GURL& url) override {}
void RemoveBlockedUrl(const GURL& url) override {}
bool IsBlocked(const GURL& url) override { return false; }
void ClearBlockedUrls() override {}
bool IsFull() override { return false; }
bool loaded() const override {
return false;
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment