Commit d5260d7b authored by Robert Ma's avatar Robert Ma Committed by Commit Bot

Roll in new version of WPT tools

Especially in order to get the new lint for 'web-platform.test'
(https://github.com/w3c/web-platform-tests/pull/6868)

Chromium custom patch for lint.py is modified to accompany
upstream changes.

Bug: 749879
Change-Id: Ic2fb4655441f1d0e3bccbb7742565cb55645cf8f
Reviewed-on: https://chromium-review.googlesource.com/809745
Commit-Queue: Robert Ma <robertma@chromium.org>
Reviewed-by: default avatarQuinten Yearsley <qyearsley@chromium.org>
Cr-Commit-Position: refs/heads/master@{#521913}
parent 8390d652
......@@ -51,7 +51,7 @@ Local Modifications: None
Name: web-platform-tests - Test Suites for Web Platform specifications
Short Name: wpt
URL: https://github.com/w3c/web-platform-tests/
Version: 53008fece70040f1886d2ba5ff92a97eed1e4037
Version: 60a9eb2ca37127deaeb7a979f27a9d0c30970786
License: LICENSES FOR W3C TEST SUITES (http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html)
License File: wpt/wpt/LICENSE.md
Security Critical: no
......
......@@ -68,6 +68,7 @@
./tools/manifest/catalog
./tools/manifest/catalog/xhtml.dtd
./tools/manifest/commands.json
./tools/manifest/download.py
./tools/manifest/item.py
./tools/manifest/log.py
./tools/manifest/manifest.py
......
......@@ -9,7 +9,7 @@ cd $DIR
TARGET_DIR=$DIR/wpt
REMOTE_REPO="https://chromium.googlesource.com/external/w3c/web-platform-tests.git"
WPT_HEAD=6439a72ba71f99e2f01abb9f69edc779698ba9bc
WPT_HEAD=60a9eb2ca37127deaeb7a979f27a9d0c30970786
function clone {
# Remove existing repo if already exists.
......
diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py b/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py
--- a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py
+++ b/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py
@@ -725,6 +725,7 @@ def create_parser():
@@ -752,6 +752,7 @@ def create_parser():
help="Run CSS testsuite specific lints")
parser.add_argument("--repo-root", help="The WPT directory. Use this"
"option if the lint script exists outside the repository")
......@@ -9,18 +9,18 @@ diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/
parser.add_argument("--all", action="store_true", help="If no paths are passed, try to lint the whole "
"working directory, not just files that changed")
return parser
@@ -746,16 +747,21 @@ def main(**kwargs):
@@ -773,16 +774,21 @@ def main(**kwargs):
paths = lint_paths(kwargs, repo_root)
- return lint(repo_root, paths, output_format, kwargs.get("css_mode", False))
- return lint(repo_root, paths, output_format)
+ ignore_glob = kwargs.get("ignore_glob")
+ return lint(repo_root, paths, output_format, kwargs.get("css_mode", False), ignore_glob)
+ return lint(repo_root, paths, output_format, ignore_glob)
-def lint(repo_root, paths, output_format, css_mode):
-def lint(repo_root, paths, output_format):
+
+def lint(repo_root, paths, output_format, css_mode, ignore_glob):
+def lint(repo_root, paths, output_format, ignore_glob):
error_count = defaultdict(int)
last = None
......
......@@ -66,7 +66,7 @@ def fnmatch_translate(pat, path_name=False):
parts.append("$")
try:
return re.compile("".join(parts))
except:
except Exception:
raise
......
......@@ -56,7 +56,7 @@ For example, to make the lint tool ignore all '%s'
errors in the %s file,
you could add the following line to the lint.whitelist file.
%s:%s"""
%s: %s"""
def all_filesystem_paths(repo_root):
path_filter = PathFilter(repo_root, extras=[".git/*"])
......@@ -108,13 +108,13 @@ def _all_files_equal(paths):
return True
def check_path_length(repo_root, path, css_mode):
def check_path_length(repo_root, path):
if len(path) + 1 > 150:
return [("PATH LENGTH", "/%s longer than maximum path length (%d > 150)" % (path, len(path) + 1), path, None)]
return []
def check_worker_collision(repo_root, path, css_mode):
def check_worker_collision(repo_root, path):
endings = [(".any.html", ".any.js"),
(".any.worker.html", ".any.js"),
(".worker.html", ".worker.js")]
......@@ -127,7 +127,7 @@ def check_worker_collision(repo_root, path, css_mode):
return []
def check_ahem_copy(repo_root, path, css_mode):
def check_ahem_copy(repo_root, path):
lpath = path.lower()
if "ahem" in lpath and lpath.endswith(".ttf"):
return [("AHEM COPY", "Don't add extra copies of Ahem, use /fonts/Ahem.ttf", path, None)]
......@@ -139,7 +139,7 @@ w3c_tr_re = re.compile(r"https?\:\/\/www\.w3c?\.org\/TR\/([^/?#]+)")
w3c_dev_re = re.compile(r"https?\:\/\/dev\.w3c?\.org\/[^/?#]+\/([^/?#]+)")
def check_css_globally_unique(repo_root, paths, css_mode):
def check_css_globally_unique(repo_root, paths):
"""
Checks that CSS filenames are sufficiently unique
......@@ -154,7 +154,6 @@ def check_css_globally_unique(repo_root, paths, css_mode):
:param repo_root: the repository root
:param paths: list of all paths
:param css_mode: whether we're in CSS testsuite mode
:returns: a list of errors found in ``paths``
"""
......@@ -166,11 +165,9 @@ def check_css_globally_unique(repo_root, paths, css_mode):
if os.name == "nt":
path = path.replace("\\", "/")
if not css_mode:
if not path.startswith("css/"):
continue
if not path.startswith("css/"):
continue
# we're within css or in css_mode after all that
source_file = SourceFile(repo_root, path, "/")
if source_file.name_is_non_test:
# If we're name_is_non_test for a reason apart from support, ignore it.
......@@ -332,6 +329,11 @@ class W3CTestOrgRegexp(Regexp):
error = "W3C-TEST.ORG"
description = "External w3c-test.org domain used"
class WebPlatformTestRegexp(Regexp):
pattern = b"web\-platform\.test"
error = "WEB-PLATFORM.TEST"
description = "Internal web-platform.test domain used"
class Webidl2Regexp(Regexp):
pattern = b"webidl2\.js"
error = "WEBIDL2.JS"
......@@ -361,19 +363,28 @@ class LayoutTestsRegexp(Regexp):
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "eventSender/testRunner/window.internals used; these are LayoutTests-specific APIs (WebKit/Blink)"
class SpecialPowersRegexp(Regexp):
pattern = b"SpecialPowers"
error = "SPECIALPOWERS API"
file_extensions = [".html", ".htm", ".js", ".xht", ".xhtml", ".svg"]
description = "SpecialPowers used; this is gecko-specific and not supported in wpt"
regexps = [item() for item in
[TrailingWhitespaceRegexp,
TabsRegexp,
CRRegexp,
SetTimeoutRegexp,
W3CTestOrgRegexp,
WebPlatformTestRegexp,
Webidl2Regexp,
ConsoleRegexp,
GenerateTestsRegexp,
PrintRegexp,
LayoutTestsRegexp]]
LayoutTestsRegexp,
SpecialPowersRegexp]]
def check_regexp_line(repo_root, path, f, css_mode):
def check_regexp_line(repo_root, path, f):
errors = []
applicable_regexps = [regexp for regexp in regexps if regexp.applies(path)]
......@@ -385,12 +396,12 @@ def check_regexp_line(repo_root, path, f, css_mode):
return errors
def check_parsed(repo_root, path, f, css_mode):
def check_parsed(repo_root, path, f):
source_file = SourceFile(repo_root, path, "/", contents=f.read())
errors = []
if css_mode or path.startswith("css/"):
if path.startswith("css/"):
if (source_file.type == "support" and
not source_file.name_is_non_test and
not source_file.name_is_reference):
......@@ -507,14 +518,26 @@ def check_parsed(repo_root, path, f, css_mode):
if all(seen_elements[name] for name in required_elements):
break
if source_file.testdriver_nodes:
if len(source_file.testdriver_nodes) > 1:
errors.append(("MULTIPLE-TESTDRIVER",
"More than one <script src='/resources/testdriver.js'>", path, None))
testdriver_vendor_nodes = source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testdriver-vendor.js']")
if not testdriver_vendor_nodes:
errors.append(("MISSING-TESTDRIVER-VENDOR",
"Missing <script src='/resources/testdriver-vendor.js'>", path, None))
else:
if len(testdriver_vendor_nodes) > 1:
errors.append(("MULTIPLE-TESTDRIVER-VENDOR",
"More than one <script src='/resources/testdriver-vendor.js'>", path, None))
for element in source_file.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src]"):
src = element.attrib["src"]
for name in ["testharness", "testharnessreport"]:
for name in ["testharness", "testharnessreport", "testdriver", "testdriver-vendor"]:
if "%s.js" % name == src or ("/%s.js" % name in src and src != "/resources/%s.js" % name):
errors.append(("%s-PATH" % name.upper(), "%s.js script seen with incorrect path" % name, path, None))
return errors
class ASTCheck(object):
......@@ -542,7 +565,7 @@ class OpenModeCheck(ASTCheck):
ast_checkers = [item() for item in [OpenModeCheck]]
def check_python_ast(repo_root, path, f, css_mode):
def check_python_ast(repo_root, path, f):
if not path.endswith(".py"):
return []
......@@ -562,7 +585,7 @@ broken_js_metadata = re.compile(b"//\s*META:")
broken_python_metadata = re.compile(b"#\s*META:")
def check_script_metadata(repo_root, path, f, css_mode):
def check_script_metadata(repo_root, path, f):
if path.endswith((".worker.js", ".any.js")):
meta_re = js_meta_re
broken_metadata = broken_js_metadata
......@@ -601,52 +624,49 @@ def check_script_metadata(repo_root, path, f, css_mode):
return errors
def check_path(repo_root, path, css_mode):
def check_path(repo_root, path):
"""
Runs lints that check the file path.
:param repo_root: the repository root
:param path: the path of the file within the repository
:param css_mode: whether we're in CSS testsuite mode
:returns: a list of errors found in ``path``
"""
errors = []
for path_fn in path_lints:
errors.extend(path_fn(repo_root, path, css_mode))
errors.extend(path_fn(repo_root, path))
return errors
def check_all_paths(repo_root, paths, css_mode):
def check_all_paths(repo_root, paths):
"""
Runs lints that check all paths globally.
:param repo_root: the repository root
:param paths: a list of all the paths within the repository
:param css_mode: whether we're in CSS testsuite mode
:returns: a list of errors found in ``f``
"""
errors = []
for paths_fn in all_paths_lints:
errors.extend(paths_fn(repo_root, paths, css_mode))
errors.extend(paths_fn(repo_root, paths))
return errors
def check_file_contents(repo_root, path, f, css_mode):
def check_file_contents(repo_root, path, f):
"""
Runs lints that check the file contents.
:param repo_root: the repository root
:param path: the path of the file within the repository
:param f: a file-like object with the file contents
:param css_mode: whether we're in CSS testsuite mode
:returns: a list of errors found in ``f``
"""
errors = []
for file_fn in file_lints:
errors.extend(file_fn(repo_root, path, f, css_mode))
errors.extend(file_fn(repo_root, path, f))
f.seek(0)
return errors
......@@ -756,10 +776,10 @@ def main(**kwargs):
ignore_glob = kwargs.get("ignore_glob")
return lint(repo_root, paths, output_format, kwargs.get("css_mode", False), ignore_glob)
return lint(repo_root, paths, output_format, ignore_glob)
def lint(repo_root, paths, output_format, css_mode, ignore_glob):
def lint(repo_root, paths, output_format, ignore_glob):
error_count = defaultdict(int)
last = None
......@@ -803,15 +823,15 @@ def lint(repo_root, paths, output_format, css_mode, ignore_glob):
paths.remove(path)
continue
errors = check_path(repo_root, path, css_mode)
errors = check_path(repo_root, path)
last = process_errors(errors) or last
if not os.path.isdir(abs_path):
with open(abs_path, 'rb') as f:
errors = check_file_contents(repo_root, path, f, css_mode)
errors = check_file_contents(repo_root, path, f)
last = process_errors(errors) or last
errors = check_all_paths(repo_root, paths, css_mode)
errors = check_all_paths(repo_root, paths)
last = process_errors(errors) or last
if output_format in ("normal", "markdown"):
......
{"manifest":
{"path": "update.py", "script": "run", "parser": "create_parser", "help": "Update the MANIFEST.json file",
"virtualenv": false},
"manifest-download":
{"path": "download.py", "script": "run", "parser": "create_parser", "help": "Download recent pregenerated MANIFEST.json file",
"virtualenv": false}}
from __future__ import absolute_import
import argparse
import gzip
import json
import io
import os
from datetime import datetime, timedelta
from six.moves.urllib.request import urlopen
from .vcs import Git
from . import log
here = os.path.dirname(__file__)
wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
logger = log.get_logger()
def abs_path(path):
return os.path.abspath(os.path.expanduser(path))
def should_download(manifest_path, rebuild_time=timedelta(days=5)):
if not os.path.exists(manifest_path):
return True
mtime = datetime.fromtimestamp(os.path.getmtime(manifest_path))
if mtime < datetime.now() - rebuild_time:
return True
logger.info("Skipping manifest download because existing file is recent")
return False
def git_commits(repo_root):
git = Git.get_func(repo_root)
return [item for item in git("log", "--format=%H", "-n50").split("\n") if item]
def github_url(commits):
try:
resp = urlopen("https://api.github.com/repos/w3c/web-platform-tests/releases")
except Exception:
return None
if resp.code != 200:
return None
try:
releases = json.load(resp.fp)
except ValueError:
logger.warning("Response was not valid JSON")
return None
fallback = None
for release in releases:
for commit in commits:
for item in release["assets"]:
if item["name"] == "MANIFEST-%s.json.gz" % commit:
return item["browser_download_url"]
elif item["name"] == "MANIFEST.json.gz" and not fallback:
fallback = item["browser_download_url"]
if fallback:
logger.info("Can't find a commit-specific manifest so just using the most recent one")
return fallback
def download_manifest(manifest_path, commits_func, url_func, force=False):
if not force and not should_download(manifest_path):
return False
commits = commits_func()
url = url_func(commits)
if not url:
logger.warning("No generated manifest found")
return False
logger.info("Downloading manifest from %s" % url)
try:
resp = urlopen(url)
except Exception:
logger.warning("Downloading pregenerated manifest failed")
return False
if resp.code != 200:
logger.warning("Downloading pregenerated manifest failed; got HTTP status %d" %
resp.code)
return False
gzf = gzip.GzipFile(fileobj=io.BytesIO(resp.read()))
try:
decompressed = gzf.read()
except IOError:
logger.warning("Failed to decompress downloaded file")
return False
try:
with open(manifest_path, "w") as f:
f.write(decompressed)
except Exception:
logger.warning("Failed to write manifest")
return False
logger.info("Manifest downloaded")
return True
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
"-p", "--path", type=abs_path, help="Path to manifest file.")
parser.add_argument(
"--tests-root", type=abs_path, default=wpt_root, help="Path to root of tests.")
parser.add_argument(
"--force", action="store_true",
help="Always download, even if the existing manifest is recent")
return parser
def download_from_github(path, tests_root, force=False):
return download_manifest(path, lambda: git_commits(tests_root), github_url,
force=force)
def run(**kwargs):
if kwargs["path"] is None:
path = os.path.join(kwargs["tests_root"], "MANIFEST.json")
else:
path = kwargs["path"]
success = download_from_github(path, kwargs["tests_root"], kwargs["force"])
return 0 if success else 1
......@@ -102,17 +102,20 @@ class URLManifestItem(ManifestItem):
class TestharnessTest(URLManifestItem):
item_type = "testharness"
def __init__(self, source_file, url, url_base="/", timeout=None, manifest=None):
def __init__(self, source_file, url, url_base="/", timeout=None, testdriver=False, manifest=None):
URLManifestItem.__init__(self, source_file, url, url_base=url_base, manifest=manifest)
self.timeout = timeout
self.testdriver = testdriver
def meta_key(self):
return (self.timeout,)
return (self.timeout, self.testdriver)
def to_json(self):
rv = URLManifestItem.to_json(self)
if self.timeout is not None:
rv[-1]["timeout"] = self.timeout
if self.testdriver:
rv[-1]["testdriver"] = self.testdriver
return rv
@classmethod
......@@ -124,6 +127,7 @@ class TestharnessTest(URLManifestItem):
url,
url_base=manifest.url_base,
timeout=extras.get("timeout"),
testdriver=bool(extras.get("testdriver")),
manifest=manifest)
......
......@@ -54,8 +54,7 @@ class SourceFile(object):
"xhtml":lambda x:ElementTree.parse(x, XMLParser.XMLParser()),
"svg":lambda x:ElementTree.parse(x, XMLParser.XMLParser())}
root_dir_non_test = set(["common",
"work-in-progress"])
root_dir_non_test = set(["common"])
dir_non_test = set(["resources",
"support",
......@@ -63,8 +62,7 @@ class SourceFile(object):
dir_path_non_test = {("css21", "archive"),
("css", "CSS2", "archive"),
("css", "common"),
("css", "work-in-progress")}
("css", "common")}
def __init__(self, tests_root, rel_path, url_base, contents=None):
"""Object representing a file in a source tree.
......@@ -382,6 +380,20 @@ class SourceFile(object):
return rv
@cached_property
def testdriver_nodes(self):
"""List of ElementTree Elements corresponding to nodes representing a
testdriver.js script"""
return self.root.findall(".//{http://www.w3.org/1999/xhtml}script[@src='/resources/testdriver.js']")
@cached_property
def has_testdriver(self):
"""Boolean indicating whether the file content represents a
testharness.js test"""
if self.root is None:
return None
return bool(self.testdriver_nodes)
@cached_property
def reftest_nodes(self):
"""List of ElementTree Elements corresponding to nodes representing a
......@@ -523,9 +535,10 @@ class SourceFile(object):
elif self.content_is_testharness:
rv = TestharnessTest.item_type, []
testdriver = self.has_testdriver
for variant in self.test_variants:
url = self.url + variant
rv[1].append(TestharnessTest(self, url, timeout=self.timeout))
rv[1].append(TestharnessTest(self, url, timeout=self.timeout, testdriver=testdriver))
elif self.content_is_ref_node:
rv = (RefTestNode.item_type,
......
......@@ -7,13 +7,16 @@ import sys
import manifest
from . import vcs
from .log import get_logger
from .download import download_from_github
here = os.path.dirname(__file__)
wpt_root = os.path.abspath(os.path.join(here, os.pardir, os.pardir))
logger = get_logger()
def update(tests_root, manifest, working_copy=False):
logger.info("Updating manifest")
tree = None
if not working_copy:
tree = vcs.Git.for_path(tests_root, manifest.url_base)
......@@ -29,7 +32,9 @@ def update_from_cli(**kwargs):
assert tests_root is not None
m = None
logger = get_logger()
if kwargs["download"]:
download_from_github(path, tests_root)
if not kwargs.get("rebuild", False):
try:
......@@ -37,8 +42,6 @@ def update_from_cli(**kwargs):
except manifest.ManifestVersionMismatch:
logger.info("Manifest version changed, rebuilding")
m = None
else:
logger.info("Updating manifest")
if m is None:
m = manifest.Manifest(kwargs["url_base"])
......@@ -69,6 +72,9 @@ def create_parser():
parser.add_argument(
"--url-base", action="store", default="/",
help="Base url to use as the mount point for tests in this manifest.")
parser.add_argument(
"--no-download", dest="download", action="store_false", default=True,
help="Never attempt to download the manifest.")
return parser
......
......@@ -2,6 +2,7 @@ import logging
import os
import platform
import re
import shutil
import stat
from abc import ABCMeta, abstractmethod
from ConfigParser import RawConfigParser
......@@ -70,9 +71,9 @@ class Firefox(Browser):
raise ValueError("Unable to construct a valid Firefox package name for current platform")
if platform == "linux":
bits = "-%s" % uname[-1]
bits = "-%s" % uname[4]
elif platform == "win":
bits = "64" if uname[-1] == "x86_64" else "32"
bits = "64" if uname[4] == "x86_64" else "32"
else:
bits = ""
......@@ -89,7 +90,7 @@ class Firefox(Browser):
raise ValueError("Unable to construct a valid Geckodriver package name for current platform")
if platform in ("linux", "win"):
bits = "64" if uname[-1] == "x86_64" else "32"
bits = "64" if uname[4] == "x86_64" else "32"
else:
bits = ""
......@@ -227,7 +228,7 @@ class Chrome(Browser):
raise ValueError("Unable to construct a valid Chrome package name for current platform")
if platform == "linux":
bits = "64" if uname[-1] == "x86_64" else "32"
bits = "64" if uname[4] == "x86_64" else "32"
elif platform == "mac":
bits = "64"
elif platform == "win":
......@@ -278,6 +279,85 @@ class Chrome(Browser):
sys.exit(1)
class Opera(Browser):
"""Opera-specific interface.
Includes installation, webdriver installation, and wptrunner setup methods.
"""
product = "opera"
binary = "/usr/bin/opera"
requirements = "requirements_opera.txt"
def install(self, dest=None):
raise NotImplementedError
def platform_string(self):
platform = {
"Linux": "linux",
"Windows": "win",
"Darwin": "mac"
}.get(uname[0])
if platform is None:
raise ValueError("Unable to construct a valid Opera package name for current platform")
if platform == "linux":
bits = "64" if uname[4] == "x86_64" else "32"
elif platform == "mac":
bits = "64"
elif platform == "win":
bits = "32"
return "%s%s" % (platform, bits)
def find_webdriver(self):
return find_executable("operadriver")
def install_webdriver(self, dest=None):
"""Install latest Webdriver."""
if dest is None:
dest = os.pwd
latest = get("https://api.github.com/repos/operasoftware/operachromiumdriver/releases/latest").json()["tag_name"]
url = "https://github.com/operasoftware/operachromiumdriver/releases/download/%s/operadriver_%s.zip" % (latest,
self.platform_string())
unzip(get(url).raw, dest)
operadriver_dir = os.path.join(dest, "operadriver_%s" % self.platform_string())
shutil.move(os.path.join(operadriver_dir, "operadriver"), dest)
shutil.rmtree(operadriver_dir)
path = find_executable("operadriver")
st = os.stat(path)
os.chmod(path, st.st_mode | stat.S_IEXEC)
return path
def version(self, root):
"""Retrieve the release version of the installed browser."""
output = call(self.binary, "--version")
return re.search(r"[0-9\.]+( [a-z]+)?$", output.strip()).group(0)
def prepare_environment(self):
# https://bugs.chromium.org/p/chromium/issues/detail?id=713947
logger.debug("DBUS_SESSION_BUS_ADDRESS %s" % os.environ.get("DBUS_SESSION_BUS_ADDRESS"))
if "DBUS_SESSION_BUS_ADDRESS" not in os.environ:
if find_executable("dbus-launch"):
logger.debug("Attempting to start dbus")
dbus_conf = subprocess.check_output(["dbus-launch"])
logger.debug(dbus_conf)
# From dbus-launch(1):
#
# > When dbus-launch prints bus information to standard output,
# > by default it is in a simple key-value pairs format.
for line in dbus_conf.strip().split("\n"):
key, _, value = line.partition("=")
os.environ[key] = value
else:
logger.critical("dbus not running and can't be started")
sys.exit(1)
class Edge(Browser):
"""Edge-specific interface.
......
......@@ -5,5 +5,6 @@
"help": "Get a list of files that have changed", "virtualenv": false},
"tests-affected": {"path": "testfiles.py", "script": "run_tests_affected", "parser": "get_parser_affected",
"help": "Get a list of tests affected by changes", "virtualenv": false},
"install": {"path": "install.py", "script": "run", "parser": "get_parser", "help": "Install browser components"}
"install": {"path": "install.py", "script": "run", "parser": "get_parser", "help": "Install browser components"},
"branch-point": {"path": "testfiles.py", "script": "display_branch_point", "parser": null, "help": "Print branch point from master", "virtualenv": false}
}
......@@ -64,6 +64,7 @@ def args_general(kwargs):
kwargs.set_if_none("tests_root", wpt_root)
kwargs.set_if_none("metadata_root", wpt_root)
kwargs.set_if_none("manifest_update", True)
kwargs.set_if_none("manifest_download", True)
if kwargs["ssl_type"] in (None, "pregenerated"):
cert_root = os.path.join(wpt_root, "tools", "certs")
......@@ -109,8 +110,8 @@ def check_environ(product):
for line in f:
line = line.split("#", 1)[0].strip()
parts = line.split()
if len(parts) == 2:
host = parts[1]
hosts = parts[1:]
for host in hosts:
missing_hosts.discard(host)
if missing_hosts:
raise WptrunError("""Missing hosts file configuration. Expected entries like:
......@@ -169,32 +170,12 @@ Install Firefox or use --binary to set the binary path""")
if certutil is None:
# Can't download this for now because it's missing the libnss3 library
raise WptrunError("""Can't find certutil.
This must be installed using your OS package manager or directly e.g.
Debian/Ubuntu:
sudo apt install libnss3-tools
macOS/Homebrew:
brew install nss
Others:
Download the firefox archive and common.tests.zip archive for your platform
from https://archive.mozilla.org/pub/firefox/nightly/latest-mozilla-central/
Then extract certutil[.exe] from the tests.zip package and
libnss3[.so|.dll|.dynlib] and but the former on your path and the latter on
your library path.
""")
logger.info("""Can't find certutil, certificates will not be checked.
Consider installing certutil via your OS package manager or directly.""")
else:
print("Using certutil %s" % certutil)
if certutil is not None:
kwargs["certutil_binary"] = certutil
else:
print("Unable to find or install certutil, setting ssl-type to none")
kwargs["ssl_type"] = "none"
kwargs["certutil_binary"] = certutil
if kwargs["webdriver_binary"] is None and "wdspec" in kwargs["test_types"]:
webdriver_binary = self.browser.find_webdriver()
......@@ -243,6 +224,29 @@ class Chrome(BrowserSetup):
raise WptrunError("Unable to locate or install chromedriver binary")
class Opera(BrowserSetup):
name = "opera"
browser_cls = browser.Opera
def setup_kwargs(self, kwargs):
if kwargs["webdriver_binary"] is None:
webdriver_binary = self.browser.find_webdriver()
if webdriver_binary is None:
install = self.prompt_install("operadriver")
if install:
print("Downloading operadriver")
webdriver_binary = self.browser.install_webdriver(dest=self.venv.bin_path)
else:
print("Using webdriver binary %s" % webdriver_binary)
if webdriver_binary:
kwargs["webdriver_binary"] = webdriver_binary
else:
raise WptrunError("Unable to locate or install operadriver binary")
class Edge(BrowserSetup):
name = "edge"
browser_cls = browser.Edge
......@@ -321,6 +325,7 @@ product_setup = {
"ie": InternetExplorer,
"servo": Servo,
"sauce": Sauce,
"opera": Opera,
}
......@@ -415,5 +420,5 @@ if __name__ == "__main__":
from tools import localpaths
try:
main()
except:
except Exception:
pdb.post_mortem()
......@@ -2,6 +2,7 @@ import argparse
import itertools
import logging
import os
import re
import subprocess
import sys
......@@ -27,9 +28,17 @@ def get_git_cmd(repo_path):
return git
def display_branch_point():
print(branch_point())
def branch_point():
git = get_git_cmd(wpt_root)
if os.environ.get("TRAVIS_PULL_REQUEST", "false") != "false":
if (os.environ.get("TRAVIS_PULL_REQUEST", "false") == "false" and
os.environ.get("TRAVIS_BRANCH") == "master"):
# For builds on the master branch just return the HEAD commit
return git("rev-parse", "HEAD")
elif os.environ.get("TRAVIS_PULL_REQUEST", "false") != "false":
# This is a PR, so the base branch is in TRAVIS_BRANCH
travis_branch = os.environ.get("TRAVIS_BRANCH")
assert travis_branch, "TRAVIS_BRANCH environment variable is defined"
......@@ -71,13 +80,21 @@ def branch_point():
return branch_point
def files_changed(revish, ignore_dirs=None, include_uncommitted=False, include_new=False):
"""Get and return files changed since current branch diverged from master,
excluding those that are located within any directory specifed by
`ignore_changes`."""
if ignore_dirs is None:
ignore_dirs = []
def compile_ignore_rule(rule):
rule = rule.replace(os.path.sep, "/")
parts = rule.split("/")
re_parts = []
for part in parts:
if part.endswith("**"):
re_parts.append(re.escape(part[:-2]) + ".*")
elif part.endswith("*"):
re_parts.append(re.escape(part[:-1]) + "[^/]*")
else:
re_parts.append(re.escape(part))
return re.compile("^%s$" % "/".join(re_parts))
def repo_files_changed(revish, include_uncommitted=False, include_new=False):
git = get_git_cmd(wpt_root)
files = git("diff", "--name-only", "-z", revish).split("\0")
assert not files[-1]
......@@ -99,27 +116,48 @@ def files_changed(revish, ignore_dirs=None, include_uncommitted=False, include_n
for filename in filenames:
files.add(os.path.join(dirpath, filename))
if not files:
return [], []
return files
def exclude_ignored(files, ignore_rules):
if ignore_rules is None:
ignore_rules = []
ignore_rules = [compile_ignore_rule(item) for item in ignore_rules]
changed = []
ignored = []
for item in sorted(files):
fullpath = os.path.join(wpt_root, item)
topmost_dir = item.split(os.sep, 1)[0]
if topmost_dir in ignore_dirs:
ignored.append(fullpath)
rule_path = item.replace(os.path.sep, "/")
for rule in ignore_rules:
if rule.match(rule_path):
ignored.append(fullpath)
break
else:
changed.append(fullpath)
return changed, ignored
def files_changed(revish, ignore_rules=None, include_uncommitted=False, include_new=False):
"""Get and return files changed since current branch diverged from master,
excluding those that are located within any path matched by
`ignore_rules`."""
files = repo_files_changed(revish,
include_uncommitted=include_uncommitted,
include_new=include_new)
if not files:
return [], []
return exclude_ignored(files, ignore_rules)
def _in_repo_root(full_path):
rel_path = os.path.relpath(full_path, wpt_root)
path_components = rel_path.split(os.sep)
return len(path_components) < 2
def _init_manifest_cache():
c = {}
......@@ -138,6 +176,7 @@ def _init_manifest_cache():
return c[manifest_path]
return load
load_manifest = _init_manifest_cache()
......@@ -163,6 +202,7 @@ def affected_testfiles(files_changed, skip_tests, manifest_path=None):
tests_changed = set(item for item in files_changed if item in test_files)
nontest_changed_paths = set()
rewrites = {"/resources/webidl2/lib/webidl2.js": "/resources/WebIDLParser.js"}
for full_path in nontests_changed:
rel_path = os.path.relpath(full_path, wpt_root)
path_components = rel_path.split(os.sep)
......@@ -170,6 +210,9 @@ def affected_testfiles(files_changed, skip_tests, manifest_path=None):
if top_level_subdir in skip_tests:
continue
repo_path = "/" + os.path.relpath(full_path, wpt_root).replace(os.path.sep, "/")
if repo_path in rewrites:
repo_path = rewrites[repo_path]
full_path = os.path.join(wpt_root, repo_path[1:].replace("/", os.path.sep))
nontest_changed_paths.add((full_path, repo_path))
def affected_by_wdspec(test):
......@@ -221,11 +264,17 @@ def affected_testfiles(files_changed, skip_tests, manifest_path=None):
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument("revish", default=None, help="Commits to consider. Defaults to the commits on the current branch", nargs="?")
parser.add_argument("--ignore-dirs", nargs="*", type=set, default=set(["resources"]),
help="Directories to exclude from the list of changes")
parser.add_argument("revish", default=None, help="Commits to consider. Defaults to the "
"commits on the current branch", nargs="?")
parser.add_argument("--ignore-rules", nargs="*", type=set,
default=set(["resources/testharness*"]),
help="Rules for paths to exclude from lists of changes. Rules are paths "
"relative to the test root, with * before a separator or the end matching "
"anything other than a path separator and ** in that position matching "
"anything")
parser.add_argument("--modified", action="store_true",
help="Include files under version control that have been modified or staged")
help="Include files under version control that have been "
"modified or staged")
parser.add_argument("--new", action="store_true",
help="Include files in the worktree that are not in version control")
parser.add_argument("--show-type", action="store_true",
......@@ -242,6 +291,7 @@ def get_parser_affected():
help="Directory that will contain MANIFEST.json")
return parser
def get_revish(**kwargs):
revish = kwargs["revish"]
if kwargs["revish"] is None:
......@@ -251,7 +301,7 @@ def get_revish(**kwargs):
def run_changed_files(**kwargs):
revish = get_revish(**kwargs)
changed, _ = files_changed(revish, kwargs["ignore_dirs"],
changed, _ = files_changed(revish, kwargs["ignore_rules"],
include_uncommitted=kwargs["modified"],
include_new=kwargs["new"])
for item in sorted(changed):
......@@ -260,7 +310,7 @@ def run_changed_files(**kwargs):
def run_tests_affected(**kwargs):
revish = get_revish(**kwargs)
changed, _ = files_changed(revish, kwargs["ignore_dirs"],
changed, _ = files_changed(revish, kwargs["ignore_rules"],
include_uncommitted=kwargs["modified"],
include_new=kwargs["new"])
manifest_path = os.path.join(kwargs["metadata_root"], "MANIFEST.json")
......
......@@ -10,11 +10,17 @@ logger = logging.getLogger(__name__)
class Kwargs(dict):
def set_if_none(self, name, value, err_fn=None, desc=None, extra_cond=None):
def set_if_none(self,
name, # type: str
value, # type: Any
err_fn=None, # type: (Kwargs, str) -> Any
desc=None, # type: str
extra_cond=None # type: (Kwargs) -> bool
):
if desc is None:
desc = name
if self[name] is None:
if name not in self or self[name] is None:
if extra_cond is not None and not extra_cond(self):
return
if callable(value):
......
import argparse
import os
import json
import logging
import os
import sys
from tools import localpaths
......@@ -89,6 +90,8 @@ def setup_virtualenv(path, props):
def main(prog=None, argv=None):
logging.basicConfig(level=logging.INFO)
if prog is None:
prog = sys.argv[0]
if argv is None:
......
......@@ -351,17 +351,15 @@ class ErrorHandler(object):
response.set_error(self.status)
class StaticHandler(object):
def __init__(self, path, format_args, content_type, **headers):
class StringHandler(object):
def __init__(self, data, content_type, **headers):
"""Hander that reads a file from a path and substitutes some fixed data
:param path: Path to the template file to use
:param format_args: Dictionary of values to substitute into the template file
:param data: String to use
:param content_type: Content type header to server the response with
:param headers: List of headers to send with responses"""
with open(path) as f:
self.data = f.read() % format_args
self.data = data
self.resp_headers = [("Content-Type", content_type)]
for k, v in headers.iteritems():
......@@ -375,3 +373,18 @@ class StaticHandler(object):
def __call__(self, request, response):
rv = self.handler(request, response)
return rv
class StaticHandler(StringHandler):
def __init__(self, path, format_args, content_type, **headers):
"""Hander that reads a file from a path and substitutes some fixed data
:param path: Path to the template file to use
:param format_args: Dictionary of values to substitute into the template file
:param content_type: Content type header to server the response with
:param headers: List of headers to send with responses"""
with open(path) as f:
data = f.read() % format_args
return super(StaticHandler, self).__init__(data, content_type, **headers)
......@@ -231,6 +231,13 @@ def trickle(request, response, delays):
content = resolve_content(response)
offset = [0]
if not ("Cache-Control" in response.headers or
"Pragma" in response.headers or
"Expires" in response.headers):
response.headers.set("Cache-Control", "no-cache, no-store, must-revalidate")
response.headers.set("Pragma", "no-cache")
response.headers.set("Expires", "0")
def add_content(delays, repeat=False):
for i, (item_type, value) in enumerate(delays):
if item_type == "bytes":
......
......@@ -418,7 +418,8 @@ class WebTestHttpd(object):
_host, self.port = self.httpd.socket.getsockname()
except Exception:
self.logger.error('Init failed! You may need to modify your hosts file. Refer to README.md.')
self.logger.error("Failed to start HTTP server. "
"You may need to edit /etc/hosts or similar, see README.md.")
raise
def start(self, block=False):
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment