Commit 79310a72 authored by Robert Ma's avatar Robert Ma Committed by Commit Bot

Roll in new version of WPT tools

Most notably we'd like to include
https://github.com/w3c/web-platform-tests/pull/10599,
which added support for test variants in .any.js tests.

Other changes in this roll:
* Fixed paths in chromium.patch after the Blink mv.
* Added the new wptserve/config.py to the whitelist.

This roll also exposed an upstream issue. Filed issue
836377 to track it (fix is under way in the upstream).

Bug: 836276, 829697, 836377
Change-Id: I5fc0af49c0149871222eaacd1bf5473b187c38a5
Reviewed-on: https://chromium-review.googlesource.com/1026165
Commit-Queue: Robert Ma <robertma@chromium.org>
Reviewed-by: default avatarQuinten Yearsley <qyearsley@chromium.org>
Cr-Commit-Position: refs/heads/master@{#553324}
parent 933b81ec
...@@ -2302,6 +2302,10 @@ crbug.com/832071 virtual/navigation-mojo-response/external/wpt/service-workers/s ...@@ -2302,6 +2302,10 @@ crbug.com/832071 virtual/navigation-mojo-response/external/wpt/service-workers/s
crbug.com/833215 [ Mac10.12 Mac10.13 Retina ] external/wpt/accelerometer/Accelerometer.https.html [ Pass Timeout ] crbug.com/833215 [ Mac10.12 Mac10.13 Retina ] external/wpt/accelerometer/Accelerometer.https.html [ Pass Timeout ]
crbug.com/833215 [ Linux ] external/wpt/accelerometer/Accelerometer.https.html [ Pass Failure ] crbug.com/833215 [ Linux ] external/wpt/accelerometer/Accelerometer.https.html [ Pass Failure ]
# TODO(robertma): Remove these once https://github.com/w3c/web-platform-tests/pull/10613 is imported.
crbug.com/836377 external/wpt/service-workers/service-worker/worker-interception-redirect.https.html [ Timeout ]
crbug.com/836377 virtual/outofblink-cors/external/wpt/service-workers/service-worker/worker-interception-redirect.https.html [ Timeout ]
# ====== New tests from wpt-importer added here ====== # ====== New tests from wpt-importer added here ======
crbug.com/626703 [ Mac10.12 ] external/wpt/html/syntax/parsing/html5lib_template.html?run_type=uri [ Timeout ] crbug.com/626703 [ Mac10.12 ] external/wpt/html/syntax/parsing/html5lib_template.html?run_type=uri [ Timeout ]
crbug.com/626703 external/wpt/html/semantics/scripting-1/the-script-element/module/dynamic-import/string-compilation-integrity-classic.sub.html [ Skip ] crbug.com/626703 external/wpt/html/semantics/scripting-1/the-script-element/module/dynamic-import/string-compilation-integrity-classic.sub.html [ Skip ]
......
...@@ -32,7 +32,7 @@ Local Modifications: None ...@@ -32,7 +32,7 @@ Local Modifications: None
Name: web-platform-tests - Test Suites for Web Platform specifications Name: web-platform-tests - Test Suites for Web Platform specifications
Short Name: wpt Short Name: wpt
URL: https://github.com/w3c/web-platform-tests/ URL: https://github.com/w3c/web-platform-tests/
Version: d00f7bab7aecadbc2fc9a47172d090b4ba7e2ef2 Version: 507af0c03617714bfd4134c54da4d534906ee52b
License: LICENSES FOR W3C TEST SUITES (http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html) License: LICENSES FOR W3C TEST SUITES (http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html)
License File: wpt/wpt/LICENSE.md License File: wpt/wpt/LICENSE.md
Security Critical: no Security Critical: no
......
...@@ -109,6 +109,7 @@ ...@@ -109,6 +109,7 @@
./tools/wptserve/README.md ./tools/wptserve/README.md
./tools/wptserve/wptserve ./tools/wptserve/wptserve
./tools/wptserve/wptserve/__init__.py ./tools/wptserve/wptserve/__init__.py
./tools/wptserve/wptserve/config.py
./tools/wptserve/wptserve/constants.py ./tools/wptserve/wptserve/constants.py
./tools/wptserve/wptserve/handlers.py ./tools/wptserve/wptserve/handlers.py
./tools/wptserve/wptserve/logger.py ./tools/wptserve/wptserve/logger.py
......
...@@ -9,7 +9,7 @@ cd $DIR ...@@ -9,7 +9,7 @@ cd $DIR
TARGET_DIR=$DIR/wpt TARGET_DIR=$DIR/wpt
REMOTE_REPO="https://chromium.googlesource.com/external/w3c/web-platform-tests.git" REMOTE_REPO="https://chromium.googlesource.com/external/w3c/web-platform-tests.git"
WPT_HEAD=d00f7bab7aecadbc2fc9a47172d090b4ba7e2ef2 WPT_HEAD=507af0c03617714bfd4134c54da4d534906ee52b
function clone { function clone {
# Remove existing repo if already exists. # Remove existing repo if already exists.
......
diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py b/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py diff --git a/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py b/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py
--- a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py --- a/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py
+++ b/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py +++ b/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py
@@ -752,6 +752,7 @@ def create_parser(): @@ -752,6 +752,7 @@ def create_parser():
help="Run CSS testsuite specific lints") help="Run CSS testsuite specific lints")
parser.add_argument("--repo-root", help="The WPT directory. Use this" parser.add_argument("--repo-root", help="The WPT directory. Use this"
...@@ -33,9 +33,9 @@ diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/ ...@@ -33,9 +33,9 @@ diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/
output_errors = {"json": output_errors_json, output_errors = {"json": output_errors_json,
"markdown": output_errors_markdown, "markdown": output_errors_markdown,
"normal": output_errors_text}[output_format] "normal": output_errors_text}[output_format]
diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/wpt/paths b/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/wpt/paths diff --git a/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/paths b/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/paths
--- a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/wpt/paths --- a/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/paths
+++ b/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/wpt/paths +++ b/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/paths
@@ -1,4 +1,3 @@ @@ -1,4 +1,3 @@
-tools/ci/ -tools/ci/
tools/lint/ tools/lint/
......
...@@ -18,7 +18,7 @@ from .. import localpaths ...@@ -18,7 +18,7 @@ from .. import localpaths
from ..gitignore.gitignore import PathFilter from ..gitignore.gitignore import PathFilter
from ..wpt import testfiles from ..wpt import testfiles
from manifest.sourcefile import SourceFile, js_meta_re, python_meta_re, space_chars from manifest.sourcefile import SourceFile, js_meta_re, python_meta_re, space_chars, get_any_variants, get_default_any_variants
from six import binary_type, iteritems, itervalues from six import binary_type, iteritems, itervalues
from six.moves import range from six.moves import range
from six.moves.urllib.parse import urlsplit, urljoin from six.moves.urllib.parse import urlsplit, urljoin
...@@ -32,7 +32,10 @@ def setup_logging(prefix=False): ...@@ -32,7 +32,10 @@ def setup_logging(prefix=False):
if logger is None: if logger is None:
logger = logging.getLogger(os.path.basename(os.path.splitext(__file__)[0])) logger = logging.getLogger(os.path.basename(os.path.splitext(__file__)[0]))
handler = logging.StreamHandler(sys.stdout) handler = logging.StreamHandler(sys.stdout)
logger.addHandler(handler) # Only add a handler if the parent logger is missing a handler
if logger.parent and len(logger.parent.handlers) == 0:
handler = logging.StreamHandler(sys.stdout)
logger.addHandler(handler)
if prefix: if prefix:
format = logging.BASIC_FORMAT format = logging.BASIC_FORMAT
else: else:
...@@ -613,6 +616,31 @@ broken_js_metadata = re.compile(b"//\s*META:") ...@@ -613,6 +616,31 @@ broken_js_metadata = re.compile(b"//\s*META:")
broken_python_metadata = re.compile(b"#\s*META:") broken_python_metadata = re.compile(b"#\s*META:")
def check_global_metadata(value):
global_values = {item.strip() for item in value.split(b",") if item.strip()}
included_variants = set.union(get_default_any_variants(),
*(get_any_variants(v) for v in global_values if not v.startswith(b"!")))
for global_value in global_values:
if global_value.startswith(b"!"):
excluded_value = global_value[1:]
if not get_any_variants(excluded_value):
yield ("UNKNOWN-GLOBAL-METADATA", "Unexpected value for global metadata")
elif excluded_value in global_values:
yield ("BROKEN-GLOBAL-METADATA", "Cannot specify both %s and %s" % (global_value, excluded_value))
else:
excluded_variants = get_any_variants(excluded_value)
if not (excluded_variants & included_variants):
yield ("BROKEN-GLOBAL-METADATA", "Cannot exclude %s if it is not included" % (excluded_value,))
else:
if not get_any_variants(global_value):
yield ("UNKNOWN-GLOBAL-METADATA", "Unexpected value for global metadata")
def check_script_metadata(repo_root, path, f): def check_script_metadata(repo_root, path, f):
if path.endswith((".worker.js", ".any.js")): if path.endswith((".worker.js", ".any.js")):
meta_re = js_meta_re meta_re = js_meta_re
...@@ -631,7 +659,9 @@ def check_script_metadata(repo_root, path, f): ...@@ -631,7 +659,9 @@ def check_script_metadata(repo_root, path, f):
m = meta_re.match(line) m = meta_re.match(line)
if m: if m:
key, value = m.groups() key, value = m.groups()
if key == b"timeout": if key == b"global":
errors.extend((kind, message, path, idx + 1) for (kind, message) in check_global_metadata(value))
elif key == b"timeout":
if value != b"long": if value != b"long":
errors.append(("UNKNOWN-TIMEOUT-METADATA", "Unexpected value for timeout metadata", path, idx + 1)) errors.append(("UNKNOWN-TIMEOUT-METADATA", "Unexpected value for timeout metadata", path, idx + 1))
elif key == b"script": elif key == b"script":
......
...@@ -49,6 +49,87 @@ def read_script_metadata(f, regexp): ...@@ -49,6 +49,87 @@ def read_script_metadata(f, regexp):
yield (m.groups()[0], m.groups()[1]) yield (m.groups()[0], m.groups()[1])
_any_variants = {
b"default": {"longhand": {b"window", b"dedicatedworker"}},
b"window": {"suffix": ".any.html"},
b"serviceworker": {"force_https": True},
b"sharedworker": {},
b"dedicatedworker": {"suffix": ".any.worker.html"},
b"worker": {"longhand": {b"dedicatedworker", b"sharedworker", b"serviceworker"}}
}
def get_any_variants(item):
"""
Returns a set of variants (bytestrings) defined by the given keyword.
"""
assert isinstance(item, binary_type), item
assert not item.startswith(b"!"), item
variant = _any_variants.get(item, None)
if variant is None:
return set()
return variant.get("longhand", {item})
def get_default_any_variants():
"""
Returns a set of variants (bytestrings) that will be used by default.
"""
return set(_any_variants[b"default"]["longhand"])
def parse_variants(value):
"""
Returns a set of variants (bytestrings) defined by a comma-separated value.
"""
assert isinstance(value, binary_type), value
globals = get_default_any_variants()
for item in value.split(b","):
item = item.strip()
if item.startswith(b"!"):
globals -= get_any_variants(item[1:])
else:
globals |= get_any_variants(item)
return globals
def global_suffixes(value):
"""
Yields the relevant filename suffixes (strings) for the variants defined by
the given comma-separated value.
"""
assert isinstance(value, binary_type), value
rv = set()
global_types = parse_variants(value)
for global_type in global_types:
variant = _any_variants[global_type]
suffix = variant.get("suffix", ".any.%s.html" % global_type.decode("utf-8"))
if variant.get("force_https", False):
suffix = ".https" + suffix
rv.add(suffix)
return rv
def global_variant_url(url, suffix):
"""
Returns a url created from the given url and suffix (all strings).
"""
url = url.replace(".any.", ".")
# If the url must be loaded over https, ensure that it will have
# the form .https.any.js
if ".https." in url and suffix.startswith(".https."):
url = url.replace(".https.", ".")
return replace_end(url, ".js", suffix)
class SourceFile(object): class SourceFile(object):
parsers = {"html":lambda x:html5lib.parse(x, treebuilder="etree"), parsers = {"html":lambda x:html5lib.parse(x, treebuilder="etree"),
"xhtml":lambda x:ElementTree.parse(x, XMLParser.XMLParser()), "xhtml":lambda x:ElementTree.parse(x, XMLParser.XMLParser()),
...@@ -225,8 +306,9 @@ class SourceFile(object): ...@@ -225,8 +306,9 @@ class SourceFile(object):
# wdspec tests are in subdirectories of /webdriver excluding __init__.py # wdspec tests are in subdirectories of /webdriver excluding __init__.py
# files. # files.
rel_dir_tree = self.rel_path.split(os.path.sep) rel_dir_tree = self.rel_path.split(os.path.sep)
return (rel_dir_tree[0] == "webdriver" and return (((rel_dir_tree[0] == "webdriver" and len(rel_dir_tree) > 1) or
len(rel_dir_tree) > 1 and (rel_dir_tree[:2] == ["infrastructure", "webdriver"] and
len(rel_dir_tree) > 2)) and
self.filename not in ("__init__.py", "conftest.py") and self.filename not in ("__init__.py", "conftest.py") and
fnmatch(self.filename, wd_pattern)) fnmatch(self.filename, wd_pattern))
...@@ -369,11 +451,18 @@ class SourceFile(object): ...@@ -369,11 +451,18 @@ class SourceFile(object):
@cached_property @cached_property
def test_variants(self): def test_variants(self):
rv = [] rv = []
for element in self.variant_nodes: if self.ext == ".js":
if "content" in element.attrib: for (key, value) in self.script_metadata:
variant = element.attrib["content"] if key == b"variant":
assert variant == "" or variant[0] in ["#", "?"] rv.append(value.decode("utf-8"))
rv.append(variant) else:
for element in self.variant_nodes:
if "content" in element.attrib:
variant = element.attrib["content"]
rv.append(variant)
for variant in rv:
assert variant == "" or variant[0] in ["#", "?"], variant
if not rv: if not rv:
rv = [""] rv = [""]
...@@ -509,22 +598,34 @@ class SourceFile(object): ...@@ -509,22 +598,34 @@ class SourceFile(object):
rv = VisualTest.item_type, [VisualTest(self, self.url)] rv = VisualTest.item_type, [VisualTest(self, self.url)]
elif self.name_is_multi_global: elif self.name_is_multi_global:
rv = TestharnessTest.item_type, [ globals = b""
TestharnessTest(self, replace_end(self.url, ".any.js", ".any.html"), for (key, value) in self.script_metadata:
timeout=self.timeout), if key == b"global":
TestharnessTest(self, replace_end(self.url, ".any.js", ".any.worker.html"), globals = value
timeout=self.timeout), break
tests = [
TestharnessTest(self, global_variant_url(self.url, suffix) + variant, timeout=self.timeout)
for suffix in sorted(global_suffixes(globals))
for variant in self.test_variants
] ]
rv = TestharnessTest.item_type, tests
elif self.name_is_worker: elif self.name_is_worker:
rv = (TestharnessTest.item_type, test_url = replace_end(self.url, ".worker.js", ".worker.html")
[TestharnessTest(self, replace_end(self.url, ".worker.js", ".worker.html"), tests = [
timeout=self.timeout)]) TestharnessTest(self, test_url + variant, timeout=self.timeout)
for variant in self.test_variants
]
rv = TestharnessTest.item_type, tests
elif self.name_is_window: elif self.name_is_window:
rv = (TestharnessTest.item_type, test_url = replace_end(self.url, ".window.js", ".window.html")
[TestharnessTest(self, replace_end(self.url, ".window.js", ".window.html"), tests = [
timeout=self.timeout)]) TestharnessTest(self, test_url + variant, timeout=self.timeout)
for variant in self.test_variants
]
rv = TestharnessTest.item_type, tests
elif self.name_is_webdriver: elif self.name_is_webdriver:
rv = WebdriverSpecTest.item_type, [WebdriverSpecTest(self, self.url, rv = WebdriverSpecTest.item_type, [WebdriverSpecTest(self, self.url,
......
...@@ -136,7 +136,7 @@ certs = $dir ...@@ -136,7 +136,7 @@ certs = $dir
new_certs_dir = $certs new_certs_dir = $certs
crl_dir = $dir%(sep)scrl crl_dir = $dir%(sep)scrl
database = $dir%(sep)sindex.txt database = $dir%(sep)sindex.txt
private_key = $dir%(sep)scakey.pem private_key = $dir%(sep)scacert.key
certificate = $dir%(sep)scacert.pem certificate = $dir%(sep)scacert.pem
serial = $dir%(sep)sserial serial = $dir%(sep)sserial
crldir = $dir%(sep)scrl crldir = $dir%(sep)scrl
...@@ -294,7 +294,7 @@ class OpenSSLEnvironment(object): ...@@ -294,7 +294,7 @@ class OpenSSLEnvironment(object):
return self._ca_cert_path return self._ca_cert_path
def _load_ca_cert(self): def _load_ca_cert(self):
key_path = self.path("cakey.pem") key_path = self.path("cacert.key")
cert_path = self.path("cacert.pem") cert_path = self.path("cacert.pem")
if self.check_key_cert(key_path, cert_path, None): if self.check_key_cert(key_path, cert_path, None):
...@@ -327,7 +327,7 @@ class OpenSSLEnvironment(object): ...@@ -327,7 +327,7 @@ class OpenSSLEnvironment(object):
path = self.path path = self.path
self.logger.info("Generating new CA in %s" % self.base_path) self.logger.info("Generating new CA in %s" % self.base_path)
key_path = path("cakey.pem") key_path = path("cacert.key")
req_path = path("careq.pem") req_path = path("careq.pem")
cert_path = path("cacert.pem") cert_path = path("cacert.pem")
......
...@@ -179,22 +179,70 @@ class Firefox(Browser): ...@@ -179,22 +179,70 @@ class Firefox(Browser):
def find_webdriver(self): def find_webdriver(self):
return find_executable("geckodriver") return find_executable("geckodriver")
def install_prefs(self, dest=None): def get_version_number(self, binary):
version_re = re.compile("Mozilla Firefox (\d+\.\d+(?:\.\d+)?)(a|b)?")
proc = subprocess.Popen([binary, "--version"], stdout=subprocess.PIPE)
stdout, _ = proc.communicate()
stdout.strip()
m = version_re.match(stdout)
if not m:
return None, "nightly"
version, status = m.groups()
channel = {"a": "nightly", "b": "beta"}
return version, channel.get(status, "stable")
def get_prefs_url(self, version, channel):
if channel == "stable":
repo = "https://hg.mozilla.org/releases/mozilla-release"
tag = "FIREFOX_%s_RELEASE" % version.replace(".", "_")
else:
repo = "https://hg.mozilla.org/mozilla-central"
if channel == "beta":
tag = "FIREFOX_%s_BETA" % version.split(".", 1)[0]
else:
# Always use tip as the tag for nightly; this isn't quite right
# but to do better we need the actual build revision, which we
# can get if we have an application.ini file
tag = "tip"
return "%s/raw-file/%s/testing/profiles/prefs_general.js" % (repo, tag)
def install_prefs(self, binary, dest=None):
version, channel = self.get_version_number(binary)
if dest is None: if dest is None:
dest = os.pwd dest = os.pwd
dest = os.path.join(dest, "profiles") dest = os.path.join(dest, "profiles")
if not os.path.exists(dest): if not os.path.exists(dest):
os.makedirs(dest) os.makedirs(dest)
prefs_path = os.path.join(dest, "prefs_general.js") prefs_file = os.path.join(dest, "prefs_general.js")
cache_file = os.path.join(dest,
now = datetime.now() "%s-%s.cache" % (version, channel)
if (not os.path.exists(prefs_path) or if channel != "nightly"
(datetime.fromtimestamp(os.stat(prefs_path).st_mtime) < else "nightly.cache")
now - timedelta(days=2))):
with open(prefs_path, "wb") as f: have_cache = False
resp = get("https://hg.mozilla.org/mozilla-central/raw-file/tip/testing/profiles/prefs_general.js") if os.path.exists(cache_file):
if channel != "nightly":
have_cache = True
else:
now = datetime.now()
have_cache = (datetime.fromtimestamp(os.stat(cache_file).st_mtime) >
now - timedelta(days=1))
# If we don't have a recent download, grab the url
if not have_cache:
url = self.get_prefs_url(version, channel)
with open(cache_file, "wb") as f:
print("Installing test prefs from %s" % url)
resp = get(url)
f.write(resp.content) f.write(resp.content)
else:
print("Using cached test prefs from %s" % cache_file)
shutil.copyfile(cache_file, prefs_file)
return dest return dest
......
...@@ -98,7 +98,6 @@ def check_environ(product): ...@@ -98,7 +98,6 @@ def check_environ(product):
if product not in ("firefox", "servo"): if product not in ("firefox", "servo"):
config = serve.load_config(os.path.join(wpt_root, "config.default.json"), config = serve.load_config(os.path.join(wpt_root, "config.default.json"),
os.path.join(wpt_root, "config.json")) os.path.join(wpt_root, "config.json"))
config = serve.normalise_config(config, {})
expected_hosts = (set(config["domains"].itervalues()) ^ expected_hosts = (set(config["domains"].itervalues()) ^
set(config["not_domains"].itervalues())) set(config["not_domains"].itervalues()))
missing_hosts = set(expected_hosts) missing_hosts = set(expected_hosts)
...@@ -202,8 +201,7 @@ Consider installing certutil via your OS package manager or directly.""") ...@@ -202,8 +201,7 @@ Consider installing certutil via your OS package manager or directly.""")
kwargs["test_types"].remove("wdspec") kwargs["test_types"].remove("wdspec")
if kwargs["prefs_root"] is None: if kwargs["prefs_root"] is None:
print("Downloading gecko prefs") prefs_root = self.browser.install_prefs(kwargs["binary"], self.venv.path)
prefs_root = self.browser.install_prefs(self.venv.path)
kwargs["prefs_root"] = prefs_root kwargs["prefs_root"] = prefs_root
......
import json
import logging
import os
from collections import defaultdict, Mapping
import sslutils
from localpaths import repo_root
from .utils import get_port
_renamed_props = {
"host": "browser_host",
"bind_hostname": "bind_address",
"external_host": "server_host",
"host_ip": "server_host",
}
def _merge_dict(base_dict, override_dict):
rv = base_dict.copy()
for key, value in base_dict.iteritems():
if key in override_dict:
if isinstance(value, dict):
rv[key] = _merge_dict(value, override_dict[key])
else:
rv[key] = override_dict[key]
return rv
class Config(Mapping):
"""wptserve config
Inherits from Mapping for backwards compatibility with the old dict-based config"""
with open(os.path.join(repo_root, "config.default.json"), "rb") as _fp:
_default = json.load(_fp)
def __init__(self,
logger=None,
subdomains=set(),
not_subdomains=set(),
**kwargs):
self.log_level = kwargs.get("log_level", "DEBUG")
if logger is None:
self._logger_name = "web-platform-tests"
else:
level_name = logging.getLevelName(logger.level)
if level_name != "NOTSET":
self.log_level = level_name
self._logger_name = logger.name
for k, v in self._default.iteritems():
setattr(self, k, kwargs.pop(k, v))
self.subdomains = subdomains
self.not_subdomains = not_subdomains
for k, new_k in _renamed_props.iteritems():
if k in kwargs:
self.logger.warning(
"%s in config is deprecated; use %s instead" % (
k,
new_k
)
)
setattr(self, new_k, kwargs.pop(k))
self.override_ssl_env = kwargs.pop("override_ssl_env", None)
if kwargs:
raise TypeError("__init__() got unexpected keyword arguments %r" % (tuple(kwargs),))
def __getitem__(self, k):
try:
return getattr(self, k)
except AttributeError:
raise KeyError(k)
def __iter__(self):
return iter([x for x in dir(self) if not x.startswith("_")])
def __len__(self):
return len([x for x in dir(self) if not x.startswith("_")])
def update(self, override):
"""Load an overrides dict to override config values"""
override = override.copy()
for k in self._default:
if k in override:
self._set_override(k, override.pop(k))
for k, new_k in _renamed_props.iteritems():
if k in override:
self.logger.warning(
"%s in config is deprecated; use %s instead" % (
k,
new_k
)
)
self._set_override(new_k, override.pop(k))
if override:
k = next(iter(override))
raise KeyError("unknown config override '%s'" % k)
def _set_override(self, k, v):
old_v = getattr(self, k)
if isinstance(old_v, dict):
setattr(self, k, _merge_dict(old_v, v))
else:
setattr(self, k, v)
@property
def ports(self):
try:
old_ports = self._computed_ports
except AttributeError:
old_ports = {}
self._computed_ports = defaultdict(list)
for scheme, ports in self._ports.iteritems():
for i, port in enumerate(ports):
if scheme in ["wss", "https"] and not self.ssl_env.ssl_enabled:
port = None
if port == "auto":
try:
port = old_ports[scheme][i]
except (KeyError, IndexError):
port = get_port(self.server_host)
else:
port = port
self._computed_ports[scheme].append(port)
return self._computed_ports
@ports.setter
def ports(self, v):
self._ports = v
@property
def doc_root(self):
return self._doc_root if self._doc_root is not None else repo_root
@doc_root.setter
def doc_root(self, v):
self._doc_root = v
@property
def ws_doc_root(self):
if self._ws_doc_root is not None:
return self._ws_doc_root
else:
return os.path.join(self.doc_root, "websockets", "handlers")
@ws_doc_root.setter
def ws_doc_root(self, v):
self._ws_doc_root = v
@property
def server_host(self):
return self._server_host if self._server_host is not None else self.browser_host
@server_host.setter
def server_host(self, v):
self._server_host = v
@property
def domains(self):
assert self.browser_host.encode("idna") == self.browser_host
domains = {subdomain: (subdomain.encode("idna") + u"." + self.browser_host)
for subdomain in self.subdomains}
domains[""] = self.browser_host
return domains
@property
def not_domains(self):
assert self.browser_host.encode("idna") == self.browser_host
domains = {subdomain: (subdomain.encode("idna") + u"." + self.browser_host)
for subdomain in self.not_subdomains}
return domains
@property
def all_domains(self):
domains = self.domains.copy()
domains.update(self.not_domains)
return domains
@property
def ssl_env(self):
try:
if self.override_ssl_env is not None:
return self.override_ssl_env
except AttributeError:
pass
implementation_type = self.ssl["type"]
try:
cls = sslutils.environments[implementation_type]
except KeyError:
raise ValueError("%s is not a vaid ssl type." % implementation_type)
kwargs = self.ssl.get(implementation_type, {}).copy()
return cls(self.logger, **kwargs)
@property
def paths(self):
return {"doc_root": self.doc_root,
"ws_doc_root": self.ws_doc_root}
@property
def ssl_config(self):
key_path, cert_path = self.ssl_env.host_cert_path(self.domains.itervalues())
return {"key_path": key_path,
"cert_path": cert_path,
"encrypt_after_connect": self.ssl["encrypt_after_connect"]}
@property
def log_level(self):
return getattr(logging, self._log_level)
@log_level.setter
def log_level(self, value):
self._log_level = value.upper()
@property
def logger(self):
logger = logging.getLogger(self._logger_name)
logger.setLevel(self.log_level)
return logger
def as_dict(self):
rv = {
"domains": list(self.domains),
"sundomains": list(self.subdomains),
}
for item in self._default.iterkeys():
rv[item] = getattr(self, item)
return rv
import cgi import cgi
import json import json
import os import os
import sys
import traceback import traceback
from six.moves.urllib.parse import parse_qs, quote, unquote, urljoin from six.moves.urllib.parse import parse_qs, quote, unquote, urljoin
...@@ -231,8 +232,11 @@ class PythonScriptHandler(object): ...@@ -231,8 +232,11 @@ class PythonScriptHandler(object):
def __call__(self, request, response): def __call__(self, request, response):
path = filesystem_path(self.base_path, request, self.url_base) path = filesystem_path(self.base_path, request, self.url_base)
sys_path = sys.path[:]
sys_modules = sys.modules.copy()
try: try:
environ = {"__file__": path} environ = {"__file__": path}
sys.path.insert(0, os.path.dirname(path))
execfile(path, environ, environ) execfile(path, environ, environ)
if "main" in environ: if "main" in environ:
handler = FunctionHandler(environ["main"]) handler = FunctionHandler(environ["main"])
...@@ -242,6 +246,10 @@ class PythonScriptHandler(object): ...@@ -242,6 +246,10 @@ class PythonScriptHandler(object):
raise HTTPException(500, "No main function in script %s" % path) raise HTTPException(500, "No main function in script %s" % path)
except IOError: except IOError:
raise HTTPException(404) raise HTTPException(404)
finally:
sys.path = sys_path
sys.modules = sys_modules
python_script_handler = PythonScriptHandler() python_script_handler = PythonScriptHandler()
...@@ -252,6 +260,8 @@ class FunctionHandler(object): ...@@ -252,6 +260,8 @@ class FunctionHandler(object):
def __call__(self, request, response): def __call__(self, request, response):
try: try:
rv = self.func(request, response) rv = self.func(request, response)
except HTTPException:
raise
except Exception: except Exception:
msg = traceback.format_exc() msg = traceback.format_exc()
raise HTTPException(500, message=msg) raise HTTPException(500, message=msg)
......
from cgi import escape from cgi import escape
import gzip as gzip_module import gzip as gzip_module
import hashlib
import os
import re import re
import time import time
import types import types
...@@ -277,6 +279,10 @@ def slice(request, response, start, end=None): ...@@ -277,6 +279,10 @@ def slice(request, response, start, end=None):
class ReplacementTokenizer(object): class ReplacementTokenizer(object):
def arguments(self, token):
unwrapped = token[1:-1]
return ("arguments", re.split(r",\s*", token[1:-1]) if unwrapped else [])
def ident(self, token): def ident(self, token):
return ("ident", token) return ("ident", token)
...@@ -296,8 +302,9 @@ class ReplacementTokenizer(object): ...@@ -296,8 +302,9 @@ class ReplacementTokenizer(object):
return self.scanner.scan(string)[0] return self.scanner.scan(string)[0]
scanner = re.Scanner([(r"\$\w+:", var), scanner = re.Scanner([(r"\$\w+:", var),
(r"\$?\w+(?:\(\))?", ident), (r"\$?\w+", ident),
(r"\[[^\]]*\]", index)]) (r"\[[^\]]*\]", index),
(r"\([^)]*\)", arguments)])
class FirstWrapper(object): class FirstWrapper(object):
...@@ -339,6 +346,11 @@ def sub(request, response, escape_type="html"): ...@@ -339,6 +346,11 @@ def sub(request, response, escape_type="html"):
A dictionary of query parameters supplied with the request. A dictionary of query parameters supplied with the request.
uuid() uuid()
A pesudo-random UUID suitable for usage with stash A pesudo-random UUID suitable for usage with stash
file_hash(algorithm, filepath)
The cryptographic hash of a file. Supported algorithms: md5, sha1,
sha224, sha256, sha384, and sha512. For example:
{{file_hash(md5, dom/interfaces.html)}}
So for example in a setup running on localhost with a www So for example in a setup running on localhost with a www
subdomain and a http server on ports 80 and 81:: subdomain and a http server on ports 80 and 81::
...@@ -351,7 +363,7 @@ def sub(request, response, escape_type="html"): ...@@ -351,7 +363,7 @@ def sub(request, response, escape_type="html"):
It is also possible to assign a value to a variable name, which must start with It is also possible to assign a value to a variable name, which must start with
the $ character, using the ":" syntax e.g. the $ character, using the ":" syntax e.g.
{{$id:uuid()} {{$id:uuid()}}
Later substitutions in the same file may then refer to the variable Later substitutions in the same file may then refer to the variable
by name e.g. by name e.g.
...@@ -365,6 +377,39 @@ def sub(request, response, escape_type="html"): ...@@ -365,6 +377,39 @@ def sub(request, response, escape_type="html"):
response.content = new_content response.content = new_content
return response return response
class SubFunctions(object):
@staticmethod
def uuid(request):
return str(uuid.uuid4())
# Maintain a whitelist of supported algorithms, restricted to those that
# are available on all platforms [1]. This ensures that test authors do not
# unknowingly introduce platform-specific tests.
#
# [1] https://docs.python.org/2/library/hashlib.html
supported_algorithms = ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")
@staticmethod
def file_hash(request, algorithm, path):
if algorithm not in SubFunctions.supported_algorithms:
raise ValueError("Unsupported encryption algorithm: '%s'" % algorithm)
hash_obj = getattr(hashlib, algorithm)()
absolute_path = os.path.join(request.doc_root, path)
try:
with open(absolute_path) as f:
hash_obj.update(f.read())
except IOError:
# In this context, an unhandled IOError will be interpreted by the
# server as an indication that the template file is non-existent.
# Although the generic "Exception" is less precise, it avoids
# triggering a potentially-confusing HTTP 404 error in cases where
# the path to the file to be hashed is invalid.
raise Exception('Cannot open file for hash computation: "%s"' % absolute_path)
return hash_obj.digest().encode('base64').strip()
def template(request, content, escape_type="html"): def template(request, content, escape_type="html"):
#TODO: There basically isn't any error handling here #TODO: There basically isn't any error handling here
tokenizer = ReplacementTokenizer() tokenizer = ReplacementTokenizer()
...@@ -382,12 +427,15 @@ def template(request, content, escape_type="html"): ...@@ -382,12 +427,15 @@ def template(request, content, escape_type="html"):
else: else:
variable = None variable = None
assert tokens[0][0] == "ident" and all(item[0] == "index" for item in tokens[1:]), tokens assert tokens[0][0] == "ident", tokens
assert all(item[0] in ("index", "arguments") for item in tokens[1:]), tokens
field = tokens[0][1] field = tokens[0][1]
if field in variables: if field in variables:
value = variables[field] value = variables[field]
elif hasattr(SubFunctions, field):
value = getattr(SubFunctions, field)
elif field == "headers": elif field == "headers":
value = request.headers value = request.headers
elif field == "GET": elif field == "GET":
...@@ -414,15 +462,16 @@ def template(request, content, escape_type="html"): ...@@ -414,15 +462,16 @@ def template(request, content, escape_type="html"):
"path": request.url_parts.path, "path": request.url_parts.path,
"pathname": request.url_parts.path, "pathname": request.url_parts.path,
"query": "?%s" % request.url_parts.query} "query": "?%s" % request.url_parts.query}
elif field == "uuid()":
value = str(uuid.uuid4())
elif field == "url_base": elif field == "url_base":
value = request.url_base value = request.url_base
else: else:
raise Exception("Undefined template variable %s" % field) raise Exception("Undefined template variable %s" % field)
for item in tokens[1:]: for item in tokens[1:]:
value = value[item[1]] if item[0] == "index":
value = value[item[1]]
else:
value = value(request, *item[1])
assert isinstance(value, (int,) + types.StringTypes), tokens assert isinstance(value, (int,) + types.StringTypes), tokens
......
...@@ -400,9 +400,10 @@ class WebTestHttpd(object): ...@@ -400,9 +400,10 @@ class WebTestHttpd(object):
server_cls = WebTestServer server_cls = WebTestServer
if use_ssl: if use_ssl:
if key_file is not None: if not os.path.exists(key_file):
assert os.path.exists(key_file) raise ValueError("SSL certificate not found: {}".format(key_file))
assert certificate is not None and os.path.exists(certificate) if not os.path.exists(certificate):
raise ValueError("SSL key not found: {}".format(certificate))
try: try:
self.httpd = server_cls((host, port), self.httpd = server_cls((host, port),
......
import socket
def invert_dict(dict): def invert_dict(dict):
rv = {} rv = {}
for key, values in dict.iteritems(): for key, values in dict.iteritems():
...@@ -12,3 +14,93 @@ class HTTPException(Exception): ...@@ -12,3 +14,93 @@ class HTTPException(Exception):
def __init__(self, code, message=""): def __init__(self, code, message=""):
self.code = code self.code = code
self.message = message self.message = message
def _open_socket(host, port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if port != 0:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
sock.listen(5)
return sock
def is_bad_port(port):
"""
Bad port as per https://fetch.spec.whatwg.org/#port-blocking
"""
return port in [
1, # tcpmux
7, # echo
9, # discard
11, # systat
13, # daytime
15, # netstat
17, # qotd
19, # chargen
20, # ftp-data
21, # ftp
22, # ssh
23, # telnet
25, # smtp
37, # time
42, # name
43, # nicname
53, # domain
77, # priv-rjs
79, # finger
87, # ttylink
95, # supdup
101, # hostriame
102, # iso-tsap
103, # gppitnp
104, # acr-nema
109, # pop2
110, # pop3
111, # sunrpc
113, # auth
115, # sftp
117, # uucp-path
119, # nntp
123, # ntp
135, # loc-srv / epmap
139, # netbios
143, # imap2
179, # bgp
389, # ldap
465, # smtp+ssl
512, # print / exec
513, # login
514, # shell
515, # printer
526, # tempo
530, # courier
531, # chat
532, # netnews
540, # uucp
556, # remotefs
563, # nntp+ssl
587, # smtp
601, # syslog-conn
636, # ldap+ssl
993, # imap+ssl
995, # pop3+ssl
2049, # nfs
3659, # apple-sasl
4045, # lockd
6000, # x11
6665, # irc (alternate)
6666, # irc (alternate)
6667, # irc (default)
6668, # irc (alternate)
6669, # irc (alternate)
]
def get_port(host):
port = 0
while True:
free_socket = _open_socket(host, 0)
port = free_socket.getsockname()[1]
free_socket.close()
if not is_bad_port(port):
break
return port
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment