Commit 79310a72 authored by Robert Ma's avatar Robert Ma Committed by Commit Bot

Roll in new version of WPT tools

Most notably we'd like to include
https://github.com/w3c/web-platform-tests/pull/10599,
which added support for test variants in .any.js tests.

Other changes in this roll:
* Fixed paths in chromium.patch after the Blink mv.
* Added the new wptserve/config.py to the whitelist.

This roll also exposed an upstream issue. Filed issue
836377 to track it (fix is under way in the upstream).

Bug: 836276, 829697, 836377
Change-Id: I5fc0af49c0149871222eaacd1bf5473b187c38a5
Reviewed-on: https://chromium-review.googlesource.com/1026165
Commit-Queue: Robert Ma <robertma@chromium.org>
Reviewed-by: default avatarQuinten Yearsley <qyearsley@chromium.org>
Cr-Commit-Position: refs/heads/master@{#553324}
parent 933b81ec
...@@ -2302,6 +2302,10 @@ crbug.com/832071 virtual/navigation-mojo-response/external/wpt/service-workers/s ...@@ -2302,6 +2302,10 @@ crbug.com/832071 virtual/navigation-mojo-response/external/wpt/service-workers/s
crbug.com/833215 [ Mac10.12 Mac10.13 Retina ] external/wpt/accelerometer/Accelerometer.https.html [ Pass Timeout ] crbug.com/833215 [ Mac10.12 Mac10.13 Retina ] external/wpt/accelerometer/Accelerometer.https.html [ Pass Timeout ]
crbug.com/833215 [ Linux ] external/wpt/accelerometer/Accelerometer.https.html [ Pass Failure ] crbug.com/833215 [ Linux ] external/wpt/accelerometer/Accelerometer.https.html [ Pass Failure ]
# TODO(robertma): Remove these once https://github.com/w3c/web-platform-tests/pull/10613 is imported.
crbug.com/836377 external/wpt/service-workers/service-worker/worker-interception-redirect.https.html [ Timeout ]
crbug.com/836377 virtual/outofblink-cors/external/wpt/service-workers/service-worker/worker-interception-redirect.https.html [ Timeout ]
# ====== New tests from wpt-importer added here ====== # ====== New tests from wpt-importer added here ======
crbug.com/626703 [ Mac10.12 ] external/wpt/html/syntax/parsing/html5lib_template.html?run_type=uri [ Timeout ] crbug.com/626703 [ Mac10.12 ] external/wpt/html/syntax/parsing/html5lib_template.html?run_type=uri [ Timeout ]
crbug.com/626703 external/wpt/html/semantics/scripting-1/the-script-element/module/dynamic-import/string-compilation-integrity-classic.sub.html [ Skip ] crbug.com/626703 external/wpt/html/semantics/scripting-1/the-script-element/module/dynamic-import/string-compilation-integrity-classic.sub.html [ Skip ]
......
...@@ -32,7 +32,7 @@ Local Modifications: None ...@@ -32,7 +32,7 @@ Local Modifications: None
Name: web-platform-tests - Test Suites for Web Platform specifications Name: web-platform-tests - Test Suites for Web Platform specifications
Short Name: wpt Short Name: wpt
URL: https://github.com/w3c/web-platform-tests/ URL: https://github.com/w3c/web-platform-tests/
Version: d00f7bab7aecadbc2fc9a47172d090b4ba7e2ef2 Version: 507af0c03617714bfd4134c54da4d534906ee52b
License: LICENSES FOR W3C TEST SUITES (http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html) License: LICENSES FOR W3C TEST SUITES (http://www.w3.org/Consortium/Legal/2008/04-testsuite-copyright.html)
License File: wpt/wpt/LICENSE.md License File: wpt/wpt/LICENSE.md
Security Critical: no Security Critical: no
......
...@@ -109,6 +109,7 @@ ...@@ -109,6 +109,7 @@
./tools/wptserve/README.md ./tools/wptserve/README.md
./tools/wptserve/wptserve ./tools/wptserve/wptserve
./tools/wptserve/wptserve/__init__.py ./tools/wptserve/wptserve/__init__.py
./tools/wptserve/wptserve/config.py
./tools/wptserve/wptserve/constants.py ./tools/wptserve/wptserve/constants.py
./tools/wptserve/wptserve/handlers.py ./tools/wptserve/wptserve/handlers.py
./tools/wptserve/wptserve/logger.py ./tools/wptserve/wptserve/logger.py
......
...@@ -9,7 +9,7 @@ cd $DIR ...@@ -9,7 +9,7 @@ cd $DIR
TARGET_DIR=$DIR/wpt TARGET_DIR=$DIR/wpt
REMOTE_REPO="https://chromium.googlesource.com/external/w3c/web-platform-tests.git" REMOTE_REPO="https://chromium.googlesource.com/external/w3c/web-platform-tests.git"
WPT_HEAD=d00f7bab7aecadbc2fc9a47172d090b4ba7e2ef2 WPT_HEAD=507af0c03617714bfd4134c54da4d534906ee52b
function clone { function clone {
# Remove existing repo if already exists. # Remove existing repo if already exists.
......
diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py b/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py diff --git a/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py b/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py
--- a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py --- a/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py
+++ b/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/lint/lint.py +++ b/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/lint/lint.py
@@ -752,6 +752,7 @@ def create_parser(): @@ -752,6 +752,7 @@ def create_parser():
help="Run CSS testsuite specific lints") help="Run CSS testsuite specific lints")
parser.add_argument("--repo-root", help="The WPT directory. Use this" parser.add_argument("--repo-root", help="The WPT directory. Use this"
...@@ -33,9 +33,9 @@ diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/ ...@@ -33,9 +33,9 @@ diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/
output_errors = {"json": output_errors_json, output_errors = {"json": output_errors_json,
"markdown": output_errors_markdown, "markdown": output_errors_markdown,
"normal": output_errors_text}[output_format] "normal": output_errors_text}[output_format]
diff --git a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/wpt/paths b/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/wpt/paths diff --git a/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/paths b/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/paths
--- a/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/wpt/paths --- a/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/paths
+++ b/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/wpt/wpt/tools/wpt/paths +++ b/third_party/blink/tools/blinkpy/third_party/wpt/wpt/tools/wpt/paths
@@ -1,4 +1,3 @@ @@ -1,4 +1,3 @@
-tools/ci/ -tools/ci/
tools/lint/ tools/lint/
......
...@@ -18,7 +18,7 @@ from .. import localpaths ...@@ -18,7 +18,7 @@ from .. import localpaths
from ..gitignore.gitignore import PathFilter from ..gitignore.gitignore import PathFilter
from ..wpt import testfiles from ..wpt import testfiles
from manifest.sourcefile import SourceFile, js_meta_re, python_meta_re, space_chars from manifest.sourcefile import SourceFile, js_meta_re, python_meta_re, space_chars, get_any_variants, get_default_any_variants
from six import binary_type, iteritems, itervalues from six import binary_type, iteritems, itervalues
from six.moves import range from six.moves import range
from six.moves.urllib.parse import urlsplit, urljoin from six.moves.urllib.parse import urlsplit, urljoin
...@@ -31,6 +31,9 @@ def setup_logging(prefix=False): ...@@ -31,6 +31,9 @@ def setup_logging(prefix=False):
global logger global logger
if logger is None: if logger is None:
logger = logging.getLogger(os.path.basename(os.path.splitext(__file__)[0])) logger = logging.getLogger(os.path.basename(os.path.splitext(__file__)[0]))
handler = logging.StreamHandler(sys.stdout)
# Only add a handler if the parent logger is missing a handler
if logger.parent and len(logger.parent.handlers) == 0:
handler = logging.StreamHandler(sys.stdout) handler = logging.StreamHandler(sys.stdout)
logger.addHandler(handler) logger.addHandler(handler)
if prefix: if prefix:
...@@ -613,6 +616,31 @@ broken_js_metadata = re.compile(b"//\s*META:") ...@@ -613,6 +616,31 @@ broken_js_metadata = re.compile(b"//\s*META:")
broken_python_metadata = re.compile(b"#\s*META:") broken_python_metadata = re.compile(b"#\s*META:")
def check_global_metadata(value):
global_values = {item.strip() for item in value.split(b",") if item.strip()}
included_variants = set.union(get_default_any_variants(),
*(get_any_variants(v) for v in global_values if not v.startswith(b"!")))
for global_value in global_values:
if global_value.startswith(b"!"):
excluded_value = global_value[1:]
if not get_any_variants(excluded_value):
yield ("UNKNOWN-GLOBAL-METADATA", "Unexpected value for global metadata")
elif excluded_value in global_values:
yield ("BROKEN-GLOBAL-METADATA", "Cannot specify both %s and %s" % (global_value, excluded_value))
else:
excluded_variants = get_any_variants(excluded_value)
if not (excluded_variants & included_variants):
yield ("BROKEN-GLOBAL-METADATA", "Cannot exclude %s if it is not included" % (excluded_value,))
else:
if not get_any_variants(global_value):
yield ("UNKNOWN-GLOBAL-METADATA", "Unexpected value for global metadata")
def check_script_metadata(repo_root, path, f): def check_script_metadata(repo_root, path, f):
if path.endswith((".worker.js", ".any.js")): if path.endswith((".worker.js", ".any.js")):
meta_re = js_meta_re meta_re = js_meta_re
...@@ -631,7 +659,9 @@ def check_script_metadata(repo_root, path, f): ...@@ -631,7 +659,9 @@ def check_script_metadata(repo_root, path, f):
m = meta_re.match(line) m = meta_re.match(line)
if m: if m:
key, value = m.groups() key, value = m.groups()
if key == b"timeout": if key == b"global":
errors.extend((kind, message, path, idx + 1) for (kind, message) in check_global_metadata(value))
elif key == b"timeout":
if value != b"long": if value != b"long":
errors.append(("UNKNOWN-TIMEOUT-METADATA", "Unexpected value for timeout metadata", path, idx + 1)) errors.append(("UNKNOWN-TIMEOUT-METADATA", "Unexpected value for timeout metadata", path, idx + 1))
elif key == b"script": elif key == b"script":
......
...@@ -49,6 +49,87 @@ def read_script_metadata(f, regexp): ...@@ -49,6 +49,87 @@ def read_script_metadata(f, regexp):
yield (m.groups()[0], m.groups()[1]) yield (m.groups()[0], m.groups()[1])
_any_variants = {
b"default": {"longhand": {b"window", b"dedicatedworker"}},
b"window": {"suffix": ".any.html"},
b"serviceworker": {"force_https": True},
b"sharedworker": {},
b"dedicatedworker": {"suffix": ".any.worker.html"},
b"worker": {"longhand": {b"dedicatedworker", b"sharedworker", b"serviceworker"}}
}
def get_any_variants(item):
"""
Returns a set of variants (bytestrings) defined by the given keyword.
"""
assert isinstance(item, binary_type), item
assert not item.startswith(b"!"), item
variant = _any_variants.get(item, None)
if variant is None:
return set()
return variant.get("longhand", {item})
def get_default_any_variants():
"""
Returns a set of variants (bytestrings) that will be used by default.
"""
return set(_any_variants[b"default"]["longhand"])
def parse_variants(value):
"""
Returns a set of variants (bytestrings) defined by a comma-separated value.
"""
assert isinstance(value, binary_type), value
globals = get_default_any_variants()
for item in value.split(b","):
item = item.strip()
if item.startswith(b"!"):
globals -= get_any_variants(item[1:])
else:
globals |= get_any_variants(item)
return globals
def global_suffixes(value):
"""
Yields the relevant filename suffixes (strings) for the variants defined by
the given comma-separated value.
"""
assert isinstance(value, binary_type), value
rv = set()
global_types = parse_variants(value)
for global_type in global_types:
variant = _any_variants[global_type]
suffix = variant.get("suffix", ".any.%s.html" % global_type.decode("utf-8"))
if variant.get("force_https", False):
suffix = ".https" + suffix
rv.add(suffix)
return rv
def global_variant_url(url, suffix):
"""
Returns a url created from the given url and suffix (all strings).
"""
url = url.replace(".any.", ".")
# If the url must be loaded over https, ensure that it will have
# the form .https.any.js
if ".https." in url and suffix.startswith(".https."):
url = url.replace(".https.", ".")
return replace_end(url, ".js", suffix)
class SourceFile(object): class SourceFile(object):
parsers = {"html":lambda x:html5lib.parse(x, treebuilder="etree"), parsers = {"html":lambda x:html5lib.parse(x, treebuilder="etree"),
"xhtml":lambda x:ElementTree.parse(x, XMLParser.XMLParser()), "xhtml":lambda x:ElementTree.parse(x, XMLParser.XMLParser()),
...@@ -225,8 +306,9 @@ class SourceFile(object): ...@@ -225,8 +306,9 @@ class SourceFile(object):
# wdspec tests are in subdirectories of /webdriver excluding __init__.py # wdspec tests are in subdirectories of /webdriver excluding __init__.py
# files. # files.
rel_dir_tree = self.rel_path.split(os.path.sep) rel_dir_tree = self.rel_path.split(os.path.sep)
return (rel_dir_tree[0] == "webdriver" and return (((rel_dir_tree[0] == "webdriver" and len(rel_dir_tree) > 1) or
len(rel_dir_tree) > 1 and (rel_dir_tree[:2] == ["infrastructure", "webdriver"] and
len(rel_dir_tree) > 2)) and
self.filename not in ("__init__.py", "conftest.py") and self.filename not in ("__init__.py", "conftest.py") and
fnmatch(self.filename, wd_pattern)) fnmatch(self.filename, wd_pattern))
...@@ -369,12 +451,19 @@ class SourceFile(object): ...@@ -369,12 +451,19 @@ class SourceFile(object):
@cached_property @cached_property
def test_variants(self): def test_variants(self):
rv = [] rv = []
if self.ext == ".js":
for (key, value) in self.script_metadata:
if key == b"variant":
rv.append(value.decode("utf-8"))
else:
for element in self.variant_nodes: for element in self.variant_nodes:
if "content" in element.attrib: if "content" in element.attrib:
variant = element.attrib["content"] variant = element.attrib["content"]
assert variant == "" or variant[0] in ["#", "?"]
rv.append(variant) rv.append(variant)
for variant in rv:
assert variant == "" or variant[0] in ["#", "?"], variant
if not rv: if not rv:
rv = [""] rv = [""]
...@@ -509,22 +598,34 @@ class SourceFile(object): ...@@ -509,22 +598,34 @@ class SourceFile(object):
rv = VisualTest.item_type, [VisualTest(self, self.url)] rv = VisualTest.item_type, [VisualTest(self, self.url)]
elif self.name_is_multi_global: elif self.name_is_multi_global:
rv = TestharnessTest.item_type, [ globals = b""
TestharnessTest(self, replace_end(self.url, ".any.js", ".any.html"), for (key, value) in self.script_metadata:
timeout=self.timeout), if key == b"global":
TestharnessTest(self, replace_end(self.url, ".any.js", ".any.worker.html"), globals = value
timeout=self.timeout), break
tests = [
TestharnessTest(self, global_variant_url(self.url, suffix) + variant, timeout=self.timeout)
for suffix in sorted(global_suffixes(globals))
for variant in self.test_variants
] ]
rv = TestharnessTest.item_type, tests
elif self.name_is_worker: elif self.name_is_worker:
rv = (TestharnessTest.item_type, test_url = replace_end(self.url, ".worker.js", ".worker.html")
[TestharnessTest(self, replace_end(self.url, ".worker.js", ".worker.html"), tests = [
timeout=self.timeout)]) TestharnessTest(self, test_url + variant, timeout=self.timeout)
for variant in self.test_variants
]
rv = TestharnessTest.item_type, tests
elif self.name_is_window: elif self.name_is_window:
rv = (TestharnessTest.item_type, test_url = replace_end(self.url, ".window.js", ".window.html")
[TestharnessTest(self, replace_end(self.url, ".window.js", ".window.html"), tests = [
timeout=self.timeout)]) TestharnessTest(self, test_url + variant, timeout=self.timeout)
for variant in self.test_variants
]
rv = TestharnessTest.item_type, tests
elif self.name_is_webdriver: elif self.name_is_webdriver:
rv = WebdriverSpecTest.item_type, [WebdriverSpecTest(self, self.url, rv = WebdriverSpecTest.item_type, [WebdriverSpecTest(self, self.url,
......
...@@ -20,11 +20,13 @@ from multiprocessing import Process, Event ...@@ -20,11 +20,13 @@ from multiprocessing import Process, Event
from localpaths import repo_root from localpaths import repo_root
import sslutils import sslutils
from manifest.sourcefile import read_script_metadata, js_meta_re from manifest.sourcefile import read_script_metadata, js_meta_re, parse_variants
from wptserve import server as wptserve, handlers from wptserve import server as wptserve, handlers
from wptserve import stash from wptserve import stash
from wptserve import config
from wptserve.logger import set_logger from wptserve.logger import set_logger
from wptserve.handlers import filesystem_path, wrap_pipeline from wptserve.handlers import filesystem_path, wrap_pipeline
from wptserve.utils import get_port, HTTPException
from mod_pywebsocket import standalone as pywebsocket from mod_pywebsocket import standalone as pywebsocket
def replace_end(s, old, new): def replace_end(s, old, new):
...@@ -54,9 +56,14 @@ class WrapperHandler(object): ...@@ -54,9 +56,14 @@ class WrapperHandler(object):
for header_name, header_value in self.headers: for header_name, header_value in self.headers:
response.headers.set(header_name, header_value) response.headers.set(header_name, header_value)
self.check_exposure(request)
path = self._get_path(request.url_parts.path, True) path = self._get_path(request.url_parts.path, True)
query = request.url_parts.query
if query:
query = "?" + query
meta = "\n".join(self._get_meta(request)) meta = "\n".join(self._get_meta(request))
response.content = self.wrapper % {"meta": meta, "path": path} response.content = self.wrapper % {"meta": meta, "path": path, "query": query}
wrap_pipeline(path, request, response) wrap_pipeline(path, request, response)
def _get_path(self, path, resource_path): def _get_path(self, path, resource_path):
...@@ -83,15 +90,24 @@ class WrapperHandler(object): ...@@ -83,15 +90,24 @@ class WrapperHandler(object):
path = replace_end(path, src, dest) path = replace_end(path, src, dest)
return path return path
def _get_meta(self, request): def _get_metadata(self, request):
"""Get an iterator over strings to inject into the wrapper document """Get an iterator over script metadata based on //META comments in the
based on //META comments in the associated js file. associated js file.
:param request: The Request being processed. :param request: The Request being processed.
""" """
path = self._get_path(filesystem_path(self.base_path, request, self.url_base), False) path = self._get_path(filesystem_path(self.base_path, request, self.url_base), False)
with open(path, "rb") as f: with open(path, "rb") as f:
for key, value in read_script_metadata(f, js_meta_re): for key, value in read_script_metadata(f, js_meta_re):
yield key, value
def _get_meta(self, request):
"""Get an iterator over strings to inject into the wrapper document
based on //META comments in the associated js file.
:param request: The Request being processed.
"""
for key, value in self._get_metadata(request):
replacement = self._meta_replacement(key, value) replacement = self._meta_replacement(key, value)
if replacement: if replacement:
yield replacement yield replacement
...@@ -116,8 +132,27 @@ class WrapperHandler(object): ...@@ -116,8 +132,27 @@ class WrapperHandler(object):
# a specific metadata key: value pair. # a specific metadata key: value pair.
pass pass
@abc.abstractmethod
def check_exposure(self, request):
# Raise an exception if this handler shouldn't be exposed after all.
pass
class HtmlWrapperHandler(WrapperHandler): class HtmlWrapperHandler(WrapperHandler):
global_type = None
def check_exposure(self, request):
if self.global_type:
globals = b""
for (key, value) in self._get_metadata(request):
if key == b"global":
globals = value
break
if self.global_type not in parse_variants(globals):
raise HTTPException(404, "This test cannot be loaded in %s mode" %
self.global_type)
def _meta_replacement(self, key, value): def _meta_replacement(self, key, value):
if key == b"timeout": if key == b"timeout":
if value == b"long": if value == b"long":
...@@ -129,6 +164,7 @@ class HtmlWrapperHandler(WrapperHandler): ...@@ -129,6 +164,7 @@ class HtmlWrapperHandler(WrapperHandler):
class WorkersHandler(HtmlWrapperHandler): class WorkersHandler(HtmlWrapperHandler):
global_type = b"dedicatedworker"
path_replace = [(".any.worker.html", ".any.js", ".any.worker.js"), path_replace = [(".any.worker.html", ".any.js", ".any.worker.js"),
(".worker.html", ".worker.js")] (".worker.html", ".worker.js")]
wrapper = """<!doctype html> wrapper = """<!doctype html>
...@@ -138,7 +174,7 @@ class WorkersHandler(HtmlWrapperHandler): ...@@ -138,7 +174,7 @@ class WorkersHandler(HtmlWrapperHandler):
<script src="/resources/testharnessreport.js"></script> <script src="/resources/testharnessreport.js"></script>
<div id=log></div> <div id=log></div>
<script> <script>
fetch_tests_from_worker(new Worker("%(path)s")); fetch_tests_from_worker(new Worker("%(path)s%(query)s"));
</script> </script>
""" """
...@@ -156,6 +192,7 @@ class WindowHandler(HtmlWrapperHandler): ...@@ -156,6 +192,7 @@ class WindowHandler(HtmlWrapperHandler):
class AnyHtmlHandler(HtmlWrapperHandler): class AnyHtmlHandler(HtmlWrapperHandler):
global_type = b"window"
path_replace = [(".any.html", ".any.js")] path_replace = [(".any.html", ".any.js")]
wrapper = """<!doctype html> wrapper = """<!doctype html>
<meta charset=utf-8> <meta charset=utf-8>
...@@ -173,6 +210,42 @@ self.GLOBAL = { ...@@ -173,6 +210,42 @@ self.GLOBAL = {
""" """
class SharedWorkersHandler(HtmlWrapperHandler):
global_type = b"sharedworker"
path_replace = [(".any.sharedworker.html", ".any.js", ".any.worker.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<div id=log></div>
<script>
fetch_tests_from_worker(new SharedWorker("%(path)s%(query)s"));
</script>
"""
class ServiceWorkersHandler(HtmlWrapperHandler):
global_type = b"serviceworker"
path_replace = [(".https.any.serviceworker.html", ".any.js", ".any.worker.js")]
wrapper = """<!doctype html>
<meta charset=utf-8>
%(meta)s
<script src="/resources/testharness.js"></script>
<script src="/resources/testharnessreport.js"></script>
<div id=log></div>
<script>
(async function() {
const scope = 'does/not/exist';
let reg = await navigator.serviceWorker.getRegistration(scope);
if (reg) await reg.unregister();
reg = await navigator.serviceWorker.register("%(path)s%(query)s", {scope});
fetch_tests_from_worker(reg.installing);
})();
</script>
"""
class AnyWorkerHandler(WrapperHandler): class AnyWorkerHandler(WrapperHandler):
headers = [('Content-Type', 'text/javascript')] headers = [('Content-Type', 'text/javascript')]
path_replace = [(".any.worker.js", ".any.js")] path_replace = [(".any.worker.js", ".any.js")]
...@@ -187,8 +260,6 @@ done(); ...@@ -187,8 +260,6 @@ done();
""" """
def _meta_replacement(self, key, value): def _meta_replacement(self, key, value):
if key == b"timeout":
return None
if key == b"script": if key == b"script":
attribute = value.decode('utf-8').replace("\\", "\\\\").replace('"', '\\"') attribute = value.decode('utf-8').replace("\\", "\\\\").replace('"', '\\"')
return 'importScripts("%s")' % attribute return 'importScripts("%s")' % attribute
...@@ -197,14 +268,6 @@ done(); ...@@ -197,14 +268,6 @@ done();
rewrites = [("GET", "/resources/WebIDLParser.js", "/resources/webidl2/lib/webidl2.js")] rewrites = [("GET", "/resources/WebIDLParser.js", "/resources/webidl2/lib/webidl2.js")]
subdomains = [u"www",
u"www1",
u"www2",
u"天気の良い日",
u"élève"]
not_subdomains = [u"nonexistent-origin"]
class RoutesBuilder(object): class RoutesBuilder(object):
def __init__(self): def __init__(self):
self.forbidden_override = [("GET", "/tools/runner/*", handlers.file_handler), self.forbidden_override = [("GET", "/tools/runner/*", handlers.file_handler),
...@@ -249,6 +312,8 @@ class RoutesBuilder(object): ...@@ -249,6 +312,8 @@ class RoutesBuilder(object):
("GET", "*.worker.html", WorkersHandler), ("GET", "*.worker.html", WorkersHandler),
("GET", "*.window.html", WindowHandler), ("GET", "*.window.html", WindowHandler),
("GET", "*.any.html", AnyHtmlHandler), ("GET", "*.any.html", AnyHtmlHandler),
("GET", "*.any.sharedworker.html", SharedWorkersHandler),
("GET", "*.https.any.serviceworker.html", ServiceWorkersHandler),
("GET", "*.any.worker.js", AnyWorkerHandler), ("GET", "*.any.worker.js", AnyWorkerHandler),
("GET", "*.asis", handlers.AsIsHandler), ("GET", "*.asis", handlers.AsIsHandler),
("*", "*.py", handlers.PythonScriptHandler), ("*", "*.py", handlers.PythonScriptHandler),
...@@ -282,105 +347,6 @@ def build_routes(aliases): ...@@ -282,105 +347,6 @@ def build_routes(aliases):
return builder.get_routes() return builder.get_routes()
def setup_logger(level):
import logging
global logger
logger = logging.getLogger("web-platform-tests")
logger.setLevel(getattr(logging, level.upper()))
set_logger(logger)
def open_socket(port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if port != 0:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('127.0.0.1', port))
sock.listen(5)
return sock
def bad_port(port):
"""
Bad port as per https://fetch.spec.whatwg.org/#port-blocking
"""
return port in [
1, # tcpmux
7, # echo
9, # discard
11, # systat
13, # daytime
15, # netstat
17, # qotd
19, # chargen
20, # ftp-data
21, # ftp
22, # ssh
23, # telnet
25, # smtp
37, # time
42, # name
43, # nicname
53, # domain
77, # priv-rjs
79, # finger
87, # ttylink
95, # supdup
101, # hostriame
102, # iso-tsap
103, # gppitnp
104, # acr-nema
109, # pop2
110, # pop3
111, # sunrpc
113, # auth
115, # sftp
117, # uucp-path
119, # nntp
123, # ntp
135, # loc-srv / epmap
139, # netbios
143, # imap2
179, # bgp
389, # ldap
465, # smtp+ssl
512, # print / exec
513, # login
514, # shell
515, # printer
526, # tempo
530, # courier
531, # chat
532, # netnews
540, # uucp
556, # remotefs
563, # nntp+ssl
587, # smtp
601, # syslog-conn
636, # ldap+ssl
993, # imap+ssl
995, # pop3+ssl
2049, # nfs
3659, # apple-sasl
4045, # lockd
6000, # x11
6665, # irc (alternate)
6666, # irc (alternate)
6667, # irc (default)
6668, # irc (alternate)
6669, # irc (alternate)
]
def get_port():
port = 0
while True:
free_socket = open_socket(0)
port = free_socket.getsockname()[1]
free_socket.close()
if not bad_port(port):
break
logger.debug("Going to use port %s" % port)
return port
class ServerProc(object): class ServerProc(object):
def __init__(self): def __init__(self):
self.proc = None self.proc = None
...@@ -432,9 +398,11 @@ class ServerProc(object): ...@@ -432,9 +398,11 @@ class ServerProc(object):
return self.proc.is_alive() return self.proc.is_alive()
def check_subdomains(host, paths, bind_address, ssl_config, aliases): def check_subdomains(domains, paths, bind_address, ssl_config, aliases):
port = get_port() domains = domains.copy()
subdomains = get_subdomains(host) host = domains.pop("")
port = get_port(host)
logger.debug("Going to use port %d to check subdomains" % port)
wrapper = ServerProc() wrapper = ServerProc()
wrapper.start(start_http_server, host, port, paths, build_routes(aliases), bind_address, wrapper.start(start_http_server, host, port, paths, build_routes(aliases), bind_address,
...@@ -454,8 +422,7 @@ def check_subdomains(host, paths, bind_address, ssl_config, aliases): ...@@ -454,8 +422,7 @@ def check_subdomains(host, paths, bind_address, ssl_config, aliases):
"You may need to edit /etc/hosts or similar, see README.md." % (host, port)) "You may need to edit /etc/hosts or similar, see README.md." % (host, port))
sys.exit(1) sys.exit(1)
for subdomain, (punycode, host) in subdomains.iteritems(): for domain in domains.itervalues():
domain = "%s.%s" % (punycode, host)
try: try:
urllib2.urlopen("http://%s:%d/" % (domain, port)) urllib2.urlopen("http://%s:%d/" % (domain, port))
except Exception as e: except Exception as e:
...@@ -466,18 +433,6 @@ def check_subdomains(host, paths, bind_address, ssl_config, aliases): ...@@ -466,18 +433,6 @@ def check_subdomains(host, paths, bind_address, ssl_config, aliases):
wrapper.wait() wrapper.wait()
def get_subdomains(host):
#This assumes that the tld is ascii-only or already in punycode
return {subdomain: (subdomain.encode("idna"), host)
for subdomain in subdomains}
def get_not_subdomains(host):
#This assumes that the tld is ascii-only or already in punycode
return {subdomain: (subdomain.encode("idna"), host)
for subdomain in not_subdomains}
def make_hosts_file(config, host): def make_hosts_file(config, host):
rv = [] rv = []
...@@ -630,80 +585,14 @@ def start_wss_server(host, port, paths, routes, bind_address, config, ssl_config ...@@ -630,80 +585,14 @@ def start_wss_server(host, port, paths, routes, bind_address, config, ssl_config
ssl_config) ssl_config)
def get_ports(config, ssl_environment):
rv = defaultdict(list)
for scheme, ports in config["ports"].iteritems():
for i, port in enumerate(ports):
if scheme in ["wss", "https"] and not ssl_environment.ssl_enabled:
port = None
if port == "auto":
port = get_port()
else:
port = port
rv[scheme].append(port)
return rv
def normalise_config(config, ports):
if "host" in config:
logger.warning("host in config is deprecated; use browser_host instead")
host = config["host"]
else:
host = config["browser_host"]
domains = get_subdomains(host)
not_domains = get_not_subdomains(host)
ports_ = {}
for scheme, ports_used in ports.iteritems():
ports_[scheme] = ports_used
for key, value in domains.iteritems():
domains[key] = ".".join(value)
for key, value in not_domains.iteritems():
not_domains[key] = ".".join(value)
domains[""] = host
if "bind_hostname" in config:
logger.warning("bind_hostname in config is deprecated; use bind_address instead")
bind_address = config["bind_hostname"]
else:
bind_address = config["bind_address"]
# make a (shallow) copy of the config and update that, so that the
# normalized config can be used in place of the original one.
config_ = config.copy()
config_["domains"] = domains
config_["not_domains"] = not_domains
config_["ports"] = ports_
config_["bind_address"] = bind_address
if config.get("server_host", None) is None:
config_["server_host"] = host
return config_
def get_paths(config):
return {"doc_root": config["doc_root"],
"ws_doc_root": config["ws_doc_root"]}
def get_ssl_config(config, ssl_environment):
external_domains = config["domains"].values()
key_path, cert_path = ssl_environment.host_cert_path(external_domains)
return {"key_path": key_path,
"cert_path": cert_path,
"encrypt_after_connect": config["ssl"]["encrypt_after_connect"]}
def start(config, ssl_environment, routes, **kwargs): def start(config, ssl_environment, routes, **kwargs):
host = config["server_host"] host = config["server_host"]
ports = get_ports(config, ssl_environment) ports = config.ports
paths = get_paths(config) paths = config.paths
bind_address = config["bind_address"] bind_address = config["bind_address"]
ssl_config = get_ssl_config(config, ssl_environment) ssl_config = config.ssl_config
logger.debug("Using ports: %r" % ports)
servers = start_servers(host, ports, paths, routes, bind_address, config, servers = start_servers(host, ports, paths, routes, bind_address, config,
ssl_config, **kwargs) ssl_config, **kwargs)
...@@ -717,49 +606,6 @@ def iter_procs(servers): ...@@ -717,49 +606,6 @@ def iter_procs(servers):
yield server.proc yield server.proc
def value_set(config, key):
return key in config and config[key] is not None
def get_value_or_default(config, key, default=None):
return config[key] if value_set(config, key) else default
def set_computed_defaults(config):
if not value_set(config, "doc_root"):
config["doc_root"] = repo_root
if not value_set(config, "ws_doc_root"):
root = get_value_or_default(config, "doc_root", default=repo_root)
config["ws_doc_root"] = os.path.join(root, "websockets", "handlers")
if not value_set(config, "aliases"):
config["aliases"] = []
def merge_json(base_obj, override_obj):
rv = {}
for key, value in base_obj.iteritems():
if key not in override_obj:
rv[key] = value
else:
if isinstance(value, dict):
rv[key] = merge_json(value, override_obj[key])
else:
rv[key] = override_obj[key]
return rv
def get_ssl_environment(config):
implementation_type = config["ssl"]["type"]
cls = sslutils.environments[implementation_type]
try:
kwargs = config["ssl"][implementation_type].copy()
except KeyError:
raise ValueError("%s is not a vaid ssl type." % implementation_type)
return cls(logger, **kwargs)
def load_config(default_path, override_path=None, **kwargs): def load_config(default_path, override_path=None, **kwargs):
if os.path.exists(default_path): if os.path.exists(default_path):
with open(default_path) as f: with open(default_path) as f:
...@@ -767,20 +613,19 @@ def load_config(default_path, override_path=None, **kwargs): ...@@ -767,20 +613,19 @@ def load_config(default_path, override_path=None, **kwargs):
else: else:
raise ValueError("Config path %s does not exist" % default_path) raise ValueError("Config path %s does not exist" % default_path)
rv = Config(**base_obj)
if os.path.exists(override_path): if os.path.exists(override_path):
with open(override_path) as f: with open(override_path) as f:
override_obj = json.load(f) override_obj = json.load(f)
else: rv.update(override_obj)
override_obj = {}
rv = merge_json(base_obj, override_obj)
if kwargs.get("config_path"): if kwargs.get("config_path"):
other_path = os.path.abspath(os.path.expanduser(kwargs.get("config_path"))) other_path = os.path.abspath(os.path.expanduser(kwargs.get("config_path")))
if os.path.exists(other_path): if os.path.exists(other_path):
base_obj = rv
with open(other_path) as f: with open(other_path) as f:
override_obj = json.load(f) override_obj = json.load(f)
rv = merge_json(base_obj, override_obj) rv.update(override_obj)
else: else:
raise ValueError("Config path %s does not exist" % other_path) raise ValueError("Config path %s does not exist" % other_path)
...@@ -793,11 +638,30 @@ def load_config(default_path, override_path=None, **kwargs): ...@@ -793,11 +638,30 @@ def load_config(default_path, override_path=None, **kwargs):
value = os.path.abspath(os.path.expanduser(value)) value = os.path.abspath(os.path.expanduser(value))
if not os.path.exists(value): if not os.path.exists(value):
raise ValueError("%s path %s does not exist" % (title, value)) raise ValueError("%s path %s does not exist" % (title, value))
rv[key] = value setattr(rv, key, value)
set_computed_defaults(rv)
return rv return rv
_subdomains = {u"www",
u"www1",
u"www2",
u"天気の良い日",
u"élève"}
_not_subdomains = {u"nonexistent-origin"}
class Config(config.Config):
"""serve config
this subclasses wptserve.config.Config to add serve config options"""
def __init__(self, *args, **kwargs):
super(Config, self).__init__(
subdomains=_subdomains,
not_subdomains=_not_subdomains,
*args,
**kwargs
)
def get_parser(): def get_parser():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
...@@ -817,26 +681,24 @@ def run(**kwargs): ...@@ -817,26 +681,24 @@ def run(**kwargs):
os.path.join(repo_root, "config.json"), os.path.join(repo_root, "config.json"),
**kwargs) **kwargs)
setup_logger(config["log_level"]) global logger
logger = config.logger
set_logger(logger)
with get_ssl_environment(config) as ssl_env:
ports = get_ports(config, ssl_env)
config = normalise_config(config, ports)
browser_host = config["browser_host"]
server_host = config["server_host"]
bind_address = config["bind_address"] bind_address = config["bind_address"]
if config["check_subdomains"]: if config["check_subdomains"]:
paths = get_paths(config) paths = config.paths
ssl_config = get_ssl_config(config, ssl_env) ssl_config = config.ssl_config
check_subdomains(browser_host, paths, bind_address, ssl_config, config["aliases"]) check_subdomains(config.domains, paths, bind_address, ssl_config, config["aliases"])
stash_address = None stash_address = None
if bind_address: if bind_address:
stash_address = (server_host, get_port()) stash_address = (config.server_host, get_port(config.server_host))
logger.debug("Going to use port %d for stash" % stash_address[1])
with stash.StashServer(stash_address, authkey=str(uuid.uuid4())): with stash.StashServer(stash_address, authkey=str(uuid.uuid4())):
servers = start(config, ssl_env, build_routes(config["aliases"]), **kwargs) servers = start(config, config.ssl_env, build_routes(config["aliases"]), **kwargs)
try: try:
while any(item.is_alive() for item in iter_procs(servers)): while any(item.is_alive() for item in iter_procs(servers)):
......
...@@ -136,7 +136,7 @@ certs = $dir ...@@ -136,7 +136,7 @@ certs = $dir
new_certs_dir = $certs new_certs_dir = $certs
crl_dir = $dir%(sep)scrl crl_dir = $dir%(sep)scrl
database = $dir%(sep)sindex.txt database = $dir%(sep)sindex.txt
private_key = $dir%(sep)scakey.pem private_key = $dir%(sep)scacert.key
certificate = $dir%(sep)scacert.pem certificate = $dir%(sep)scacert.pem
serial = $dir%(sep)sserial serial = $dir%(sep)sserial
crldir = $dir%(sep)scrl crldir = $dir%(sep)scrl
...@@ -294,7 +294,7 @@ class OpenSSLEnvironment(object): ...@@ -294,7 +294,7 @@ class OpenSSLEnvironment(object):
return self._ca_cert_path return self._ca_cert_path
def _load_ca_cert(self): def _load_ca_cert(self):
key_path = self.path("cakey.pem") key_path = self.path("cacert.key")
cert_path = self.path("cacert.pem") cert_path = self.path("cacert.pem")
if self.check_key_cert(key_path, cert_path, None): if self.check_key_cert(key_path, cert_path, None):
...@@ -327,7 +327,7 @@ class OpenSSLEnvironment(object): ...@@ -327,7 +327,7 @@ class OpenSSLEnvironment(object):
path = self.path path = self.path
self.logger.info("Generating new CA in %s" % self.base_path) self.logger.info("Generating new CA in %s" % self.base_path)
key_path = path("cakey.pem") key_path = path("cacert.key")
req_path = path("careq.pem") req_path = path("careq.pem")
cert_path = path("cacert.pem") cert_path = path("cacert.pem")
......
...@@ -179,22 +179,70 @@ class Firefox(Browser): ...@@ -179,22 +179,70 @@ class Firefox(Browser):
def find_webdriver(self): def find_webdriver(self):
return find_executable("geckodriver") return find_executable("geckodriver")
def install_prefs(self, dest=None): def get_version_number(self, binary):
version_re = re.compile("Mozilla Firefox (\d+\.\d+(?:\.\d+)?)(a|b)?")
proc = subprocess.Popen([binary, "--version"], stdout=subprocess.PIPE)
stdout, _ = proc.communicate()
stdout.strip()
m = version_re.match(stdout)
if not m:
return None, "nightly"
version, status = m.groups()
channel = {"a": "nightly", "b": "beta"}
return version, channel.get(status, "stable")
def get_prefs_url(self, version, channel):
if channel == "stable":
repo = "https://hg.mozilla.org/releases/mozilla-release"
tag = "FIREFOX_%s_RELEASE" % version.replace(".", "_")
else:
repo = "https://hg.mozilla.org/mozilla-central"
if channel == "beta":
tag = "FIREFOX_%s_BETA" % version.split(".", 1)[0]
else:
# Always use tip as the tag for nightly; this isn't quite right
# but to do better we need the actual build revision, which we
# can get if we have an application.ini file
tag = "tip"
return "%s/raw-file/%s/testing/profiles/prefs_general.js" % (repo, tag)
def install_prefs(self, binary, dest=None):
version, channel = self.get_version_number(binary)
if dest is None: if dest is None:
dest = os.pwd dest = os.pwd
dest = os.path.join(dest, "profiles") dest = os.path.join(dest, "profiles")
if not os.path.exists(dest): if not os.path.exists(dest):
os.makedirs(dest) os.makedirs(dest)
prefs_path = os.path.join(dest, "prefs_general.js") prefs_file = os.path.join(dest, "prefs_general.js")
cache_file = os.path.join(dest,
"%s-%s.cache" % (version, channel)
if channel != "nightly"
else "nightly.cache")
have_cache = False
if os.path.exists(cache_file):
if channel != "nightly":
have_cache = True
else:
now = datetime.now() now = datetime.now()
if (not os.path.exists(prefs_path) or have_cache = (datetime.fromtimestamp(os.stat(cache_file).st_mtime) >
(datetime.fromtimestamp(os.stat(prefs_path).st_mtime) < now - timedelta(days=1))
now - timedelta(days=2))):
with open(prefs_path, "wb") as f: # If we don't have a recent download, grab the url
resp = get("https://hg.mozilla.org/mozilla-central/raw-file/tip/testing/profiles/prefs_general.js") if not have_cache:
url = self.get_prefs_url(version, channel)
with open(cache_file, "wb") as f:
print("Installing test prefs from %s" % url)
resp = get(url)
f.write(resp.content) f.write(resp.content)
else:
print("Using cached test prefs from %s" % cache_file)
shutil.copyfile(cache_file, prefs_file)
return dest return dest
......
...@@ -98,7 +98,6 @@ def check_environ(product): ...@@ -98,7 +98,6 @@ def check_environ(product):
if product not in ("firefox", "servo"): if product not in ("firefox", "servo"):
config = serve.load_config(os.path.join(wpt_root, "config.default.json"), config = serve.load_config(os.path.join(wpt_root, "config.default.json"),
os.path.join(wpt_root, "config.json")) os.path.join(wpt_root, "config.json"))
config = serve.normalise_config(config, {})
expected_hosts = (set(config["domains"].itervalues()) ^ expected_hosts = (set(config["domains"].itervalues()) ^
set(config["not_domains"].itervalues())) set(config["not_domains"].itervalues()))
missing_hosts = set(expected_hosts) missing_hosts = set(expected_hosts)
...@@ -202,8 +201,7 @@ Consider installing certutil via your OS package manager or directly.""") ...@@ -202,8 +201,7 @@ Consider installing certutil via your OS package manager or directly.""")
kwargs["test_types"].remove("wdspec") kwargs["test_types"].remove("wdspec")
if kwargs["prefs_root"] is None: if kwargs["prefs_root"] is None:
print("Downloading gecko prefs") prefs_root = self.browser.install_prefs(kwargs["binary"], self.venv.path)
prefs_root = self.browser.install_prefs(self.venv.path)
kwargs["prefs_root"] = prefs_root kwargs["prefs_root"] = prefs_root
......
import json
import logging
import os
from collections import defaultdict, Mapping
import sslutils
from localpaths import repo_root
from .utils import get_port
_renamed_props = {
"host": "browser_host",
"bind_hostname": "bind_address",
"external_host": "server_host",
"host_ip": "server_host",
}
def _merge_dict(base_dict, override_dict):
rv = base_dict.copy()
for key, value in base_dict.iteritems():
if key in override_dict:
if isinstance(value, dict):
rv[key] = _merge_dict(value, override_dict[key])
else:
rv[key] = override_dict[key]
return rv
class Config(Mapping):
"""wptserve config
Inherits from Mapping for backwards compatibility with the old dict-based config"""
with open(os.path.join(repo_root, "config.default.json"), "rb") as _fp:
_default = json.load(_fp)
def __init__(self,
logger=None,
subdomains=set(),
not_subdomains=set(),
**kwargs):
self.log_level = kwargs.get("log_level", "DEBUG")
if logger is None:
self._logger_name = "web-platform-tests"
else:
level_name = logging.getLevelName(logger.level)
if level_name != "NOTSET":
self.log_level = level_name
self._logger_name = logger.name
for k, v in self._default.iteritems():
setattr(self, k, kwargs.pop(k, v))
self.subdomains = subdomains
self.not_subdomains = not_subdomains
for k, new_k in _renamed_props.iteritems():
if k in kwargs:
self.logger.warning(
"%s in config is deprecated; use %s instead" % (
k,
new_k
)
)
setattr(self, new_k, kwargs.pop(k))
self.override_ssl_env = kwargs.pop("override_ssl_env", None)
if kwargs:
raise TypeError("__init__() got unexpected keyword arguments %r" % (tuple(kwargs),))
def __getitem__(self, k):
try:
return getattr(self, k)
except AttributeError:
raise KeyError(k)
def __iter__(self):
return iter([x for x in dir(self) if not x.startswith("_")])
def __len__(self):
return len([x for x in dir(self) if not x.startswith("_")])
def update(self, override):
"""Load an overrides dict to override config values"""
override = override.copy()
for k in self._default:
if k in override:
self._set_override(k, override.pop(k))
for k, new_k in _renamed_props.iteritems():
if k in override:
self.logger.warning(
"%s in config is deprecated; use %s instead" % (
k,
new_k
)
)
self._set_override(new_k, override.pop(k))
if override:
k = next(iter(override))
raise KeyError("unknown config override '%s'" % k)
def _set_override(self, k, v):
old_v = getattr(self, k)
if isinstance(old_v, dict):
setattr(self, k, _merge_dict(old_v, v))
else:
setattr(self, k, v)
@property
def ports(self):
try:
old_ports = self._computed_ports
except AttributeError:
old_ports = {}
self._computed_ports = defaultdict(list)
for scheme, ports in self._ports.iteritems():
for i, port in enumerate(ports):
if scheme in ["wss", "https"] and not self.ssl_env.ssl_enabled:
port = None
if port == "auto":
try:
port = old_ports[scheme][i]
except (KeyError, IndexError):
port = get_port(self.server_host)
else:
port = port
self._computed_ports[scheme].append(port)
return self._computed_ports
@ports.setter
def ports(self, v):
self._ports = v
@property
def doc_root(self):
return self._doc_root if self._doc_root is not None else repo_root
@doc_root.setter
def doc_root(self, v):
self._doc_root = v
@property
def ws_doc_root(self):
if self._ws_doc_root is not None:
return self._ws_doc_root
else:
return os.path.join(self.doc_root, "websockets", "handlers")
@ws_doc_root.setter
def ws_doc_root(self, v):
self._ws_doc_root = v
@property
def server_host(self):
return self._server_host if self._server_host is not None else self.browser_host
@server_host.setter
def server_host(self, v):
self._server_host = v
@property
def domains(self):
assert self.browser_host.encode("idna") == self.browser_host
domains = {subdomain: (subdomain.encode("idna") + u"." + self.browser_host)
for subdomain in self.subdomains}
domains[""] = self.browser_host
return domains
@property
def not_domains(self):
assert self.browser_host.encode("idna") == self.browser_host
domains = {subdomain: (subdomain.encode("idna") + u"." + self.browser_host)
for subdomain in self.not_subdomains}
return domains
@property
def all_domains(self):
domains = self.domains.copy()
domains.update(self.not_domains)
return domains
@property
def ssl_env(self):
try:
if self.override_ssl_env is not None:
return self.override_ssl_env
except AttributeError:
pass
implementation_type = self.ssl["type"]
try:
cls = sslutils.environments[implementation_type]
except KeyError:
raise ValueError("%s is not a vaid ssl type." % implementation_type)
kwargs = self.ssl.get(implementation_type, {}).copy()
return cls(self.logger, **kwargs)
@property
def paths(self):
return {"doc_root": self.doc_root,
"ws_doc_root": self.ws_doc_root}
@property
def ssl_config(self):
key_path, cert_path = self.ssl_env.host_cert_path(self.domains.itervalues())
return {"key_path": key_path,
"cert_path": cert_path,
"encrypt_after_connect": self.ssl["encrypt_after_connect"]}
@property
def log_level(self):
return getattr(logging, self._log_level)
@log_level.setter
def log_level(self, value):
self._log_level = value.upper()
@property
def logger(self):
logger = logging.getLogger(self._logger_name)
logger.setLevel(self.log_level)
return logger
def as_dict(self):
rv = {
"domains": list(self.domains),
"sundomains": list(self.subdomains),
}
for item in self._default.iterkeys():
rv[item] = getattr(self, item)
return rv
import cgi import cgi
import json import json
import os import os
import sys
import traceback import traceback
from six.moves.urllib.parse import parse_qs, quote, unquote, urljoin from six.moves.urllib.parse import parse_qs, quote, unquote, urljoin
...@@ -231,8 +232,11 @@ class PythonScriptHandler(object): ...@@ -231,8 +232,11 @@ class PythonScriptHandler(object):
def __call__(self, request, response): def __call__(self, request, response):
path = filesystem_path(self.base_path, request, self.url_base) path = filesystem_path(self.base_path, request, self.url_base)
sys_path = sys.path[:]
sys_modules = sys.modules.copy()
try: try:
environ = {"__file__": path} environ = {"__file__": path}
sys.path.insert(0, os.path.dirname(path))
execfile(path, environ, environ) execfile(path, environ, environ)
if "main" in environ: if "main" in environ:
handler = FunctionHandler(environ["main"]) handler = FunctionHandler(environ["main"])
...@@ -242,6 +246,10 @@ class PythonScriptHandler(object): ...@@ -242,6 +246,10 @@ class PythonScriptHandler(object):
raise HTTPException(500, "No main function in script %s" % path) raise HTTPException(500, "No main function in script %s" % path)
except IOError: except IOError:
raise HTTPException(404) raise HTTPException(404)
finally:
sys.path = sys_path
sys.modules = sys_modules
python_script_handler = PythonScriptHandler() python_script_handler = PythonScriptHandler()
...@@ -252,6 +260,8 @@ class FunctionHandler(object): ...@@ -252,6 +260,8 @@ class FunctionHandler(object):
def __call__(self, request, response): def __call__(self, request, response):
try: try:
rv = self.func(request, response) rv = self.func(request, response)
except HTTPException:
raise
except Exception: except Exception:
msg = traceback.format_exc() msg = traceback.format_exc()
raise HTTPException(500, message=msg) raise HTTPException(500, message=msg)
......
from cgi import escape from cgi import escape
import gzip as gzip_module import gzip as gzip_module
import hashlib
import os
import re import re
import time import time
import types import types
...@@ -277,6 +279,10 @@ def slice(request, response, start, end=None): ...@@ -277,6 +279,10 @@ def slice(request, response, start, end=None):
class ReplacementTokenizer(object): class ReplacementTokenizer(object):
def arguments(self, token):
unwrapped = token[1:-1]
return ("arguments", re.split(r",\s*", token[1:-1]) if unwrapped else [])
def ident(self, token): def ident(self, token):
return ("ident", token) return ("ident", token)
...@@ -296,8 +302,9 @@ class ReplacementTokenizer(object): ...@@ -296,8 +302,9 @@ class ReplacementTokenizer(object):
return self.scanner.scan(string)[0] return self.scanner.scan(string)[0]
scanner = re.Scanner([(r"\$\w+:", var), scanner = re.Scanner([(r"\$\w+:", var),
(r"\$?\w+(?:\(\))?", ident), (r"\$?\w+", ident),
(r"\[[^\]]*\]", index)]) (r"\[[^\]]*\]", index),
(r"\([^)]*\)", arguments)])
class FirstWrapper(object): class FirstWrapper(object):
...@@ -339,6 +346,11 @@ def sub(request, response, escape_type="html"): ...@@ -339,6 +346,11 @@ def sub(request, response, escape_type="html"):
A dictionary of query parameters supplied with the request. A dictionary of query parameters supplied with the request.
uuid() uuid()
A pesudo-random UUID suitable for usage with stash A pesudo-random UUID suitable for usage with stash
file_hash(algorithm, filepath)
The cryptographic hash of a file. Supported algorithms: md5, sha1,
sha224, sha256, sha384, and sha512. For example:
{{file_hash(md5, dom/interfaces.html)}}
So for example in a setup running on localhost with a www So for example in a setup running on localhost with a www
subdomain and a http server on ports 80 and 81:: subdomain and a http server on ports 80 and 81::
...@@ -351,7 +363,7 @@ def sub(request, response, escape_type="html"): ...@@ -351,7 +363,7 @@ def sub(request, response, escape_type="html"):
It is also possible to assign a value to a variable name, which must start with It is also possible to assign a value to a variable name, which must start with
the $ character, using the ":" syntax e.g. the $ character, using the ":" syntax e.g.
{{$id:uuid()} {{$id:uuid()}}
Later substitutions in the same file may then refer to the variable Later substitutions in the same file may then refer to the variable
by name e.g. by name e.g.
...@@ -365,6 +377,39 @@ def sub(request, response, escape_type="html"): ...@@ -365,6 +377,39 @@ def sub(request, response, escape_type="html"):
response.content = new_content response.content = new_content
return response return response
class SubFunctions(object):
@staticmethod
def uuid(request):
return str(uuid.uuid4())
# Maintain a whitelist of supported algorithms, restricted to those that
# are available on all platforms [1]. This ensures that test authors do not
# unknowingly introduce platform-specific tests.
#
# [1] https://docs.python.org/2/library/hashlib.html
supported_algorithms = ("md5", "sha1", "sha224", "sha256", "sha384", "sha512")
@staticmethod
def file_hash(request, algorithm, path):
if algorithm not in SubFunctions.supported_algorithms:
raise ValueError("Unsupported encryption algorithm: '%s'" % algorithm)
hash_obj = getattr(hashlib, algorithm)()
absolute_path = os.path.join(request.doc_root, path)
try:
with open(absolute_path) as f:
hash_obj.update(f.read())
except IOError:
# In this context, an unhandled IOError will be interpreted by the
# server as an indication that the template file is non-existent.
# Although the generic "Exception" is less precise, it avoids
# triggering a potentially-confusing HTTP 404 error in cases where
# the path to the file to be hashed is invalid.
raise Exception('Cannot open file for hash computation: "%s"' % absolute_path)
return hash_obj.digest().encode('base64').strip()
def template(request, content, escape_type="html"): def template(request, content, escape_type="html"):
#TODO: There basically isn't any error handling here #TODO: There basically isn't any error handling here
tokenizer = ReplacementTokenizer() tokenizer = ReplacementTokenizer()
...@@ -382,12 +427,15 @@ def template(request, content, escape_type="html"): ...@@ -382,12 +427,15 @@ def template(request, content, escape_type="html"):
else: else:
variable = None variable = None
assert tokens[0][0] == "ident" and all(item[0] == "index" for item in tokens[1:]), tokens assert tokens[0][0] == "ident", tokens
assert all(item[0] in ("index", "arguments") for item in tokens[1:]), tokens
field = tokens[0][1] field = tokens[0][1]
if field in variables: if field in variables:
value = variables[field] value = variables[field]
elif hasattr(SubFunctions, field):
value = getattr(SubFunctions, field)
elif field == "headers": elif field == "headers":
value = request.headers value = request.headers
elif field == "GET": elif field == "GET":
...@@ -414,15 +462,16 @@ def template(request, content, escape_type="html"): ...@@ -414,15 +462,16 @@ def template(request, content, escape_type="html"):
"path": request.url_parts.path, "path": request.url_parts.path,
"pathname": request.url_parts.path, "pathname": request.url_parts.path,
"query": "?%s" % request.url_parts.query} "query": "?%s" % request.url_parts.query}
elif field == "uuid()":
value = str(uuid.uuid4())
elif field == "url_base": elif field == "url_base":
value = request.url_base value = request.url_base
else: else:
raise Exception("Undefined template variable %s" % field) raise Exception("Undefined template variable %s" % field)
for item in tokens[1:]: for item in tokens[1:]:
if item[0] == "index":
value = value[item[1]] value = value[item[1]]
else:
value = value(request, *item[1])
assert isinstance(value, (int,) + types.StringTypes), tokens assert isinstance(value, (int,) + types.StringTypes), tokens
......
...@@ -400,9 +400,10 @@ class WebTestHttpd(object): ...@@ -400,9 +400,10 @@ class WebTestHttpd(object):
server_cls = WebTestServer server_cls = WebTestServer
if use_ssl: if use_ssl:
if key_file is not None: if not os.path.exists(key_file):
assert os.path.exists(key_file) raise ValueError("SSL certificate not found: {}".format(key_file))
assert certificate is not None and os.path.exists(certificate) if not os.path.exists(certificate):
raise ValueError("SSL key not found: {}".format(certificate))
try: try:
self.httpd = server_cls((host, port), self.httpd = server_cls((host, port),
......
import socket
def invert_dict(dict): def invert_dict(dict):
rv = {} rv = {}
for key, values in dict.iteritems(): for key, values in dict.iteritems():
...@@ -12,3 +14,93 @@ class HTTPException(Exception): ...@@ -12,3 +14,93 @@ class HTTPException(Exception):
def __init__(self, code, message=""): def __init__(self, code, message=""):
self.code = code self.code = code
self.message = message self.message = message
def _open_socket(host, port):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if port != 0:
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind((host, port))
sock.listen(5)
return sock
def is_bad_port(port):
"""
Bad port as per https://fetch.spec.whatwg.org/#port-blocking
"""
return port in [
1, # tcpmux
7, # echo
9, # discard
11, # systat
13, # daytime
15, # netstat
17, # qotd
19, # chargen
20, # ftp-data
21, # ftp
22, # ssh
23, # telnet
25, # smtp
37, # time
42, # name
43, # nicname
53, # domain
77, # priv-rjs
79, # finger
87, # ttylink
95, # supdup
101, # hostriame
102, # iso-tsap
103, # gppitnp
104, # acr-nema
109, # pop2
110, # pop3
111, # sunrpc
113, # auth
115, # sftp
117, # uucp-path
119, # nntp
123, # ntp
135, # loc-srv / epmap
139, # netbios
143, # imap2
179, # bgp
389, # ldap
465, # smtp+ssl
512, # print / exec
513, # login
514, # shell
515, # printer
526, # tempo
530, # courier
531, # chat
532, # netnews
540, # uucp
556, # remotefs
563, # nntp+ssl
587, # smtp
601, # syslog-conn
636, # ldap+ssl
993, # imap+ssl
995, # pop3+ssl
2049, # nfs
3659, # apple-sasl
4045, # lockd
6000, # x11
6665, # irc (alternate)
6666, # irc (alternate)
6667, # irc (default)
6668, # irc (alternate)
6669, # irc (alternate)
]
def get_port(host):
port = 0
while True:
free_socket = _open_socket(host, 0)
port = free_socket.getsockname()[1]
free_socket.close()
if not is_bad_port(port):
break
return port
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment