Commit 8dc2204a authored by nednguyen's avatar nednguyen Committed by Commit bot

Refactor prepareToMeasureValuesAsync to startMeasureValuesAsyn which run test through callback

Motivation:
In order to support tracing metrics for prepareToMeasureValuesAsync method,
we need to be able to trigger tracing between prepareToMeasureValuesAsync &
actual test run. This CL rename prepareToMeasureValuesAsync method to
startMeasureValuesAsyn & refactor it  so that the
actual test code is specified as a callback, which allows the framework
to interject the startTrace() call before running test in the future.

BUG=701059

Review-Url: https://codereview.chromium.org/2858783003
Cr-Commit-Position: refs/heads/master@{#469233}
parent 3c2d1c61
...@@ -27,12 +27,12 @@ function testDone() { ...@@ -27,12 +27,12 @@ function testDone() {
} }
window.onload = function () { window.onload = function () {
PerfTestRunner.prepareToMeasureValuesAsync({ PerfTestRunner.startMeasureValuesAsync({
description: "Measures time to run 1000 postMessage round trips in serial.", description: "Measures time to run 1000 postMessage round trips in serial.",
unit: 'ms', unit: 'ms',
run: runTest,
done: testDone, done: testDone,
}); });
runTest();
}; };
</script> </script>
<div id="log"></div> <div id="log"></div>
......
...@@ -22,12 +22,13 @@ const StructuredClonePerfTestRunner = (function() { ...@@ -22,12 +22,13 @@ const StructuredClonePerfTestRunner = (function() {
return { return {
measureTimeAsync(test) { measureTimeAsync(test) {
let isDone = false; let isDone = false;
PerfTestRunner.prepareToMeasureValuesAsync({ PerfTestRunner.startMeasureValuesAsync({
description: test.description, description: test.description,
unit: 'ms', unit: 'ms',
warmUpCount: test.warmUpCount || 10, warmUpCount: test.warmUpCount || 10,
iterationCount: test.iterationCount || 250, iterationCount: test.iterationCount || 250,
done() { isDone = true; }, done() { isDone = true; },
run: pingPongUntilDone,
}); });
function pingPongUntilDone() { function pingPongUntilDone() {
...@@ -40,7 +41,6 @@ const StructuredClonePerfTestRunner = (function() { ...@@ -40,7 +41,6 @@ const StructuredClonePerfTestRunner = (function() {
if (!isDone) pingPongUntilDone(); if (!isDone) pingPongUntilDone();
}); });
} }
pingPongUntilDone();
}, },
}; };
})(); })();
...@@ -14,9 +14,11 @@ if (!window.GCController && !window.gc) ...@@ -14,9 +14,11 @@ if (!window.GCController && !window.gc)
test.unit = 'ms'; test.unit = 'ms';
if (!test.warmUpCount) if (!test.warmUpCount)
test.warmUpCount = warmUpCount; test.warmUpCount = warmUpCount;
if (!test.run)
test.run = function() {};
completedIterations = 0; completedIterations = 0;
PerfTestRunner.prepareToMeasureValuesAsync(test); PerfTestRunner.startMeasureValuesAsync(test);
// Force a V8 GC before running Blink GC test to avoid measuring marking from stale V8 wrappers. // Force a V8 GC before running Blink GC test to avoid measuring marking from stale V8 wrappers.
if (window.GCController) if (window.GCController)
......
...@@ -9,14 +9,18 @@ ...@@ -9,14 +9,18 @@
var CanvasRunner = {}; var CanvasRunner = {};
CanvasRunner.start = function (test) { CanvasRunner.start = function (test) {
PerfTestRunner.prepareToMeasureValuesAsync({unit: 'runs/s', PerfTestRunner.startMeasureValuesAsync({
description: test.description, done: testDone}); unit: 'runs/s',
if (!test.doRun) { description: test.description,
CanvasRunner.logFatalError("doRun must be set."); done: testDone,
return; run: function() {
} if (!test.doRun) {
currentTest = test; CanvasRunner.logFatalError("doRun must be set.");
runTest(); return;
}
currentTest = test;
runTest();
}});
} }
function runTest() { function runTest() {
......
...@@ -39,14 +39,16 @@ function runTest(myCanvas) { ...@@ -39,14 +39,16 @@ function runTest(myCanvas) {
window.onload = function () { window.onload = function () {
canvas_idle = createCanvas4kBy4k("canvas_idle"); canvas_idle = createCanvas4kBy4k("canvas_idle");
PerfTestRunner.prepareToMeasureValuesAsync({ PerfTestRunner.startMeasureValuesAsync({
unit: 'ms', unit: 'ms',
done: function () { done: function () {
isDone = true; isDone = true;
}, },
description: "Measures performance of canvas." run: function() {
runTest(canvas_idle);
},
description: "Measures performance of canvas."
}); });
runTest(canvas_idle);
}; };
</script> </script>
</body> </body>
......
...@@ -33,14 +33,16 @@ function runTest(myCanvas) { ...@@ -33,14 +33,16 @@ function runTest(myCanvas) {
window.onload = function () { window.onload = function () {
canvas_idle = createCanvas4k("canvas_idle"); canvas_idle = createCanvas4k("canvas_idle");
PerfTestRunner.prepareToMeasureValuesAsync({ PerfTestRunner.startMeasureValuesAsync({
unit: 'ms', unit: 'ms',
done: function () { done: function () {
isDone = true; isDone = true;
}, },
run: function() {
runTest(canvas_idle);
},
description: "Measures performance of canvas." description: "Measures performance of canvas."
}); });
runTest(canvas_idle);
}; };
</script> </script>
</body> </body>
......
...@@ -4,38 +4,39 @@ ...@@ -4,38 +4,39 @@
setup: function(testName) { setup: function(testName) {
var ITERATION_COUNT = 5; var ITERATION_COUNT = 5;
PerfTestRunner.prepareToMeasureValuesAsync({ PerfTestRunner.startMeasureValuesAsync({
iterationCount: ITERATION_COUNT, iterationCount: ITERATION_COUNT,
doNotMeasureMemoryUsage: true, doNotMeasureMemoryUsage: true,
doNotIgnoreInitialRun: true, doNotIgnoreInitialRun: true,
unit: 'runs/s' unit: 'runs/s',
}); run: function() {
var iframe = document.createElement("iframe");
var iframe = document.createElement("iframe"); var url = DRT.baseURL + "?" + testName + '&numTests=' + ITERATION_COUNT;
var url = DRT.baseURL + "?" + testName + '&numTests=' + ITERATION_COUNT; iframe.setAttribute("src", url);
iframe.setAttribute("src", url); document.body.insertBefore(iframe, document.body.firstChild);
document.body.insertBefore(iframe, document.body.firstChild); iframe.addEventListener(
iframe.addEventListener( "load", function() {
"load", function() { DRT.targetDocument = iframe.contentDocument;
DRT.targetDocument = iframe.contentDocument; DRT.targetWindow = iframe.contentDocument.defaultView;
DRT.targetWindow = iframe.contentDocument.defaultView; });
});
window.addEventListener( window.addEventListener(
"message", "message",
function(event) { function(event) {
switch(event.data.name) { switch(event.data.name) {
case "dromaeo:ready": case "dromaeo:ready":
DRT.start(); DRT.start();
break; break;
case "dromaeo:progress": case "dromaeo:progress":
DRT.progress(event.data); DRT.progress(event.data);
break; break;
case "dromaeo:alldone": case "dromaeo:alldone":
DRT.teardown(event.data); DRT.teardown(event.data);
break; break;
} }
}); });
}
});
}, },
start: function() { start: function() {
......
...@@ -9,26 +9,29 @@ iframe.style.display = "none"; // Prevent creation of the rendering tree, so we ...@@ -9,26 +9,29 @@ iframe.style.display = "none"; // Prevent creation of the rendering tree, so we
iframe.sandbox = ''; // Prevent external script loads which could cause write() to return before completing the parse. iframe.sandbox = ''; // Prevent external script loads which could cause write() to return before completing the parse.
document.body.appendChild(iframe); document.body.appendChild(iframe);
PerfTestRunner.prepareToMeasureValuesAsync({
description: "Measures performance of the threaded HTML parser (if available).",
done: onCompletedRun,
unit: 'ms'
});
iframe.onload = function() {
var now = PerfTestRunner.now();
PerfTestRunner.measureValueAsync(now - then);
then = now;
iframe.src = specURL;
}
var then = PerfTestRunner.now();
iframe.src = specURL;
function onCompletedRun() { function onCompletedRun() {
iframe.onload = null; iframe.onload = null;
// FIXME: This should probably remove the iframe, but that currently // FIXME: This should probably remove the iframe, but that currently
// causes the threaded-parser to never send the load event for the main page. // causes the threaded-parser to never send the load event for the main page.
// document.body.removeChild(iframe); // document.body.removeChild(iframe);
} }
PerfTestRunner.startMeasureValuesAsync({
description: "Measures performance of the threaded HTML parser (if available).",
done: onCompletedRun,
unit: 'ms',
run: function() {
iframe.onload = function() {
var now = PerfTestRunner.now();
PerfTestRunner.measureValueAsync(now - then);
then = now;
iframe.src = specURL;
}
var then = PerfTestRunner.now();
iframe.src = specURL;
}
});
</script> </script>
</body> </body>
...@@ -145,7 +145,7 @@ if (window.testRunner) { ...@@ -145,7 +145,7 @@ if (window.testRunner) {
}; };
function start(test, scheduler, runner) { function start(test, scheduler, runner) {
if (!test) { if (!test || !runner) {
PerfTestRunner.logFatalError("Got a bad test object."); PerfTestRunner.logFatalError("Got a bad test object.");
return; return;
} }
...@@ -170,24 +170,36 @@ if (window.testRunner) { ...@@ -170,24 +170,36 @@ if (window.testRunner) {
if (test.doNotIgnoreInitialRun) if (test.doNotIgnoreInitialRun)
completedIterations++; completedIterations++;
if (runner && test.tracingCategories && window.testRunner && if (!test.tracingCategories) {
window.testRunner.supportTracing) { scheduleNextRun(scheduler, runner);
window.testRunner.startTracing(test.tracingCategories, function() { return;
}
if (window.testRunner && window.testRunner.supportTracing) {
testRunner.startTracing(test.tracingCategories, function() {
scheduleNextRun(scheduler, runner); scheduleNextRun(scheduler, runner);
}); });
} else if (runner) { return;
if (test.tracingCategories && !(window.testRuner &&
window.testRunner.supportTracing)) {
PerfTestRunner.log("Tracing based metrics are specified but " +
"tracing is not supported on this platform. To get those " +
"metrics from this test, you can run the test using " +
"tools/perf/run_benchmarks script.");
}
scheduleNextRun(scheduler, runner);
} }
PerfTestRunner.log("Tracing based metrics are specified but " +
"tracing is not supported on this platform. To get those " +
"metrics from this test, you can run the test using " +
"tools/perf/run_benchmarks script.");
scheduleNextRun(scheduler, runner);
} }
function scheduleNextRun(scheduler, runner) { function scheduleNextRun(scheduler, runner) {
if (!scheduler) {
// This is an async measurement test which has its own scheduler.
try {
runner();
} catch (exception) {
PerfTestRunner.logFatalError("Got an exception while running test.run with name=" + exception.name + ", message=" + exception.message);
}
return;
}
scheduler(function () { scheduler(function () {
// This will be used by tools/perf/benchmarks/blink_perf.py to find // This will be used by tools/perf/benchmarks/blink_perf.py to find
// traces during the measured runs. // traces during the measured runs.
...@@ -264,9 +276,9 @@ if (window.testRunner) { ...@@ -264,9 +276,9 @@ if (window.testRunner) {
} }
} }
PerfTestRunner.prepareToMeasureValuesAsync = function (test) { PerfTestRunner.startMeasureValuesAsync = function (test) {
PerfTestRunner.unit = test.unit; PerfTestRunner.unit = test.unit;
start(test); start(test, undefined, function() { test.run() });
} }
PerfTestRunner.measureValueAsync = function (measuredValue) { PerfTestRunner.measureValueAsync = function (measuredValue) {
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment