Diff
Modified: trunk/LayoutTests/ChangeLog (130098 => 130099)
--- trunk/LayoutTests/ChangeLog 2012-10-01 23:43:10 UTC (rev 130098)
+++ trunk/LayoutTests/ChangeLog 2012-10-01 23:48:17 UTC (rev 130099)
@@ -1,3 +1,21 @@
+2012-09-28 Ryosuke Niwa <rn...@webkit.org>
+
+ Encapsulate private properties in PerfTestRunner better
+ https://bugs.webkit.org/show_bug.cgi?id=97833
+
+ Reviewed by Ojan Vafai.
+
+ Override PerfTestRunner.now instead of PerfTestRunner._perSecondRunnerIterator since the latter
+ is no longer exposed.
+
+ * fast/harness/perftests/runs-per-second-iterations-expected.txt:
+ * fast/harness/perftests/runs-per-second-iterations.html: Increase the runtime of the last 4 runs
+ since test.timeToRun is no longer supported by PerfTestRunner.
+ * fast/harness/perftests/runs-per-second-log-expected.txt:
+ * fast/harness/perftests/runs-per-second-log.html: Avoid use numbers that contain primes other
+ than 2 and 5 as runs because they cause rouding errors in PerfTestRunner.measureRunsPerSecondOnce
+ and make the output dependent on the underlying floating number implementation.
+
2012-10-01 Roger Fong <roger_f...@apple.com>
Unreviewed. Rolling out changes from http://trac.webkit.org/changeset/119130.
Modified: trunk/LayoutTests/fast/harness/perftests/runs-per-second-iterations-expected.txt (130098 => 130099)
--- trunk/LayoutTests/fast/harness/perftests/runs-per-second-iterations-expected.txt 2012-10-01 23:43:10 UTC (rev 130098)
+++ trunk/LayoutTests/fast/harness/perftests/runs-per-second-iterations-expected.txt 2012-10-01 23:48:17 UTC (rev 130099)
@@ -2,7 +2,7 @@
On success, you will see a series of "PASS" messages, followed by "TEST COMPLETE".
-Returning times: [0, 10, 30, 60, 100, 100, 100, 100, 500]
+Returning times: [0, 10, 30, 60, 200, 200, 200, 200, 750]
PASS callsInIterations[0] is 1
PASS callsInIterations[1] is 10
PASS callsInIterations[1] + 5 < callsInIterations[2] is true
Modified: trunk/LayoutTests/fast/harness/perftests/runs-per-second-iterations.html (130098 => 130099)
--- trunk/LayoutTests/fast/harness/perftests/runs-per-second-iterations.html 2012-10-01 23:43:10 UTC (rev 130098)
+++ trunk/LayoutTests/fast/harness/perftests/runs-per-second-iterations.html 2012-10-01 23:48:17 UTC (rev 130099)
@@ -10,23 +10,32 @@
description("This test verifies PerfTestRunner.runPerSecond() calls runFunction as many times as expected.");
var iteration = 0;
-
+var currentTime = 100;
var callsInIterations = [];
-var timesInIterations = [0, 10, 30, 60, 100, 100, 100, 100, 500];
+var timesInIterations = [0, 10, 30, 60, 200, 200, 200, 200, 750];
var logLines = [];
-PerfTestRunner.log = function (line) { logLines.push(line); }
-PerfTestRunner._perSecondRunnerIterator = function (callsPerIteration) {
- callsInIterations[iteration] = callsPerIteration;
- return timesInIterations[iteration++];
+PerfTestRunner.now = function () {
+ iteration++;
+ if (iteration % 2 == 0)
+ currentTime += timesInIterations.shift();
+ return currentTime;
}
+PerfTestRunner.log = function (line) { logLines.push(line); }
+
+var originalTimesInIterations = timesInIterations.slice();
+
PerfTestRunner.runPerSecond({
- run: function () { },
+ run: function () {
+ var i = Math.floor(iteration / 2);
+ if (callsInIterations[i] == undefined)
+ callsInIterations[i] = 0;
+ callsInIterations[i]++;
+ },
runCount: 1,
- timeToRun: 500,
done: function () {
- debug("Returning times: [" + timesInIterations.join(", ") + "]");
+ debug("Returning times: [" + originalTimesInIterations.join(", ") + "]");
shouldEvaluateTo("callsInIterations[0]", 1);
shouldEvaluateTo("callsInIterations[1]", 10);
shouldBeTrue("callsInIterations[1] + 5 < callsInIterations[2]");
Modified: trunk/LayoutTests/fast/harness/perftests/runs-per-second-log-expected.txt (130098 => 130099)
--- trunk/LayoutTests/fast/harness/perftests/runs-per-second-log-expected.txt 2012-10-01 23:43:10 UTC (rev 130098)
+++ trunk/LayoutTests/fast/harness/perftests/runs-per-second-log-expected.txt 2012-10-01 23:48:17 UTC (rev 130099)
@@ -1,18 +1,18 @@
This test verifies PerfTestRunner.runPerSecond() outputs log as expected.
Running 5 times
-Ignoring warm-up run (0 runs/s)
-1 runs/s
+Ignoring warm-up run (1 runs/s)
2 runs/s
-3 runs/s
4 runs/s
5 runs/s
+8 runs/s
+10 runs/s
Time:
-values 1, 2, 3, 4, 5 runs/s
-avg 3 runs/s
-median 3 runs/s
-stdev 1.41 runs/s
-min 1 runs/s
-max 5 runs/s
+values 2, 4, 5, 8, 10 runs/s
+avg 5.8 runs/s
+median 5 runs/s
+stdev 2.86 runs/s
+min 2 runs/s
+max 10 runs/s
Modified: trunk/LayoutTests/fast/harness/perftests/runs-per-second-log.html (130098 => 130099)
--- trunk/LayoutTests/fast/harness/perftests/runs-per-second-log.html 2012-10-01 23:43:10 UTC (rev 130098)
+++ trunk/LayoutTests/fast/harness/perftests/runs-per-second-log.html 2012-10-01 23:48:17 UTC (rev 130099)
@@ -10,28 +10,37 @@
var initial = true;
var runs = 0;
-PerfTestRunner._perSecondRunnerIterator = function (callsPerIteration) {
- return 1000 / runs;
+var iteration = 0;
+var currentTime = 0;
+
+PerfTestRunner.now = function () {
+ iteration++;
+ if (iteration % 2 == 0)
+ currentTime += 1000 / runs;
+ return currentTime;
}
PerfTestRunner.storeHeapResults = function () { }
var printStatistics = PerfTestRunner.printStatistics;
-PerfTestRunner.printStatistics = function (statistics) {
+PerfTestRunner.printStatistics = function (statistics, title) {
+ if (statistics.unit == 'bytes')
+ return;
statistics.stdev = statistics.stdev.toPrecision(3);
- return printStatistics.call(PerfTestRunner, statistics, 'Time:');
+ return printStatistics.call(PerfTestRunner, statistics, title);
}
PerfTestRunner.runPerSecond({
setup: function () {
- if (initial)
- initial = false;
- else
+ runs++;
+ // Avoid 3, 6, 7, 9.
+ if (runs == 6)
+ runs += 2;
+ else if (!(runs % 3))
runs++;
},
run: function () { },
runCount: 5,
- timeToRun: 500,
});
</script>
Modified: trunk/PerformanceTests/ChangeLog (130098 => 130099)
--- trunk/PerformanceTests/ChangeLog 2012-10-01 23:43:10 UTC (rev 130098)
+++ trunk/PerformanceTests/ChangeLog 2012-10-01 23:48:17 UTC (rev 130099)
@@ -1,3 +1,32 @@
+2012-10-01 Ryosuke Niwa <rn...@webkit.org>
+
+ Encapsulate private properties in PerfTestRunner better
+ https://bugs.webkit.org/show_bug.cgi?id=97833
+
+ Reviewed by Ojan Vafai.
+
+ This patch moves "private" methods and properties of PerfTestRunner into a closure so that they're
+ inaccssible from outside. Also catch exceptions from test.run, test.done, and other runner code
+ to ensure we call notifyDone() even if we broke tests. Otherwise DRT will timeout and we end up
+ waiting for 10 minutes per each broken test on bots.
+
+ * resources/runner.js:
+ (PerfTestRunner.gc):
+ (logInDocument): Extracted from PerfTestRunner.log.
+ (PerfTestRunner.log): Moved.
+ (logFatalError): Added.
+ (start): Renamed from PerfTestRunner._start.
+ (scheduleNextRun): Extracted from PerfTestRunner._runLoop. Also catch any exceptions that happen
+ in the runner and ignoreWarmUpAndLog so that we don't end up timing out. We call logFatalError in
+ such cases, which in turn ensures notifyDone() is called.
+ (ignoreWarmUpAndLog): Renamed from PerfTestRunner._ignoreWarmUpAndLog.
+ (finish): Extracted from PerfTestRunner._runLoop.
+ (PerfTestRunner.measureTime): Moved. The initialization of runCount is moved into start().
+ (measureTimeOnce): Renamed from PerfTestRunner._measureTimeOnce.
+ (PerfTestRunner.runPerSecond): Moved. Ditto about runCount.
+ (measureRunsPerSecondOnce): Renamed from PerfTestRunner._measureRunsPerSecondOnce.
+ (callRunAndMeasureTime): Renamed from PerfTestRunner._perSecondRunnerIterator.
+
2012-10-01 Florin Malita <fmal...@chromium.org>
Add a performance test for nested <use> elements
Modified: trunk/PerformanceTests/resources/runner.js (130098 => 130099)
--- trunk/PerformanceTests/resources/runner.js 2012-10-01 23:43:10 UTC (rev 130098)
+++ trunk/PerformanceTests/resources/runner.js 2012-10-01 23:48:17 UTC (rev 130099)
@@ -29,20 +29,6 @@
PerfTestRunner.now = window.performance && window.performance.webkitNow ? function () { return window.performance.webkitNow(); } : Date.now;
-PerfTestRunner.log = function (text) {
- if (this._logLines) {
- this._logLines.push(text);
- return;
- }
- if (!document.getElementById("log")) {
- var pre = document.createElement('pre');
- pre.id = 'log';
- document.body.appendChild(pre);
- }
- document.getElementById("log").innerHTML += text + "\n";
- window.scrollTo(0, document.body.height);
-}
-
PerfTestRunner.logInfo = function (text) {
if (!window.testRunner)
this.log(text);
@@ -105,13 +91,6 @@
this.log("max " + statistics.max + " " + statistics.unit);
}
-PerfTestRunner.storeHeapResults = function() {
- if (!window.internals)
- return;
- this._jsHeapResults.push(this.getUsedJSHeap());
- this._mallocHeapResults.push(this.getUsedMallocHeap());
-}
-
PerfTestRunner.getUsedMallocHeap = function() {
var stats = window.internals.mallocStatistics();
return stats.committedVMBytes - stats.freeListBytes;
@@ -145,120 +124,164 @@
for (var i = 0; i < 1000; i++)
gcRec(10);
}
-}
+};
-PerfTestRunner._scheduleNextMeasurementOrNotifyDone = function () {
- if (this._completedRuns < this._runCount) {
- this.gc();
+(function () {
+ var logLines = null;
+ var completedRuns = -1;
+ var callsPerIteration = 1;
+ var currentTest = null;
+ var results = [];
+ var jsHeapResults = [];
+ var mallocHeapResults = [];
+ var runCount = undefined;
+
+ function logInDocument(text) {
+ if (!document.getElementById("log")) {
+ var pre = document.createElement('pre');
+ pre.id = 'log';
+ document.body.appendChild(pre);
+ }
+ document.getElementById("log").innerHTML += text + "\n";
+ window.scrollTo(0, document.body.height);
+ }
+
+ PerfTestRunner.log = function (text) {
+ if (logLines)
+ logLines.push(text);
+ else
+ logInDocument(text);
+ }
+
+ function logFatalError(text) {
+ PerfTestRunner.log(text);
+ finish();
+ }
+
+ function start(test, runner) {
+ if (!test) {
+ logFatalError('Got a bad test object.');
+ return;
+ }
+ currentTest = test;
+ runCount = test.runCount || 20;
+ logLines = window.testRunner ? [] : null;
+ PerfTestRunner.log("Running " + runCount + " times");
+ scheduleNextRun(runner);
+ }
+
+ function scheduleNextRun(runner) {
+ PerfTestRunner.gc();
window.setTimeout(function () {
- var measuredValue = PerfTestRunner._runner();
- PerfTestRunner.ignoreWarmUpAndLog(measuredValue);
- PerfTestRunner._scheduleNextMeasurementOrNotifyDone();
+ try {
+ var measuredValue = runner();
+ } catch (exception) {
+ logFatalError('Got an exception while running test.run with name=' + exception.name + ', message=' + exception.message);
+ return;
+ }
+
+ completedRuns++;
+
+ try {
+ ignoreWarmUpAndLog(measuredValue);
+ } catch (exception) {
+ logFatalError('Got an exception while logging the result with name=' + exception.name + ', message=' + exception.message);
+ return;
+ }
+
+ if (completedRuns < runCount)
+ scheduleNextRun(runner);
+ else
+ finish();
}, 0);
- } else {
- if (this._description)
- this.log("Description: " + this._description);
- this.logStatistics(this._results, this.unit, "Time:");
- if (this._jsHeapResults.length) {
- this.logStatistics(this._jsHeapResults, "bytes", "JS Heap:");
- this.logStatistics(this._mallocHeapResults, "bytes", "Malloc:");
+ }
+
+ function ignoreWarmUpAndLog(measuredValue) {
+ var labeledResult = measuredValue + " " + PerfTestRunner.unit;
+ if (completedRuns <= 0)
+ PerfTestRunner.log("Ignoring warm-up run (" + labeledResult + ")");
+ else {
+ results.push(measuredValue);
+ if (window.internals) {
+ jsHeapResults.push(PerfTestRunner.getUsedJSHeap());
+ mallocHeapResults.push(PerfTestRunner.getUsedMallocHeap());
+ }
+ PerfTestRunner.log(labeledResult);
}
- if (this._logLines) {
- var logLines = this._logLines;
- this._logLines = null;
- var self = this;
- logLines.forEach(function(text) { self.log(text); });
+ }
+
+ function finish() {
+ try {
+ if (currentTest.description)
+ PerfTestRunner.log("Description: " + currentTest.description);
+ PerfTestRunner.logStatistics(results, PerfTestRunner.unit, "Time:");
+ if (jsHeapResults.length) {
+ PerfTestRunner.logStatistics(jsHeapResults, "bytes", "JS Heap:");
+ PerfTestRunner.logStatistics(mallocHeapResults, "bytes", "Malloc:");
+ }
+ if (logLines)
+ logLines.forEach(logInDocument);
+ if (currentTest.done)
+ currentTest.done();
+ } catch (exception) {
+ logInDocument('Got an exception while finalizing the test with name=' + exception.name + ', message=' + exception.message);
}
- if (this._test.done)
- this._test.done();
+
if (window.testRunner)
testRunner.notifyDone();
}
-}
-PerfTestRunner._measureTimeOnce = function () {
- var start = this.now();
- var returnValue = this._test.run.call(window);
- var end = this.now();
-
- if (returnValue - 0 === returnValue) {
- if (returnValue <= 0)
- this.log("runFunction returned a non-positive value: " + returnValue);
- return returnValue;
+ PerfTestRunner.measureTime = function (test) {
+ PerfTestRunner.unit = 'ms';
+ start(test, measureTimeOnce);
}
- return end - start;
-}
+ function measureTimeOnce() {
+ var start = PerfTestRunner.now();
+ var returnValue = currentTest.run();
+ var end = PerfTestRunner.now();
-PerfTestRunner.ignoreWarmUpAndLog = function (result) {
- this._completedRuns++;
+ if (returnValue - 0 === returnValue) {
+ if (returnValue <= 0)
+ PerfTestRunner.log("runFunction returned a non-positive value: " + returnValue);
+ return returnValue;
+ }
- var labeledResult = result + " " + this.unit;
- if (this._completedRuns <= 0)
- this.log("Ignoring warm-up run (" + labeledResult + ")");
- else {
- this._results.push(result);
- this.storeHeapResults();
- this.log(labeledResult);
+ return end - start;
}
-}
-PerfTestRunner._start = function(test) {
- this._description = test.description || "";
- this._completedRuns = -1;
- this._callsPerIteration = 1;
- this._test = test;
+ PerfTestRunner.runPerSecond = function (test) {
+ PerfTestRunner.unit = 'runs/s';
+ start(test, measureRunsPerSecondOnce);
+ }
- this._results = [];
- this._jsHeapResults = [];
- this._mallocHeapResults = [];
+ function measureRunsPerSecondOnce() {
+ var timeToRun = 750;
+ var totalTime = 0;
+ var numberOfRuns = 0;
- this._logLines = window.testRunner ? [] : null;
- this.log("Running " + this._runCount + " times");
- this._scheduleNextMeasurementOrNotifyDone();
-}
+ if (currentTest.setup)
+ currentTest.setup();
-PerfTestRunner.measureTime = function (test) {
- this._runCount = test.runCount || 20;
- this.unit = 'ms';
- this._runner = this._measureTimeOnce;
- this._start(test);
-}
+ while (totalTime < timeToRun) {
+ totalTime += callRunAndMeasureTime(callsPerIteration);
+ numberOfRuns += callsPerIteration;
+ if (completedRuns < 0 && totalTime < 100)
+ callsPerIteration = Math.max(10, 2 * callsPerIteration);
+ }
-PerfTestRunner.runPerSecond = function (test) {
- this._runCount = test.runCount || 20; // Only used by tests in fast/harness/perftests
- this.unit = 'runs/s';
- this._runner = this._measureRunsPerSecondOnce;
- this._start(test);
-}
+ return numberOfRuns * 1000 / totalTime;
+ }
-PerfTestRunner._measureRunsPerSecondOnce = function () {
- var timeToRun = this._test.timeToRun || 750;
- var totalTime = 0;
- var i = 0;
- var callsPerIteration = this._callsPerIteration;
-
- if (this._test.setup)
- this._test.setup();
-
- while (totalTime < timeToRun) {
- totalTime += this._perSecondRunnerIterator(callsPerIteration);
- i += callsPerIteration;
- if (this._completedRuns < 0 && totalTime < 100)
- callsPerIteration = Math.max(10, 2 * callsPerIteration);
+ function callRunAndMeasureTime(callsPerIteration) {
+ var startTime = PerfTestRunner.now();
+ for (var i = 0; i < callsPerIteration; i++)
+ currentTest.run();
+ return PerfTestRunner.now() - startTime;
}
- this._callsPerIteration = callsPerIteration;
- return i * 1000 / totalTime;
-}
+})();
-PerfTestRunner._perSecondRunnerIterator = function (callsPerIteration) {
- var startTime = this.now();
- for (var i = 0; i < callsPerIteration; i++)
- this._test.run.call(window);
- return this.now() - startTime;
-}
-
if (window.testRunner) {
testRunner.waitUntilDone();
testRunner.dumpAsText();