Skip to content

Commit

Permalink
ADD: Removed warmUpTime in favour of self-warmup to reach the minimum…
Browse files Browse the repository at this point in the history
… time. Now will run new processes each 10% of gathered samples. getDiff module isolation to reduce deops.
  • Loading branch information
Llorx committed Sep 2, 2023
1 parent 06cb1ed commit c92c24a
Show file tree
Hide file tree
Showing 9 changed files with 44 additions and 62 deletions.
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -124,9 +124,8 @@ Creates a new `IsoBench` instance to benchmark your code.
- `name`: The name of this IsoBench instance. Defaults to `IsoBench`.
- `options`: Object:
- `parallel`: The amount of parallel tests to run. Defaults to **1**.
- `time`: The minimum time (in milliseconds) to invest on each test. The library will automatically increase the amount of cycles to reach a minimum of `ms` between tests to take samples. Defaults to **50**.
- `samples`: Amount of samples to get. Defaults to **100**.
- `warmUpTime`: The minimum time (in milliseconds) to pre-run the tests, so the [JavaScript engine optimizer kicks-in](https://doar-e.github.io/blog/2019/01/28/introduction-to-turbofan/#compilation-pipeline:~:text=If%20the%20function%20gets%20executed%20a%20lot%2C%20TurboFan%20will%20generate%20some%20optimized%20code) before initializing the timer. Defaults to **500**.
- `time`: The minimum time (in milliseconds) to invest on each test. The library will automatically increase the amount of cycles to reach a minimum of `ms` between tests to take samples. Defaults to **100**.
- `samples`: Amount of samples to get. Will launch a new process each 10% samples. Defaults to **50** so will launch a new process each **5** samples.

---
```typescript
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "iso-bench",
"version": "2.4.6",
"version": "2.4.7",
"description": "Small benchmark library focused in avoiding optimization/deoptimization pollution between tests by isolating them.",
"types": "./lib/_types/index.d.ts",
"main": "./lib/",
Expand Down
6 changes: 2 additions & 4 deletions src/IsoBench.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ export type IsoBenchOptions = {
parallel?:number;
samples?:number;
time?:number;
warmUpTime?:number;
};
export class IsoBench {
processors:Processor[] = [];
Expand All @@ -32,9 +31,8 @@ export class IsoBench {
constructor(readonly name:string = "IsoBench", options?:IsoBenchOptions) {
this.options = {...{ // Set defaults
parallel: 1,
samples: 100,
time: 50,
warmUpTime: 50
samples: 50,
time: 100
}, ...options};
this.name = getUniqueName(this.name, BENCHES);
BENCHES.set(this.name, this);
Expand Down
67 changes: 20 additions & 47 deletions src/Test.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import STREAM from "stream";
import CHILD_PROCESS from "child_process";
import { performance } from "perf_hooks";

import { Fork } from "./Fork";
import { IsoBenchOptions, Processor } from ".";
import { SetupMessage } from "./WorkerSetup";
import { Messager, RunMessage } from "./Messager";
import { getDiff } from "./getDiff";

export type Sample = {
cycles: number;
Expand All @@ -21,9 +21,8 @@ class ForkContext<T> {
const setup:SetupMessage = {
testIndex: this._test.index,
benchName: this._benchName,
samples: this._options.samples,
time: this._options.time,
warmUpTime: this._options.warmUpTime
samples: Math.min(Math.ceil(this._options.samples * 0.1), this._options.samples - this._test.samples.length),
time: this._options.time
};
const worker = Fork.fork({
["ISO_BENCH_SETUP"]: JSON.stringify(setup)
Expand All @@ -40,7 +39,11 @@ class ForkContext<T> {
this._resolve();
} else if (msg.done) {
this._ended = true;
this._resolve();
if (this._test.samples.length >= this._options.samples) {
this._resolve();
} else {
new ForkContext(this._test, this._processors, this._resolve, this._benchName, this._options).start();
}
} else {
const sample:Sample = {
cycles: msg.cycles,
Expand Down Expand Up @@ -107,7 +110,7 @@ export class Test {
totalTime = 0;
samples:Sample[] = [];
group = "";
constructor(readonly name:string, readonly index:number, private _callback:(setup?:unknown)=>void, private _setup?:()=>unknown) {}
constructor(readonly name:string, readonly index:number, private _callback:(setupData?:unknown)=>void, private _setup?:()=>unknown) {}
fork(benchName:string, processors:Processor[], options:Required<IsoBenchOptions>) {
return new Promise<void>((resolve => {
// Start new context for this specific fork run
Expand All @@ -116,50 +119,20 @@ export class Test {
}));
}
async run(setup:SetupMessage) {
const warmUpResult = setup.warmUpTime > 0 ? this._getResult(setup.warmUpTime, 1) : null;
getDiff(1, this._callback, this._setup); // warmup
let cycles = 1;
if (warmUpResult) {
const ratio = (setup.time / setup.warmUpTime) * 1.02;
cycles = warmUpResult.cycles * ratio;
}
let samples = setup.samples;
while(samples-- > 0) {
const result = this._getResult(setup.time, cycles);
cycles = result.cycles;
await Messager.send({
diff: result.diff,
cycles: result.cycles
});
}
}
private _getResult(targetTime:number, cycles:number) {
let diff:number;
while(true) {
diff = this._getCallbackTime(cycles);
if (diff >= targetTime) {
break;
} else {
const ratio = diff > 0 ? (targetTime / diff) * 1.02 : 1.1; // Go a 2% further, to avoid it ending just below the targetTime. Increase by 10% if zero is received (mostly in systems without nanosecond resolution)
cycles = Math.ceil(cycles * ratio);
}
}
return {cycles, diff};
}
private _getCallbackTime(cycles:number) {
// Individual loops so the callback doesn't receive an argument if there's no setup
if (this._setup) {
const setup = this._setup();
const startTS = performance.now();
while(cycles-- > 0) {
this._callback(setup);
}
return performance.now() - startTS;
} else {
const startTS = performance.now();
while(cycles-- > 0) {
this._callback();
while(samples > 0) {
const diff = getDiff(cycles, this._callback, this._setup);
if (diff >= setup.time) {
samples--;
await Messager.send({
diff: diff,
cycles: cycles
});
}
return performance.now() - startTS;
const ratio = diff > 0 ? (setup.time / diff) * 1.02 : 1.1; // Go a 2% further, to avoid it ending just below the targetTime. Increase by 10% if zero is received (mostly in systems without nanosecond resolution)
cycles = diff >= setup.time ? Math.round(cycles * ratio) : Math.ceil(cycles * ratio);
}
}
}
1 change: 0 additions & 1 deletion src/WorkerSetup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ export type SetupMessage = {
benchName:string;
time:number;
samples:number;
warmUpTime:number;
};

export let WorkerSetup:SetupMessage|null = null;
Expand Down
10 changes: 10 additions & 0 deletions src/getDiff.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import { performance } from "perf_hooks";

export function getDiff(cycles:number, callback:(setupData?:unknown)=>void, setup?:()=>unknown) {
const setupData = setup && setup();
const startTS = performance.now();
while(cycles-- > 0) {
callback(setupData);
}
return performance.now() - startTS;
}
9 changes: 6 additions & 3 deletions src/processors/StreamLog/DynamicStream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,10 +4,10 @@ import { Processor } from "../../Processor";
import { Test, Sample } from "../../Test";
import { IsoBench } from "../../IsoBench";
import { Group, getTestLog, COLORS } from "./Utils";
import { StreamTTY } from "./StreamTTY";
import { TTYOutput } from "./TTYOutput";

export class TestOutput {
constructor(private _tty:StreamTTY, readonly line:number) {}
constructor(private _tty:TTYOutput, readonly line:number) {}
log(data:string) {
this._tty.log(data, this.line);
}
Expand All @@ -20,7 +20,7 @@ export class DynamicStream implements Processor {
private _benchName = "";
private _groups = new Map<string, Group>();
constructor(protected _stream:TTY.WriteStream) {
this._tty = new StreamTTY(this._stream);
this._tty = new TTYOutput(this._stream);
this._header = new TestOutput(this._tty, 0);
}
initialize(bench:IsoBench, tests:Test[]) {
Expand Down Expand Up @@ -73,13 +73,15 @@ export class DynamicStream implements Processor {
if (output) {
const logArgs = getTestLog(this._padding, test, null, true, sample);
logArgs.push(`${COLORS.YELLOW}Running...${COLORS.CLEAR}`);
//logArgs.push("Min:", test.samples.slice().sort((a,b) => a.ops - b.ops)[0].ops, "Max:", test.samples.slice().sort((a,b) => b.ops - a.ops)[0].ops);
output.log(logArgs.join(" "));
}
}
end(test:Test) {
const output = this._outputs.get(test.index);
if (output) {
const logArgs = getTestLog(this._padding, test, null, true);
//logArgs.push("Min:", test.samples.slice().sort((a,b) => a.ops - b.ops)[0].ops, "Max:", test.samples.slice().sort((a,b) => b.ops - a.ops)[0].ops);
output.log(logArgs.join(" "));
}
const group = this._groups.get(test.group);
Expand All @@ -101,6 +103,7 @@ export class DynamicStream implements Processor {
const output = this._outputs.get(test.index);
if (output) {
const logArgs = getTestLog(this._padding, test, { min, max }, true);
//logArgs.push("Min:", test.samples.slice().sort((a,b) => a.ops - b.ops)[0].ops, "Max:", test.samples.slice().sort((a,b) => b.ops - a.ops)[0].ops);
output.log(logArgs.join(" "));
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import TTY from "tty";

export class StreamTTY {
export class TTYOutput {
logs:string[] = [];
top = 0;
drawheight = 0;
Expand Down
4 changes: 2 additions & 2 deletions src/tests/StreamTTY.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { StreamTTY } from "../processors/StreamLog/StreamTTY";
import { TTYOutput } from "../processors/StreamLog/TTYOutput";

const tty = new StreamTTY(process.stdout);
const tty = new TTYOutput(process.stdout);
for (let i = 0; i < 10; i++) {
setTimeout(() => {
tty.log("test" + i, i);
Expand Down

0 comments on commit c92c24a

Please sign in to comment.