I need to calculate statistics of many fixed sized arrays. I tried creating an equal length image, set its samples, and then use image.statistics methods. This works but it is maybe 100x slower than expected.
This script shows the problem. Data set is 4k arrays each of length 64. Putting all of the data in a single 64 x 4k image and then calculating statistics on the entire data set is fast. I don't actually need statistics across the entire data set, I am doing this just to provide a performance baseline.
But when I iterate through the data set, basically row by row, copying the length 64 rows into a small image, statistics is much slower, including the slice and setSamples overhead.
Is this a performance bug? Is there a faster way to do this?
Mike
PS: Of course I can write my own PJSR statistics methods. I did. They work faster. I was hoping that compiled PI code would be faster than PJSR, but it slower by a large factor, when using this small image, slice, setSamples technique.
mean: 131071.5, 0.005 s
standard deviation: 75674.59882065227, 0.015 s
Sn: 65537, 0.040 s
totalMean: 536868864, 4.331 s
totalStandardDeviation: 76263.36962570688, 8.205 s
totalSn: 69632, 3.900 s
var width = 64;
var height = 4 * 1024;
var data = new Array();
for (var i = 0; i != width * height; ++i) {
data.push(i);
}
var image = new Image(width, height);
image.setSamples(data);
var start = new Date();
var mean = image.mean();
var stop = new Date();
console.writeln();
console.writeln(
"mean: ", mean,
format(", %.03f s", 0.001 * (stop.getTime() - start.getTime()))
);
var start = new Date();
var standardDeviation = image.stdDev();
var stop = new Date();
console.writeln();
console.writeln(
"standard deviation: ", standardDeviation,
format(", %.03f s", 0.001 * (stop.getTime() - start.getTime()))
);
var start = new Date();
var Sn = image.Sn();
var stop = new Date();
console.writeln();
console.writeln(
"Sn: ", Sn,
format(", %.03f s", 0.001 * (stop.getTime() - start.getTime()))
);
var image = new Image(width, 1);
var totalMean = 0;
var start = new Date();
for (var i = 0; i != height; ++i) {
image.setSamples(data.slice(i * width, (i + 1) * width));
totalMean += image.mean();
}
var stop = new Date();
console.writeln();
console.writeln(
"totalMean: ", totalMean,
format(", %.03f s", 0.001 * (stop.getTime() - start.getTime()))
);
var image = new Image(width, 1);
var totalStandardDeviation = 0;
var start = new Date();
for (var i = 0; i != height; ++i) {
image.setSamples(data.slice(i * width, (i + 1) * width));
totalStandardDeviation += image.stdDev();
}
var stop = new Date();
console.writeln();
console.writeln(
"totalStandardDeviation: ", totalStandardDeviation,
format(", %.03f s", 0.001 * (stop.getTime() - start.getTime()))
);
var image = new Image(width, 1);
var totalSn = 0;
var start = new Date();
for (var i = 0; i != height; ++i) {
image.setSamples(data.slice(i * width, (i + 1) * width));
totalSn += image.Sn();
}
var stop = new Date();
console.writeln();
console.writeln(
"totalSn: ", totalSn,
format(", %.03f s", 0.001 * (stop.getTime() - start.getTime()))
);