Switch to CoV algorithm for automatic early stopping

This commit is contained in:
mikkelam 2025-06-19 12:08:18 +02:00
parent 34ed84f50f
commit bbaab5f026
8 changed files with 513 additions and 330 deletions

View file

@ -4,16 +4,16 @@
[![CI](https://github.com/mikkelam/fast-cli-zig/actions/workflows/ci.yml/badge.svg)](https://github.com/mikkelam/fast-cli-zig/actions/workflows/ci.yml)
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
A blazingly fast CLI tool for testing internet speed compatible with fast.com (api v2). Written in Zig for maximum performance.
A blazingly fast CLI tool for testing internet speed uses fast.com v2 api. Written in Zig for maximum performance.
**1.3 MiB binary** • 🚀 **Zero runtime deps** • 📊 **Real-time progress**
**1.3 MiB binary** • 🚀 **Zero runtime deps** • 📊 **Smart stability detection**
## Why fast-cli-zig?
- **Tiny binary**: Just 1.4 MiB, no runtime dependencies
- **Blazing fast**: Concurrent connections with adaptive chunk sizing
- **Cross-platform**: Single binary for Linux, macOS, Windows
- **Real-time feedback**: Live speed updates during tests
- **Smart stopping**: Uses Coefficient of Variation (CoV) algorithm for adaptive test duration
## Installation
@ -42,37 +42,26 @@ v0.0.1
Usage: fast-cli [options]
Flags:
--stability-max-variance Maximum variance percentage for stability test [String] (default: "10.0")
-u, --upload Check upload speed as well [Bool] (default: false)
-d, --duration Duration in seconds for each test phase - download, then upload if enabled (duration mode only) [Int] (default: 10)
--stability-min-samples Minimum samples for stability test [Int] (default: 5)
--stability-max-duration Maximum duration in seconds for stability test [Int] (default: 30)
--https Use https when connecting to fast.com [Bool] (default: true)
-j, --json Output results in JSON format [Bool] (default: false)
-m, --mode Test mode: 'duration' or 'stability' [String] (default: "duration")
-h, --help Shows the help for a command [Bool] (default: false)
-u, --upload Check upload speed as well [Bool] (default: false)
-d, --duration Maximum test duration in seconds (uses Fast.com-style stability detection by default) [Int] (default: 30)
--https Use https when connecting to fast.com [Bool] (default: true)
-j, --json Output results in JSON format [Bool] (default: false)
-h, --help Shows the help for a command [Bool] (default: false)
Use "fast-cli --help" for more information.
```
## Performance Comparison
TODO
## Options
| Flag | Description | Default |
|------|-------------|---------|
| `--upload`, `-u` | Test upload speed | `false` |
| `--duration`, `-d` | Test duration (seconds) | `10` |
| `--json`, `-j` | JSON output | `false` |
| `--https` | Use HTTPS | `true` |
## Example Output
```console
$ fast-cli --upload
🏓 25ms | ⬇️ Download: 113.7 Mbps | ⬆️ Upload: 62.1 Mbps
$ fast-cli -d 15 # Quick test with 15s max duration
🏓 22ms | ⬇️ Download: 105.0 Mbps
$ fast-cli -j # JSON output
{"download_mbps": 131.4, "ping_ms": 20.8}
```
## Development

View file

@ -1,15 +1,16 @@
const std = @import("std");
const zli = @import("zli");
const builtin = @import("builtin");
const build_options = @import("build_options");
const Fast = @import("../lib/fast.zig").Fast;
const HTTPSpeedTester = @import("../lib/http_speed_tester_v2.zig").HTTPSpeedTester;
const StabilityCriteria = @import("../lib/http_speed_tester_v2.zig").StabilityCriteria;
const SpeedTestResult = @import("../lib/http_speed_tester_v2.zig").SpeedTestResult;
const BandwidthMeter = @import("../lib/bandwidth.zig");
const SpeedMeasurement = @import("../lib/bandwidth.zig").SpeedMeasurement;
const progress = @import("../lib/progress.zig");
const HttpLatencyTester = @import("../lib/latency.zig").HttpLatencyTester;
const HttpLatencyTester = @import("../lib/http_latency_tester.zig").HttpLatencyTester;
const log = std.log.scoped(.cli);
/// Update spinner text with current speed measurement
@ -45,40 +46,11 @@ const json_output_flag = zli.Flag{
.default_value = .{ .Bool = false },
};
const test_mode_flag = zli.Flag{
.name = "mode",
.description = "Test mode: 'duration' or 'stability'",
.shortcut = "m",
.type = .String,
.default_value = .{ .String = "duration" },
};
const test_duration_flag = zli.Flag{
const max_duration_flag = zli.Flag{
.name = "duration",
.description = "Duration in seconds for each test phase - download, then upload if enabled (duration mode only)",
.description = "Maximum test duration in seconds (uses CoV stability detection by default)",
.shortcut = "d",
.type = .Int,
.default_value = .{ .Int = 5 },
};
const stability_min_samples_flag = zli.Flag{
.name = "stability-min-samples",
.description = "Minimum samples for stability test",
.type = .Int,
.default_value = .{ .Int = 5 },
};
const stability_max_variance_flag = zli.Flag{
.name = "stability-max-variance",
.description = "Maximum variance percentage for stability test",
.type = .String,
.default_value = .{ .String = "10.0" },
};
const stability_max_duration_flag = zli.Flag{
.name = "stability-max-duration",
.description = "Maximum duration in seconds for stability test",
.type = .Int,
.default_value = .{ .Int = 30 },
};
@ -86,17 +58,13 @@ pub fn build(allocator: std.mem.Allocator) !*zli.Command {
const root = try zli.Command.init(allocator, .{
.name = "fast-cli",
.description = "Estimate connection speed using fast.com",
.version = std.SemanticVersion.parse(build_options.version) catch null,
.version = null,
}, run);
try root.addFlag(https_flag);
try root.addFlag(check_upload_flag);
try root.addFlag(json_output_flag);
try root.addFlag(test_mode_flag);
try root.addFlag(test_duration_flag);
try root.addFlag(stability_min_samples_flag);
try root.addFlag(stability_max_variance_flag);
try root.addFlag(stability_max_duration_flag);
try root.addFlag(max_duration_flag);
return root;
}
@ -105,15 +73,10 @@ fn run(ctx: zli.CommandContext) !void {
const use_https = ctx.flag("https", bool);
const check_upload = ctx.flag("upload", bool);
const json_output = ctx.flag("json", bool);
const test_mode = ctx.flag("mode", []const u8);
const test_duration = ctx.flag("duration", i64);
const stability_min_samples = ctx.flag("stability-min-samples", i64);
const stability_max_variance_str = ctx.flag("stability-max-variance", []const u8);
const stability_max_duration = ctx.flag("stability-max-duration", i64);
const max_duration = ctx.flag("duration", i64);
const stability_max_variance = std.fmt.parseFloat(f64, stability_max_variance_str) catch 10.0;
log.info("Config: https={}, upload={}, json={}, mode={s}, duration={}s", .{
use_https, check_upload, json_output, test_mode, test_duration,
log.info("Config: https={}, upload={}, json={}, max_duration={}s", .{
use_https, check_upload, json_output, max_duration,
});
var fast = Fast.init(std.heap.page_allocator, use_https);
@ -156,81 +119,52 @@ fn run(ctx: zli.CommandContext) !void {
var speed_tester = HTTPSpeedTester.init(std.heap.page_allocator);
defer speed_tester.deinit();
// Determine test mode
const use_stability = std.mem.eql(u8, test_mode, "stability");
// Use Fast.com-style stability detection by default
const criteria = StabilityCriteria{
.ramp_up_duration_seconds = 4,
.max_duration_seconds = @as(u32, @intCast(@max(25, max_duration))),
.measurement_interval_ms = 750,
.sliding_window_size = 6,
.stability_threshold_cov = 0.15,
.stable_checks_required = 2,
};
// Measure download speed
const download_result = if (use_stability) blk: {
const criteria = StabilityCriteria{
.min_samples = @as(u32, @intCast(stability_min_samples)),
.max_variance_percent = stability_max_variance,
.max_duration_seconds = @as(u32, @intCast(stability_max_duration)),
};
const download_result = if (json_output) blk: {
// JSON mode: clean output only
break :blk speed_tester.measure_download_speed_stability(urls, criteria) catch |err| {
if (!json_output) {
try ctx.spinner.fail("Download test failed: {}", .{err});
} else {
log.err("Download test failed: {}", .{err});
std.debug.print("{{\"error\": \"{}\"}}\n", .{err});
}
log.err("Download test failed: {}", .{err});
std.debug.print("{{\"error\": \"{}\"}}\n", .{err});
return;
};
} else blk: {
if (json_output) {
// JSON mode: clean output only
break :blk speed_tester.measureDownloadSpeed(urls, @as(u32, @intCast(@max(0, test_duration)))) catch |err| {
log.err("Download test failed: {}", .{err});
std.debug.print("{{\"error\": \"{}\"}}\n", .{err});
return;
};
} else {
// Create progress callback with spinner context
const progressCallback = progress.createCallback(ctx.spinner, updateSpinnerText);
break :blk speed_tester.measureDownloadSpeedWithProgress(urls, @as(u32, @intCast(@max(0, test_duration))), progressCallback) catch |err| {
try ctx.spinner.fail("Download test failed: {}", .{err});
return;
};
}
// Interactive mode with spinner updates
const progressCallback = progress.createCallback(ctx.spinner, updateSpinnerText);
break :blk speed_tester.measureDownloadSpeedWithStabilityProgress(urls, criteria, progressCallback) catch |err| {
try ctx.spinner.fail("Download test failed: {}", .{err});
return;
};
};
var upload_result: ?SpeedTestResult = null;
if (check_upload) {
if (!json_output) {
const upload_mode_str = if (use_stability) "stability" else "duration";
try ctx.spinner.start(.{}, "Measuring upload speed ({s} mode)...", .{upload_mode_str});
try ctx.spinner.start(.{}, "Measuring upload speed...", .{});
}
upload_result = if (use_stability) blk: {
const criteria = StabilityCriteria{
.min_samples = @as(u32, @intCast(stability_min_samples)),
.max_variance_percent = stability_max_variance,
.max_duration_seconds = @as(u32, @intCast(stability_max_duration)),
};
upload_result = if (json_output) blk: {
// JSON mode: clean output only
break :blk speed_tester.measure_upload_speed_stability(urls, criteria) catch |err| {
if (!json_output) {
try ctx.spinner.fail("Upload test failed: {}", .{err});
}
log.err("Upload test failed: {}", .{err});
std.debug.print("{{\"error\": \"{}\"}}\n", .{err});
return;
};
} else blk: {
if (json_output) {
// JSON mode: clean output only
break :blk speed_tester.measureUploadSpeed(urls, @as(u32, @intCast(@max(0, test_duration)))) catch |err| {
log.err("Upload test failed: {}", .{err});
std.debug.print("{{\"error\": \"{}\"}}\n", .{err});
return;
};
} else {
// Create progress callback with spinner context
const uploadProgressCallback = progress.createCallback(ctx.spinner, updateUploadSpinnerText);
break :blk speed_tester.measureUploadSpeedWithProgress(urls, @as(u32, @intCast(@max(0, test_duration))), uploadProgressCallback) catch |err| {
try ctx.spinner.fail("Upload test failed: {}", .{err});
return;
};
}
// Interactive mode with spinner updates
const uploadProgressCallback = progress.createCallback(ctx.spinner, updateUploadSpinnerText);
break :blk speed_tester.measureUploadSpeedWithStabilityProgress(urls, criteria, uploadProgressCallback) catch |err| {
try ctx.spinner.fail("Upload test failed: {}", .{err});
return;
};
};
}

View file

@ -41,11 +41,6 @@ pub const BandwidthMeter = struct {
self._bytes_transferred = total_bytes;
}
pub fn record_bytes(self: *BandwidthMeter, byte_count: usize) void {
assert(self._started);
self._bytes_transferred += byte_count;
}
pub fn bandwidth(self: *BandwidthMeter) f64 {
if (!self._started) return 0;
@ -97,8 +92,8 @@ test "BandwidthMeter record_bytes" {
var meter = BandwidthMeter.init();
try meter.start();
meter.record_bytes(1000);
meter.record_bytes(500);
meter.update_total(1000);
meter.update_total(1500);
// Just test that bandwidth calculation works
const bw = meter.bandwidth();
@ -109,7 +104,7 @@ test "BandwidthMeter bandwidth calculation" {
var meter = BandwidthMeter.init();
try meter.start();
meter.record_bytes(1000); // 1000 bytes
meter.update_total(1000); // 1000 bytes
// Sleep briefly to ensure time passes
std.time.sleep(std.time.ns_per_ms * 10); // 10ms

View file

@ -15,9 +15,9 @@ pub const SpeedTestResult = struct {
speed: SpeedMeasurement,
/// Convert bytes per second to optimal unit for display (in bits per second)
pub fn fromBytesPerSecond(speed_bytes_per_sec: f64) SpeedTestResult {
pub fn fromBytesPerSecond(bytes_per_second: f64) SpeedTestResult {
// Convert bytes/s to bits/s
const speed_bits_per_sec = speed_bytes_per_sec * 8.0;
const speed_bits_per_sec = bytes_per_second * 8.0;
const abs_speed = @abs(speed_bits_per_sec);
const speed_measurement = if (abs_speed >= 1_000_000_000)
@ -50,25 +50,45 @@ pub const HTTPSpeedTester = struct {
_ = self;
}
pub fn set_concurrent_connections(self: *HTTPSpeedTester, count: u32) void {
self.concurrent_connections = @min(count, 8); // Max 8 connections
// Stability-based download with optional progress callback
pub fn measure_download_speed_stability_duration(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria, comptime ProgressType: ?type, progress_callback: if (ProgressType) |T| T else void) !SpeedTestResult {
var strategy = measurement_strategy.createStabilityStrategy(self.allocator, criteria);
defer strategy.deinit();
return self.measureDownloadSpeedWithStability(urls, &strategy, ProgressType, progress_callback);
}
pub fn set_progress_update_interval_ms(self: *HTTPSpeedTester, interval_ms: u32) void {
self.progress_update_interval_ms = interval_ms;
// Stability-based download without progress callback
pub fn measure_download_speed_stability(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria) !SpeedTestResult {
return self.measure_download_speed_stability_duration(urls, criteria, null, {});
}
// Stability-based upload with optional progress callback
pub fn measure_upload_speed_stability_duration(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria, comptime ProgressType: ?type, progress_callback: if (ProgressType) |T| T else void) !SpeedTestResult {
const upload_data = try self.allocator.alloc(u8, 4 * 1024 * 1024);
defer self.allocator.free(upload_data);
@memset(upload_data, 'A');
var strategy = measurement_strategy.createStabilityStrategy(self.allocator, criteria);
defer strategy.deinit();
return self.measureUploadSpeedWithStability(urls, &strategy, upload_data, ProgressType, progress_callback);
}
// Stability-based upload without progress callback
pub fn measure_upload_speed_stability(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria) !SpeedTestResult {
return self.measure_upload_speed_stability_duration(urls, criteria, null, {});
}
// Convenience helpers for cleaner API usage
// Clean duration-based download with optional progress callback
pub fn measure_download_speed_duration(self: *HTTPSpeedTester, urls: []const []const u8, duration_seconds: u32, comptime ProgressType: ?type, progress_callback: if (ProgressType) |T| T else void) !SpeedTestResult {
const strategy = measurement_strategy.createDurationStrategy(duration_seconds, self.progress_update_interval_ms);
return self.measureDownloadSpeedWithDuration(urls, strategy, ProgressType, progress_callback);
}
// Clean stability-based download
pub fn measure_download_speed_stability(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria) !SpeedTestResult {
var strategy = measurement_strategy.createStabilityStrategy(self.allocator, criteria);
defer strategy.deinit();
return self.measureDownloadSpeedWithStability(urls, &strategy);
/// Simple download speed measurement without progress callback
pub fn measureDownloadSpeed(self: *HTTPSpeedTester, urls: []const []const u8, duration_seconds: u32) !SpeedTestResult {
return self.measure_download_speed_duration(urls, duration_seconds, null, {});
}
// Clean duration-based upload with optional progress callback
@ -81,37 +101,19 @@ pub const HTTPSpeedTester = struct {
return self.measureUploadSpeedWithDuration(urls, strategy, upload_data, ProgressType, progress_callback);
}
// Clean stability-based upload
pub fn measure_upload_speed_stability(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria) !SpeedTestResult {
const upload_data = try self.allocator.alloc(u8, 4 * 1024 * 1024);
defer self.allocator.free(upload_data);
@memset(upload_data, 'A');
var strategy = measurement_strategy.createStabilityStrategy(self.allocator, criteria);
defer strategy.deinit();
return self.measureUploadSpeedWithStability(urls, &strategy, upload_data);
}
// Convenience helpers for cleaner API usage
/// Simple download speed measurement without progress callback
pub fn measureDownloadSpeed(self: *HTTPSpeedTester, urls: []const []const u8, duration_seconds: u32) !SpeedTestResult {
return self.measure_download_speed_duration(urls, duration_seconds, null, {});
}
/// Download speed measurement with progress callback (type inferred)
pub fn measureDownloadSpeedWithProgress(self: *HTTPSpeedTester, urls: []const []const u8, duration_seconds: u32, progress_callback: anytype) !SpeedTestResult {
return self.measure_download_speed_duration(urls, duration_seconds, @TypeOf(progress_callback), progress_callback);
}
/// Simple upload speed measurement without progress callback
pub fn measureUploadSpeed(self: *HTTPSpeedTester, urls: []const []const u8, duration_seconds: u32) !SpeedTestResult {
return self.measure_upload_speed_duration(urls, duration_seconds, null, {});
}
/// Upload speed measurement with progress callback (type inferred)
pub fn measureUploadSpeedWithProgress(self: *HTTPSpeedTester, urls: []const []const u8, duration_seconds: u32, progress_callback: anytype) !SpeedTestResult {
return self.measure_upload_speed_duration(urls, duration_seconds, @TypeOf(progress_callback), progress_callback);
/// Stability-based download speed measurement with progress callback (type inferred)
pub fn measureDownloadSpeedWithStabilityProgress(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria, progress_callback: anytype) !SpeedTestResult {
return self.measure_download_speed_stability_duration(urls, criteria, @TypeOf(progress_callback), progress_callback);
}
/// Stability-based upload speed measurement with progress callback (type inferred)
pub fn measureUploadSpeedWithStabilityProgress(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria, progress_callback: anytype) !SpeedTestResult {
return self.measure_upload_speed_stability_duration(urls, criteria, @TypeOf(progress_callback), progress_callback);
}
// Private implementation for duration-based download
@ -179,62 +181,6 @@ pub const HTTPSpeedTester = struct {
return SpeedTestResult.fromBytesPerSecond(speed_bytes_per_sec);
}
// Private implementation for stability-based download
fn measureDownloadSpeedWithStability(
self: *HTTPSpeedTester,
urls: []const []const u8,
strategy: *StabilityStrategy,
) !SpeedTestResult {
var timer = try speed_worker.RealTimer.init();
var should_stop = std.atomic.Value(bool).init(false);
// Setup worker manager
const num_workers = @min(urls.len, self.concurrent_connections);
var worker_manager = try WorkerManager.init(self.allocator, &should_stop, num_workers);
defer worker_manager.deinit();
// Setup download workers
const workers = try worker_manager.setupDownloadWorkers(
urls,
self.concurrent_connections,
timer.timer_interface(),
strategy.max_duration_ns,
);
defer worker_manager.cleanupWorkers(workers);
// Start workers
try worker_manager.startDownloadWorkers(workers);
// Main measurement loop
while (strategy.shouldContinue(timer.timer_interface().read())) {
std.time.sleep(strategy.getSleepInterval());
const current_bytes = worker_manager.getCurrentDownloadBytes(workers);
const should_stop_early = try strategy.handleProgress(
timer.timer_interface().read(),
current_bytes,
);
if (should_stop_early) break;
}
// Stop and wait for workers
worker_manager.stopAndJoinWorkers();
// Calculate results
const totals = worker_manager.calculateDownloadTotals(workers);
if (totals.errors > 0) {
print("Download completed with {} errors\n", .{totals.errors});
}
const actual_duration_ns = timer.timer_interface().read();
const actual_duration_s = @as(f64, @floatFromInt(actual_duration_ns)) / std.time.ns_per_s;
if (actual_duration_s == 0) return SpeedTestResult.fromBytesPerSecond(0);
const speed_bytes_per_sec = @as(f64, @floatFromInt(totals.bytes)) / actual_duration_s;
return SpeedTestResult.fromBytesPerSecond(speed_bytes_per_sec);
}
// Private implementation for duration-based upload
fn measureUploadSpeedWithDuration(
self: *HTTPSpeedTester,
@ -302,16 +248,97 @@ pub const HTTPSpeedTester = struct {
return SpeedTestResult.fromBytesPerSecond(speed_bytes_per_sec);
}
// Private implementation for stability-based download
fn measureDownloadSpeedWithStability(
self: *HTTPSpeedTester,
urls: []const []const u8,
strategy: *StabilityStrategy,
comptime ProgressType: ?type,
progress_callback: if (ProgressType) |T| T else void,
) !SpeedTestResult {
const has_progress = ProgressType != null;
var timer = try speed_worker.RealTimer.init();
var should_stop = std.atomic.Value(bool).init(false);
// Initialize bandwidth meter for progress tracking
var bandwidth_meter = BandwidthMeter.init();
if (has_progress) {
try bandwidth_meter.start();
}
// Setup worker manager
const num_workers = @min(urls.len, self.concurrent_connections);
var worker_manager = try WorkerManager.init(self.allocator, &should_stop, num_workers);
defer worker_manager.deinit();
// Setup download workers
const workers = try worker_manager.setupDownloadWorkers(
urls,
self.concurrent_connections,
timer.timer_interface(),
strategy.max_duration_ns,
);
defer worker_manager.cleanupWorkers(workers);
// Start workers
try worker_manager.startDownloadWorkers(workers);
// Main measurement loop
while (strategy.shouldContinue(timer.timer_interface().read())) {
std.time.sleep(strategy.getSleepInterval());
const current_bytes = worker_manager.getCurrentDownloadBytes(workers);
if (has_progress) {
bandwidth_meter.update_total(current_bytes);
const measurement = bandwidth_meter.bandwidthWithUnits();
progress_callback.call(measurement);
}
const should_stop_early = try strategy.handleProgress(
timer.timer_interface().read(),
current_bytes,
);
if (should_stop_early) break;
}
// Stop and wait for workers
worker_manager.stopAndJoinWorkers();
// Calculate results
const totals = worker_manager.calculateDownloadTotals(workers);
if (totals.errors > 0) {
print("Download completed with {} errors\n", .{totals.errors});
}
const actual_duration_ns = timer.timer_interface().read();
const actual_duration_s = @as(f64, @floatFromInt(actual_duration_ns)) / std.time.ns_per_s;
if (actual_duration_s == 0) return SpeedTestResult.fromBytesPerSecond(0);
const speed_bytes_per_sec = @as(f64, @floatFromInt(totals.bytes)) / actual_duration_s;
return SpeedTestResult.fromBytesPerSecond(speed_bytes_per_sec);
}
// Private implementation for stability-based upload
fn measureUploadSpeedWithStability(
self: *HTTPSpeedTester,
urls: []const []const u8,
strategy: *StabilityStrategy,
upload_data: []const u8,
comptime ProgressType: ?type,
progress_callback: if (ProgressType) |T| T else void,
) !SpeedTestResult {
const has_progress = ProgressType != null;
var timer = try speed_worker.RealTimer.init();
var should_stop = std.atomic.Value(bool).init(false);
// Initialize bandwidth meter for progress tracking
var bandwidth_meter = BandwidthMeter.init();
if (has_progress) {
try bandwidth_meter.start();
}
// Setup worker manager
const num_workers = @min(urls.len, self.concurrent_connections);
var worker_manager = try WorkerManager.init(self.allocator, &should_stop, num_workers);
@ -335,6 +362,13 @@ pub const HTTPSpeedTester = struct {
std.time.sleep(strategy.getSleepInterval());
const current_bytes = worker_manager.getCurrentUploadBytes(workers);
if (has_progress) {
bandwidth_meter.update_total(current_bytes);
const measurement = bandwidth_meter.bandwidthWithUnits();
progress_callback.call(measurement);
}
const should_stop_early = try strategy.handleProgress(
timer.timer_interface().read(),
current_bytes,

View file

@ -1,9 +1,12 @@
const std = @import("std");
pub const StabilityCriteria = struct {
min_samples: u32,
max_variance_percent: f64,
max_duration_seconds: u32,
ramp_up_duration_seconds: u32 = 4,
max_duration_seconds: u32 = 25,
measurement_interval_ms: u64 = 750,
sliding_window_size: u32 = 6,
stability_threshold_cov: f64 = 0.15,
stable_checks_required: u32 = 2,
};
pub const DurationStrategy = struct {
@ -21,21 +24,26 @@ pub const DurationStrategy = struct {
pub const StabilityStrategy = struct {
criteria: StabilityCriteria,
ramp_up_duration_ns: u64,
max_duration_ns: u64,
speed_samples: std.ArrayList(f64),
measurement_interval_ns: u64,
speed_measurements: std.ArrayList(f64), // Sliding window of recent speeds
last_sample_time: u64 = 0,
last_total_bytes: u64 = 0,
consecutive_stable_checks: u32 = 0,
pub fn init(allocator: std.mem.Allocator, criteria: StabilityCriteria) StabilityStrategy {
return StabilityStrategy{
.criteria = criteria,
.ramp_up_duration_ns = @as(u64, criteria.ramp_up_duration_seconds) * std.time.ns_per_s,
.max_duration_ns = @as(u64, criteria.max_duration_seconds) * std.time.ns_per_s,
.speed_samples = std.ArrayList(f64).init(allocator),
.measurement_interval_ns = criteria.measurement_interval_ms * std.time.ns_per_ms,
.speed_measurements = std.ArrayList(f64).init(allocator),
};
}
pub fn deinit(self: *StabilityStrategy) void {
self.speed_samples.deinit();
self.speed_measurements.deinit();
}
pub fn shouldContinue(self: StabilityStrategy, current_time: u64) bool {
@ -43,27 +51,51 @@ pub const StabilityStrategy = struct {
}
pub fn getSleepInterval(self: StabilityStrategy) u64 {
_ = self;
return std.time.ns_per_ms * 100; // 100ms for stability sampling
return self.measurement_interval_ns / 3; // Sample more frequently than measurement interval
}
pub fn shouldSample(self: *StabilityStrategy, current_time: u64) bool {
return current_time - self.last_sample_time >= std.time.ns_per_s;
return current_time - self.last_sample_time >= self.measurement_interval_ns;
}
pub fn addSample(self: *StabilityStrategy, current_time: u64, current_total_bytes: u64) !bool {
// Skip first sample
// Skip first sample to calculate speed
if (self.last_sample_time > 0) {
const bytes_diff = current_total_bytes - self.last_total_bytes;
const time_diff_s = @as(f64, @floatFromInt(current_time - self.last_sample_time)) / std.time.ns_per_s;
const current_speed = @as(f64, @floatFromInt(bytes_diff)) / time_diff_s;
const time_diff_ns = current_time - self.last_sample_time;
const time_diff_s = @as(f64, @floatFromInt(time_diff_ns)) / std.time.ns_per_s;
try self.speed_samples.append(current_speed);
const interval_speed = @as(f64, @floatFromInt(bytes_diff)) / time_diff_s;
// Check stability if we have enough samples
if (self.speed_samples.items.len >= self.criteria.min_samples) {
if (isStable(self.speed_samples.items, self.criteria.max_variance_percent)) {
return true; // Stable, can stop
// Phase 1: Ramp-up - collect measurements but don't check stability
if (current_time < self.ramp_up_duration_ns) {
try self.speed_measurements.append(interval_speed);
// Keep sliding window size
if (self.speed_measurements.items.len > self.criteria.sliding_window_size) {
_ = self.speed_measurements.orderedRemove(0);
}
} else {
// Phase 2: Stabilization - check CoV for stability
try self.speed_measurements.append(interval_speed);
// Maintain sliding window
if (self.speed_measurements.items.len > self.criteria.sliding_window_size) {
_ = self.speed_measurements.orderedRemove(0);
}
// Check stability if we have enough measurements
if (self.speed_measurements.items.len >= self.criteria.sliding_window_size) {
const cov = calculateCoV(self.speed_measurements.items);
if (cov <= self.criteria.stability_threshold_cov) {
self.consecutive_stable_checks += 1;
if (self.consecutive_stable_checks >= self.criteria.stable_checks_required) {
return true; // Stable, can stop
}
} else {
self.consecutive_stable_checks = 0; // Reset counter
}
}
}
}
@ -81,31 +113,30 @@ pub const StabilityStrategy = struct {
}
};
fn isStable(samples: []const f64, max_variance_percent: f64) bool {
if (samples.len < 2) return false;
/// Calculate Coefficient of Variation (standard deviation / mean) for stability detection
fn calculateCoV(speeds: []const f64) f64 {
if (speeds.len < 2) return 1.0; // Not enough data, assume unstable
// Calculate mean
var sum: f64 = 0;
for (samples) |sample| {
sum += sample;
for (speeds) |speed| {
sum += speed;
}
const mean = sum / @as(f64, @floatFromInt(samples.len));
const mean = sum / @as(f64, @floatFromInt(speeds.len));
if (mean == 0) return false;
if (mean == 0) return 1.0; // Avoid division by zero
// Calculate variance
var variance: f64 = 0;
for (samples) |sample| {
const diff = sample - mean;
for (speeds) |speed| {
const diff = speed - mean;
variance += diff * diff;
}
variance = variance / @as(f64, @floatFromInt(samples.len));
variance = variance / @as(f64, @floatFromInt(speeds.len));
// Calculate coefficient of variation (standard deviation / mean)
// Calculate CoV (coefficient of variation)
const std_dev = @sqrt(variance);
const cv_percent = (std_dev / mean) * 100.0;
return cv_percent <= max_variance_percent;
return std_dev / mean;
}
// Clean helper functions

View file

@ -29,8 +29,3 @@ pub fn createCallback(context: anytype, comptime updateFn: anytype) ProgressCall
.updateFn = wrapper.call,
};
}
/// Check if a value is a valid progress callback at comptime
pub fn isProgressCallback(comptime T: type) bool {
return @hasDecl(T, "call") and @hasField(T, "context");
}

View file

@ -12,22 +12,6 @@ test "createDurationStrategy" {
try testing.expect(strategy.progress_update_interval_ms == 100);
}
test "createStabilityStrategy" {
const criteria = StabilityCriteria{
.min_samples = 5,
.max_variance_percent = 10.0,
.max_duration_seconds = 30,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
try testing.expect(strategy.criteria.min_samples == 5);
try testing.expect(strategy.criteria.max_variance_percent == 10.0);
try testing.expect(strategy.criteria.max_duration_seconds == 30);
try testing.expect(strategy.max_duration_ns == 30 * std.time.ns_per_s);
}
test "DurationStrategy shouldContinue" {
const strategy = measurement_strategy.createDurationStrategy(1, 100); // 1 second
@ -38,59 +22,74 @@ test "DurationStrategy shouldContinue" {
try testing.expect(!strategy.shouldContinue(2 * std.time.ns_per_s)); // 2 seconds
}
test "Strategy getSleepInterval" {
// Duration strategy should use progress update interval
const duration_strategy = measurement_strategy.createDurationStrategy(10, 250);
try testing.expect(duration_strategy.getSleepInterval() == 250 * std.time.ns_per_ms);
}
// Fast.com-style stability tests
test "StabilityCriteria default values" {
const criteria = StabilityCriteria{};
try testing.expect(criteria.ramp_up_duration_seconds == 4);
try testing.expect(criteria.max_duration_seconds == 25);
try testing.expect(criteria.measurement_interval_ms == 750);
try testing.expect(criteria.sliding_window_size == 6);
try testing.expect(criteria.stability_threshold_cov == 0.15);
try testing.expect(criteria.stable_checks_required == 2);
}
test "createStabilityStrategy" {
const criteria = StabilityCriteria{
.ramp_up_duration_seconds = 5,
.max_duration_seconds = 20,
.measurement_interval_ms = 500,
.sliding_window_size = 8,
.stability_threshold_cov = 0.12,
.stable_checks_required = 3,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
try testing.expect(strategy.criteria.ramp_up_duration_seconds == 5);
try testing.expect(strategy.criteria.max_duration_seconds == 20);
try testing.expect(strategy.criteria.measurement_interval_ms == 500);
try testing.expect(strategy.criteria.sliding_window_size == 8);
try testing.expect(strategy.criteria.stability_threshold_cov == 0.12);
try testing.expect(strategy.criteria.stable_checks_required == 3);
try testing.expect(strategy.ramp_up_duration_ns == 5 * std.time.ns_per_s);
try testing.expect(strategy.max_duration_ns == 20 * std.time.ns_per_s);
}
test "StabilityStrategy shouldContinue" {
const criteria = StabilityCriteria{
.min_samples = 3,
.max_variance_percent = 5.0,
.max_duration_seconds = 5,
.max_duration_seconds = 20,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
// Should continue before max duration
try testing.expect(strategy.shouldContinue(2 * std.time.ns_per_s)); // 2 seconds
try testing.expect(strategy.shouldContinue(15 * std.time.ns_per_s));
// Should not continue after max duration
try testing.expect(!strategy.shouldContinue(10 * std.time.ns_per_s)); // 10 seconds
try testing.expect(!strategy.shouldContinue(25 * std.time.ns_per_s));
}
test "Strategy getSleepInterval" {
// Duration strategy should use progress update interval
const duration_strategy = measurement_strategy.createDurationStrategy(10, 250);
try testing.expect(duration_strategy.getSleepInterval() == 250 * std.time.ns_per_ms);
test "StabilityStrategy getSleepInterval" {
const criteria = StabilityCriteria{};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
// Stability strategy should use fixed 100ms
const criteria = StabilityCriteria{
.min_samples = 3,
.max_variance_percent = 5.0,
.max_duration_seconds = 10,
};
var stability_strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer stability_strategy.deinit();
try testing.expect(stability_strategy.getSleepInterval() == 100 * std.time.ns_per_ms);
}
test "StabilityCriteria default values" {
const criteria = StabilityCriteria{
.min_samples = 5,
.max_variance_percent = 10.0,
.max_duration_seconds = 30,
};
try testing.expect(criteria.min_samples == 5);
try testing.expect(criteria.max_variance_percent == 10.0);
try testing.expect(criteria.max_duration_seconds == 30);
// Should be measurement_interval / 3 = 750ms / 3 = 250ms
try testing.expect(strategy.getSleepInterval() == 250 * std.time.ns_per_ms);
}
test "StabilityStrategy shouldSample timing" {
const criteria = StabilityCriteria{
.min_samples = 3,
.max_variance_percent = 5.0,
.max_duration_seconds = 10,
};
const criteria = StabilityCriteria{};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
@ -107,9 +106,10 @@ test "StabilityStrategy shouldSample timing" {
test "StabilityStrategy addSample basic functionality" {
const criteria = StabilityCriteria{
.min_samples = 2,
.max_variance_percent = 50.0, // High threshold to avoid early stability
.max_duration_seconds = 10,
.ramp_up_duration_seconds = 1, // Short for testing
.sliding_window_size = 3,
.stability_threshold_cov = 0.5, // High threshold to avoid early stability
.stable_checks_required = 2,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
@ -118,15 +118,220 @@ test "StabilityStrategy addSample basic functionality" {
// First sample should be skipped
const is_stable1 = try strategy.addSample(1 * std.time.ns_per_s, 1000);
try testing.expect(!is_stable1);
try testing.expect(strategy.speed_samples.items.len == 0);
try testing.expect(strategy.speed_measurements.items.len == 0);
// Second sample should be added
const is_stable2 = try strategy.addSample(2 * std.time.ns_per_s, 2000);
try testing.expect(!is_stable2); // Not stable yet, need min_samples
try testing.expect(strategy.speed_samples.items.len == 1);
try testing.expect(!is_stable2); // Not stable yet, need more measurements for CoV
try testing.expect(strategy.speed_measurements.items.len == 1);
// Third sample should be added and might trigger stability check
_ = try strategy.addSample(3 * std.time.ns_per_s, 3000);
try testing.expect(strategy.speed_samples.items.len == 2);
// Result depends on variance calculation, but should not crash
// Third sample should be added
const is_stable3 = try strategy.addSample(3 * std.time.ns_per_s, 3000);
try testing.expect(!is_stable3); // Still need more measurements
try testing.expect(strategy.speed_measurements.items.len == 2);
// Fourth sample should trigger stability check (we have 3 measurements now)
_ = try strategy.addSample(4 * std.time.ns_per_s, 4000);
try testing.expect(strategy.speed_measurements.items.len == 3);
}
test "StabilityStrategy requires ramp up duration" {
const criteria = StabilityCriteria{
.ramp_up_duration_seconds = 10,
.sliding_window_size = 2,
.stability_threshold_cov = 0.01, // Low threshold for easy stability
.stable_checks_required = 1,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
// Add samples before ramp up duration - should not be stable
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000);
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000);
const is_stable_early = try strategy.addSample(3 * std.time.ns_per_s, 3000);
try testing.expect(!is_stable_early); // Should not be stable before ramp up duration
// Add sample after ramp up duration - might be stable
_ = try strategy.addSample(11 * std.time.ns_per_s, 11000);
// Result depends on CoV calculation, but should not crash
}
test "StabilityStrategy handleProgress integration" {
const criteria = StabilityCriteria{
.ramp_up_duration_seconds = 2,
.sliding_window_size = 2,
.stability_threshold_cov = 0.1,
.measurement_interval_ms = 500,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
// Should not trigger sampling immediately
const should_stop1 = try strategy.handleProgress(500 * std.time.ns_per_ms, 500);
try testing.expect(!should_stop1);
// Should not trigger sampling if less than 1 second elapsed
const should_stop2 = try strategy.handleProgress(800 * std.time.ns_per_ms, 800);
try testing.expect(!should_stop2);
// Should trigger sampling after measurement interval (750ms)
_ = try strategy.handleProgress(750 * std.time.ns_per_ms, 750);
try testing.expect(strategy.speed_measurements.items.len == 0); // First sample skipped
// Should add second sample
_ = try strategy.handleProgress(1500 * std.time.ns_per_ms, 1500);
try testing.expect(strategy.speed_measurements.items.len == 1);
}
test "CoV stability detection algorithm" {
const criteria = StabilityCriteria{
.ramp_up_duration_seconds = 1, // Short for testing
.sliding_window_size = 4,
.stability_threshold_cov = 0.05, // 5% CoV threshold
.stable_checks_required = 1,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
// Add stable samples after ramp up period
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000); // Skip first
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000); // 1000 bytes/s (after ramp up)
_ = try strategy.addSample(3 * std.time.ns_per_s, 3000); // 1000 bytes/s
_ = try strategy.addSample(4 * std.time.ns_per_s, 4000); // 1000 bytes/s
// This should be stable since CoV should be very low
const is_stable = try strategy.addSample(5 * std.time.ns_per_s, 5000); // 1000 bytes/s
// Should be stable with consistent speeds
try testing.expect(is_stable);
}
test "CoV stability detection - unstable case" {
const criteria = StabilityCriteria{
.ramp_up_duration_seconds = 1, // Short for testing
.sliding_window_size = 3,
.stability_threshold_cov = 0.02, // Strict 2% CoV threshold
.stable_checks_required = 1,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
// Add samples that should NOT be stable (high variance)
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000); // Skip first
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000); // 1000 bytes/s (after ramp up)
_ = try strategy.addSample(3 * std.time.ns_per_s, 3500); // 1500 bytes/s (high variance)
// This should NOT be stable due to high CoV
const is_stable = try strategy.addSample(4 * std.time.ns_per_s, 4000); // 500 bytes/s (high variance)
// Should not be stable with inconsistent speeds
try testing.expect(!is_stable);
}
test "CoV stability handles variable speeds correctly" {
const criteria = StabilityCriteria{
.ramp_up_duration_seconds = 1,
.sliding_window_size = 6,
.stability_threshold_cov = 0.05,
.stable_checks_required = 2,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
// Add samples with a peak in the middle, then lower speeds
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000); // Skip first
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000); // 1000 bytes/s (after ramp up)
_ = try strategy.addSample(3 * std.time.ns_per_s, 4000); // 2000 bytes/s (peak creates high CoV)
_ = try strategy.addSample(4 * std.time.ns_per_s, 5000); // 1000 bytes/s
_ = try strategy.addSample(5 * std.time.ns_per_s, 6000); // 1000 bytes/s
// Should not be stable yet due to high CoV from the peak
const is_stable = try strategy.addSample(6 * std.time.ns_per_s, 7000); // 1000 bytes/s
// CoV should still be too high due to the peak in the sliding window
try testing.expect(!is_stable);
// Test should not crash and should have collected measurements
try testing.expect(strategy.speed_measurements.items.len > 0);
}
test "CoV stability detection realistic scenario" {
const criteria = StabilityCriteria{
.ramp_up_duration_seconds = 5,
.max_duration_seconds = 20,
.stability_threshold_cov = 0.15, // 15% CoV threshold
.sliding_window_size = 6,
.stable_checks_required = 2,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
// Simulate realistic speed test progression: ramp up, then stabilize
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000); // Skip first
_ = try strategy.addSample(2 * std.time.ns_per_s, 3000); // 2000 bytes/s (ramp up)
_ = try strategy.addSample(3 * std.time.ns_per_s, 6000); // 3000 bytes/s (still ramping)
// Before min duration - should not be stable regardless of measurements
const stable_before_min = try strategy.addSample(4 * std.time.ns_per_s, 10000); // 4000 bytes/s (peak)
try testing.expect(!stable_before_min);
// After min duration with stable measurements
_ = try strategy.addSample(6 * std.time.ns_per_s, 16000); // 4000 bytes/s (stable)
_ = try strategy.addSample(7 * std.time.ns_per_s, 20000); // 4000 bytes/s (stable)
_ = try strategy.addSample(8 * std.time.ns_per_s, 24000); // 4000 bytes/s (stable)
const stable_after_min = try strategy.addSample(9 * std.time.ns_per_s, 28000); // 4000 bytes/s (stable)
// Should be able to detect stability after minimum duration with consistent speeds
try testing.expect(stable_after_min or strategy.speed_measurements.items.len >= 6);
}
test "CoV timing intervals specification" {
const criteria = StabilityCriteria{};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
// Should be measurement_interval / 3 = 750ms / 3 = 250ms
try testing.expect(strategy.getSleepInterval() == 250 * std.time.ns_per_ms);
// Should enforce measurement interval sampling (750ms by default)
try testing.expect(!strategy.shouldSample(0));
strategy.last_sample_time = 500 * std.time.ns_per_ms;
try testing.expect(!strategy.shouldSample(1000 * std.time.ns_per_ms));
try testing.expect(strategy.shouldSample(1250 * std.time.ns_per_ms));
}
test "CoV algorithm handles edge cases correctly" {
const criteria = StabilityCriteria{
.ramp_up_duration_seconds = 1,
.sliding_window_size = 3,
.stability_threshold_cov = 0.05,
.stable_checks_required = 1,
};
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
defer strategy.deinit();
// Test very small speed changes (edge case for percentage calculation)
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000); // Skip first
_ = try strategy.addSample(2 * std.time.ns_per_s, 1001); // 1 byte/s
_ = try strategy.addSample(3 * std.time.ns_per_s, 1002); // 1 byte/s
const stable_small = try strategy.addSample(4 * std.time.ns_per_s, 1003); // 1 byte/s
// Should handle small speeds without division errors
_ = stable_small; // May or may not be stable, but shouldn't crash
// Test zero speed edge case
strategy.speed_measurements.clearRetainingCapacity();
strategy.last_sample_time = 0;
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000); // Skip first
const stable_zero = try strategy.addSample(2 * std.time.ns_per_s, 1000); // 0 bytes/s
// Zero speed should not be considered stable
try testing.expect(!stable_zero);
}