mirror of
https://github.com/mikkelam/fast-cli.git
synced 2025-12-18 12:54:05 +00:00
update stopping algorithm and use by default
This commit is contained in:
parent
1ddc7db574
commit
8000eb850d
5 changed files with 234 additions and 587 deletions
39
README.md
39
README.md
|
|
@ -6,14 +6,16 @@
|
|||
|
||||
A blazingly fast CLI tool for testing internet speed compatible with fast.com (api v2). Written in Zig for maximum performance.
|
||||
|
||||
⚡ **1.3 MiB binary** • 🚀 **Zero runtime deps** • 📊 **Real-time progress**
|
||||
Uses **Fast.com-style stability detection** by default for accurate results with adaptive stopping.
|
||||
|
||||
⚡ **1.3 MiB binary** • 🚀 **Zero runtime deps** • 📊 **Smart stability detection**
|
||||
|
||||
## Why fast-cli-zig?
|
||||
|
||||
- **Tiny binary**: Just 1.4 MiB, no runtime dependencies
|
||||
- **Blazing fast**: Concurrent connections with adaptive chunk sizing
|
||||
- **Cross-platform**: Single binary for Linux, macOS, Windows
|
||||
- **Real-time feedback**: Live speed updates during tests
|
||||
- **Smart stopping**: Uses Coefficient of Variation (CoV) algorithm for adaptive test duration
|
||||
|
||||
## Installation
|
||||
|
||||
|
|
@ -42,37 +44,26 @@ v0.0.1
|
|||
Usage: fast-cli [options]
|
||||
|
||||
Flags:
|
||||
--stability-max-variance Maximum variance percentage for stability test [String] (default: "10.0")
|
||||
-u, --upload Check upload speed as well [Bool] (default: false)
|
||||
-d, --duration Duration in seconds for each test phase - download, then upload if enabled (duration mode only) [Int] (default: 10)
|
||||
--stability-min-samples Minimum samples for stability test [Int] (default: 5)
|
||||
--stability-max-duration Maximum duration in seconds for stability test [Int] (default: 30)
|
||||
--https Use https when connecting to fast.com [Bool] (default: true)
|
||||
-j, --json Output results in JSON format [Bool] (default: false)
|
||||
-m, --mode Test mode: 'duration' or 'stability' [String] (default: "duration")
|
||||
-h, --help Shows the help for a command [Bool] (default: false)
|
||||
-u, --upload Check upload speed as well [Bool] (default: false)
|
||||
-d, --duration Maximum test duration in seconds (uses Fast.com-style stability detection by default) [Int] (default: 30)
|
||||
--https Use https when connecting to fast.com [Bool] (default: true)
|
||||
-j, --json Output results in JSON format [Bool] (default: false)
|
||||
-h, --help Shows the help for a command [Bool] (default: false)
|
||||
|
||||
Use "fast-cli --help" for more information.
|
||||
```
|
||||
|
||||
## Performance Comparison
|
||||
|
||||
TODO
|
||||
|
||||
## Options
|
||||
|
||||
| Flag | Description | Default |
|
||||
|------|-------------|---------|
|
||||
| `--upload`, `-u` | Test upload speed | `false` |
|
||||
| `--duration`, `-d` | Test duration (seconds) | `10` |
|
||||
| `--json`, `-j` | JSON output | `false` |
|
||||
| `--https` | Use HTTPS | `true` |
|
||||
|
||||
## Example Output
|
||||
|
||||
```console
|
||||
$ fast-cli --upload
|
||||
🏓 25ms | ⬇️ Download: 113.7 Mbps | ⬆️ Upload: 62.1 Mbps
|
||||
|
||||
$ fast-cli -d 15 # Quick test with 15s max duration
|
||||
🏓 22ms | ⬇️ Download: 105.0 Mbps
|
||||
|
||||
$ fast-cli -j # JSON output
|
||||
{"download_mbps": 131.4, "ping_ms": 20.8}
|
||||
```
|
||||
|
||||
## Development
|
||||
|
|
|
|||
|
|
@ -1,11 +1,11 @@
|
|||
const std = @import("std");
|
||||
const zli = @import("zli");
|
||||
const builtin = @import("builtin");
|
||||
const build_options = @import("build_options");
|
||||
|
||||
const Fast = @import("../lib/fast.zig").Fast;
|
||||
const HTTPSpeedTester = @import("../lib/http_speed_tester_v2.zig").HTTPSpeedTester;
|
||||
|
||||
const StabilityCriteria = @import("../lib/http_speed_tester_v2.zig").StabilityCriteria;
|
||||
const FastStabilityCriteria = @import("../lib/http_speed_tester_v2.zig").FastStabilityCriteria;
|
||||
const SpeedTestResult = @import("../lib/http_speed_tester_v2.zig").SpeedTestResult;
|
||||
const BandwidthMeter = @import("../lib/bandwidth.zig");
|
||||
const SpeedMeasurement = @import("../lib/bandwidth.zig").SpeedMeasurement;
|
||||
|
|
@ -58,7 +58,7 @@ pub fn build(allocator: std.mem.Allocator) !*zli.Command {
|
|||
const root = try zli.Command.init(allocator, .{
|
||||
.name = "fast-cli",
|
||||
.description = "Estimate connection speed using fast.com",
|
||||
.version = std.SemanticVersion.parse(build_options.version) catch null,
|
||||
version = null,
|
||||
}, run);
|
||||
|
||||
try root.addFlag(https_flag);
|
||||
|
|
@ -120,11 +120,13 @@ fn run(ctx: zli.CommandContext) !void {
|
|||
defer speed_tester.deinit();
|
||||
|
||||
// Use Fast.com-style stability detection by default
|
||||
const criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 7,
|
||||
.max_duration_seconds = @as(u32, @intCast(@min(30, max_duration))),
|
||||
.stability_delta_percent = 5.0,
|
||||
.min_stable_measurements = 6,
|
||||
const criteria = StabilityCriteria{
|
||||
.ramp_up_duration_seconds = 4,
|
||||
.max_duration_seconds = @as(u32, @intCast(@max(25, max_duration))),
|
||||
.measurement_interval_ms = 750,
|
||||
.sliding_window_size = 6,
|
||||
.stability_threshold_cov = 0.15,
|
||||
.stable_checks_required = 2,
|
||||
};
|
||||
|
||||
const download_result = if (json_output) blk: {
|
||||
|
|
|
|||
|
|
@ -7,9 +7,7 @@ const WorkerManager = @import("workers/worker_manager.zig").WorkerManager;
|
|||
const measurement_strategy = @import("measurement_strategy.zig");
|
||||
const DurationStrategy = measurement_strategy.DurationStrategy;
|
||||
const StabilityStrategy = measurement_strategy.StabilityStrategy;
|
||||
const FastStabilityStrategy = measurement_strategy.FastStabilityStrategy;
|
||||
pub const StabilityCriteria = measurement_strategy.StabilityCriteria;
|
||||
pub const FastStabilityCriteria = measurement_strategy.FastStabilityCriteria;
|
||||
|
||||
const print = std.debug.print;
|
||||
|
||||
|
|
@ -66,22 +64,15 @@ pub const HTTPSpeedTester = struct {
|
|||
return self.measureDownloadSpeedWithDuration(urls, strategy, ProgressType, progress_callback);
|
||||
}
|
||||
|
||||
// Clean stability-based download
|
||||
pub fn measure_download_speed_stability(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria) !SpeedTestResult {
|
||||
var strategy = measurement_strategy.createStabilityStrategy(self.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
return self.measureDownloadSpeedWithStability(urls, &strategy);
|
||||
}
|
||||
|
||||
// Fast.com-style stability-based download with optional progress callback
|
||||
pub fn measure_download_speed_fast_stability_duration(self: *HTTPSpeedTester, urls: []const []const u8, criteria: FastStabilityCriteria, comptime ProgressType: ?type, progress_callback: if (ProgressType) |T| T else void) !SpeedTestResult {
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(self.allocator, criteria);
|
||||
pub fn measure_download_speed_fast_stability_duration(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria, comptime ProgressType: ?type, progress_callback: if (ProgressType) |T| T else void) !SpeedTestResult {
|
||||
var strategy = measurement_strategy.createStabilityStrategy(self.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
return self.measureDownloadSpeedWithFastStability(urls, &strategy, ProgressType, progress_callback);
|
||||
}
|
||||
|
||||
// Fast.com-style stability-based download without progress callback
|
||||
pub fn measure_download_speed_fast_stability(self: *HTTPSpeedTester, urls: []const []const u8, criteria: FastStabilityCriteria) !SpeedTestResult {
|
||||
pub fn measure_download_speed_fast_stability(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria) !SpeedTestResult {
|
||||
return self.measure_download_speed_fast_stability_duration(urls, criteria, null, {});
|
||||
}
|
||||
|
||||
|
|
@ -95,30 +86,19 @@ pub const HTTPSpeedTester = struct {
|
|||
return self.measureUploadSpeedWithDuration(urls, strategy, upload_data, ProgressType, progress_callback);
|
||||
}
|
||||
|
||||
// Clean stability-based upload
|
||||
pub fn measure_upload_speed_stability(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria) !SpeedTestResult {
|
||||
// Fast.com-style stability-based upload with optional progress callback
|
||||
pub fn measure_upload_speed_fast_stability_duration(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria, comptime ProgressType: ?type, progress_callback: if (ProgressType) |T| T else void) !SpeedTestResult {
|
||||
const upload_data = try self.allocator.alloc(u8, 4 * 1024 * 1024);
|
||||
defer self.allocator.free(upload_data);
|
||||
@memset(upload_data, 'A');
|
||||
|
||||
var strategy = measurement_strategy.createStabilityStrategy(self.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
return self.measureUploadSpeedWithStability(urls, &strategy, upload_data);
|
||||
}
|
||||
|
||||
// Fast.com-style stability-based upload with optional progress callback
|
||||
pub fn measure_upload_speed_fast_stability_duration(self: *HTTPSpeedTester, urls: []const []const u8, criteria: FastStabilityCriteria, comptime ProgressType: ?type, progress_callback: if (ProgressType) |T| T else void) !SpeedTestResult {
|
||||
const upload_data = try self.allocator.alloc(u8, 4 * 1024 * 1024);
|
||||
defer self.allocator.free(upload_data);
|
||||
@memset(upload_data, 'A');
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(self.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
return self.measureUploadSpeedWithFastStability(urls, &strategy, upload_data, ProgressType, progress_callback);
|
||||
}
|
||||
|
||||
// Fast.com-style stability-based upload without progress callback
|
||||
pub fn measure_upload_speed_fast_stability(self: *HTTPSpeedTester, urls: []const []const u8, criteria: FastStabilityCriteria) !SpeedTestResult {
|
||||
pub fn measure_upload_speed_fast_stability(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria) !SpeedTestResult {
|
||||
return self.measure_upload_speed_fast_stability_duration(urls, criteria, null, {});
|
||||
}
|
||||
|
||||
|
|
@ -145,12 +125,12 @@ pub const HTTPSpeedTester = struct {
|
|||
}
|
||||
|
||||
/// Fast stability download speed measurement with progress callback (type inferred)
|
||||
pub fn measureDownloadSpeedWithFastStabilityProgress(self: *HTTPSpeedTester, urls: []const []const u8, criteria: FastStabilityCriteria, progress_callback: anytype) !SpeedTestResult {
|
||||
pub fn measureDownloadSpeedWithFastStabilityProgress(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria, progress_callback: anytype) !SpeedTestResult {
|
||||
return self.measure_download_speed_fast_stability_duration(urls, criteria, @TypeOf(progress_callback), progress_callback);
|
||||
}
|
||||
|
||||
/// Fast stability upload speed measurement with progress callback (type inferred)
|
||||
pub fn measureUploadSpeedWithFastStabilityProgress(self: *HTTPSpeedTester, urls: []const []const u8, criteria: FastStabilityCriteria, progress_callback: anytype) !SpeedTestResult {
|
||||
pub fn measureUploadSpeedWithFastStabilityProgress(self: *HTTPSpeedTester, urls: []const []const u8, criteria: StabilityCriteria, progress_callback: anytype) !SpeedTestResult {
|
||||
return self.measure_upload_speed_fast_stability_duration(urls, criteria, @TypeOf(progress_callback), progress_callback);
|
||||
}
|
||||
|
||||
|
|
@ -219,62 +199,6 @@ pub const HTTPSpeedTester = struct {
|
|||
return SpeedTestResult.fromBytesPerSecond(speed_bytes_per_sec);
|
||||
}
|
||||
|
||||
// Private implementation for stability-based download
|
||||
fn measureDownloadSpeedWithStability(
|
||||
self: *HTTPSpeedTester,
|
||||
urls: []const []const u8,
|
||||
strategy: *StabilityStrategy,
|
||||
) !SpeedTestResult {
|
||||
var timer = try speed_worker.RealTimer.init();
|
||||
var should_stop = std.atomic.Value(bool).init(false);
|
||||
|
||||
// Setup worker manager
|
||||
const num_workers = @min(urls.len, self.concurrent_connections);
|
||||
var worker_manager = try WorkerManager.init(self.allocator, &should_stop, num_workers);
|
||||
defer worker_manager.deinit();
|
||||
|
||||
// Setup download workers
|
||||
const workers = try worker_manager.setupDownloadWorkers(
|
||||
urls,
|
||||
self.concurrent_connections,
|
||||
timer.timer_interface(),
|
||||
strategy.max_duration_ns,
|
||||
);
|
||||
defer worker_manager.cleanupWorkers(workers);
|
||||
|
||||
// Start workers
|
||||
try worker_manager.startDownloadWorkers(workers);
|
||||
|
||||
// Main measurement loop
|
||||
while (strategy.shouldContinue(timer.timer_interface().read())) {
|
||||
std.time.sleep(strategy.getSleepInterval());
|
||||
|
||||
const current_bytes = worker_manager.getCurrentDownloadBytes(workers);
|
||||
const should_stop_early = try strategy.handleProgress(
|
||||
timer.timer_interface().read(),
|
||||
current_bytes,
|
||||
);
|
||||
|
||||
if (should_stop_early) break;
|
||||
}
|
||||
|
||||
// Stop and wait for workers
|
||||
worker_manager.stopAndJoinWorkers();
|
||||
|
||||
// Calculate results
|
||||
const totals = worker_manager.calculateDownloadTotals(workers);
|
||||
if (totals.errors > 0) {
|
||||
print("Download completed with {} errors\n", .{totals.errors});
|
||||
}
|
||||
|
||||
const actual_duration_ns = timer.timer_interface().read();
|
||||
const actual_duration_s = @as(f64, @floatFromInt(actual_duration_ns)) / std.time.ns_per_s;
|
||||
|
||||
if (actual_duration_s == 0) return SpeedTestResult.fromBytesPerSecond(0);
|
||||
const speed_bytes_per_sec = @as(f64, @floatFromInt(totals.bytes)) / actual_duration_s;
|
||||
return SpeedTestResult.fromBytesPerSecond(speed_bytes_per_sec);
|
||||
}
|
||||
|
||||
// Private implementation for duration-based upload
|
||||
fn measureUploadSpeedWithDuration(
|
||||
self: *HTTPSpeedTester,
|
||||
|
|
@ -342,69 +266,11 @@ pub const HTTPSpeedTester = struct {
|
|||
return SpeedTestResult.fromBytesPerSecond(speed_bytes_per_sec);
|
||||
}
|
||||
|
||||
// Private implementation for stability-based upload
|
||||
fn measureUploadSpeedWithStability(
|
||||
self: *HTTPSpeedTester,
|
||||
urls: []const []const u8,
|
||||
strategy: *StabilityStrategy,
|
||||
upload_data: []const u8,
|
||||
) !SpeedTestResult {
|
||||
var timer = try speed_worker.RealTimer.init();
|
||||
var should_stop = std.atomic.Value(bool).init(false);
|
||||
|
||||
// Setup worker manager
|
||||
const num_workers = @min(urls.len, self.concurrent_connections);
|
||||
var worker_manager = try WorkerManager.init(self.allocator, &should_stop, num_workers);
|
||||
defer worker_manager.deinit();
|
||||
|
||||
// Setup upload workers
|
||||
const workers = try worker_manager.setupUploadWorkers(
|
||||
urls,
|
||||
self.concurrent_connections,
|
||||
timer.timer_interface(),
|
||||
strategy.max_duration_ns,
|
||||
upload_data,
|
||||
);
|
||||
defer worker_manager.cleanupWorkers(workers);
|
||||
|
||||
// Start workers
|
||||
try worker_manager.startUploadWorkers(workers);
|
||||
|
||||
// Main measurement loop
|
||||
while (strategy.shouldContinue(timer.timer_interface().read())) {
|
||||
std.time.sleep(strategy.getSleepInterval());
|
||||
|
||||
const current_bytes = worker_manager.getCurrentUploadBytes(workers);
|
||||
const should_stop_early = try strategy.handleProgress(
|
||||
timer.timer_interface().read(),
|
||||
current_bytes,
|
||||
);
|
||||
|
||||
if (should_stop_early) break;
|
||||
}
|
||||
|
||||
// Stop and wait for workers
|
||||
worker_manager.stopAndJoinWorkers();
|
||||
|
||||
// Calculate results
|
||||
const totals = worker_manager.calculateUploadTotals(workers);
|
||||
if (totals.errors > 0) {
|
||||
print("Upload completed with {} errors\n", .{totals.errors});
|
||||
}
|
||||
|
||||
const actual_duration_ns = timer.timer_interface().read();
|
||||
const actual_duration_s = @as(f64, @floatFromInt(actual_duration_ns)) / std.time.ns_per_s;
|
||||
|
||||
if (actual_duration_s == 0) return SpeedTestResult.fromBytesPerSecond(0);
|
||||
const speed_bytes_per_sec = @as(f64, @floatFromInt(totals.bytes)) / actual_duration_s;
|
||||
return SpeedTestResult.fromBytesPerSecond(speed_bytes_per_sec);
|
||||
}
|
||||
|
||||
// Private implementation for Fast.com-style stability-based download
|
||||
fn measureDownloadSpeedWithFastStability(
|
||||
self: *HTTPSpeedTester,
|
||||
urls: []const []const u8,
|
||||
strategy: *FastStabilityStrategy,
|
||||
strategy: *StabilityStrategy,
|
||||
comptime ProgressType: ?type,
|
||||
progress_callback: if (ProgressType) |T| T else void,
|
||||
) !SpeedTestResult {
|
||||
|
|
@ -476,7 +342,7 @@ pub const HTTPSpeedTester = struct {
|
|||
fn measureUploadSpeedWithFastStability(
|
||||
self: *HTTPSpeedTester,
|
||||
urls: []const []const u8,
|
||||
strategy: *FastStabilityStrategy,
|
||||
strategy: *StabilityStrategy,
|
||||
upload_data: []const u8,
|
||||
comptime ProgressType: ?type,
|
||||
progress_callback: if (ProgressType) |T| T else void,
|
||||
|
|
|
|||
|
|
@ -1,17 +1,12 @@
|
|||
const std = @import("std");
|
||||
|
||||
pub const FastStabilityCriteria = struct {
|
||||
min_duration_seconds: u32 = 7,
|
||||
max_duration_seconds: u32 = 30,
|
||||
stability_delta_percent: f64 = 5.0,
|
||||
min_stable_measurements: u32 = 6,
|
||||
};
|
||||
|
||||
// Keep old struct for backward compatibility during transition
|
||||
pub const StabilityCriteria = struct {
|
||||
min_samples: u32,
|
||||
max_variance_percent: f64,
|
||||
max_duration_seconds: u32,
|
||||
ramp_up_duration_seconds: u32 = 4,
|
||||
max_duration_seconds: u32 = 25,
|
||||
measurement_interval_ms: u64 = 750,
|
||||
sliding_window_size: u32 = 6,
|
||||
stability_threshold_cov: f64 = 0.15,
|
||||
stable_checks_required: u32 = 2,
|
||||
};
|
||||
|
||||
pub const DurationStrategy = struct {
|
||||
|
|
@ -27,103 +22,28 @@ pub const DurationStrategy = struct {
|
|||
}
|
||||
};
|
||||
|
||||
pub const FastStabilityStrategy = struct {
|
||||
criteria: FastStabilityCriteria,
|
||||
min_duration_ns: u64,
|
||||
max_duration_ns: u64,
|
||||
speed_measurements: std.ArrayList(SpeedMeasurement),
|
||||
last_sample_time: u64 = 0,
|
||||
last_total_bytes: u64 = 0,
|
||||
|
||||
const SpeedMeasurement = struct {
|
||||
speed: f64,
|
||||
time: u64,
|
||||
};
|
||||
|
||||
pub fn init(allocator: std.mem.Allocator, criteria: FastStabilityCriteria) FastStabilityStrategy {
|
||||
return FastStabilityStrategy{
|
||||
.criteria = criteria,
|
||||
.min_duration_ns = @as(u64, criteria.min_duration_seconds) * std.time.ns_per_s,
|
||||
.max_duration_ns = @as(u64, criteria.max_duration_seconds) * std.time.ns_per_s,
|
||||
.speed_measurements = std.ArrayList(SpeedMeasurement).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *FastStabilityStrategy) void {
|
||||
self.speed_measurements.deinit();
|
||||
}
|
||||
|
||||
pub fn shouldContinue(self: FastStabilityStrategy, current_time: u64) bool {
|
||||
return current_time < self.max_duration_ns;
|
||||
}
|
||||
|
||||
pub fn getSleepInterval(self: FastStabilityStrategy) u64 {
|
||||
_ = self;
|
||||
return std.time.ns_per_ms * 150; // Fast.com uses 150ms
|
||||
}
|
||||
|
||||
pub fn shouldSample(self: *FastStabilityStrategy, current_time: u64) bool {
|
||||
return current_time - self.last_sample_time >= std.time.ns_per_s;
|
||||
}
|
||||
|
||||
pub fn addSample(self: *FastStabilityStrategy, current_time: u64, current_total_bytes: u64) !bool {
|
||||
// Skip first sample
|
||||
if (self.last_sample_time > 0) {
|
||||
const bytes_diff = current_total_bytes - self.last_total_bytes;
|
||||
const time_diff_s = @as(f64, @floatFromInt(current_time - self.last_sample_time)) / std.time.ns_per_s;
|
||||
const current_speed = @as(f64, @floatFromInt(bytes_diff)) / time_diff_s;
|
||||
|
||||
try self.speed_measurements.append(SpeedMeasurement{
|
||||
.speed = current_speed,
|
||||
.time = current_time,
|
||||
});
|
||||
|
||||
// Apply Fast.com stability logic
|
||||
if (current_time >= self.min_duration_ns) {
|
||||
if (self.speed_measurements.items.len >= self.criteria.min_stable_measurements) {
|
||||
if (isFastStable(
|
||||
self.speed_measurements.items,
|
||||
current_speed,
|
||||
self.criteria.stability_delta_percent,
|
||||
self.criteria.min_stable_measurements,
|
||||
)) {
|
||||
return true; // Stable, can stop
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.last_sample_time = current_time;
|
||||
self.last_total_bytes = current_total_bytes;
|
||||
return false; // Not stable yet
|
||||
}
|
||||
|
||||
pub fn handleProgress(self: *FastStabilityStrategy, current_time: u64, current_bytes: u64) !bool {
|
||||
if (self.shouldSample(current_time)) {
|
||||
return try self.addSample(current_time, current_bytes);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
// Keep old strategy for backward compatibility
|
||||
pub const StabilityStrategy = struct {
|
||||
criteria: StabilityCriteria,
|
||||
ramp_up_duration_ns: u64,
|
||||
max_duration_ns: u64,
|
||||
speed_samples: std.ArrayList(f64),
|
||||
measurement_interval_ns: u64,
|
||||
speed_measurements: std.ArrayList(f64), // Sliding window of recent speeds
|
||||
last_sample_time: u64 = 0,
|
||||
last_total_bytes: u64 = 0,
|
||||
consecutive_stable_checks: u32 = 0,
|
||||
|
||||
pub fn init(allocator: std.mem.Allocator, criteria: StabilityCriteria) StabilityStrategy {
|
||||
return StabilityStrategy{
|
||||
.criteria = criteria,
|
||||
.ramp_up_duration_ns = @as(u64, criteria.ramp_up_duration_seconds) * std.time.ns_per_s,
|
||||
.max_duration_ns = @as(u64, criteria.max_duration_seconds) * std.time.ns_per_s,
|
||||
.speed_samples = std.ArrayList(f64).init(allocator),
|
||||
.measurement_interval_ns = criteria.measurement_interval_ms * std.time.ns_per_ms,
|
||||
.speed_measurements = std.ArrayList(f64).init(allocator),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn deinit(self: *StabilityStrategy) void {
|
||||
self.speed_samples.deinit();
|
||||
self.speed_measurements.deinit();
|
||||
}
|
||||
|
||||
pub fn shouldContinue(self: StabilityStrategy, current_time: u64) bool {
|
||||
|
|
@ -131,27 +51,51 @@ pub const StabilityStrategy = struct {
|
|||
}
|
||||
|
||||
pub fn getSleepInterval(self: StabilityStrategy) u64 {
|
||||
_ = self;
|
||||
return std.time.ns_per_ms * 100; // 100ms for stability sampling
|
||||
return self.measurement_interval_ns / 3; // Sample more frequently than measurement interval
|
||||
}
|
||||
|
||||
pub fn shouldSample(self: *StabilityStrategy, current_time: u64) bool {
|
||||
return current_time - self.last_sample_time >= std.time.ns_per_s;
|
||||
return current_time - self.last_sample_time >= self.measurement_interval_ns;
|
||||
}
|
||||
|
||||
pub fn addSample(self: *StabilityStrategy, current_time: u64, current_total_bytes: u64) !bool {
|
||||
// Skip first sample
|
||||
// Skip first sample to calculate speed
|
||||
if (self.last_sample_time > 0) {
|
||||
const bytes_diff = current_total_bytes - self.last_total_bytes;
|
||||
const time_diff_s = @as(f64, @floatFromInt(current_time - self.last_sample_time)) / std.time.ns_per_s;
|
||||
const current_speed = @as(f64, @floatFromInt(bytes_diff)) / time_diff_s;
|
||||
const time_diff_ns = current_time - self.last_sample_time;
|
||||
const time_diff_s = @as(f64, @floatFromInt(time_diff_ns)) / std.time.ns_per_s;
|
||||
|
||||
try self.speed_samples.append(current_speed);
|
||||
const interval_speed = @as(f64, @floatFromInt(bytes_diff)) / time_diff_s;
|
||||
|
||||
// Check stability if we have enough samples
|
||||
if (self.speed_samples.items.len >= self.criteria.min_samples) {
|
||||
if (isStable(self.speed_samples.items, self.criteria.max_variance_percent)) {
|
||||
return true; // Stable, can stop
|
||||
// Phase 1: Ramp-up - collect measurements but don't check stability
|
||||
if (current_time < self.ramp_up_duration_ns) {
|
||||
try self.speed_measurements.append(interval_speed);
|
||||
|
||||
// Keep sliding window size
|
||||
if (self.speed_measurements.items.len > self.criteria.sliding_window_size) {
|
||||
_ = self.speed_measurements.orderedRemove(0);
|
||||
}
|
||||
} else {
|
||||
// Phase 2: Stabilization - check CoV for stability
|
||||
try self.speed_measurements.append(interval_speed);
|
||||
|
||||
// Maintain sliding window
|
||||
if (self.speed_measurements.items.len > self.criteria.sliding_window_size) {
|
||||
_ = self.speed_measurements.orderedRemove(0);
|
||||
}
|
||||
|
||||
// Check stability if we have enough measurements
|
||||
if (self.speed_measurements.items.len >= self.criteria.sliding_window_size) {
|
||||
const cov = calculateCoV(self.speed_measurements.items);
|
||||
|
||||
if (cov <= self.criteria.stability_threshold_cov) {
|
||||
self.consecutive_stable_checks += 1;
|
||||
if (self.consecutive_stable_checks >= self.criteria.stable_checks_required) {
|
||||
return true; // Stable, can stop
|
||||
}
|
||||
} else {
|
||||
self.consecutive_stable_checks = 0; // Reset counter
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -169,64 +113,30 @@ pub const StabilityStrategy = struct {
|
|||
}
|
||||
};
|
||||
|
||||
/// Simplified stability detection using recent measurements
|
||||
fn isFastStable(
|
||||
measurements: []const FastStabilityStrategy.SpeedMeasurement,
|
||||
current_speed: f64,
|
||||
stability_delta_percent: f64,
|
||||
min_stable_measurements: u32,
|
||||
) bool {
|
||||
if (measurements.len < min_stable_measurements) return false;
|
||||
if (current_speed == 0) return false;
|
||||
|
||||
// Check if recent measurements are within delta threshold
|
||||
const window_size = @min(measurements.len, min_stable_measurements);
|
||||
const recent_start = measurements.len - window_size;
|
||||
|
||||
// Calculate average of recent measurements
|
||||
var speed_sum: f64 = 0;
|
||||
for (measurements[recent_start..]) |measurement| {
|
||||
speed_sum += measurement.speed;
|
||||
}
|
||||
const avg_speed = speed_sum / @as(f64, @floatFromInt(window_size));
|
||||
|
||||
// Check if all recent measurements are within threshold of average
|
||||
for (measurements[recent_start..]) |measurement| {
|
||||
const deviation_percent = @abs(measurement.speed - avg_speed) / avg_speed * 100.0;
|
||||
if (deviation_percent > stability_delta_percent) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// Legacy variance-based stability detection (for backward compatibility)
|
||||
fn isStable(samples: []const f64, max_variance_percent: f64) bool {
|
||||
if (samples.len < 2) return false;
|
||||
/// Calculate Coefficient of Variation (standard deviation / mean) for stability detection
|
||||
fn calculateCoV(speeds: []const f64) f64 {
|
||||
if (speeds.len < 2) return 1.0; // Not enough data, assume unstable
|
||||
|
||||
// Calculate mean
|
||||
var sum: f64 = 0;
|
||||
for (samples) |sample| {
|
||||
sum += sample;
|
||||
for (speeds) |speed| {
|
||||
sum += speed;
|
||||
}
|
||||
const mean = sum / @as(f64, @floatFromInt(samples.len));
|
||||
const mean = sum / @as(f64, @floatFromInt(speeds.len));
|
||||
|
||||
if (mean == 0) return false;
|
||||
if (mean == 0) return 1.0; // Avoid division by zero
|
||||
|
||||
// Calculate variance
|
||||
var variance: f64 = 0;
|
||||
for (samples) |sample| {
|
||||
const diff = sample - mean;
|
||||
for (speeds) |speed| {
|
||||
const diff = speed - mean;
|
||||
variance += diff * diff;
|
||||
}
|
||||
variance = variance / @as(f64, @floatFromInt(samples.len));
|
||||
variance = variance / @as(f64, @floatFromInt(speeds.len));
|
||||
|
||||
// Calculate coefficient of variation (standard deviation / mean)
|
||||
// Calculate CoV (coefficient of variation)
|
||||
const std_dev = @sqrt(variance);
|
||||
const cv_percent = (std_dev / mean) * 100.0;
|
||||
|
||||
return cv_percent <= max_variance_percent;
|
||||
return std_dev / mean;
|
||||
}
|
||||
|
||||
// Clean helper functions
|
||||
|
|
@ -237,10 +147,6 @@ pub fn createDurationStrategy(duration_seconds: u32, progress_update_interval_ms
|
|||
};
|
||||
}
|
||||
|
||||
pub fn createFastStabilityStrategy(allocator: std.mem.Allocator, criteria: FastStabilityCriteria) FastStabilityStrategy {
|
||||
return FastStabilityStrategy.init(allocator, criteria);
|
||||
}
|
||||
|
||||
pub fn createStabilityStrategy(allocator: std.mem.Allocator, criteria: StabilityCriteria) StabilityStrategy {
|
||||
return StabilityStrategy.init(allocator, criteria);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ const testing = std.testing;
|
|||
const measurement_strategy = @import("../measurement_strategy.zig");
|
||||
const MeasurementStrategy = measurement_strategy.MeasurementStrategy;
|
||||
const StabilityCriteria = measurement_strategy.StabilityCriteria;
|
||||
const FastStabilityCriteria = measurement_strategy.FastStabilityCriteria;
|
||||
const BandwidthMeter = @import("../bandwidth.zig").BandwidthMeter;
|
||||
|
||||
test "createDurationStrategy" {
|
||||
|
|
@ -13,22 +12,6 @@ test "createDurationStrategy" {
|
|||
try testing.expect(strategy.progress_update_interval_ms == 100);
|
||||
}
|
||||
|
||||
test "createStabilityStrategy" {
|
||||
const criteria = StabilityCriteria{
|
||||
.min_samples = 5,
|
||||
.max_variance_percent = 10.0,
|
||||
.max_duration_seconds = 30,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
try testing.expect(strategy.criteria.min_samples == 5);
|
||||
try testing.expect(strategy.criteria.max_variance_percent == 10.0);
|
||||
try testing.expect(strategy.criteria.max_duration_seconds == 30);
|
||||
try testing.expect(strategy.max_duration_ns == 30 * std.time.ns_per_s);
|
||||
}
|
||||
|
||||
test "DurationStrategy shouldContinue" {
|
||||
const strategy = measurement_strategy.createDurationStrategy(1, 100); // 1 second
|
||||
|
||||
|
|
@ -39,59 +22,74 @@ test "DurationStrategy shouldContinue" {
|
|||
try testing.expect(!strategy.shouldContinue(2 * std.time.ns_per_s)); // 2 seconds
|
||||
}
|
||||
|
||||
test "Strategy getSleepInterval" {
|
||||
// Duration strategy should use progress update interval
|
||||
const duration_strategy = measurement_strategy.createDurationStrategy(10, 250);
|
||||
try testing.expect(duration_strategy.getSleepInterval() == 250 * std.time.ns_per_ms);
|
||||
}
|
||||
|
||||
// Fast.com-style stability tests
|
||||
|
||||
test "StabilityCriteria default values" {
|
||||
const criteria = StabilityCriteria{};
|
||||
|
||||
try testing.expect(criteria.ramp_up_duration_seconds == 4);
|
||||
try testing.expect(criteria.max_duration_seconds == 25);
|
||||
try testing.expect(criteria.measurement_interval_ms == 750);
|
||||
try testing.expect(criteria.sliding_window_size == 6);
|
||||
try testing.expect(criteria.stability_threshold_cov == 0.15);
|
||||
try testing.expect(criteria.stable_checks_required == 2);
|
||||
}
|
||||
|
||||
test "createStabilityStrategy" {
|
||||
const criteria = StabilityCriteria{
|
||||
.ramp_up_duration_seconds = 5,
|
||||
.max_duration_seconds = 20,
|
||||
.measurement_interval_ms = 500,
|
||||
.sliding_window_size = 8,
|
||||
.stability_threshold_cov = 0.12,
|
||||
.stable_checks_required = 3,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
try testing.expect(strategy.criteria.ramp_up_duration_seconds == 5);
|
||||
try testing.expect(strategy.criteria.max_duration_seconds == 20);
|
||||
try testing.expect(strategy.criteria.measurement_interval_ms == 500);
|
||||
try testing.expect(strategy.criteria.sliding_window_size == 8);
|
||||
try testing.expect(strategy.criteria.stability_threshold_cov == 0.12);
|
||||
try testing.expect(strategy.criteria.stable_checks_required == 3);
|
||||
try testing.expect(strategy.ramp_up_duration_ns == 5 * std.time.ns_per_s);
|
||||
try testing.expect(strategy.max_duration_ns == 20 * std.time.ns_per_s);
|
||||
}
|
||||
|
||||
test "StabilityStrategy shouldContinue" {
|
||||
const criteria = StabilityCriteria{
|
||||
.min_samples = 3,
|
||||
.max_variance_percent = 5.0,
|
||||
.max_duration_seconds = 5,
|
||||
.max_duration_seconds = 20,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Should continue before max duration
|
||||
try testing.expect(strategy.shouldContinue(2 * std.time.ns_per_s)); // 2 seconds
|
||||
try testing.expect(strategy.shouldContinue(15 * std.time.ns_per_s));
|
||||
|
||||
// Should not continue after max duration
|
||||
try testing.expect(!strategy.shouldContinue(10 * std.time.ns_per_s)); // 10 seconds
|
||||
try testing.expect(!strategy.shouldContinue(25 * std.time.ns_per_s));
|
||||
}
|
||||
|
||||
test "Strategy getSleepInterval" {
|
||||
// Duration strategy should use progress update interval
|
||||
const duration_strategy = measurement_strategy.createDurationStrategy(10, 250);
|
||||
try testing.expect(duration_strategy.getSleepInterval() == 250 * std.time.ns_per_ms);
|
||||
test "StabilityStrategy getSleepInterval" {
|
||||
const criteria = StabilityCriteria{};
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Stability strategy should use fixed 100ms
|
||||
const criteria = StabilityCriteria{
|
||||
.min_samples = 3,
|
||||
.max_variance_percent = 5.0,
|
||||
.max_duration_seconds = 10,
|
||||
};
|
||||
var stability_strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer stability_strategy.deinit();
|
||||
|
||||
try testing.expect(stability_strategy.getSleepInterval() == 100 * std.time.ns_per_ms);
|
||||
}
|
||||
|
||||
test "StabilityCriteria default values" {
|
||||
const criteria = StabilityCriteria{
|
||||
.min_samples = 5,
|
||||
.max_variance_percent = 10.0,
|
||||
.max_duration_seconds = 30,
|
||||
};
|
||||
|
||||
try testing.expect(criteria.min_samples == 5);
|
||||
try testing.expect(criteria.max_variance_percent == 10.0);
|
||||
try testing.expect(criteria.max_duration_seconds == 30);
|
||||
// Should be measurement_interval / 3 = 750ms / 3 = 250ms
|
||||
try testing.expect(strategy.getSleepInterval() == 250 * std.time.ns_per_ms);
|
||||
}
|
||||
|
||||
test "StabilityStrategy shouldSample timing" {
|
||||
const criteria = StabilityCriteria{
|
||||
.min_samples = 3,
|
||||
.max_variance_percent = 5.0,
|
||||
.max_duration_seconds = 10,
|
||||
};
|
||||
|
||||
const criteria = StabilityCriteria{};
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
|
|
@ -108,110 +106,15 @@ test "StabilityStrategy shouldSample timing" {
|
|||
|
||||
test "StabilityStrategy addSample basic functionality" {
|
||||
const criteria = StabilityCriteria{
|
||||
.min_samples = 2,
|
||||
.max_variance_percent = 50.0, // High threshold to avoid early stability
|
||||
.max_duration_seconds = 10,
|
||||
.ramp_up_duration_seconds = 1, // Short for testing
|
||||
.sliding_window_size = 3,
|
||||
.stability_threshold_cov = 0.5, // High threshold to avoid early stability
|
||||
.stable_checks_required = 2,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// First sample should be skipped
|
||||
const is_stable1 = try strategy.addSample(1 * std.time.ns_per_s, 1000);
|
||||
try testing.expect(!is_stable1);
|
||||
try testing.expect(strategy.speed_samples.items.len == 0);
|
||||
|
||||
// Second sample should be added
|
||||
const is_stable2 = try strategy.addSample(2 * std.time.ns_per_s, 2000);
|
||||
try testing.expect(!is_stable2); // Not stable yet, need min_samples
|
||||
try testing.expect(strategy.speed_samples.items.len == 1);
|
||||
|
||||
// Third sample should be added and might trigger stability check
|
||||
_ = try strategy.addSample(3 * std.time.ns_per_s, 3000);
|
||||
try testing.expect(strategy.speed_samples.items.len == 2);
|
||||
// Result depends on variance calculation, but should not crash
|
||||
}
|
||||
|
||||
// Fast.com-style stability tests
|
||||
|
||||
test "FastStabilityCriteria default values" {
|
||||
const criteria = FastStabilityCriteria{};
|
||||
|
||||
try testing.expect(criteria.min_duration_seconds == 7);
|
||||
try testing.expect(criteria.max_duration_seconds == 30);
|
||||
try testing.expect(criteria.stability_delta_percent == 2.0);
|
||||
try testing.expect(criteria.min_stable_measurements == 6);
|
||||
}
|
||||
|
||||
test "createFastStabilityStrategy" {
|
||||
const criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 10,
|
||||
.max_duration_seconds = 25,
|
||||
.stability_delta_percent = 3.0,
|
||||
.min_stable_measurements = 8,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
try testing.expect(strategy.criteria.min_duration_seconds == 10);
|
||||
try testing.expect(strategy.criteria.max_duration_seconds == 25);
|
||||
try testing.expect(strategy.criteria.stability_delta_percent == 3.0);
|
||||
try testing.expect(strategy.criteria.min_stable_measurements == 8);
|
||||
try testing.expect(strategy.min_duration_ns == 10 * std.time.ns_per_s);
|
||||
try testing.expect(strategy.max_duration_ns == 25 * std.time.ns_per_s);
|
||||
}
|
||||
|
||||
test "FastStabilityStrategy shouldContinue" {
|
||||
const criteria = FastStabilityCriteria{
|
||||
.max_duration_seconds = 20,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Should continue before max duration
|
||||
try testing.expect(strategy.shouldContinue(15 * std.time.ns_per_s));
|
||||
|
||||
// Should not continue after max duration
|
||||
try testing.expect(!strategy.shouldContinue(25 * std.time.ns_per_s));
|
||||
}
|
||||
|
||||
test "FastStabilityStrategy getSleepInterval" {
|
||||
const criteria = FastStabilityCriteria{};
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Should use Fast.com's 150ms interval
|
||||
try testing.expect(strategy.getSleepInterval() == 150 * std.time.ns_per_ms);
|
||||
}
|
||||
|
||||
test "FastStabilityStrategy shouldSample timing" {
|
||||
const criteria = FastStabilityCriteria{};
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// First call should not sample (last_sample_time is 0)
|
||||
try testing.expect(!strategy.shouldSample(0));
|
||||
|
||||
// Should not sample if less than 1 second has passed
|
||||
strategy.last_sample_time = 500 * std.time.ns_per_ms; // 0.5 seconds
|
||||
try testing.expect(!strategy.shouldSample(800 * std.time.ns_per_ms)); // 0.8 seconds
|
||||
|
||||
// Should sample if 1 second or more has passed
|
||||
try testing.expect(strategy.shouldSample(1600 * std.time.ns_per_ms)); // 1.6 seconds
|
||||
}
|
||||
|
||||
test "FastStabilityStrategy addSample basic functionality" {
|
||||
const criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 1, // Short for testing
|
||||
.min_stable_measurements = 3,
|
||||
.stability_delta_percent = 50.0, // High threshold to avoid early stability
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// First sample should be skipped
|
||||
const is_stable1 = try strategy.addSample(1 * std.time.ns_per_s, 1000);
|
||||
try testing.expect(!is_stable1);
|
||||
|
|
@ -219,7 +122,7 @@ test "FastStabilityStrategy addSample basic functionality" {
|
|||
|
||||
// Second sample should be added
|
||||
const is_stable2 = try strategy.addSample(2 * std.time.ns_per_s, 2000);
|
||||
try testing.expect(!is_stable2); // Not stable yet, need min_stable_measurements
|
||||
try testing.expect(!is_stable2); // Not stable yet, need more measurements for CoV
|
||||
try testing.expect(strategy.speed_measurements.items.len == 1);
|
||||
|
||||
// Third sample should be added
|
||||
|
|
@ -232,35 +135,37 @@ test "FastStabilityStrategy addSample basic functionality" {
|
|||
try testing.expect(strategy.speed_measurements.items.len == 3);
|
||||
}
|
||||
|
||||
test "FastStabilityStrategy requires minimum duration" {
|
||||
const criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 10,
|
||||
.min_stable_measurements = 2,
|
||||
.stability_delta_percent = 1.0, // Low threshold for easy stability
|
||||
test "StabilityStrategy requires ramp up duration" {
|
||||
const criteria = StabilityCriteria{
|
||||
.ramp_up_duration_seconds = 10,
|
||||
.sliding_window_size = 2,
|
||||
.stability_threshold_cov = 0.01, // Low threshold for easy stability
|
||||
.stable_checks_required = 1,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Add samples before minimum duration - should not be stable
|
||||
// Add samples before ramp up duration - should not be stable
|
||||
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000);
|
||||
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000);
|
||||
const is_stable_early = try strategy.addSample(3 * std.time.ns_per_s, 3000);
|
||||
try testing.expect(!is_stable_early); // Should not be stable before min duration
|
||||
try testing.expect(!is_stable_early); // Should not be stable before ramp up duration
|
||||
|
||||
// Add sample after minimum duration - might be stable
|
||||
// Add sample after ramp up duration - might be stable
|
||||
_ = try strategy.addSample(11 * std.time.ns_per_s, 11000);
|
||||
// Result depends on stability calculation, but should not crash
|
||||
// Result depends on CoV calculation, but should not crash
|
||||
}
|
||||
|
||||
test "FastStabilityStrategy handleProgress integration" {
|
||||
const criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 2,
|
||||
.min_stable_measurements = 2,
|
||||
.stability_delta_percent = 10.0,
|
||||
test "StabilityStrategy handleProgress integration" {
|
||||
const criteria = StabilityCriteria{
|
||||
.ramp_up_duration_seconds = 2,
|
||||
.sliding_window_size = 2,
|
||||
.stability_threshold_cov = 0.1,
|
||||
.measurement_interval_ms = 500,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Should not trigger sampling immediately
|
||||
|
|
@ -271,124 +176,100 @@ test "FastStabilityStrategy handleProgress integration" {
|
|||
const should_stop2 = try strategy.handleProgress(800 * std.time.ns_per_ms, 800);
|
||||
try testing.expect(!should_stop2);
|
||||
|
||||
// Should trigger sampling after 1 second
|
||||
_ = try strategy.handleProgress(1500 * std.time.ns_per_ms, 1500);
|
||||
// Should trigger sampling after measurement interval (750ms)
|
||||
_ = try strategy.handleProgress(750 * std.time.ns_per_ms, 750);
|
||||
try testing.expect(strategy.speed_measurements.items.len == 0); // First sample skipped
|
||||
|
||||
// Should add second sample
|
||||
_ = try strategy.handleProgress(2500 * std.time.ns_per_ms, 2500);
|
||||
_ = try strategy.handleProgress(1500 * std.time.ns_per_ms, 1500);
|
||||
try testing.expect(strategy.speed_measurements.items.len == 1);
|
||||
}
|
||||
|
||||
test "Fast.com delta stability detection algorithm" {
|
||||
const criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 1, // Short for testing
|
||||
.min_stable_measurements = 4,
|
||||
.stability_delta_percent = 5.0, // 5% deviation threshold
|
||||
test "CoV stability detection algorithm" {
|
||||
const criteria = StabilityCriteria{
|
||||
.ramp_up_duration_seconds = 1, // Short for testing
|
||||
.sliding_window_size = 4,
|
||||
.stability_threshold_cov = 0.05, // 5% CoV threshold
|
||||
.stable_checks_required = 1,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Add samples that should be stable (within 5% of each other)
|
||||
// Add stable samples after ramp up period
|
||||
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000); // Skip first
|
||||
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000); // 1000 bytes/s
|
||||
_ = try strategy.addSample(3 * std.time.ns_per_s, 3050); // 1050 bytes/s (5% higher)
|
||||
_ = try strategy.addSample(4 * std.time.ns_per_s, 4000); // 950 bytes/s (5% lower)
|
||||
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000); // 1000 bytes/s (after ramp up)
|
||||
_ = try strategy.addSample(3 * std.time.ns_per_s, 3000); // 1000 bytes/s
|
||||
_ = try strategy.addSample(4 * std.time.ns_per_s, 4000); // 1000 bytes/s
|
||||
|
||||
// This should be stable since all speeds are within 5% of 1000 bytes/s
|
||||
// This should be stable since CoV should be very low
|
||||
const is_stable = try strategy.addSample(5 * std.time.ns_per_s, 5000); // 1000 bytes/s
|
||||
|
||||
// Should be stable with consistent speeds
|
||||
try testing.expect(is_stable);
|
||||
}
|
||||
|
||||
test "Fast.com delta stability detection - unstable case" {
|
||||
const criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 1, // Short for testing
|
||||
.min_stable_measurements = 3,
|
||||
.stability_delta_percent = 2.0, // Strict 2% threshold
|
||||
test "CoV stability detection - unstable case" {
|
||||
const criteria = StabilityCriteria{
|
||||
.ramp_up_duration_seconds = 1, // Short for testing
|
||||
.sliding_window_size = 3,
|
||||
.stability_threshold_cov = 0.02, // Strict 2% CoV threshold
|
||||
.stable_checks_required = 1,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Add samples that should NOT be stable (outside 2% threshold)
|
||||
// Add samples that should NOT be stable (high variance)
|
||||
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000); // Skip first
|
||||
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000); // 1000 bytes/s
|
||||
_ = try strategy.addSample(3 * std.time.ns_per_s, 3100); // 1100 bytes/s (10% higher)
|
||||
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000); // 1000 bytes/s (after ramp up)
|
||||
_ = try strategy.addSample(3 * std.time.ns_per_s, 3500); // 1500 bytes/s (high variance)
|
||||
|
||||
// This should NOT be stable due to large deviation
|
||||
const is_stable = try strategy.addSample(4 * std.time.ns_per_s, 4000); // 900 bytes/s (10% lower)
|
||||
// This should NOT be stable due to high CoV
|
||||
const is_stable = try strategy.addSample(4 * std.time.ns_per_s, 4000); // 500 bytes/s (high variance)
|
||||
|
||||
// Should not be stable with inconsistent speeds
|
||||
try testing.expect(!is_stable);
|
||||
}
|
||||
|
||||
test "Fast.com stability requires measurements after max speed" {
|
||||
const criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 1,
|
||||
.min_stable_measurements = 6,
|
||||
.stability_delta_percent = 5.0,
|
||||
test "CoV stability handles variable speeds correctly" {
|
||||
const criteria = StabilityCriteria{
|
||||
.ramp_up_duration_seconds = 1,
|
||||
.sliding_window_size = 6,
|
||||
.stability_threshold_cov = 0.05,
|
||||
.stable_checks_required = 2,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Add samples with a peak in the middle, then lower speeds
|
||||
_ = try strategy.addSample(1 * std.time.ns_per_s, 1000); // Skip first
|
||||
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000); // 1000 bytes/s
|
||||
_ = try strategy.addSample(3 * std.time.ns_per_s, 4000); // 2000 bytes/s (peak)
|
||||
_ = try strategy.addSample(4 * std.time.ns_per_s, 5000); // 1000 bytes/s (back down)
|
||||
_ = try strategy.addSample(2 * std.time.ns_per_s, 2000); // 1000 bytes/s (after ramp up)
|
||||
_ = try strategy.addSample(3 * std.time.ns_per_s, 4000); // 2000 bytes/s (peak creates high CoV)
|
||||
_ = try strategy.addSample(4 * std.time.ns_per_s, 5000); // 1000 bytes/s
|
||||
_ = try strategy.addSample(5 * std.time.ns_per_s, 6000); // 1000 bytes/s
|
||||
|
||||
// Should not be stable yet - need more measurements after the peak
|
||||
// Should not be stable yet due to high CoV from the peak
|
||||
const is_stable = try strategy.addSample(6 * std.time.ns_per_s, 7000); // 1000 bytes/s
|
||||
|
||||
// Fast.com algorithm should detect this pattern and require more stability
|
||||
// Either not stable yet OR we have collected enough measurements to make a decision
|
||||
if (is_stable) {
|
||||
try testing.expect(strategy.speed_measurements.items.len >= 6);
|
||||
}
|
||||
// CoV should still be too high due to the peak in the sliding window
|
||||
try testing.expect(!is_stable);
|
||||
|
||||
// Test should not crash and should have collected measurements
|
||||
try testing.expect(strategy.speed_measurements.items.len > 0);
|
||||
}
|
||||
|
||||
test "Fast.com API integration with legacy API" {
|
||||
// Test that both old and new APIs can coexist
|
||||
const old_criteria = StabilityCriteria{
|
||||
.min_samples = 5,
|
||||
.max_variance_percent = 10.0,
|
||||
.max_duration_seconds = 30,
|
||||
};
|
||||
|
||||
const new_criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 7,
|
||||
.max_duration_seconds = 30,
|
||||
.stability_delta_percent = 2.0,
|
||||
.min_stable_measurements = 6,
|
||||
};
|
||||
|
||||
var old_strategy = measurement_strategy.createStabilityStrategy(testing.allocator, old_criteria);
|
||||
defer old_strategy.deinit();
|
||||
|
||||
var new_strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, new_criteria);
|
||||
defer new_strategy.deinit();
|
||||
|
||||
// Both should compile and initialize without conflicts
|
||||
try testing.expect(old_strategy.criteria.min_samples == 5);
|
||||
try testing.expect(new_strategy.criteria.min_stable_measurements == 6);
|
||||
}
|
||||
|
||||
test "Fast.com stability detection realistic scenario" {
|
||||
const criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 5,
|
||||
test "CoV stability detection realistic scenario" {
|
||||
const criteria = StabilityCriteria{
|
||||
.ramp_up_duration_seconds = 5,
|
||||
.max_duration_seconds = 20,
|
||||
.stability_delta_percent = 2.0, // Fast.com's 2% threshold
|
||||
.min_stable_measurements = 6, // Fast.com's requirement
|
||||
.stability_threshold_cov = 0.15, // 15% CoV threshold
|
||||
.sliding_window_size = 6,
|
||||
.stable_checks_required = 2,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Simulate realistic speed test progression: ramp up, then stabilize
|
||||
|
|
@ -410,29 +291,30 @@ test "Fast.com stability detection realistic scenario" {
|
|||
try testing.expect(stable_after_min or strategy.speed_measurements.items.len >= 6);
|
||||
}
|
||||
|
||||
test "Fast.com timing intervals match specification" {
|
||||
const criteria = FastStabilityCriteria{};
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
test "CoV timing intervals specification" {
|
||||
const criteria = StabilityCriteria{};
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Fast.com uses 150ms progress frequency (vs our old 100ms)
|
||||
try testing.expect(strategy.getSleepInterval() == 150 * std.time.ns_per_ms);
|
||||
// Should be measurement_interval / 3 = 750ms / 3 = 250ms
|
||||
try testing.expect(strategy.getSleepInterval() == 250 * std.time.ns_per_ms);
|
||||
|
||||
// Should enforce 1-second sampling intervals like Fast.com
|
||||
// Should enforce measurement interval sampling (750ms by default)
|
||||
try testing.expect(!strategy.shouldSample(0));
|
||||
strategy.last_sample_time = 500 * std.time.ns_per_ms;
|
||||
try testing.expect(!strategy.shouldSample(999 * std.time.ns_per_ms));
|
||||
try testing.expect(strategy.shouldSample(1500 * std.time.ns_per_ms));
|
||||
try testing.expect(!strategy.shouldSample(1000 * std.time.ns_per_ms));
|
||||
try testing.expect(strategy.shouldSample(1250 * std.time.ns_per_ms));
|
||||
}
|
||||
|
||||
test "Fast.com delta algorithm handles edge cases correctly" {
|
||||
const criteria = FastStabilityCriteria{
|
||||
.min_duration_seconds = 1,
|
||||
.min_stable_measurements = 3,
|
||||
.stability_delta_percent = 5.0,
|
||||
test "CoV algorithm handles edge cases correctly" {
|
||||
const criteria = StabilityCriteria{
|
||||
.ramp_up_duration_seconds = 1,
|
||||
.sliding_window_size = 3,
|
||||
.stability_threshold_cov = 0.05,
|
||||
.stable_checks_required = 1,
|
||||
};
|
||||
|
||||
var strategy = measurement_strategy.createFastStabilityStrategy(testing.allocator, criteria);
|
||||
var strategy = measurement_strategy.createStabilityStrategy(testing.allocator, criteria);
|
||||
defer strategy.deinit();
|
||||
|
||||
// Test very small speed changes (edge case for percentage calculation)
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue