diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index 2372b4a..55e50c6 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -12,16 +12,16 @@ jobs:
name: lint
runs-on: ubuntu-latest
steps:
- - uses: actions/setup-go@v4
+ - uses: actions/setup-go@v5
with:
go-version: '1.24'
cache: false
- uses: actions/checkout@v2
- name: golangci-lint
- uses: golangci/golangci-lint-action@v3
+ uses: golangci/golangci-lint-action@v8
with:
# Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
- version: v1.49
+ version: v2.7
args: --issues-exit-code=0
only-new-issues: true
# Optional: golangci-lint command line arguments.
diff --git a/.golangci.yml b/.golangci.yml
index a2d8828..5609cf3 100644
--- a/.golangci.yml
+++ b/.golangci.yml
@@ -1,76 +1,72 @@
+version: "2"
+
linters:
- disable-all: true
+ default: none
enable:
- - gosimple
- ineffassign
- errcheck
- misspell
- unparam
- - gofmt
- - goimports
- - deadcode
- nestif
- govet
- - golint
+ - revive
- prealloc
- depguard
- dogsled
- dupl
- goconst
- - gocritic
- gocyclo
- goprintffuncname
- gosec
- nakedret
- rowserrcheck
- - scopelint
- - structcheck
- - stylecheck
- - typecheck
+ - staticcheck
- unconvert
- - varcheck
- exhaustive
- - exportloopref
- - goerr113
- - gofumpt
+ - copyloopvar
+ - err113
- unused
+ exclusions:
+ paths:
+ - testdata
+ rules:
+ - linters:
+ - gosec
+ text: "G204: Subprocess launched"
+ - linters:
+ - err113
+ text: "err113: do not define dynamic errors"
+ - linters:
+ - staticcheck
+ text: "ST1003: struct field Https"
+ - linters:
+ - staticcheck
+ text: "ST1003: struct field Id"
+ settings:
+ dupl:
+ threshold: 100
+ funlen:
+ lines: 100
+ statements: 50
+ goconst:
+ min-len: 2
+ min-occurrences: 2
+ misspell:
+ locale: US
+
+formatters:
+ enable:
+ - gofmt
+ - goimports
+ - gofumpt
+ exclusions:
+ paths:
+ - testdata
+ settings:
+ goimports:
+ local-prefixes:
+ - github.com/nakabonne/ali
run:
issues-exit-code: 0
tests: false
- skip-dirs:
- - testdata
-
-issues:
- exclude-rules:
- - linters:
- - gosec
- text: "G204: Subprocess launched"
- - linters:
- - goerr113
- text: "err113: do not define dynamic errors"
- - linters:
- - stylecheck
- text: "ST1003: struct field Https"
- - linters:
- - stylecheck
- text: "ST1003: struct field Id"
-
-linters-settings:
- dupl:
- threshold: 100
- funlen:
- lines: 100
- statements: 50
- goconst:
- min-len: 2
- min-occurrences: 2
- goimports:
- local-prefixes: github.com/nakabonne/ali
- golint:
- min-confidence: 0.3
- maligned:
- suggest-new: true
- misspell:
- locale: US
-
diff --git a/README.md b/README.md
index ee4b148..3485846 100644
--- a/README.md
+++ b/README.md
@@ -89,6 +89,7 @@ Flags:
-c, --connections int Amount of maximum open idle connections per target host (default 10000)
--debug Run in debug mode.
-d, --duration duration The amount of time to issue requests to the targets. Give 0s for an infinite attack. (default 10s)
+ --export-to string Export results to the given directory
-H, --header stringArray A request header to be sent. Can be used multiple times to send multiple headers.
--insecure Skip TLS verification
--key string PEM encoded tls private key file to use
@@ -100,7 +101,7 @@ Flags:
-K, --no-keepalive Don't use HTTP persistent connection.
--query-range duration The results within the given time range will be drawn on the charts (default 30s)
-r, --rate int The request rate per second to issue against the targets. Give 0 then it will send requests as fast as possible. (default 50)
- --redraw-interval duration The time interval to redraw charts (default 250ms)
+ --redraw-interval duration Specify how often it redraws the screen (default 250ms)
--resolvers string Custom DNS resolver addresses; comma-separated list.
-t, --timeout duration The timeout for each request. 0s means to disable timeouts. (default 30s)
-v, --version Print the current version.
@@ -174,6 +175,16 @@ With the help of [mum4k/termdash](https://github.com/mum4k/termdash) can be used

+### Export results
+
+You can persist load test results for downstream processing.
+
+```bash
+ali --export-to ./results/
+```
+
+See [here](./docs/export.md) more details.
+
## Acknowledgements
This project would not have been possible without the effort of many individuals and projects but especially [vegeta](https://github.com/tsenart/vegeta) for the inspiration and powerful API.
Besides, `ali` is built with [termdash](https://github.com/mum4k/termdash) (as well as [termbox-go](https://github.com/nsf/termbox-go)) for the rendering of all those fancy graphs on the terminal.
diff --git a/attacker/attacker.go b/attacker/attacker.go
index e1a746b..24b16c6 100644
--- a/attacker/attacker.go
+++ b/attacker/attacker.go
@@ -2,8 +2,10 @@ package attacker
import (
"context"
+ "crypto/rand"
"crypto/tls"
"crypto/x509"
+ "encoding/binary"
"fmt"
"log"
"math"
@@ -13,6 +15,7 @@ import (
vegeta "github.com/tsenart/vegeta/v12/lib"
+ "github.com/nakabonne/ali/export"
"github.com/nakabonne/ali/storage"
)
@@ -52,6 +55,9 @@ type Options struct {
TLSCertificates []tls.Certificate
Attacker backedAttacker
+
+ Exporter *export.FileExporter
+ IDGenerator func() string
}
type Attacker interface {
@@ -59,7 +65,7 @@ type Attacker interface {
// Results are sent to the given channel as soon as they arrive.
// When the attack is over, it gives back final statistics.
// TODO: Use storage instead of metricsCh
- Attack(ctx context.Context, metricsCh chan *Metrics)
+ Attack(ctx context.Context, metricsCh chan *Metrics) error
// Rate gives back the rate set to itself.
Rate() int
@@ -140,6 +146,8 @@ func NewAttacker(storage storage.Writer, target string, opts *Options) (Attacker
tlsCertificates: opts.TLSCertificates,
attacker: opts.Attacker,
storage: storage,
+ exporter: opts.Exporter,
+ idGenerator: opts.IDGenerator,
}, nil
}
@@ -171,9 +179,12 @@ type attacker struct {
attacker backedAttacker
storage storage.Writer
+
+ exporter *export.FileExporter
+ idGenerator func() string
}
-func (a *attacker) Attack(ctx context.Context, metricsCh chan *Metrics) {
+func (a *attacker) Attack(ctx context.Context, metricsCh chan *Metrics) error {
rate := vegeta.Rate{Freq: a.rate, Per: time.Second}
targeter := vegeta.NewStaticTargeter(vegeta.Target{
Method: a.method,
@@ -186,12 +197,34 @@ func (a *attacker) Attack(ctx context.Context, metricsCh chan *Metrics) {
if len(a.buckets) > 0 {
metrics.Histogram = &vegeta.Histogram{Buckets: a.buckets}
}
+ idGenerator := a.idGenerator
+ if idGenerator == nil {
+ idGenerator = defaultIDGenerator
+ }
+
+ var runExporter *export.Run
+ if a.exporter != nil {
+ var err error
+ runExporter, err = a.exporter.StartRun(export.Meta{
+ ID: idGenerator(),
+ TargetURL: a.target,
+ Method: a.method,
+ Rate: a.rate,
+ Duration: a.duration,
+ })
+ if err != nil {
+ return err
+ }
+ }
for res := range a.attacker.Attack(targeter, rate, a.duration, "main") {
select {
case <-ctx.Done():
a.attacker.Stop()
- return
+ if runExporter != nil {
+ _ = runExporter.Abort()
+ }
+ return nil
default:
metrics.Add(res)
m := newMetrics(metrics)
@@ -208,17 +241,53 @@ func (a *attacker) Attack(ctx context.Context, metricsCh chan *Metrics) {
log.Printf("failed to insert results")
continue
}
+ if runExporter != nil {
+ if err := runExporter.WriteResult(export.Result{
+ Timestamp: res.Timestamp,
+ LatencyNS: float64(res.Latency.Nanoseconds()),
+ URL: a.target,
+ Method: a.method,
+ StatusCode: res.Code,
+ }); err != nil {
+ _ = runExporter.Abort()
+ return err
+ }
+ }
metricsCh <- m
}
}
metrics.Close()
- metricsCh <- newMetrics(metrics)
+ finalMetrics := newMetrics(metrics)
+ metricsCh <- finalMetrics
+ if runExporter != nil {
+ if err := runExporter.Close(newSummary(a.target, a.method, a.rate, a.duration, finalMetrics)); err != nil {
+ return err
+ }
+ }
+ return nil
}
func (a *attacker) Rate() int {
return a.rate
}
+func defaultIDGenerator() string {
+ var b [16]byte
+ if _, err := rand.Read(b[:]); err != nil {
+ return "00000000-0000-0000-0000-000000000000"
+ }
+ b[6] = (b[6] & 0x0f) | 0x40
+ b[8] = (b[8] & 0x3f) | 0x80
+
+ part1 := binary.BigEndian.Uint32(b[0:4])
+ part2 := binary.BigEndian.Uint16(b[4:6])
+ part3 := binary.BigEndian.Uint16(b[6:8])
+ part4 := binary.BigEndian.Uint16(b[8:10])
+ part5 := uint64(b[10])<<40 | uint64(b[11])<<32 | uint64(b[12])<<24 | uint64(b[13])<<16 | uint64(b[14])<<8 | uint64(b[15])
+
+ return fmt.Sprintf("%08x-%04x-%04x-%04x-%012x", part1, part2, part3, part4, part5)
+}
+
func (a *attacker) Duration() time.Duration {
return a.duration
}
diff --git a/attacker/attacker_test.go b/attacker/attacker_test.go
index c6fe8a3..997e7d2 100644
--- a/attacker/attacker_test.go
+++ b/attacker/attacker_test.go
@@ -78,7 +78,8 @@ func TestAttack(t *testing.T) {
a, err := NewAttacker(&storage.FakeStorage{}, tt.target, &tt.opts)
require.NoError(t, err)
metricsCh := make(chan *Metrics, 100)
- a.Attack(ctx, metricsCh)
+ err = a.Attack(ctx, metricsCh)
+ require.NoError(t, err)
})
}
}
diff --git a/attacker/fake_attacker.go b/attacker/fake_attacker.go
index 7c4a9ca..c935703 100644
--- a/attacker/fake_attacker.go
+++ b/attacker/fake_attacker.go
@@ -13,8 +13,8 @@ type FakeAttacker struct {
method string
}
-func (f *FakeAttacker) Attack(ctx context.Context, metricsCh chan *Metrics) {
-
+func (f *FakeAttacker) Attack(ctx context.Context, metricsCh chan *Metrics) error {
+ return nil
}
func (f *FakeAttacker) Rate() int {
diff --git a/attacker/summary.go b/attacker/summary.go
new file mode 100644
index 0000000..b38815a
--- /dev/null
+++ b/attacker/summary.go
@@ -0,0 +1,54 @@
+package attacker
+
+import (
+ "time"
+
+ "github.com/nakabonne/ali/export"
+)
+
+func newSummary(targetURL, method string, rate int, duration time.Duration, metrics *Metrics) export.Summary {
+ return export.Summary{
+ Target: export.TargetSummary{
+ URL: targetURL,
+ Method: method,
+ },
+ Parameters: export.ParametersSummary{
+ Rate: rate,
+ DurationSeconds: duration.Seconds(),
+ },
+ Timing: export.TimingSummary{
+ Earliest: metrics.Earliest,
+ Latest: metrics.Latest,
+ },
+ Requests: export.RequestsSummary{
+ Count: metrics.Requests,
+ SuccessRatio: metrics.Success,
+ },
+ Throughput: metrics.Throughput,
+ LatencyMS: export.LatencySummary{
+ Total: durationToMillis(metrics.Latencies.Total),
+ Mean: durationToMillis(metrics.Latencies.Mean),
+ P50: durationToMillis(metrics.Latencies.P50),
+ P90: durationToMillis(metrics.Latencies.P90),
+ P95: durationToMillis(metrics.Latencies.P95),
+ P99: durationToMillis(metrics.Latencies.P99),
+ Max: durationToMillis(metrics.Latencies.Max),
+ Min: durationToMillis(metrics.Latencies.Min),
+ },
+ Bytes: export.BytesSummary{
+ In: export.BytesFlowSummary{
+ Total: metrics.BytesIn.Total,
+ Mean: metrics.BytesIn.Mean,
+ },
+ Out: export.BytesFlowSummary{
+ Total: metrics.BytesOut.Total,
+ Mean: metrics.BytesOut.Mean,
+ },
+ },
+ StatusCodes: export.StatusCodesSummary(metrics.StatusCodes),
+ }
+}
+
+func durationToMillis(d time.Duration) float64 {
+ return float64(d) / float64(time.Millisecond)
+}
diff --git a/docs/export.md b/docs/export.md
new file mode 100644
index 0000000..b7b751c
--- /dev/null
+++ b/docs/export.md
@@ -0,0 +1,134 @@
+# Exporting results
+
+Use `--export-to` to persist load test results for downstream processing.
+
+## Usage
+
+```bash
+ali --export-to ./results/
+```
+
+## What gets written
+
+When `--export-to
` is provided, ali creates the directory (if needed) and writes:
+
+- `/results.csv` with all data points for the run.
+- `/summary-.json` with an aggregated summary for the run.
+
+If you start a new run by pressing `` in the TUI, ali appends new rows with a
+new run `id` to `results.csv` and writes a new `summary-.json`.
+
+If `--export-to ` points to an existing file, the command fails before rendering
+the TUI and the file is left unchanged.
+
+## CSV schema: `results.csv`
+
+Columns:
+
+| Column | Type | Description |
+|---------------|--------|-------------|
+| `id` | string | Unique identifier for the run (UUID). |
+| `timestamp` | string | RFC3339 timestamp. |
+| `latency_ns` | int | Request latency in nanoseconds. |
+| `url` | string | Target URL. |
+| `method` | string | HTTP method (e.g., GET, POST). |
+| `status_code` | int | HTTP status code. |
+
+## JSON schema: `summary-.json`
+
+```json
+{
+ "target": {
+ "url": "string",
+ "method": "string"
+ },
+ "parameters": {
+ "rate": "number",
+ "duration_seconds": "number"
+ },
+ "timing": {
+ "earliest": "RFC3339 string",
+ "latest": "RFC3339 string"
+ },
+ "requests": {
+ "count": "integer",
+ "success_ratio": "number"
+ },
+ "throughput": "number",
+ "latency_ms": {
+ "total": "number",
+ "mean": "number",
+ "p50": "number",
+ "p90": "number",
+ "p95": "number",
+ "p99": "number",
+ "max": "number",
+ "min": "number"
+ },
+ "bytes": {
+ "in": { "total": "integer", "mean": "number" },
+ "out": { "total": "integer", "mean": "number" }
+ },
+ "status_codes": {
+ "200": "number"
+ }
+}
+```
+
+## Example output
+
+`./results/results.csv`:
+
+```csv
+id,timestamp,latency_ns,url,method,status_code
+f48ff413-c446-4021-8a28-f153ee2e1151,2026-01-19T13:44:38.779088333+09:00,199035250,https://example.com/,GET,200
+f48ff413-c446-4021-8a28-f153ee2e1151,2026-01-19T13:44:39.779554166+09:00,10721500,https://example.com/,GET,200
+f48ff413-c446-4021-8a28-f153ee2e1151,2026-01-19T13:44:40.779522791+09:00,11019792,https://example.com/,GET,200
+```
+
+`./results/summary-.json`:
+
+```json
+{
+ "target": {
+ "url": "https://example.com/",
+ "method": "GET"
+ },
+ "parameters": {
+ "rate": 1,
+ "duration_seconds": 3
+ },
+ "timing": {
+ "earliest": "2026-01-19T13:44:38.779088333+09:00",
+ "latest": "2026-01-19T13:44:40.779522791+09:00"
+ },
+ "requests": {
+ "count": 3,
+ "success_ratio": 1
+ },
+ "throughput": 1.4914582322715022,
+ "latency_ms": {
+ "total": 220.776542,
+ "mean": 73.59218,
+ "p50": 11.019792,
+ "p90": 199.03525,
+ "p95": 199.03525,
+ "p99": 199.03525,
+ "max": 199.03525,
+ "min": 10.7215
+ },
+ "bytes": {
+ "in": {
+ "total": 70137,
+ "mean": 23379
+ },
+ "out": {
+ "total": 0,
+ "mean": 0
+ }
+ },
+ "status_codes": {
+ "200": 3
+ }
+}
+```
diff --git a/docs/rfc/001-export.md b/docs/rfc/001-export.md
new file mode 100644
index 0000000..e1cdfdd
--- /dev/null
+++ b/docs/rfc/001-export.md
@@ -0,0 +1,239 @@
+# Export results
+
+## Goal
+
+Add an export feature that persists load test results for downstream processing:
+- Export all data points to `results.csv` in a stable schema.
+- Export a summary to `summary-.json`.
+
+## Deliverables
+
+- Add a new CLI flag: `--export-to `.
+- When `--export-to ` is provided,
+ - Create ``
+ - Export all generated data points as CSV to `/results.csv` at the end of the run.
+ - Write the processed data to `/summary-.json`.
+- Add unit tests
+
+## Acceptance Criteria
+
+### Core behavior
+
+1. Running the tool **without** `--export-to` produces **no CSV file** and behaves as before.
+2. Running with `--export-to ` creates the following files under ``:
+ - `results.csv` with schema as defined in the [Schema](#schema) section.
+ - `summary-.json` with schema as defined in the [Schema](#schema) section.
+3. If `--export-to ` points to an existing file:
+ - The command fails with a non-zero exit code before rendering TUI
+ - It does not modify the file.
+4. Export failures (permission denied, invalid path, disk full, etc.) produce:
+ - non-zero exit code,
+ - a clear error message including the path.
+
+### Compatibility / UX
+
+1. Whenever a new run is trigger by hitting the `` key, append new data points with a different id to `results.csv`.
+
+### Testing
+
+1. Unit tests verify CSV serialization with deterministic fixtures.
+2. `make test` passes.
+3. `go build` builds a binary without any problem.
+
+## Context
+
+- This tool is a TUI tool that allows us to generate HTTP requests.
+- After rendering the TUI, you can hit the Enter key to start generating HTTP requests.
+- Users want to import results into external systems (TSDB, spreadsheets, BI tools). CSV is broadly supported.
+
+## Schema
+
+### results.csv
+
+Columns:
+
+| Column | Type | Required | Description |
+|------------------|---------|----------|-------------|
+| `id` | string | yes | Unique identifier for the run (UUID) |
+| `timestamp` | string | yes | RFC3339 |
+| `latency_ns` | int | yes | Request latency in nanoseconds |
+| `url` | string | yes | The target URL. |
+| `method` | string | yes | The HTTP method. (e.g. GET, POST) |
+| `status_code` | int | yes | HTTP status code |
+
+CSV Formatting Rules:
+
+- UTF-8 encoding.
+- Header row included.
+
+### summary-.json
+
+```json
+{
+ "target": {
+ "url": "string",
+ "method": "string"
+ },
+ "parameters": {
+ "rate": "number",
+ "duration_seconds": "number"
+ },
+ "timing": {
+ "earliest": "RFC3339 string",
+ "latest": "RFC3339 string"
+ },
+ "requests": {
+ "count": "integer",
+ "success_ratio": "number"
+ },
+ "throughput": "number",
+ "latency_ms": {
+ "total": "number",
+ "mean": "number",
+ "p50": "number",
+ "p90": "number",
+ "p95": "number",
+ "p99": "number",
+ "max": "number",
+ "min": "number"
+ },
+ "bytes": {
+ "in": { "total": "integer", "mean": "number" },
+ "out": { "total": "integer", "mean": "number" }
+ },
+ "status_codes": {
+ "200": "number"
+ }
+}
+```
+
+## Example
+
+### Usage
+
+```bash
+ali --export-to ./results/
+```
+
+### Output file
+
+#### ./results/results.csv
+
+```csv
+id,timestamp,latency_ns,url,method,status_code
+f48ff413-c446-4021-8a28-f153ee2e1151,2026-01-19T13:44:38.779088333+09:00,199035250,https://example.com/,GET,200
+f48ff413-c446-4021-8a28-f153ee2e1151,2026-01-19T13:44:39.779554166+09:00,10721500,https://example.com/,GET,200
+f48ff413-c446-4021-8a28-f153ee2e1151,2026-01-19T13:44:40.779522791+09:00,11019792,https://example.com/,GET,200
+```
+
+#### ./results/summary-.json
+
+```json
+{
+ "target": {
+ "url": "https://example.com/",
+ "method": "GET"
+ },
+ "parameters": {
+ "rate": 1,
+ "duration_seconds": 3
+ },
+ "timing": {
+ "earliest": "2026-01-19T13:44:38.779088333+09:00",
+ "latest": "2026-01-19T13:44:40.779522791+09:00"
+ },
+ "requests": {
+ "count": 3,
+ "success_ratio": 1
+ },
+ "throughput": 1.4914582322715022,
+ "latency_ms": {
+ "total": 220.776542,
+ "mean": 73.59218,
+ "p50": 11.019792,
+ "p90": 199.03525,
+ "p95": 199.03525,
+ "p99": 199.03525,
+ "max": 199.03525,
+ "min": 10.7215
+ },
+ "bytes": {
+ "in": {
+ "total": 70137,
+ "mean": 23379
+ },
+ "out": {
+ "total": 0,
+ "mean": 0
+ }
+ },
+ "status_codes": {
+ "200": 3
+ }
+}
+```
+
+## Non-Functional Requirements
+
+1. **Atomic file writes**:
+ - For file exports, write to a temp file in the same directory, then rename/replace.
+ - Do not leave partial/corrupted final files on failure.
+
+2. **Streaming output**:
+ - Write CSV incrementally with buffering; do not require holding all rows in memory.
+
+3. **Deterministic output**:
+ - Ensure stable row ordering for testability.
+ - Ensure deterministic `id` in tests (inject clock / fixed ID in fixtures).
+
+4. **Backward compatibility**:
+ - No change to existing behavior unless `--export-to` is provided.
+ - Do not break existing CLI flags, exit codes, or TUI flows.
+
+5. **Clear, actionable errors**:
+ - Return non-zero exit codes on any export failure.
+
+6. **Test coverage**:
+ - Unit tests for CSV serialization include quoting/escaping, empty results, and NaN/Inf handling.
+
+## Edge Cases
+
+1. **Empty results** (e.g., run aborted immediately):
+ - CSV still includes header row.
+ - Data rows may be zero.
+
+2. **Special characters in fields**:
+ - Scenario/request names containing commas, quotes, or newlines are properly quoted/escaped (RFC4180 style).
+
+3. **NaN/Inf values**:
+ - Define and implement one of:
+ - omit the row, or
+ - serialize `value` as empty.
+ - Tests must cover the chosen behavior.
+
+4. **Very large runs**:
+ - Export should stream to the writer; avoid building the entire CSV in memory.
+
+5. **Permission/path errors**:
+ - Permission denied, invalid directory, non-existent parent directory, read-only filesystem.
+ - Must fail with a non-zero exit code and an error message that includes the path.
+
+6. **Existing file behavior (no-clobber default)**:
+ - Check if the path specified by `--export-to` exist on the startup.
+ - If the target exists:
+ - fail without modifying the existing file,
+
+7. **Concurrent runs writing to the same path**:
+ - One should fail due to file existence.
+
+8. **Cross-platform paths**:
+ - Handle paths with spaces and (if supported) Windows-style paths/backslashes.
+
+9. **Interrupted export** (process terminated mid-write):
+ - Atomic-write strategy should ensure no partially written final file is left behind.
+
+## Future extensions
+
+Support these format:
+- JSON
+- Influx line protocol.
diff --git a/export/export.go b/export/export.go
new file mode 100644
index 0000000..c628b96
--- /dev/null
+++ b/export/export.go
@@ -0,0 +1,332 @@
+package export
+
+import (
+ "bufio"
+ "bytes"
+ "encoding/csv"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "math"
+ "os"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "time"
+)
+
+const (
+ resultsFilename = "results.csv"
+)
+
+var resultsHeader = []string{"id", "timestamp", "latency_ns", "url", "method", "status_code"}
+
+type Meta struct {
+ ID string
+ TargetURL string
+ Method string
+ Rate int
+ Duration time.Duration
+}
+
+type Result struct {
+ Timestamp time.Time
+ LatencyNS float64
+ URL string
+ Method string
+ StatusCode uint16
+}
+
+type Summary struct {
+ Target TargetSummary `json:"target"`
+ Parameters ParametersSummary `json:"parameters"`
+ Timing TimingSummary `json:"timing"`
+ Requests RequestsSummary `json:"requests"`
+ Throughput float64 `json:"throughput"`
+ LatencyMS LatencySummary `json:"latency_ms"`
+ Bytes BytesSummary `json:"bytes"`
+ StatusCodes StatusCodesSummary `json:"status_codes"`
+}
+
+type TargetSummary struct {
+ URL string `json:"url"`
+ Method string `json:"method"`
+}
+
+type ParametersSummary struct {
+ Rate int `json:"rate"`
+ DurationSeconds float64 `json:"duration_seconds"`
+}
+
+type TimingSummary struct {
+ Earliest time.Time `json:"earliest"`
+ Latest time.Time `json:"latest"`
+}
+
+type RequestsSummary struct {
+ Count uint64 `json:"count"`
+ SuccessRatio float64 `json:"success_ratio"`
+}
+
+type LatencySummary struct {
+ Total float64 `json:"total"`
+ Mean float64 `json:"mean"`
+ P50 float64 `json:"p50"`
+ P90 float64 `json:"p90"`
+ P95 float64 `json:"p95"`
+ P99 float64 `json:"p99"`
+ Max float64 `json:"max"`
+ Min float64 `json:"min"`
+}
+
+type BytesSummary struct {
+ In BytesFlowSummary `json:"in"`
+ Out BytesFlowSummary `json:"out"`
+}
+
+type BytesFlowSummary struct {
+ Total uint64 `json:"total"`
+ Mean float64 `json:"mean"`
+}
+
+type StatusCodesSummary map[string]int
+
+func (s StatusCodesSummary) MarshalJSON() ([]byte, error) {
+ keys := make([]string, 0, len(s))
+ for k := range s {
+ keys = append(keys, k)
+ }
+ sort.Strings(keys)
+
+ var buf bytes.Buffer
+ buf.WriteByte('{')
+ for i, key := range keys {
+ if i > 0 {
+ buf.WriteByte(',')
+ }
+ keyJSON, err := json.Marshal(key)
+ if err != nil {
+ return nil, err
+ }
+ buf.Write(keyJSON)
+ buf.WriteByte(':')
+ buf.WriteString(strconv.Itoa(s[key]))
+ }
+ buf.WriteByte('}')
+ return buf.Bytes(), nil
+}
+
+type FileExporter struct {
+ dir string
+}
+
+func NewFileExporter(dir string) *FileExporter {
+ return &FileExporter{dir: dir}
+}
+
+type Run struct {
+ meta Meta
+
+ resultsPath string
+ summaryPath string
+
+ resultsFile *os.File
+ resultsBuf *bufio.Writer
+ resultsCSV *csv.Writer
+
+ tempResultsPath string
+ closed bool
+}
+
+func (e *FileExporter) StartRun(meta Meta) (*Run, error) {
+ if meta.ID == "" {
+ return nil, errors.New("export run id is required")
+ }
+ if e.dir == "" {
+ return nil, errors.New("export directory is required")
+ }
+ resultsPath := filepath.Join(e.dir, resultsFilename)
+ summaryPath := filepath.Join(e.dir, summaryFilename(meta.ID))
+
+ tmpFile, err := os.CreateTemp(e.dir, ".results.csv.")
+ if err != nil {
+ return nil, fmt.Errorf("failed to create temp results file in %q: %w", e.dir, err)
+ }
+ tempResultsPath := tmpFile.Name()
+ if err := tmpFile.Chmod(0o644); err != nil {
+ _ = tmpFile.Close()
+ _ = os.Remove(tempResultsPath)
+ return nil, fmt.Errorf("failed to chmod temp results file %q: %w", tempResultsPath, err)
+ }
+
+ var resultsExist bool
+ info, err := os.Stat(resultsPath)
+ if err == nil {
+ if info.IsDir() {
+ _ = tmpFile.Close()
+ _ = os.Remove(tempResultsPath)
+ return nil, fmt.Errorf("results path %q is a directory", resultsPath)
+ }
+ resultsExist = true
+ src, err := os.Open(resultsPath)
+ if err != nil {
+ _ = tmpFile.Close()
+ _ = os.Remove(tempResultsPath)
+ return nil, fmt.Errorf("failed to open results file %q: %w", resultsPath, err)
+ }
+ if _, err := io.Copy(tmpFile, src); err != nil {
+ _ = src.Close()
+ _ = tmpFile.Close()
+ _ = os.Remove(tempResultsPath)
+ return nil, fmt.Errorf("failed to copy results file %q: %w", resultsPath, err)
+ }
+ if err := src.Close(); err != nil {
+ _ = tmpFile.Close()
+ _ = os.Remove(tempResultsPath)
+ return nil, fmt.Errorf("failed to close results file %q: %w", resultsPath, err)
+ }
+ } else if !os.IsNotExist(err) {
+ _ = tmpFile.Close()
+ _ = os.Remove(tempResultsPath)
+ return nil, fmt.Errorf("failed to stat results file %q: %w", resultsPath, err)
+ }
+
+ buf := bufio.NewWriter(tmpFile)
+ writer := csv.NewWriter(buf)
+ if !resultsExist {
+ if err := writer.Write(resultsHeader); err != nil {
+ _ = tmpFile.Close()
+ _ = os.Remove(tempResultsPath)
+ return nil, fmt.Errorf("failed to write results header to %q: %w", resultsPath, err)
+ }
+ }
+
+ return &Run{
+ meta: meta,
+ resultsPath: resultsPath,
+ summaryPath: summaryPath,
+ resultsFile: tmpFile,
+ resultsBuf: buf,
+ resultsCSV: writer,
+ tempResultsPath: tempResultsPath,
+ }, nil
+}
+
+func (r *Run) WriteResult(res Result) error {
+ if r.closed {
+ return errors.New("export run already closed")
+ }
+ url := res.URL
+ if url == "" {
+ url = r.meta.TargetURL
+ }
+ method := res.Method
+ if method == "" {
+ method = r.meta.Method
+ }
+ record := []string{
+ r.meta.ID,
+ res.Timestamp.Format(time.RFC3339Nano),
+ formatLatencyNS(res.LatencyNS),
+ url,
+ method,
+ strconv.FormatUint(uint64(res.StatusCode), 10),
+ }
+ if err := r.resultsCSV.Write(record); err != nil {
+ _ = r.Abort()
+ return fmt.Errorf("failed to write results to %q: %w", r.resultsPath, err)
+ }
+ return nil
+}
+
+func (r *Run) Close(summary Summary) error {
+ if r.closed {
+ return errors.New("export run already closed")
+ }
+ r.resultsCSV.Flush()
+ if err := r.resultsCSV.Error(); err != nil {
+ _ = r.Abort()
+ return fmt.Errorf("failed to flush results to %q: %w", r.resultsPath, err)
+ }
+ if err := r.resultsBuf.Flush(); err != nil {
+ _ = r.Abort()
+ return fmt.Errorf("failed to flush results buffer to %q: %w", r.resultsPath, err)
+ }
+ if err := r.resultsFile.Sync(); err != nil {
+ _ = r.Abort()
+ return fmt.Errorf("failed to sync results file %q: %w", r.resultsPath, err)
+ }
+ if err := r.resultsFile.Close(); err != nil {
+ _ = r.Abort()
+ return fmt.Errorf("failed to close results file %q: %w", r.resultsPath, err)
+ }
+ if err := os.Rename(r.tempResultsPath, r.resultsPath); err != nil {
+ _ = os.Remove(r.tempResultsPath)
+ return fmt.Errorf("failed to replace results file %q: %w", r.resultsPath, err)
+ }
+ if err := writeSummary(r.summaryPath, summary); err != nil {
+ return err
+ }
+ r.closed = true
+ return nil
+}
+
+func (r *Run) Abort() error {
+ if r.closed {
+ return nil
+ }
+ _ = r.resultsFile.Close()
+ if err := os.Remove(r.tempResultsPath); err != nil && !os.IsNotExist(err) {
+ return err
+ }
+ r.closed = true
+ return nil
+}
+
+func writeSummary(path string, summary Summary) error {
+ dir := filepath.Dir(path)
+ tmpFile, err := os.CreateTemp(dir, ".summary.")
+ if err != nil {
+ return fmt.Errorf("failed to create temp summary file in %q: %w", dir, err)
+ }
+ tmpPath := tmpFile.Name()
+ if err := tmpFile.Chmod(0o644); err != nil {
+ _ = tmpFile.Close()
+ _ = os.Remove(tmpPath)
+ return fmt.Errorf("failed to chmod temp summary file %q: %w", tmpPath, err)
+ }
+
+ enc := json.NewEncoder(tmpFile)
+ enc.SetIndent("", " ")
+ if err := enc.Encode(summary); err != nil {
+ _ = tmpFile.Close()
+ _ = os.Remove(tmpPath)
+ return fmt.Errorf("failed to encode summary to %q: %w", path, err)
+ }
+ if err := tmpFile.Sync(); err != nil {
+ _ = tmpFile.Close()
+ _ = os.Remove(tmpPath)
+ return fmt.Errorf("failed to sync summary file %q: %w", path, err)
+ }
+ if err := tmpFile.Close(); err != nil {
+ _ = os.Remove(tmpPath)
+ return fmt.Errorf("failed to close summary file %q: %w", path, err)
+ }
+ if err := os.Rename(tmpPath, path); err != nil {
+ _ = os.Remove(tmpPath)
+ return fmt.Errorf("failed to replace summary file %q: %w", path, err)
+ }
+ return nil
+}
+
+func formatLatencyNS(v float64) string {
+ if math.IsNaN(v) || math.IsInf(v, 0) {
+ return ""
+ }
+ return strconv.FormatInt(int64(v), 10)
+}
+
+func summaryFilename(id string) string {
+ return fmt.Sprintf("summary-%s.json", id)
+}
diff --git a/export/export_test.go b/export/export_test.go
new file mode 100644
index 0000000..2c116a0
--- /dev/null
+++ b/export/export_test.go
@@ -0,0 +1,278 @@
+package export
+
+import (
+ "encoding/csv"
+ "math"
+ "os"
+ "path/filepath"
+ "runtime"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestFileExporter_Basic(t *testing.T) {
+ dir := t.TempDir()
+ exporter := NewFileExporter(dir)
+ zone := time.FixedZone("JST", 9*60*60)
+
+ run, err := exporter.StartRun(Meta{
+ ID: "00000000-0000-0000-0000-000000000000",
+ TargetURL: "https://example.com/",
+ Method: "GET",
+ Rate: 50,
+ Duration: 2 * time.Second,
+ })
+ require.NoError(t, err)
+
+ results := []Result{
+ {
+ Timestamp: time.Date(2021, 3, 13, 15, 20, 43, 0, zone),
+ LatencyNS: 18234567,
+ StatusCode: 200,
+ },
+ {
+ Timestamp: time.Date(2021, 3, 13, 15, 20, 43, 20*int(time.Millisecond), zone),
+ LatencyNS: 44900123,
+ StatusCode: 200,
+ },
+ {
+ Timestamp: time.Date(2021, 3, 13, 15, 20, 43, 41*int(time.Millisecond), zone),
+ LatencyNS: 935489752,
+ StatusCode: 500,
+ },
+ }
+ for _, res := range results {
+ require.NoError(t, run.WriteResult(res))
+ }
+
+ summary := Summary{
+ Target: TargetSummary{
+ URL: "https://example.com/",
+ Method: "GET",
+ },
+ Parameters: ParametersSummary{
+ Rate: 50,
+ DurationSeconds: 2,
+ },
+ Timing: TimingSummary{
+ Earliest: time.Date(2021, 3, 13, 15, 20, 43, 0, zone),
+ Latest: time.Date(2021, 3, 13, 15, 20, 45, 0, zone),
+ },
+ Requests: RequestsSummary{
+ Count: 100,
+ SuccessRatio: 0.98,
+ },
+ Throughput: 48.24,
+ LatencyMS: LatencySummary{
+ Total: 44000,
+ Mean: 447.88,
+ P50: 445.46,
+ P90: 806.58,
+ P95: 849.89,
+ P99: 935.49,
+ Max: 965.4,
+ Min: 55.32,
+ },
+ Bytes: BytesSummary{
+ In: BytesFlowSummary{
+ Total: 2325200,
+ Mean: 23252,
+ },
+ Out: BytesFlowSummary{
+ Total: 0,
+ Mean: 0,
+ },
+ },
+ StatusCodes: StatusCodesSummary{
+ "200": 98,
+ "500": 2,
+ },
+ }
+ require.NoError(t, run.Close(summary))
+
+ wantResults := readGolden(t, filepath.Join("..", "testdata", "export", "basic", "results.csv"))
+ gotResults := readFile(t, filepath.Join(dir, resultsFilename))
+ require.Equal(t, string(wantResults), string(gotResults))
+
+ wantSummary := readGolden(t, filepath.Join("..", "testdata", "export", "basic", "summary-00000000-0000-0000-0000-000000000000.json"))
+ gotSummary := readFile(t, filepath.Join(dir, summaryFilename("00000000-0000-0000-0000-000000000000")))
+ require.Equal(t, string(wantSummary), string(gotSummary))
+}
+
+func TestFileExporter_Quotes(t *testing.T) {
+ dir := t.TempDir()
+ exporter := NewFileExporter(dir)
+ zone := time.FixedZone("JST", 9*60*60)
+
+ run, err := exporter.StartRun(Meta{
+ ID: "11111111-1111-1111-1111-111111111111",
+ TargetURL: "https://example.com/hello, \"world\"",
+ Method: "GET",
+ Rate: 1,
+ Duration: time.Second,
+ })
+ require.NoError(t, err)
+
+ require.NoError(t, run.WriteResult(Result{
+ Timestamp: time.Date(2021, 3, 13, 15, 20, 43, 0, zone),
+ LatencyNS: 123,
+ StatusCode: 200,
+ }))
+ require.NoError(t, run.Close(Summary{}))
+
+ wantResults := readGolden(t, filepath.Join("..", "testdata", "export", "quotes", "results.csv"))
+ gotResults := readFile(t, filepath.Join(dir, resultsFilename))
+ require.Equal(t, string(wantResults), string(gotResults))
+}
+
+func TestFileExporter_EmptyResults(t *testing.T) {
+ dir := t.TempDir()
+ exporter := NewFileExporter(dir)
+
+ run, err := exporter.StartRun(Meta{
+ ID: "33333333-3333-3333-3333-333333333333",
+ TargetURL: "https://example.com/",
+ Method: "GET",
+ Rate: 1,
+ Duration: time.Second,
+ })
+ require.NoError(t, err)
+ require.NoError(t, run.Close(Summary{}))
+
+ wantResults := readGolden(t, filepath.Join("..", "testdata", "export", "empty", "results.csv"))
+ gotResults := readFile(t, filepath.Join(dir, resultsFilename))
+ require.Equal(t, string(wantResults), string(gotResults))
+}
+
+func TestFileExporter_NaNInf(t *testing.T) {
+ dir := t.TempDir()
+ exporter := NewFileExporter(dir)
+ zone := time.FixedZone("JST", 9*60*60)
+
+ run, err := exporter.StartRun(Meta{
+ ID: "22222222-2222-2222-2222-222222222222",
+ TargetURL: "https://example.com/",
+ Method: "GET",
+ Rate: 1,
+ Duration: time.Second,
+ })
+ require.NoError(t, err)
+
+ require.NoError(t, run.WriteResult(Result{
+ Timestamp: time.Date(2021, 3, 13, 15, 20, 43, 0, zone),
+ LatencyNS: math.NaN(),
+ StatusCode: 200,
+ }))
+ require.NoError(t, run.Close(Summary{}))
+
+ wantResults := readGolden(t, filepath.Join("..", "testdata", "export", "naninf", "results.csv"))
+ gotResults := readFile(t, filepath.Join(dir, resultsFilename))
+ require.Equal(t, string(wantResults), string(gotResults))
+}
+
+func TestFileExporter_AppendsRuns(t *testing.T) {
+ dir := t.TempDir()
+ exporter := NewFileExporter(dir)
+ zone := time.FixedZone("JST", 9*60*60)
+
+ run1, err := exporter.StartRun(Meta{
+ ID: "aaaaaaa1-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
+ TargetURL: "https://example.com/",
+ Method: "GET",
+ Rate: 1,
+ Duration: time.Second,
+ })
+ require.NoError(t, err)
+ require.NoError(t, run1.WriteResult(Result{
+ Timestamp: time.Date(2021, 3, 13, 15, 20, 43, 0, zone),
+ LatencyNS: 1,
+ StatusCode: 200,
+ }))
+ require.NoError(t, run1.Close(Summary{}))
+
+ run2, err := exporter.StartRun(Meta{
+ ID: "bbbbbbb2-bbbb-bbbb-bbbb-bbbbbbbbbbbb",
+ TargetURL: "https://example.com/",
+ Method: "GET",
+ Rate: 1,
+ Duration: time.Second,
+ })
+ require.NoError(t, err)
+ require.NoError(t, run2.WriteResult(Result{
+ Timestamp: time.Date(2021, 3, 13, 15, 20, 44, 0, zone),
+ LatencyNS: 2,
+ StatusCode: 200,
+ }))
+ require.NoError(t, run2.Close(Summary{}))
+
+ records := readCSV(t, filepath.Join(dir, resultsFilename))
+ require.Len(t, records, 3)
+ require.Equal(t, resultsHeader, records[0])
+ require.Equal(t, "aaaaaaa1-aaaa-aaaa-aaaa-aaaaaaaaaaaa", records[1][0])
+ require.Equal(t, "bbbbbbb2-bbbb-bbbb-bbbb-bbbbbbbbbbbb", records[2][0])
+}
+
+func TestFileExporter_AtomicResultsWrite(t *testing.T) {
+ if runtime.GOOS == "windows" {
+ t.Skip("chmod semantics are not reliable on windows")
+ }
+
+ dir := t.TempDir()
+ original := []byte("id,timestamp,latency_ns,url,method,status_code\n")
+ resultsPath := filepath.Join(dir, resultsFilename)
+ require.NoError(t, os.WriteFile(resultsPath, original, 0o644))
+
+ exporter := NewFileExporter(dir)
+ zone := time.FixedZone("JST", 9*60*60)
+ run, err := exporter.StartRun(Meta{
+ ID: "cccccccc-cccc-cccc-cccc-cccccccccccc",
+ TargetURL: "https://example.com/",
+ Method: "GET",
+ Rate: 1,
+ Duration: time.Second,
+ })
+ require.NoError(t, err)
+ require.NoError(t, run.WriteResult(Result{
+ Timestamp: time.Date(2021, 3, 13, 15, 20, 43, 0, zone),
+ LatencyNS: 3,
+ StatusCode: 200,
+ }))
+
+ require.NoError(t, os.Chmod(dir, 0o555))
+ err = run.Close(Summary{})
+ require.Error(t, err)
+ require.NoError(t, os.Chmod(dir, 0o755))
+
+ got := readFile(t, resultsPath)
+ require.Equal(t, string(original), string(got))
+ _, err = os.Stat(filepath.Join(dir, summaryFilename("cccccccc-cccc-cccc-cccc-cccccccccccc")))
+ require.True(t, os.IsNotExist(err))
+}
+
+func readGolden(t *testing.T, path string) []byte {
+ t.Helper()
+ content, err := os.ReadFile(path)
+ require.NoError(t, err)
+ return content
+}
+
+func readFile(t *testing.T, path string) []byte {
+ t.Helper()
+ content, err := os.ReadFile(path)
+ require.NoError(t, err)
+ return content
+}
+
+func readCSV(t *testing.T, path string) [][]string {
+ t.Helper()
+ file, err := os.Open(path)
+ require.NoError(t, err)
+ defer file.Close()
+
+ reader := csv.NewReader(file)
+ records, err := reader.ReadAll()
+ require.NoError(t, err)
+ return records
+}
diff --git a/export_golden_test.go b/export_golden_test.go
new file mode 100644
index 0000000..0958896
--- /dev/null
+++ b/export_golden_test.go
@@ -0,0 +1,142 @@
+package main
+
+import (
+ "encoding/csv"
+ "encoding/json"
+ "os"
+ "path/filepath"
+ "strconv"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+)
+
+func readCSV(t *testing.T, path string) [][]string {
+ t.Helper()
+
+ file, err := os.Open(path)
+ require.NoError(t, err)
+ defer file.Close()
+
+ reader := csv.NewReader(file)
+ reader.FieldsPerRecord = -1
+ records, err := reader.ReadAll()
+ require.NoError(t, err)
+
+ return records
+}
+
+func TestExportGoldenResultsCSVBasic(t *testing.T) {
+ path := filepath.Join("testdata", "export", "basic", "results.csv")
+ records := readCSV(t, path)
+
+ require.GreaterOrEqual(t, len(records), 2)
+ require.Equal(t, []string{"id", "timestamp", "latency_ns", "url", "method", "status_code"}, records[0])
+
+ for i, row := range records[1:] {
+ require.Len(t, row, 6, "row %d", i+1)
+ require.Equal(t, "00000000-0000-0000-0000-000000000000", row[0])
+ _, err := time.Parse(time.RFC3339, row[1])
+ require.NoError(t, err)
+ _, err = strconv.ParseInt(row[2], 10, 64)
+ require.NoError(t, err)
+ require.NotEmpty(t, row[3])
+ require.NotEmpty(t, row[4])
+ _, err = strconv.ParseUint(row[5], 10, 16)
+ require.NoError(t, err)
+ }
+}
+
+func TestExportGoldenResultsCSVQuotes(t *testing.T) {
+ path := filepath.Join("testdata", "export", "quotes", "results.csv")
+ records := readCSV(t, path)
+
+ require.Len(t, records, 2)
+ require.Equal(t, []string{"id", "timestamp", "latency_ns", "url", "method", "status_code"}, records[0])
+ require.Equal(t, "https://example.com/hello, \"world\"", records[1][3])
+}
+
+func TestExportGoldenResultsCSVEmpty(t *testing.T) {
+ path := filepath.Join("testdata", "export", "empty", "results.csv")
+ records := readCSV(t, path)
+
+ require.Len(t, records, 1)
+ require.Equal(t, []string{"id", "timestamp", "latency_ns", "url", "method", "status_code"}, records[0])
+}
+
+func TestExportGoldenResultsCSVNaNInf(t *testing.T) {
+ path := filepath.Join("testdata", "export", "naninf", "results.csv")
+ records := readCSV(t, path)
+
+ require.Len(t, records, 2)
+ require.Equal(t, []string{"id", "timestamp", "latency_ns", "url", "method", "status_code"}, records[0])
+ require.Equal(t, "", records[1][2])
+}
+
+func TestExportGoldenSummaryJSONSchema(t *testing.T) {
+ path := filepath.Join("testdata", "export", "basic", "summary-00000000-0000-0000-0000-000000000000.json")
+ content, err := os.ReadFile(path)
+ require.NoError(t, err)
+
+ var doc map[string]interface{}
+ require.NoError(t, json.Unmarshal(content, &doc))
+
+ target := mustMap(t, doc["target"], "target")
+ require.NotEmpty(t, mustString(t, target["url"], "target.url"))
+ require.NotEmpty(t, mustString(t, target["method"], "target.method"))
+
+ params := mustMap(t, doc["parameters"], "parameters")
+ mustNumber(t, params["rate"], "parameters.rate")
+ mustNumber(t, params["duration_seconds"], "parameters.duration_seconds")
+
+ timing := mustMap(t, doc["timing"], "timing")
+ _, err = time.Parse(time.RFC3339, mustString(t, timing["earliest"], "timing.earliest"))
+ require.NoError(t, err)
+ _, err = time.Parse(time.RFC3339, mustString(t, timing["latest"], "timing.latest"))
+ require.NoError(t, err)
+
+ requests := mustMap(t, doc["requests"], "requests")
+ mustNumber(t, requests["count"], "requests.count")
+ mustNumber(t, requests["success_ratio"], "requests.success_ratio")
+
+ mustNumber(t, doc["throughput"], "throughput")
+
+ latency := mustMap(t, doc["latency_ms"], "latency_ms")
+ for _, key := range []string{"total", "mean", "p50", "p90", "p95", "p99", "max", "min"} {
+ mustNumber(t, latency[key], "latency_ms."+key)
+ }
+
+ bytes := mustMap(t, doc["bytes"], "bytes")
+ bytesIn := mustMap(t, bytes["in"], "bytes.in")
+ mustNumber(t, bytesIn["total"], "bytes.in.total")
+ mustNumber(t, bytesIn["mean"], "bytes.in.mean")
+
+ bytesOut := mustMap(t, bytes["out"], "bytes.out")
+ mustNumber(t, bytesOut["total"], "bytes.out.total")
+ mustNumber(t, bytesOut["mean"], "bytes.out.mean")
+
+ statusCodes := mustMap(t, doc["status_codes"], "status_codes")
+ require.NotEmpty(t, statusCodes)
+}
+
+func mustMap(t *testing.T, value interface{}, name string) map[string]interface{} {
+ t.Helper()
+ m, ok := value.(map[string]interface{})
+ require.True(t, ok, "%s must be an object", name)
+ return m
+}
+
+func mustString(t *testing.T, value interface{}, name string) string {
+ t.Helper()
+ s, ok := value.(string)
+ require.True(t, ok, "%s must be a string", name)
+ return s
+}
+
+func mustNumber(t *testing.T, value interface{}, name string) float64 {
+ t.Helper()
+ n, ok := value.(float64)
+ require.True(t, ok, "%s must be a number", name)
+ return n
+}
diff --git a/export_pending_test.go b/export_pending_test.go
new file mode 100644
index 0000000..93c45b5
--- /dev/null
+++ b/export_pending_test.go
@@ -0,0 +1,175 @@
+package main
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "testing"
+ "time"
+
+ "github.com/stretchr/testify/require"
+
+ "github.com/nakabonne/ali/attacker"
+ "github.com/nakabonne/ali/export"
+ "github.com/nakabonne/ali/gui"
+ "github.com/nakabonne/ali/storage"
+)
+
+func TestExportCLI_NoExportTo_NoFilesCreated(t *testing.T) {
+ origRunGUI := runGUI
+ origNewAttacker := newAttacker
+ defer func() {
+ runGUI = origRunGUI
+ newAttacker = origNewAttacker
+ }()
+
+ runGUI = func(string, storage.Reader, attacker.Attacker, gui.Options) error {
+ return nil
+ }
+ var gotExporter *export.FileExporter
+ newAttacker = func(_ storage.Writer, _ string, opts *attacker.Options) (attacker.Attacker, error) {
+ gotExporter = opts.Exporter
+ if gotExporter != nil {
+ return nil, fmt.Errorf("unexpected exporter %v", gotExporter)
+ }
+ return &attacker.FakeAttacker{}, nil
+ }
+
+ buf := &bytes.Buffer{}
+ c := defaultCLI(buf)
+ exitCode := c.run([]string{"https://example.com/"})
+ require.Equal(t, 0, exitCode)
+ require.Nil(t, gotExporter)
+}
+
+func TestExportCLI_CreateDirAndFiles(t *testing.T) {
+ origRunGUI := runGUI
+ origNewAttacker := newAttacker
+ defer func() {
+ runGUI = origRunGUI
+ newAttacker = origNewAttacker
+ }()
+
+ resultsDir := filepath.Join(t.TempDir(), "results")
+ runGUI = func(_ string, _ storage.Reader, a attacker.Attacker, _ gui.Options) error {
+ metricsCh := make(chan *attacker.Metrics, 10)
+ ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
+ defer cancel()
+ return a.Attack(ctx, metricsCh)
+ }
+ newAttacker = func(_ storage.Writer, target string, opts *attacker.Options) (attacker.Attacker, error) {
+ if opts.Exporter == nil {
+ return nil, fmt.Errorf("exporter is required for this test")
+ }
+ meta := export.Meta{
+ ID: "00000000-0000-0000-0000-000000000000",
+ TargetURL: target,
+ Method: opts.Method,
+ Rate: opts.Rate,
+ Duration: opts.Duration,
+ }
+ results := []export.Result{
+ {
+ Timestamp: time.Date(2021, 3, 13, 15, 20, 43, 0, time.FixedZone("JST", 9*60*60)),
+ LatencyNS: 123,
+ StatusCode: 200,
+ },
+ }
+ return &exportingAttacker{
+ exporter: opts.Exporter,
+ meta: meta,
+ results: results,
+ summary: export.Summary{},
+ }, nil
+ }
+
+ buf := &bytes.Buffer{}
+ c := defaultCLI(buf)
+ c.exportTo = resultsDir
+ exitCode := c.run([]string{"https://example.com/"})
+ require.Equal(t, 0, exitCode)
+ require.FileExists(t, filepath.Join(resultsDir, "results.csv"))
+ require.FileExists(t, filepath.Join(resultsDir, "summary-00000000-0000-0000-0000-000000000000.json"))
+}
+
+func TestExportCLI_ExistingDirFails(t *testing.T) {
+ origRunGUI := runGUI
+ origNewAttacker := newAttacker
+ defer func() {
+ runGUI = origRunGUI
+ newAttacker = origNewAttacker
+ }()
+
+ runGUICalled := false
+ newAttackerCalled := false
+ runGUI = func(string, storage.Reader, attacker.Attacker, gui.Options) error {
+ runGUICalled = true
+ return nil
+ }
+ newAttacker = func(storage.Writer, string, *attacker.Options) (attacker.Attacker, error) {
+ newAttackerCalled = true
+ return &attacker.FakeAttacker{}, nil
+ }
+
+ resultsDir := filepath.Join(t.TempDir(), "results")
+ require.NoError(t, os.MkdirAll(resultsDir, 0o755))
+ sentinelPath := filepath.Join(resultsDir, "sentinel.txt")
+ require.NoError(t, os.WriteFile(sentinelPath, []byte("keep"), 0o644))
+
+ buf := &bytes.Buffer{}
+ c := defaultCLI(buf)
+ c.exportTo = resultsDir
+ exitCode := c.run([]string{"https://example.com/"})
+ require.Equal(t, 1, exitCode)
+ require.False(t, runGUICalled)
+ require.False(t, newAttackerCalled)
+
+ content, err := os.ReadFile(sentinelPath)
+ require.NoError(t, err)
+ require.Equal(t, "keep", string(content))
+}
+
+type exportingAttacker struct {
+ exporter *export.FileExporter
+ meta export.Meta
+ results []export.Result
+ summary export.Summary
+}
+
+func (e *exportingAttacker) Attack(ctx context.Context, metricsCh chan *attacker.Metrics) error {
+ run, err := e.exporter.StartRun(e.meta)
+ if err != nil {
+ return err
+ }
+ for _, res := range e.results {
+ if err := run.WriteResult(res); err != nil {
+ return err
+ }
+ }
+ return run.Close(e.summary)
+}
+
+func (e *exportingAttacker) Rate() int {
+ return e.meta.Rate
+}
+
+func (e *exportingAttacker) Duration() time.Duration {
+ return e.meta.Duration
+}
+
+func (e *exportingAttacker) Method() string {
+ return e.meta.Method
+}
+
+func defaultCLI(buf *bytes.Buffer) *cli {
+ return &cli{
+ method: "GET",
+ localAddress: "0.0.0.0",
+ queryRange: gui.DefaultQueryRange,
+ redrawInterval: gui.DefaultRedrawInterval,
+ stdout: buf,
+ stderr: buf,
+ }
+}
diff --git a/gui/drawer.go b/gui/drawer.go
index 92bdf0d..0a1fb96 100644
--- a/gui/drawer.go
+++ b/gui/drawer.go
@@ -33,6 +33,9 @@ type drawer struct {
mu sync.RWMutex
metrics *attacker.Metrics
storage storage.Reader
+
+ errMu sync.Mutex
+ exportErr error
}
// redrawCharts sets the values held by itself as chart values, at the specified interval as redrawInterval.
@@ -234,3 +237,20 @@ func (d *drawer) updateMetrics(ctx context.Context) {
}
}
}
+
+func (d *drawer) setExportErr(err error) {
+ if err == nil {
+ return
+ }
+ d.errMu.Lock()
+ if d.exportErr == nil {
+ d.exportErr = err
+ }
+ d.errMu.Unlock()
+}
+
+func (d *drawer) exportError() error {
+ d.errMu.Lock()
+ defer d.errMu.Unlock()
+ return d.exportErr
+}
diff --git a/gui/gui.go b/gui/gui.go
index 6464e44..b9f6c79 100644
--- a/gui/gui.go
+++ b/gui/gui.go
@@ -96,7 +96,11 @@ func run(t terminalapi.Terminal, r runner, targetURL string, storage storage.Rea
k := keybinds(ctx, cancel, c, d, a)
- return r(ctx, t, c, termdash.KeyboardSubscriber(k), termdash.RedrawInterval(opts.RedrawInternal))
+ err = r(ctx, t, c, termdash.KeyboardSubscriber(k), termdash.RedrawInterval(opts.RedrawInternal))
+ if exportErr := d.exportError(); exportErr != nil {
+ return exportErr
+ }
+ return err
}
// newChartWithLegends creates a chart with legends at the bottom.
diff --git a/gui/keybinds.go b/gui/keybinds.go
index 3f4b2b7..059e17b 100644
--- a/gui/keybinds.go
+++ b/gui/keybinds.go
@@ -36,7 +36,7 @@ func keybinds(ctx context.Context, cancel context.CancelFunc, c *container.Conta
case keyboard.KeyCtrlC, 'q': // Quit
cancel()
case keyboard.KeyEnter: // Attack
- attack(ctx, dr, a)
+ attack(ctx, cancel, dr, a)
case 'H', 'h': // backwards
navigateFunc(true)
case 'L', 'l': // forwards
@@ -45,17 +45,22 @@ func keybinds(ctx context.Context, cancel context.CancelFunc, c *container.Conta
}
}
-func attack(ctx context.Context, d *drawer, a attacker.Attacker) {
+func attack(ctx context.Context, cancelParent context.CancelFunc, d *drawer, a attacker.Attacker) {
if d.chartDrawing.Load() {
return
}
- child, cancel := context.WithCancel(ctx)
+ child, cancelChild := context.WithCancel(ctx)
// To initialize, run redrawChart on a per-attack basis.
go d.redrawCharts(child)
go d.redrawGauge(child, a.Duration())
go func() {
- a.Attack(child, d.metricsCh) // this blocks until attack finishes
- cancel()
+ if err := a.Attack(child, d.metricsCh); err != nil {
+ d.setExportErr(err)
+ cancelChild()
+ cancelParent()
+ return
+ }
+ cancelChild()
}()
}
diff --git a/gui/keybinds_test.go b/gui/keybinds_test.go
index c5382bd..3eea206 100644
--- a/gui/keybinds_test.go
+++ b/gui/keybinds_test.go
@@ -208,7 +208,7 @@ func TestAttack(t *testing.T) {
},
chartDrawing: tt.chartDrawing,
}
- attack(ctx, d, &attacker.FakeAttacker{})
+ attack(ctx, cancel, d, &attacker.FakeAttacker{})
})
}
}
diff --git a/main.go b/main.go
index 82e80c7..58cb73c 100644
--- a/main.go
+++ b/main.go
@@ -24,6 +24,7 @@ import (
flag "github.com/spf13/pflag"
"github.com/nakabonne/ali/attacker"
+ "github.com/nakabonne/ali/export"
"github.com/nakabonne/ali/gui"
)
@@ -34,6 +35,9 @@ var (
version = "unversioned"
commit = "?"
date = "?"
+
+ runGUI = gui.Run
+ newAttacker = attacker.NewAttacker
)
type cli struct {
@@ -63,6 +67,9 @@ type cli struct {
queryRange time.Duration
redrawInterval time.Duration
+ // options for export
+ exportTo string
+
debug bool
version bool
stdout io.Writer
@@ -107,6 +114,7 @@ func parseFlags(stdout, stderr io.Writer) (*cli, error) {
flagSet.StringVar(&c.resolvers, "resolvers", "", "Custom DNS resolver addresses; comma-separated list.")
flagSet.DurationVar(&c.queryRange, "query-range", gui.DefaultQueryRange, "The results within the given time range will be drawn on the charts")
flagSet.DurationVar(&c.redrawInterval, "redraw-interval", gui.DefaultRedrawInterval, "Specify how often it redraws the screen")
+ flagSet.StringVar(&c.exportTo, "export-to", "", "Export results to the given directory")
flagSet.Usage = c.usage
if err := flagSet.Parse(os.Args[1:]); err != nil {
if !errors.Is(err, flag.ErrHelp) {
@@ -140,6 +148,27 @@ func (c *cli) run(args []string) int {
return 1
}
+ var exporter *export.FileExporter
+ if c.exportTo != "" {
+ if c.exportTo == "-" {
+ fmt.Fprintf(c.stderr, "export path %q is not supported\n", c.exportTo)
+ return 1
+ }
+ if _, err := os.Stat(c.exportTo); err == nil {
+ fmt.Fprintf(c.stderr, "export path %q already exists\n", c.exportTo)
+ return 1
+ } else if !os.IsNotExist(err) {
+ fmt.Fprintf(c.stderr, "failed to stat export path %q: %v\n", c.exportTo, err)
+ return 1
+ }
+ if err := os.MkdirAll(c.exportTo, 0o755); err != nil {
+ fmt.Fprintf(c.stderr, "failed to create export directory %q: %v\n", c.exportTo, err)
+ return 1
+ }
+ exporter = export.NewFileExporter(c.exportTo)
+ }
+ opts.Exporter = exporter
+
// Data points out of query range get flushed to prevent using heap more than need.
s, err := storage.NewStorage(c.queryRange * 2)
if err != nil {
@@ -147,7 +176,7 @@ func (c *cli) run(args []string) int {
c.usage()
return 1
}
- a, err := attacker.NewAttacker(s, target, opts)
+ a, err := newAttacker(s, target, opts)
if err != nil {
fmt.Fprintf(c.stderr, "failed to initialize attacker: %v\n", err)
c.usage()
@@ -155,7 +184,7 @@ func (c *cli) run(args []string) int {
}
setDebug(nil, c.debug)
- if err := gui.Run(target, s, a,
+ if err := runGUI(target, s, a,
gui.Options{
QueryRange: c.queryRange,
RedrawInternal: c.redrawInterval,
diff --git a/main_test.go b/main_test.go
index e19617d..138f339 100644
--- a/main_test.go
+++ b/main_test.go
@@ -65,6 +65,7 @@ func TestParseFlags(t *testing.T) {
resolvers: "",
queryRange: 30 * time.Second,
redrawInterval: 250 * time.Millisecond,
+ exportTo: "",
},
wantErr: false,
},
diff --git a/testdata/export/basic/results.csv b/testdata/export/basic/results.csv
new file mode 100644
index 0000000..d7d7912
--- /dev/null
+++ b/testdata/export/basic/results.csv
@@ -0,0 +1,4 @@
+id,timestamp,latency_ns,url,method,status_code
+00000000-0000-0000-0000-000000000000,2021-03-13T15:20:43+09:00,18234567,https://example.com/,GET,200
+00000000-0000-0000-0000-000000000000,2021-03-13T15:20:43.02+09:00,44900123,https://example.com/,GET,200
+00000000-0000-0000-0000-000000000000,2021-03-13T15:20:43.041+09:00,935489752,https://example.com/,GET,500
diff --git a/testdata/export/basic/summary-00000000-0000-0000-0000-000000000000.json b/testdata/export/basic/summary-00000000-0000-0000-0000-000000000000.json
new file mode 100644
index 0000000..eacda02
--- /dev/null
+++ b/testdata/export/basic/summary-00000000-0000-0000-0000-000000000000.json
@@ -0,0 +1,43 @@
+{
+ "target": {
+ "url": "https://example.com/",
+ "method": "GET"
+ },
+ "parameters": {
+ "rate": 50,
+ "duration_seconds": 2
+ },
+ "timing": {
+ "earliest": "2021-03-13T15:20:43+09:00",
+ "latest": "2021-03-13T15:20:45+09:00"
+ },
+ "requests": {
+ "count": 100,
+ "success_ratio": 0.98
+ },
+ "throughput": 48.24,
+ "latency_ms": {
+ "total": 44000,
+ "mean": 447.88,
+ "p50": 445.46,
+ "p90": 806.58,
+ "p95": 849.89,
+ "p99": 935.49,
+ "max": 965.4,
+ "min": 55.32
+ },
+ "bytes": {
+ "in": {
+ "total": 2325200,
+ "mean": 23252
+ },
+ "out": {
+ "total": 0,
+ "mean": 0
+ }
+ },
+ "status_codes": {
+ "200": 98,
+ "500": 2
+ }
+}
diff --git a/testdata/export/empty/results.csv b/testdata/export/empty/results.csv
new file mode 100644
index 0000000..a74c757
--- /dev/null
+++ b/testdata/export/empty/results.csv
@@ -0,0 +1 @@
+id,timestamp,latency_ns,url,method,status_code
diff --git a/testdata/export/naninf/results.csv b/testdata/export/naninf/results.csv
new file mode 100644
index 0000000..83a9fbe
--- /dev/null
+++ b/testdata/export/naninf/results.csv
@@ -0,0 +1,2 @@
+id,timestamp,latency_ns,url,method,status_code
+22222222-2222-2222-2222-222222222222,2021-03-13T15:20:43+09:00,,https://example.com/,GET,200
diff --git a/testdata/export/quotes/results.csv b/testdata/export/quotes/results.csv
new file mode 100644
index 0000000..b40e907
--- /dev/null
+++ b/testdata/export/quotes/results.csv
@@ -0,0 +1,2 @@
+id,timestamp,latency_ns,url,method,status_code
+11111111-1111-1111-1111-111111111111,2021-03-13T15:20:43+09:00,123,"https://example.com/hello, ""world""",GET,200