From 6dd1fc4d6e9046b3b75d64bcb8f6c43a844d20a5 Mon Sep 17 00:00:00 2001 From: seakee Date: Mon, 11 May 2026 10:54:36 +0800 Subject: [PATCH 1/3] =?UTF-8?q?=F0=9F=90=9B=20fix(usage-service):=20persis?= =?UTF-8?q?t=20auth=20account=20snapshots?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add snapshot fields to usage events and SQLite storage so collected records retain display account metadata. Thread the fields through import/export and aggregate payloads for backwards-compatible reads. Existing databases are migrated with additive nullable columns to keep upgrade risk low. --- usage-service/internal/store/store.go | 88 ++++++++++++- usage-service/internal/store/store_test.go | 72 +++++++++++ usage-service/internal/usage/event.go | 136 ++++++++++++--------- usage-service/internal/usage/import.go | 104 +++++++++------- 4 files changed, 293 insertions(+), 107 deletions(-) create mode 100644 usage-service/internal/store/store_test.go diff --git a/usage-service/internal/store/store.go b/usage-service/internal/store/store.go index 0063deda7..8e87a8bf2 100644 --- a/usage-service/internal/store/store.go +++ b/usage-service/internal/store/store.go @@ -93,6 +93,11 @@ func (s *Store) init() error { source text, source_hash text, api_key_hash text, + account_snapshot text, + auth_label_snapshot text, + auth_file_snapshot text, + auth_provider_snapshot text, + auth_snapshot_at_ms integer, input_tokens integer not null default 0, output_tokens integer not null default 0, reasoning_tokens integer not null default 0, @@ -137,6 +142,58 @@ func (s *Store) init() error { return err } } + if err := s.ensureUsageEventSnapshotColumns(); err != nil { + return err + } + return nil +} + +func (s *Store) ensureUsageEventSnapshotColumns() error { + rows, err := s.db.Query(`pragma table_info(usage_events)`) + if err != nil { + return err + } + defer rows.Close() + + existing := map[string]struct{}{} + for rows.Next() { + var cid int + var name string + var columnType string + var notNull int + var defaultValue any + var pk int + if err := rows.Scan(&cid, &name, &columnType, ¬Null, &defaultValue, &pk); err != nil { + return err + } + existing[name] = struct{}{} + } + if err := rows.Err(); err != nil { + return err + } + + columns := []struct { + name string + definition string + }{ + {name: "account_snapshot", definition: "text"}, + {name: "auth_label_snapshot", definition: "text"}, + {name: "auth_file_snapshot", definition: "text"}, + {name: "auth_provider_snapshot", definition: "text"}, + {name: "auth_snapshot_at_ms", definition: "integer"}, + } + for _, column := range columns { + if _, ok := existing[column.name]; ok { + continue + } + if _, err := s.db.Exec(fmt.Sprintf( + `alter table usage_events add column %s %s`, + column.name, + column.definition, + )); err != nil { + return err + } + } return nil } @@ -360,9 +417,10 @@ func (s *Store) InsertEvents(ctx context.Context, events []usage.Event) (InsertR stmt, err := tx.PrepareContext(ctx, `insert or ignore into usage_events ( request_id, event_hash, timestamp_ms, timestamp, provider, model, endpoint, method, path, auth_type, auth_index, source, source_hash, api_key_hash, + account_snapshot, auth_label_snapshot, auth_file_snapshot, auth_provider_snapshot, auth_snapshot_at_ms, input_tokens, output_tokens, reasoning_tokens, cached_tokens, cache_tokens, total_tokens, latency_ms, failed, raw_json, created_at_ms - ) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`) + ) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`) if err != nil { return InsertResult{}, err } @@ -390,6 +448,11 @@ func (s *Store) InsertEvents(ctx context.Context, events []usage.Event) (InsertR nullString(event.Source), nullString(event.SourceHash), nullString(event.APIKeyHash), + nullString(event.AccountSnapshot), + nullString(event.AuthLabelSnapshot), + nullString(event.AuthFileSnapshot), + nullString(event.AuthProviderSnapshot), + nullPositiveInt64(event.AuthSnapshotAtMS), event.InputTokens, event.OutputTokens, event.ReasoningTokens, @@ -435,6 +498,7 @@ func (s *Store) RecentEvents(ctx context.Context, limit int) ([]usage.Event, err rows, err := s.db.QueryContext(ctx, `select request_id, event_hash, timestamp_ms, timestamp, provider, model, endpoint, method, path, auth_type, auth_index, source, source_hash, api_key_hash, + account_snapshot, auth_label_snapshot, auth_file_snapshot, auth_provider_snapshot, auth_snapshot_at_ms, input_tokens, output_tokens, reasoning_tokens, cached_tokens, cache_tokens, total_tokens, latency_ms, failed, raw_json, created_at_ms from usage_events @@ -448,7 +512,8 @@ func (s *Store) RecentEvents(ctx context.Context, limit int) ([]usage.Event, err events := make([]usage.Event, 0) for rows.Next() { var event usage.Event - var requestID, provider, endpoint, method, path, authType, authIndex, source, sourceHash, apiKeyHash, rawJSON sql.NullString + var requestID, provider, endpoint, method, path, authType, authIndex, source, sourceHash, apiKeyHash, accountSnapshot, authLabelSnapshot, authFileSnapshot, authProviderSnapshot, rawJSON sql.NullString + var authSnapshotAt sql.NullInt64 var latency sql.NullInt64 var failed int if err := rows.Scan( @@ -466,6 +531,11 @@ func (s *Store) RecentEvents(ctx context.Context, limit int) ([]usage.Event, err &source, &sourceHash, &apiKeyHash, + &accountSnapshot, + &authLabelSnapshot, + &authFileSnapshot, + &authProviderSnapshot, + &authSnapshotAt, &event.InputTokens, &event.OutputTokens, &event.ReasoningTokens, @@ -489,6 +559,13 @@ func (s *Store) RecentEvents(ctx context.Context, limit int) ([]usage.Event, err event.Source = source.String event.SourceHash = sourceHash.String event.APIKeyHash = apiKeyHash.String + event.AccountSnapshot = accountSnapshot.String + event.AuthLabelSnapshot = authLabelSnapshot.String + event.AuthFileSnapshot = authFileSnapshot.String + event.AuthProviderSnapshot = authProviderSnapshot.String + if authSnapshotAt.Valid { + event.AuthSnapshotAtMS = authSnapshotAt.Int64 + } event.RawJSON = rawJSON.String event.Failed = failed != 0 if latency.Valid { @@ -541,6 +618,13 @@ func nullInt(value *int64) any { return *value } +func nullPositiveInt64(value int64) any { + if value <= 0 { + return nil + } + return value +} + func (s Setup) String() string { return fmt.Sprintf("upstream=%s queue=%s popSide=%s", s.CPAUpstreamURL, s.Queue, s.PopSide) } diff --git a/usage-service/internal/store/store_test.go b/usage-service/internal/store/store_test.go new file mode 100644 index 000000000..233d082d7 --- /dev/null +++ b/usage-service/internal/store/store_test.go @@ -0,0 +1,72 @@ +package store + +import ( + "context" + "path/filepath" + "testing" + + "github.com/seakee/cpa-manager/usage-service/internal/usage" +) + +func TestStorePersistsAccountSnapshot(t *testing.T) { + db, err := Open(filepath.Join(t.TempDir(), "usage.sqlite")) + if err != nil { + t.Fatalf("open store: %v", err) + } + t.Cleanup(func() { + _ = db.Close() + }) + + _, err = db.InsertEvents(context.Background(), []usage.Event{ + { + EventHash: "event-1", + TimestampMS: 1_778_000_000_000, + Timestamp: "2026-05-06T00:00:00Z", + Model: "gpt-test", + Endpoint: "POST /v1/chat/completions", + AuthIndex: "auth-1", + AccountSnapshot: "alice@example.com", + AuthLabelSnapshot: "Alice", + AuthFileSnapshot: "alice.json", + AuthProviderSnapshot: "codex", + AuthSnapshotAtMS: 1_778_000_000_100, + CreatedAtMS: 1_778_000_000_200, + }, + }) + if err != nil { + t.Fatalf("insert events: %v", err) + } + + events, err := db.RecentEvents(context.Background(), 10) + if err != nil { + t.Fatalf("recent events: %v", err) + } + if len(events) != 1 { + t.Fatalf("len(events) = %d, want 1", len(events)) + } + event := events[0] + if event.AccountSnapshot != "alice@example.com" { + t.Fatalf("AccountSnapshot = %q", event.AccountSnapshot) + } + if event.AuthLabelSnapshot != "Alice" { + t.Fatalf("AuthLabelSnapshot = %q", event.AuthLabelSnapshot) + } + if event.AuthFileSnapshot != "alice.json" { + t.Fatalf("AuthFileSnapshot = %q", event.AuthFileSnapshot) + } + if event.AuthProviderSnapshot != "codex" { + t.Fatalf("AuthProviderSnapshot = %q", event.AuthProviderSnapshot) + } + if event.AuthSnapshotAtMS != 1_778_000_000_100 { + t.Fatalf("AuthSnapshotAtMS = %d", event.AuthSnapshotAtMS) + } + + payload := usage.BuildPayload(events) + detail := payload.APIs["POST /v1/chat/completions"].Models["gpt-test"].Details[0] + if detail.AccountSnapshot != "alice@example.com" { + t.Fatalf("payload AccountSnapshot = %q", detail.AccountSnapshot) + } + if detail.AuthProviderSnapshot != "codex" { + t.Fatalf("payload AuthProviderSnapshot = %q", detail.AuthProviderSnapshot) + } +} diff --git a/usage-service/internal/usage/event.go b/usage-service/internal/usage/event.go index 6e219fd9c..c6b253caf 100644 --- a/usage-service/internal/usage/event.go +++ b/usage-service/internal/usage/event.go @@ -12,30 +12,35 @@ import ( ) type Event struct { - RequestID string `json:"request_id,omitempty"` - EventHash string `json:"event_hash"` - TimestampMS int64 `json:"timestamp_ms"` - Timestamp string `json:"timestamp"` - Provider string `json:"provider,omitempty"` - Model string `json:"model"` - Endpoint string `json:"endpoint,omitempty"` - Method string `json:"method,omitempty"` - Path string `json:"path,omitempty"` - AuthType string `json:"auth_type,omitempty"` - AuthIndex string `json:"auth_index,omitempty"` - Source string `json:"source,omitempty"` - SourceHash string `json:"source_hash,omitempty"` - APIKeyHash string `json:"api_key_hash,omitempty"` - InputTokens int64 `json:"input_tokens"` - OutputTokens int64 `json:"output_tokens"` - ReasoningTokens int64 `json:"reasoning_tokens"` - CachedTokens int64 `json:"cached_tokens"` - CacheTokens int64 `json:"cache_tokens"` - TotalTokens int64 `json:"total_tokens"` - LatencyMS *int64 `json:"latency_ms,omitempty"` - Failed bool `json:"failed"` - RawJSON string `json:"raw_json,omitempty"` - CreatedAtMS int64 `json:"created_at_ms"` + RequestID string `json:"request_id,omitempty"` + EventHash string `json:"event_hash"` + TimestampMS int64 `json:"timestamp_ms"` + Timestamp string `json:"timestamp"` + Provider string `json:"provider,omitempty"` + Model string `json:"model"` + Endpoint string `json:"endpoint,omitempty"` + Method string `json:"method,omitempty"` + Path string `json:"path,omitempty"` + AuthType string `json:"auth_type,omitempty"` + AuthIndex string `json:"auth_index,omitempty"` + Source string `json:"source,omitempty"` + SourceHash string `json:"source_hash,omitempty"` + APIKeyHash string `json:"api_key_hash,omitempty"` + AccountSnapshot string `json:"account_snapshot,omitempty"` + AuthLabelSnapshot string `json:"auth_label_snapshot,omitempty"` + AuthFileSnapshot string `json:"auth_file_snapshot,omitempty"` + AuthProviderSnapshot string `json:"auth_provider_snapshot,omitempty"` + AuthSnapshotAtMS int64 `json:"auth_snapshot_at_ms,omitempty"` + InputTokens int64 `json:"input_tokens"` + OutputTokens int64 `json:"output_tokens"` + ReasoningTokens int64 `json:"reasoning_tokens"` + CachedTokens int64 `json:"cached_tokens"` + CacheTokens int64 `json:"cache_tokens"` + TotalTokens int64 `json:"total_tokens"` + LatencyMS *int64 `json:"latency_ms,omitempty"` + Failed bool `json:"failed"` + RawJSON string `json:"raw_json,omitempty"` + CreatedAtMS int64 `json:"created_at_ms"` } type Tokens struct { @@ -48,12 +53,17 @@ type Tokens struct { } type Detail struct { - Timestamp string `json:"timestamp"` - Source string `json:"source"` - AuthIndex string `json:"auth_index,omitempty"` - LatencyMS *int64 `json:"latency_ms,omitempty"` - Tokens Tokens `json:"tokens"` - Failed bool `json:"failed"` + Timestamp string `json:"timestamp"` + Source string `json:"source"` + AuthIndex string `json:"auth_index,omitempty"` + AccountSnapshot string `json:"account_snapshot,omitempty"` + AuthLabelSnapshot string `json:"auth_label_snapshot,omitempty"` + AuthFileSnapshot string `json:"auth_file_snapshot,omitempty"` + AuthProviderSnapshot string `json:"auth_provider_snapshot,omitempty"` + AuthSnapshotAtMS int64 `json:"auth_snapshot_at_ms,omitempty"` + LatencyMS *int64 `json:"latency_ms,omitempty"` + Tokens Tokens `json:"tokens"` + Failed bool `json:"failed"` } type ModelAggregate struct { @@ -121,29 +131,34 @@ func NormalizeRaw(raw []byte) (Event, error) { authIndex := readString(record, "auth_index", "authIndex", "AuthIndex") event := Event{ - RequestID: readString(record, "request_id", "requestId", "id"), - TimestampMS: timestampMS, - Timestamp: timestamp, - Provider: readString(record, "provider", "type", "auth_type", "authType"), - Model: readString(record, "model", "model_name", "modelName"), - Endpoint: endpoint, - Method: method, - Path: path, - AuthType: readString(record, "auth_type", "authType"), - AuthIndex: authIndex, - Source: source, - SourceHash: hashString(sourceRaw), - APIKeyHash: hashString(apiKey), - InputTokens: inputTokens, - OutputTokens: outputTokens, - ReasoningTokens: reasoningTokens, - CachedTokens: cachedTokens, - CacheTokens: cacheTokens, - TotalTokens: totalTokens, - LatencyMS: latencyMS, - Failed: failed, - RawJSON: string(redactedJSON), - CreatedAtMS: time.Now().UnixMilli(), + RequestID: readString(record, "request_id", "requestId", "id"), + TimestampMS: timestampMS, + Timestamp: timestamp, + Provider: readString(record, "provider", "type", "auth_type", "authType"), + Model: readString(record, "model", "model_name", "modelName"), + Endpoint: endpoint, + Method: method, + Path: path, + AuthType: readString(record, "auth_type", "authType"), + AuthIndex: authIndex, + Source: source, + SourceHash: hashString(sourceRaw), + APIKeyHash: hashString(apiKey), + AccountSnapshot: readString(record, "account_snapshot", "accountSnapshot"), + AuthLabelSnapshot: readString(record, "auth_label_snapshot", "authLabelSnapshot"), + AuthFileSnapshot: readString(record, "auth_file_snapshot", "authFileSnapshot"), + AuthProviderSnapshot: readString(record, "auth_provider_snapshot", "authProviderSnapshot"), + AuthSnapshotAtMS: readInt(record, "auth_snapshot_at_ms", "authSnapshotAtMs"), + InputTokens: inputTokens, + OutputTokens: outputTokens, + ReasoningTokens: reasoningTokens, + CachedTokens: cachedTokens, + CacheTokens: cacheTokens, + TotalTokens: totalTokens, + LatencyMS: latencyMS, + Failed: failed, + RawJSON: string(redactedJSON), + CreatedAtMS: time.Now().UnixMilli(), } if event.Model == "" { event.Model = "-" @@ -182,11 +197,16 @@ func BuildPayload(events []Event) Payload { apiEntry.Models[model] = modelEntry } modelEntry.Details = append(modelEntry.Details, Detail{ - Timestamp: event.Timestamp, - Source: event.Source, - AuthIndex: event.AuthIndex, - LatencyMS: event.LatencyMS, - Failed: event.Failed, + Timestamp: event.Timestamp, + Source: event.Source, + AuthIndex: event.AuthIndex, + AccountSnapshot: event.AccountSnapshot, + AuthLabelSnapshot: event.AuthLabelSnapshot, + AuthFileSnapshot: event.AuthFileSnapshot, + AuthProviderSnapshot: event.AuthProviderSnapshot, + AuthSnapshotAtMS: event.AuthSnapshotAtMS, + LatencyMS: event.LatencyMS, + Failed: event.Failed, Tokens: Tokens{ InputTokens: event.InputTokens, OutputTokens: event.OutputTokens, diff --git a/usage-service/internal/usage/import.go b/usage-service/internal/usage/import.go index b3d56f77a..4844c483d 100644 --- a/usage-service/internal/usage/import.go +++ b/usage-service/internal/usage/import.go @@ -177,30 +177,35 @@ func eventFromExportedRecord(record map[string]any) (Event, bool, error) { } event := Event{ - RequestID: readString(record, "request_id", "requestId"), - EventHash: eventHash, - TimestampMS: timestampMS, - Timestamp: timestamp, - Provider: readString(record, "provider"), - Model: readString(record, "model"), - Endpoint: readString(record, "endpoint"), - Method: readString(record, "method"), - Path: readString(record, "path"), - AuthType: readString(record, "auth_type", "authType"), - AuthIndex: readString(record, "auth_index", "authIndex", "AuthIndex"), - Source: readString(record, "source"), - SourceHash: readString(record, "source_hash", "sourceHash"), - APIKeyHash: readString(record, "api_key_hash", "apiKeyHash"), - InputTokens: inputTokens, - OutputTokens: outputTokens, - ReasoningTokens: reasoningTokens, - CachedTokens: cachedTokens, - CacheTokens: cacheTokens, - TotalTokens: totalTokens, - LatencyMS: readOptionalInt(record, "latency_ms", "latencyMs"), - Failed: readBool(record, "failed", "is_failed", "isFailed"), - RawJSON: readString(record, "raw_json", "rawJson"), - CreatedAtMS: readInt(record, "created_at_ms", "createdAtMs"), + RequestID: readString(record, "request_id", "requestId"), + EventHash: eventHash, + TimestampMS: timestampMS, + Timestamp: timestamp, + Provider: readString(record, "provider"), + Model: readString(record, "model"), + Endpoint: readString(record, "endpoint"), + Method: readString(record, "method"), + Path: readString(record, "path"), + AuthType: readString(record, "auth_type", "authType"), + AuthIndex: readString(record, "auth_index", "authIndex", "AuthIndex"), + Source: readString(record, "source"), + SourceHash: readString(record, "source_hash", "sourceHash"), + APIKeyHash: readString(record, "api_key_hash", "apiKeyHash"), + AccountSnapshot: readString(record, "account_snapshot", "accountSnapshot"), + AuthLabelSnapshot: readString(record, "auth_label_snapshot", "authLabelSnapshot"), + AuthFileSnapshot: readString(record, "auth_file_snapshot", "authFileSnapshot"), + AuthProviderSnapshot: readString(record, "auth_provider_snapshot", "authProviderSnapshot"), + AuthSnapshotAtMS: readInt(record, "auth_snapshot_at_ms", "authSnapshotAtMs"), + InputTokens: inputTokens, + OutputTokens: outputTokens, + ReasoningTokens: reasoningTokens, + CachedTokens: cachedTokens, + CacheTokens: cacheTokens, + TotalTokens: totalTokens, + LatencyMS: readOptionalInt(record, "latency_ms", "latencyMs"), + Failed: readBool(record, "failed", "is_failed", "isFailed"), + RawJSON: readString(record, "raw_json", "rawJson"), + CreatedAtMS: readInt(record, "created_at_ms", "createdAtMs"), } if event.Model == "" { event.Model = "-" @@ -322,29 +327,34 @@ func eventFromLegacyDetail( } event := Event{ - RequestID: requestID, - TimestampMS: timestampMS, - Timestamp: normalizedTimestamp, - Provider: readString(detail, "provider", "type", "auth_type", "authType"), - Model: model, - Endpoint: endpoint, - Method: method, - Path: path, - AuthType: readString(detail, "auth_type", "authType"), - AuthIndex: authIndex, - Source: maskSource(sourceRaw), - SourceHash: hashString(sourceRaw), - APIKeyHash: hashString(apiKey), - InputTokens: inputTokens, - OutputTokens: outputTokens, - ReasoningTokens: reasoningTokens, - CachedTokens: cachedTokens, - CacheTokens: cacheTokens, - TotalTokens: totalTokens, - LatencyMS: readOptionalInt(detail, "latency_ms", "latencyMs", "duration_ms", "durationMs", "elapsed_ms", "elapsedMs"), - Failed: readFailed(detail), - RawJSON: rawJSON, - CreatedAtMS: now, + RequestID: requestID, + TimestampMS: timestampMS, + Timestamp: normalizedTimestamp, + Provider: readString(detail, "provider", "type", "auth_type", "authType"), + Model: model, + Endpoint: endpoint, + Method: method, + Path: path, + AuthType: readString(detail, "auth_type", "authType"), + AuthIndex: authIndex, + Source: maskSource(sourceRaw), + SourceHash: hashString(sourceRaw), + APIKeyHash: hashString(apiKey), + AccountSnapshot: readString(detail, "account_snapshot", "accountSnapshot"), + AuthLabelSnapshot: readString(detail, "auth_label_snapshot", "authLabelSnapshot"), + AuthFileSnapshot: readString(detail, "auth_file_snapshot", "authFileSnapshot"), + AuthProviderSnapshot: readString(detail, "auth_provider_snapshot", "authProviderSnapshot"), + AuthSnapshotAtMS: readInt(detail, "auth_snapshot_at_ms", "authSnapshotAtMs"), + InputTokens: inputTokens, + OutputTokens: outputTokens, + ReasoningTokens: reasoningTokens, + CachedTokens: cachedTokens, + CacheTokens: cacheTokens, + TotalTokens: totalTokens, + LatencyMS: readOptionalInt(detail, "latency_ms", "latencyMs", "duration_ms", "durationMs", "elapsed_ms", "elapsedMs"), + Failed: readFailed(detail), + RawJSON: rawJSON, + CreatedAtMS: now, } if event.Model == "" { event.Model = "-" From 32258198f7f2fd5d8db0c3107c9792ca50136e5f Mon Sep 17 00:00:00 2001 From: seakee Date: Mon, 11 May 2026 10:55:00 +0800 Subject: [PATCH 2/3] =?UTF-8?q?=F0=9F=90=9B=20fix(usage-service):=20enrich?= =?UTF-8?q?=20usage=20events=20from=20auth=20files?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fetch current CPA auth files during collection and cache auth_index metadata for incoming usage events. Populate account, label, file, and provider snapshots before insert without storing tokens. Fall back to raw usage data if CPA auth metadata cannot be fetched. --- .../internal/collector/auth_snapshot.go | 238 ++++++++++++++++++ usage-service/internal/collector/collector.go | 67 +++-- .../internal/collector/collector_test.go | 23 ++ 3 files changed, 313 insertions(+), 15 deletions(-) create mode 100644 usage-service/internal/collector/auth_snapshot.go diff --git a/usage-service/internal/collector/auth_snapshot.go b/usage-service/internal/collector/auth_snapshot.go new file mode 100644 index 000000000..e2db480d9 --- /dev/null +++ b/usage-service/internal/collector/auth_snapshot.go @@ -0,0 +1,238 @@ +package collector + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "sync" + "time" +) + +const authSnapshotCacheTTL = 30 * time.Second + +type authSnapshot struct { + Account string + Label string + FileName string + Provider string + CapturedAtMS int64 +} + +type authSnapshotResolver struct { + mu sync.Mutex + client *http.Client + baseURL string + managementKey string + expiresAt time.Time + snapshots map[string]authSnapshot +} + +func newAuthSnapshotResolver() *authSnapshotResolver { + return &authSnapshotResolver{ + client: &http.Client{Timeout: 5 * time.Second}, + } +} + +func (r *authSnapshotResolver) lookup(ctx context.Context, cfg RuntimeConfig, authIndices map[string]struct{}) map[string]authSnapshot { + if r == nil || len(authIndices) == 0 { + return nil + } + baseURL := strings.TrimRight(strings.TrimSpace(cfg.CPAUpstreamURL), "/") + managementKey := strings.TrimSpace(cfg.ManagementKey) + if baseURL == "" || managementKey == "" { + return nil + } + + now := time.Now() + r.mu.Lock() + sameSource := r.baseURL == baseURL && r.managementKey == managementKey + if r.baseURL == baseURL && r.managementKey == managementKey && now.Before(r.expiresAt) { + result := r.lookupLocked(authIndices) + r.mu.Unlock() + return result + } + r.mu.Unlock() + + snapshots, err := r.fetch(ctx, baseURL, managementKey) + if err != nil { + r.mu.Lock() + var result map[string]authSnapshot + if sameSource { + result = r.lookupLocked(authIndices) + } + r.mu.Unlock() + return result + } + + r.mu.Lock() + r.baseURL = baseURL + r.managementKey = managementKey + r.expiresAt = now.Add(authSnapshotCacheTTL) + r.snapshots = snapshots + result := r.lookupLocked(authIndices) + r.mu.Unlock() + return result +} + +func (r *authSnapshotResolver) lookupLocked(authIndices map[string]struct{}) map[string]authSnapshot { + if len(r.snapshots) == 0 { + return nil + } + result := make(map[string]authSnapshot, len(authIndices)) + for authIndex := range authIndices { + if snapshot, ok := r.snapshots[authIndex]; ok { + result[authIndex] = snapshot + } + } + return result +} + +func (r *authSnapshotResolver) fetch(ctx context.Context, baseURL string, managementKey string) (map[string]authSnapshot, error) { + endpoint, err := authFilesEndpoint(baseURL) + if err != nil { + return nil, err + } + req, err := http.NewRequestWithContext(ctx, http.MethodGet, endpoint, nil) + if err != nil { + return nil, err + } + req.Header.Set("Authorization", "Bearer "+managementKey) + + client := r.client + if client == nil { + client = http.DefaultClient + } + res, err := client.Do(req) + if err != nil { + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 200 || res.StatusCode >= 300 { + _, _ = io.Copy(io.Discard, io.LimitReader(res.Body, 1024)) + return nil, errors.New("auth files request failed: " + res.Status) + } + + var payload authFilesPayload + decoder := json.NewDecoder(res.Body) + if err := decoder.Decode(&payload); err != nil { + return nil, err + } + + capturedAt := time.Now().UnixMilli() + snapshots := make(map[string]authSnapshot, len(payload.Files)) + for _, file := range payload.Files { + authIndex := readAuthFileString(file, "auth_index", "authIndex", "auth-index") + if authIndex == "" { + continue + } + account := firstSafeAccount( + readAuthFileString(file, "account"), + readAuthFileString(file, "email"), + ) + label := firstNonEmpty( + readAuthFileString(file, "label"), + readAuthFileString(file, "name"), + readAuthFileString(file, "email"), + account, + ) + fileName := readAuthFileString(file, "name") + provider := firstNonEmpty( + readAuthFileString(file, "provider"), + readAuthFileString(file, "type"), + ) + if account == "" { + account = firstNonEmpty(label, fileName) + } + snapshots[authIndex] = authSnapshot{ + Account: account, + Label: label, + FileName: fileName, + Provider: provider, + CapturedAtMS: capturedAt, + } + } + return snapshots, nil +} + +type authFilesPayload struct { + Files []map[string]any `json:"files"` +} + +func authFilesEndpoint(baseURL string) (string, error) { + base := strings.TrimRight(strings.TrimSpace(baseURL), "/") + if base == "" { + return "", errors.New("upstream URL is empty") + } + if !strings.Contains(base, "://") { + base = "http://" + base + } + parsed, err := url.Parse(base + "/v0/management/auth-files") + if err != nil { + return "", err + } + return parsed.String(), nil +} + +func readAuthFileString(file map[string]any, keys ...string) string { + for _, key := range keys { + value, ok := file[key] + if !ok || value == nil { + continue + } + text := strings.TrimSpace(toString(value)) + if text != "" { + return text + } + } + return "" +} + +func toString(value any) string { + switch typed := value.(type) { + case string: + return typed + case json.Number: + return typed.String() + default: + return fmt.Sprint(value) + } +} + +func firstNonEmpty(values ...string) string { + for _, value := range values { + if trimmed := strings.TrimSpace(value); trimmed != "" { + return trimmed + } + } + return "" +} + +func firstSafeAccount(values ...string) string { + for _, value := range values { + trimmed := strings.TrimSpace(value) + if trimmed == "" || looksLikeSecret(trimmed) { + continue + } + return trimmed + } + return "" +} + +func looksLikeSecret(value string) bool { + trimmed := strings.TrimSpace(value) + if trimmed == "" || strings.Contains(trimmed, "@") { + return false + } + if strings.ContainsAny(trimmed, " /\\") { + return false + } + return strings.HasPrefix(trimmed, "sk-") || + strings.HasPrefix(trimmed, "AIza") || + (len(trimmed) >= 32 && len(trimmed) <= 512) +} diff --git a/usage-service/internal/collector/collector.go b/usage-service/internal/collector/collector.go index 9c4db1ca5..d52e278b3 100644 --- a/usage-service/internal/collector/collector.go +++ b/usage-service/internal/collector/collector.go @@ -37,18 +37,20 @@ type RuntimeConfig struct { } type Manager struct { - base config.Config - store *store.Store - mu sync.Mutex - cancel context.CancelFunc - status Status - runtimeCfg RuntimeConfig + base config.Config + store *store.Store + snapshotResolver *authSnapshotResolver + mu sync.Mutex + cancel context.CancelFunc + status Status + runtimeCfg RuntimeConfig } func NewManager(base config.Config, store *store.Store) *Manager { return &Manager{ - base: base, - store: store, + base: base, + store: store, + snapshotResolver: newAuthSnapshotResolver(), status: Status{ Collector: "stopped", Mode: collectorMode(base.CollectorMode), @@ -120,7 +122,7 @@ func (m *Manager) runHTTP(ctx context.Context, cfg RuntimeConfig, mode string) b if ctx.Err() != nil { return true } - err := m.consumeHTTP(ctx, client) + err := m.consumeHTTP(ctx, cfg, client) if ctx.Err() != nil { return true } @@ -170,7 +172,7 @@ func (m *Manager) runRESP(ctx context.Context, cfg RuntimeConfig) { status.LastError = "" }) - err = m.consumeRESP(ctx, client, queue, popSide) + err = m.consumeRESP(ctx, cfg, client, queue, popSide) _ = client.Close() if ctx.Err() != nil { return @@ -183,7 +185,7 @@ func (m *Manager) runRESP(ctx context.Context, cfg RuntimeConfig) { } } -func (m *Manager) consumeHTTP(ctx context.Context, client *httpqueue.Client) error { +func (m *Manager) consumeHTTP(ctx context.Context, cfg RuntimeConfig, client *httpqueue.Client) error { ticker := time.NewTicker(m.pollInterval()) defer ticker.Stop() @@ -208,13 +210,13 @@ func (m *Manager) consumeHTTP(ctx context.Context, client *httpqueue.Client) err continue } } - if err := m.processItems(ctx, items); err != nil { + if err := m.processItems(ctx, cfg, items); err != nil { return err } } } -func (m *Manager) consumeRESP(ctx context.Context, client *resp.Client, queue string, popSide string) error { +func (m *Manager) consumeRESP(ctx context.Context, cfg RuntimeConfig, client *resp.Client, queue string, popSide string) error { ticker := time.NewTicker(m.pollInterval()) defer ticker.Stop() @@ -234,13 +236,13 @@ func (m *Manager) consumeRESP(ctx context.Context, client *resp.Client, queue st continue } } - if err := m.processItems(ctx, items); err != nil { + if err := m.processItems(ctx, cfg, items); err != nil { return err } } } -func (m *Manager) processItems(ctx context.Context, items []string) error { +func (m *Manager) processItems(ctx context.Context, cfg RuntimeConfig, items []string) error { if len(items) == 0 { return nil } @@ -259,6 +261,7 @@ func (m *Manager) processItems(ctx context.Context, items []string) error { } events = append(events, event) } + m.enrichAccountSnapshots(ctx, cfg, events) result, err := m.store.InsertEvents(ctx, events) if err != nil { return err @@ -273,6 +276,40 @@ func (m *Manager) processItems(ctx context.Context, items []string) error { return nil } +func (m *Manager) enrichAccountSnapshots(ctx context.Context, cfg RuntimeConfig, events []usage.Event) { + if len(events) == 0 || m.snapshotResolver == nil { + return + } + authIndices := make(map[string]struct{}) + for i := range events { + if events[i].AccountSnapshot != "" || events[i].AuthIndex == "" { + continue + } + authIndices[events[i].AuthIndex] = struct{}{} + } + if len(authIndices) == 0 { + return + } + snapshots := m.snapshotResolver.lookup(ctx, cfg, authIndices) + if len(snapshots) == 0 { + return + } + for i := range events { + if events[i].AuthIndex == "" || events[i].AccountSnapshot != "" { + continue + } + snapshot, ok := snapshots[events[i].AuthIndex] + if !ok { + continue + } + events[i].AccountSnapshot = snapshot.Account + events[i].AuthLabelSnapshot = snapshot.Label + events[i].AuthFileSnapshot = snapshot.FileName + events[i].AuthProviderSnapshot = snapshot.Provider + events[i].AuthSnapshotAtMS = snapshot.CapturedAtMS + } +} + func (m *Manager) markError(stage string, err error) { m.setStatus(func(status *Status) { status.Collector = "error" diff --git a/usage-service/internal/collector/collector_test.go b/usage-service/internal/collector/collector_test.go index 701d4de98..67e46a0ee 100644 --- a/usage-service/internal/collector/collector_test.go +++ b/usage-service/internal/collector/collector_test.go @@ -17,6 +17,15 @@ import ( func TestManagerConsumesHTTPUsageQueue(t *testing.T) { var calls int32 upstream := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/v0/management/auth-files" { + if r.Header.Get("Authorization") != "Bearer management-key" { + http.Error(w, "bad key", http.StatusUnauthorized) + return + } + w.Header().Set("Content-Type", "application/json") + _, _ = w.Write([]byte(`{"files":[{"auth_index":"auth-1","account":"alice@example.com","label":"Alice","name":"alice.json","provider":"codex"}]}`)) + return + } if r.URL.Path != "/v0/management/usage-queue" { http.NotFound(w, r) return @@ -31,6 +40,7 @@ func TestManagerConsumesHTTPUsageQueue(t *testing.T) { "timestamp": "2026-05-06T00:00:00Z", "model": "gpt-test", "endpoint": "POST /v1/chat/completions", + "auth_index": "auth-1", "input_tokens": 10, "output_tokens": 5 }]`)) @@ -63,6 +73,19 @@ func TestManagerConsumesHTTPUsageQueue(t *testing.T) { if status.TotalInserted != 1 { t.Fatalf("total inserted = %d, want 1", status.TotalInserted) } + events, err := db.RecentEvents(context.Background(), 10) + if err != nil { + t.Fatalf("recent events: %v", err) + } + if len(events) != 1 { + t.Fatalf("len(events) = %d, want 1", len(events)) + } + if events[0].AccountSnapshot != "alice@example.com" { + t.Fatalf("account snapshot = %q", events[0].AccountSnapshot) + } + if events[0].AuthLabelSnapshot != "Alice" { + t.Fatalf("auth label snapshot = %q", events[0].AuthLabelSnapshot) + } } func TestManagerFallsBackToRESPWhenHTTPQueueUnsupported(t *testing.T) { From a07bc3b8f1fe188fe0f27d1c05696e9f5433e91b Mon Sep 17 00:00:00 2001 From: seakee Date: Mon, 11 May 2026 10:55:26 +0800 Subject: [PATCH 3/3] =?UTF-8?q?=F0=9F=90=9B=20fix(monitoring):=20recover?= =?UTF-8?q?=20historical=20auth=20account=20display?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add legacy auth_index aliases for CPA auth index changes and use account snapshots as display fallback. Keep original event auth_index values intact while resolving aliases to current auth metadata. This restores historical account grouping when current auth files still exist and reduces future display drift. --- .../hooks/useMonitoringData.test.ts | 26 ++- .../monitoring/hooks/useMonitoringData.ts | 71 +++++-- .../monitoring/legacyAuthIndexAliases.test.ts | 21 ++ .../monitoring/legacyAuthIndexAliases.ts | 186 ++++++++++++++++++ src/utils/usage.ts | 47 +++++ 5 files changed, 331 insertions(+), 20 deletions(-) create mode 100644 src/features/monitoring/legacyAuthIndexAliases.test.ts create mode 100644 src/features/monitoring/legacyAuthIndexAliases.ts diff --git a/src/features/monitoring/hooks/useMonitoringData.test.ts b/src/features/monitoring/hooks/useMonitoringData.test.ts index 52ccdfd05..6f02e26ff 100644 --- a/src/features/monitoring/hooks/useMonitoringData.test.ts +++ b/src/features/monitoring/hooks/useMonitoringData.test.ts @@ -1,5 +1,10 @@ import { describe, expect, it } from 'vitest'; -import { buildAccountRows, type MonitoringEventRow } from './useMonitoringData'; +import { + buildAccountRows, + buildMonitoringAuthMetaMap, + type MonitoringEventRow, +} from './useMonitoringData'; +import type { AuthFileItem } from '@/types'; const createMonitoringEventRow = ( overrides: Partial = {} @@ -55,3 +60,22 @@ describe('buildAccountRows', () => { expect(rows[0].authIndices).toEqual(['auth-123456', 'auth-999999']); }); }); + +describe('buildMonitoringAuthMetaMap', () => { + it('maps legacy auth indices to current auth metadata', () => { + const authFiles: AuthFileItem[] = [ + { + name: 'alice.json', + provider: 'codex', + authIndex: 'current-auth-index', + path: '/tmp/auths/alice.json', + account: 'alice@example.com', + }, + ]; + + const map = buildMonitoringAuthMetaMap(authFiles); + + expect(map.get('current-auth-index')?.account).toBe('alice@example.com'); + expect(map.get('6bf749cb7db0e15c')?.account).toBe('alice@example.com'); + }); +}); diff --git a/src/features/monitoring/hooks/useMonitoringData.ts b/src/features/monitoring/hooks/useMonitoringData.ts index 35ea9ad51..14d2b32bc 100644 --- a/src/features/monitoring/hooks/useMonitoringData.ts +++ b/src/features/monitoring/hooks/useMonitoringData.ts @@ -5,6 +5,7 @@ import type { AuthFileItem } from '@/types/authFile'; import type { Config } from '@/types/config'; import type { CredentialInfo } from '@/types/sourceInfo'; import { buildSourceInfoMap, resolveSourceDisplay } from '@/utils/sourceResolver'; +import { buildLegacyAuthIndexAliases } from '../legacyAuthIndexAliases'; import { calculateCost, collectUsageDetailsWithEndpoint, @@ -547,6 +548,24 @@ const normalizeAuthMeta = (entry: AuthFileItem): MonitoringAuthMeta | null => { }; }; +export const buildMonitoringAuthMetaMap = ( + authFiles: AuthFileItem[] +): Map => { + const map = new Map(); + authFiles.forEach((entry) => { + const normalized = normalizeAuthMeta(entry); + if (!normalized) return; + + map.set(normalized.authIndex, normalized); + buildLegacyAuthIndexAliases(entry).forEach((alias) => { + if (!map.has(alias)) { + map.set(alias, normalized); + } + }); + }); + return map; +}; + const buildRangeFilteredRows = ( rows: MonitoringEventRow[], timeRange: MonitoringTimeRange, @@ -1353,12 +1372,26 @@ const buildEventRows = ( const authIndex = normalizeAuthIndex(detail.auth_index) ?? '-'; const authMeta = authMetaMap.get(authIndex); const sourceMeta = resolveSourceDisplay(detail.source, detail.auth_index, sourceInfoMap, authFileMap); - const sourceLabel = authMeta?.label || sourceMeta.displayName || authIndex; + const snapshotAccount = readString(detail.account_snapshot ?? detail.accountSnapshot); + const snapshotLabel = readString( + detail.auth_label_snapshot ?? + detail.authLabelSnapshot ?? + detail.auth_file_snapshot ?? + detail.authFileSnapshot + ); + const snapshotProvider = readString( + detail.auth_provider_snapshot ?? detail.authProviderSnapshot + ); + const snapshotDisplay = snapshotAccount || snapshotLabel; + const sourceLabel = authMeta?.label || snapshotDisplay || sourceMeta.displayName || authIndex; const sourceMasked = maskEmailLike(sourceLabel); - const account = authMeta?.account || sourceLabel; + const account = authMeta?.account || snapshotAccount || sourceLabel; const accountMasked = maskEmailLike(account); - const channelMeta = channelByAuthIndex.get(authIndex); - const channelLabel = channelMeta?.name || authMeta?.provider || sourceMeta.type || '-'; + const channelMeta = + channelByAuthIndex.get(authIndex) || + (authMeta?.authIndex ? channelByAuthIndex.get(authMeta.authIndex) : undefined); + const channelLabel = + channelMeta?.name || authMeta?.provider || snapshotProvider || sourceMeta.type || '-'; const endpoint = readString(detail.__endpoint) || '-'; const endpointMethod = readString(detail.__endpointMethod) || '-'; const endpointPath = readString(detail.__endpointPath) || endpoint; @@ -1394,8 +1427,8 @@ const buildEventRows = ( accountMasked, authIndex, authIndexMasked: maskAuthIndex(authIndex), - authLabel: authMeta?.label || sourceMasked, - provider: authMeta?.provider || sourceMeta.type || '-', + authLabel: authMeta?.label || snapshotLabel || sourceMasked, + provider: authMeta?.provider || snapshotProvider || sourceMeta.type || '-', planType: authMeta?.planType || '-', channel: channelLabel, channelHost: channelMeta?.host || '-', @@ -1420,7 +1453,7 @@ const buildEventRows = ( channelMeta?.host, endpointPath, endpointMethod, - authMeta?.provider, + authMeta?.provider || snapshotProvider, authMeta?.planType ), } satisfies MonitoringEventRow; @@ -1514,15 +1547,15 @@ export function useMonitoringData({ }; }, [config]); - const authMetaMap = useMemo(() => { + const authMetaMap = useMemo(() => buildMonitoringAuthMetaMap(authFiles), [authFiles]); + + const uniqueAuthMeta = useMemo(() => { const map = new Map(); - authFiles.forEach((entry) => { - const normalized = normalizeAuthMeta(entry); - if (!normalized) return; - map.set(normalized.authIndex, normalized); + authMetaMap.forEach((item) => { + map.set(item.authIndex, item); }); - return map; - }, [authFiles]); + return Array.from(map.values()); + }, [authMetaMap]); const authFileMap = useMemo(() => { const map = new Map(); @@ -1592,22 +1625,22 @@ export function useMonitoringData({ const metadata = useMemo(() => { const planTypes = Array.from( - new Set(Array.from(authMetaMap.values()).map((item) => item.planType).filter((item) => item && item !== '-')) + new Set(uniqueAuthMeta.map((item) => item.planType).filter((item) => item && item !== '-')) ).sort(); return { totalAuthFiles: authFiles.length, - activeAuthFiles: Array.from(authMetaMap.values()).filter( + activeAuthFiles: uniqueAuthMeta.filter( (item) => !item.disabled && !item.unavailable && item.status === 'active' ).length, - unavailableAuthFiles: Array.from(authMetaMap.values()).filter((item) => item.unavailable).length, - runtimeOnlyAuthFiles: Array.from(authMetaMap.values()).filter((item) => item.runtimeOnly).length, + unavailableAuthFiles: uniqueAuthMeta.filter((item) => item.unavailable).length, + runtimeOnlyAuthFiles: uniqueAuthMeta.filter((item) => item.runtimeOnly).length, totalChannels: channels.length, enabledChannels: channels.filter((item) => !item.disabled).length, configuredModels: Array.from(new Set(channels.flatMap((item) => item.modelNames))).length, planTypes, }; - }, [authFiles.length, authMetaMap, channels]); + }, [authFiles.length, channels, uniqueAuthMeta]); const statusChips = useMemo(() => buildStatusChips(metadata), [metadata]); diff --git a/src/features/monitoring/legacyAuthIndexAliases.test.ts b/src/features/monitoring/legacyAuthIndexAliases.test.ts new file mode 100644 index 000000000..932dc3c9a --- /dev/null +++ b/src/features/monitoring/legacyAuthIndexAliases.test.ts @@ -0,0 +1,21 @@ +import { describe, expect, it } from 'vitest'; +import { buildLegacyAuthIndexAliases, stableAuthIndexFromSeed } from './legacyAuthIndexAliases'; + +describe('legacy auth index aliases', () => { + it('matches CPA stable auth index hashing', () => { + expect(stableAuthIndexFromSeed('abc')).toBe('ba7816bf8f01cfea'); + }); + + it('builds legacy file-based source aliases', () => { + const aliases = buildLegacyAuthIndexAliases({ + name: 'alice.json', + provider: 'codex', + path: '/tmp/auths/alice.json', + authIndex: 'current-auth-index', + account: 'alice@example.com', + }); + + expect(aliases).toContain('6bf749cb7db0e15c'); + expect(aliases).toContain('b2035f866a8fdbf7'); + }); +}); diff --git a/src/features/monitoring/legacyAuthIndexAliases.ts b/src/features/monitoring/legacyAuthIndexAliases.ts new file mode 100644 index 000000000..4b26b02b1 --- /dev/null +++ b/src/features/monitoring/legacyAuthIndexAliases.ts @@ -0,0 +1,186 @@ +import type { AuthFileItem } from '@/types/authFile'; + +type RecordLike = Record; + +const SHA256_K = new Uint32Array([ + 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, + 0xab1c5ed5, 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, + 0x9bdc06a7, 0xc19bf174, 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, + 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, + 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, + 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, 0xa2bfe8a1, 0xa81a664b, + 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, 0x19a4c116, + 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, + 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, + 0xc67178f2, +]); + +const readString = (value: unknown): string => + value === null || value === undefined ? '' : String(value).trim(); + +const readAnyString = (entry: RecordLike, keys: string[]): string => { + for (const key of keys) { + const value = readString(entry[key]); + if (value) return value; + } + return ''; +}; + +const rightRotate = (value: number, bits: number): number => + (value >>> bits) | (value << (32 - bits)); + +export const stableAuthIndexFromSeed = (seed: string): string => { + const trimmed = seed.trim(); + if (!trimmed) return ''; + + const bytes = new TextEncoder().encode(trimmed); + const bitLength = bytes.length * 8; + const paddedLength = Math.ceil((bytes.length + 9) / 64) * 64; + const data = new Uint8Array(paddedLength); + data.set(bytes); + data[bytes.length] = 0x80; + + const view = new DataView(data.buffer); + view.setUint32(paddedLength - 8, Math.floor(bitLength / 0x100000000), false); + view.setUint32(paddedLength - 4, bitLength >>> 0, false); + + const hash = new Uint32Array([ + 0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, + 0x5be0cd19, + ]); + const words = new Uint32Array(64); + + for (let offset = 0; offset < paddedLength; offset += 64) { + for (let index = 0; index < 16; index += 1) { + words[index] = view.getUint32(offset + index * 4, false); + } + for (let index = 16; index < 64; index += 1) { + const s0 = + rightRotate(words[index - 15], 7) ^ + rightRotate(words[index - 15], 18) ^ + (words[index - 15] >>> 3); + const s1 = + rightRotate(words[index - 2], 17) ^ + rightRotate(words[index - 2], 19) ^ + (words[index - 2] >>> 10); + words[index] = (words[index - 16] + s0 + words[index - 7] + s1) >>> 0; + } + + let a = hash[0]; + let b = hash[1]; + let c = hash[2]; + let d = hash[3]; + let e = hash[4]; + let f = hash[5]; + let g = hash[6]; + let h = hash[7]; + + for (let index = 0; index < 64; index += 1) { + const s1 = rightRotate(e, 6) ^ rightRotate(e, 11) ^ rightRotate(e, 25); + const ch = (e & f) ^ (~e & g); + const temp1 = (h + s1 + ch + SHA256_K[index] + words[index]) >>> 0; + const s0 = rightRotate(a, 2) ^ rightRotate(a, 13) ^ rightRotate(a, 22); + const maj = (a & b) ^ (a & c) ^ (b & c); + const temp2 = (s0 + maj) >>> 0; + + h = g; + g = f; + f = e; + e = (d + temp1) >>> 0; + d = c; + c = b; + b = a; + a = (temp1 + temp2) >>> 0; + } + + hash[0] = (hash[0] + a) >>> 0; + hash[1] = (hash[1] + b) >>> 0; + hash[2] = (hash[2] + c) >>> 0; + hash[3] = (hash[3] + d) >>> 0; + hash[4] = (hash[4] + e) >>> 0; + hash[5] = (hash[5] + f) >>> 0; + hash[6] = (hash[6] + g) >>> 0; + hash[7] = (hash[7] + h) >>> 0; + } + + return [hash[0], hash[1]].map((value) => value.toString(16).padStart(8, '0')).join(''); +}; + +const isUsableSourceCandidate = (value: string): boolean => { + const trimmed = value.trim(); + if (!trimmed) return false; + const lowered = trimmed.toLowerCase(); + if (lowered === 'file' || lowered === 'memory') return false; + return lowered.endsWith('.json') || trimmed.includes('/') || trimmed.includes('\\'); +}; + +const buildLegacyConfigSeed = (input: { + providerKey: string; + compatName?: string; + baseURL?: string; + proxyURL?: string; + apiKey?: string; + source?: string; +}) => { + const providerKey = input.providerKey.trim().toLowerCase(); + if (!providerKey) return ''; + + const parts = [`provider=${providerKey}`]; + if (input.compatName) parts.push(`compat=${input.compatName.trim().toLowerCase()}`); + if (input.baseURL) parts.push(`base=${input.baseURL.trim()}`); + if (input.proxyURL) parts.push(`proxy=${input.proxyURL.trim()}`); + if (input.apiKey) parts.push(`api_key=${input.apiKey.trim()}`); + if (input.source) parts.push(`source=${input.source.trim()}`); + + return parts.length > 1 ? `config:${parts.join('\x00')}` : ''; +}; + +export const buildLegacyAuthIndexAliases = (entry: AuthFileItem): string[] => { + const record = entry as RecordLike; + const seeds = new Set(); + const name = readAnyString(record, ['name']); + const id = readAnyString(record, ['id']); + const providerKey = readAnyString(record, ['provider_key', 'providerKey', 'provider', 'type']); + const compatName = readAnyString(record, ['compat_name', 'compatName']); + const baseURL = readAnyString(record, ['base_url', 'baseUrl', 'base-url']); + const proxyURL = readAnyString(record, ['proxy_url', 'proxyUrl', 'proxy-url']); + const apiKey = readAnyString(record, ['api_key', 'apiKey', 'api-key']); + + [name, id].forEach((value) => { + if (value) seeds.add(`file:${value}`); + }); + if (id) seeds.add(`id:${id}`); + + const sourceCandidates = [ + readAnyString(record, ['path']), + readAnyString(record, ['source']), + readAnyString(record, ['file']), + ].filter(isUsableSourceCandidate); + + sourceCandidates.forEach((source) => { + const seed = buildLegacyConfigSeed({ + providerKey, + compatName, + baseURL, + proxyURL, + apiKey, + source, + }); + if (seed) seeds.add(seed); + }); + + if (apiKey || baseURL || proxyURL || compatName) { + const seed = buildLegacyConfigSeed({ + providerKey, + compatName, + baseURL, + proxyURL, + apiKey, + }); + if (seed) seeds.add(seed); + } + + return Array.from(seeds) + .map(stableAuthIndexFromSeed) + .filter(Boolean); +}; diff --git a/src/utils/usage.ts b/src/utils/usage.ts index 0a2b4d7a5..97de15eb7 100644 --- a/src/utils/usage.ts +++ b/src/utils/usage.ts @@ -24,6 +24,16 @@ export interface UsageDetail { timestamp: string; source: string; auth_index: string | number | null; + account_snapshot?: string; + accountSnapshot?: string; + auth_label_snapshot?: string; + authLabelSnapshot?: string; + auth_file_snapshot?: string; + authFileSnapshot?: string; + auth_provider_snapshot?: string; + authProviderSnapshot?: string; + auth_snapshot_at_ms?: number; + authSnapshotAtMs?: number; latency_ms?: number; tokens: UsageTokens; failed: boolean; @@ -67,6 +77,17 @@ const toFiniteNumber = (value: unknown): number => { return Number.isFinite(numberValue) ? numberValue : 0; }; +const toPositiveNumber = (value: unknown): number | undefined => { + const numberValue = toFiniteNumber(value); + return numberValue > 0 ? numberValue : undefined; +}; + +const readDetailString = (value: unknown): string | undefined => { + if (value === null || value === undefined) return undefined; + const text = String(value).trim(); + return text || undefined; +}; + const getApisRecord = (usageData: unknown): Record | null => { const usageRecord = isRecord(usageData) ? usageData : null; const apisRaw = usageRecord ? usageRecord.apis : null; @@ -243,6 +264,19 @@ export function collectUsageDetails(usageData: unknown): UsageDetail[] { detailRaw.authIndex ?? detailRaw.AuthIndex ?? null) as UsageDetail['auth_index'], + account_snapshot: readDetailString(detailRaw.account_snapshot ?? detailRaw.accountSnapshot), + auth_label_snapshot: readDetailString( + detailRaw.auth_label_snapshot ?? detailRaw.authLabelSnapshot + ), + auth_file_snapshot: readDetailString( + detailRaw.auth_file_snapshot ?? detailRaw.authFileSnapshot + ), + auth_provider_snapshot: readDetailString( + detailRaw.auth_provider_snapshot ?? detailRaw.authProviderSnapshot + ), + auth_snapshot_at_ms: toPositiveNumber( + detailRaw.auth_snapshot_at_ms ?? detailRaw.authSnapshotAtMs + ), latency_ms: latencyMs ?? undefined, tokens: readTokens(detailRaw), failed: detailRaw.failed === true, @@ -295,6 +329,19 @@ export function collectUsageDetailsWithEndpoint(usageData: unknown): UsageDetail detailRaw.authIndex ?? detailRaw.AuthIndex ?? null) as UsageDetail['auth_index'], + account_snapshot: readDetailString(detailRaw.account_snapshot ?? detailRaw.accountSnapshot), + auth_label_snapshot: readDetailString( + detailRaw.auth_label_snapshot ?? detailRaw.authLabelSnapshot + ), + auth_file_snapshot: readDetailString( + detailRaw.auth_file_snapshot ?? detailRaw.authFileSnapshot + ), + auth_provider_snapshot: readDetailString( + detailRaw.auth_provider_snapshot ?? detailRaw.authProviderSnapshot + ), + auth_snapshot_at_ms: toPositiveNumber( + detailRaw.auth_snapshot_at_ms ?? detailRaw.authSnapshotAtMs + ), latency_ms: latencyMs ?? undefined, tokens: readTokens(detailRaw), failed: detailRaw.failed === true,