diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..8f3cab7 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1 @@ +Load AGENTS.md diff --git a/cmd/jivefire/main.go b/cmd/jivefire/main.go index 94f1b1d..5fd0891 100644 --- a/cmd/jivefire/main.go +++ b/cmd/jivefire/main.go @@ -378,38 +378,27 @@ func runPass2(p *tea.Program, inputFile string, outputFile string, channels int, fftBuffer := make([]float64, config.FFTSize) // Pre-allocate reusable buffers for audio processing (avoid per-frame allocations) - newSamples := make([]float64, 0, samplesPerFrame) + newSamples := make([]float64, samplesPerFrame) audioSamples := make([]float32, samplesPerFrame) // Pre-fill buffer with first chunk - // Keep reading until we get the requested number of samples or EOF - var initialSamples []float64 - for len(initialSamples) < config.FFTSize { - chunk, err := reader.ReadChunk(config.FFTSize - len(initialSamples)) - if err != nil { - if errors.Is(err, io.EOF) { - break // Use what we have - } - cli.PrintError(fmt.Sprintf("error reading initial audio chunk: %v", err)) - p.Quit() - return - } - initialSamples = append(initialSamples, chunk...) + n, err := audio.FillFFTBuffer(reader, fftBuffer) + if err != nil { + cli.PrintError(fmt.Sprintf("error reading initial audio chunk: %v", err)) + p.Quit() + return } - - if len(initialSamples) == 0 { + if n == 0 { cli.PrintError("no audio data available") p.Quit() return } - copy(fftBuffer, initialSamples) - // Write initial audio samples to encoder (first samplesPerFrame worth) // This corresponds to the audio for frame 0 initialAudioSamples := make([]float32, samplesPerFrame) - for i := 0; i < samplesPerFrame && i < len(initialSamples); i++ { - initialAudioSamples[i] = float32(initialSamples[i]) + for i := 0; i < samplesPerFrame && i < n; i++ { + initialAudioSamples[i] = float32(fftBuffer[i]) } if err := enc.WriteAudioSamples(initialAudioSamples); err != nil { cli.PrintError(fmt.Sprintf("error writing initial audio: %v", err)) @@ -556,40 +545,24 @@ func runPass2(p *tea.Program, inputFile string, outputFile string, channels int, // === AUDIO TIMING START === // Read audio, encode, and manage buffer for next frame t0 = time.Now() - newSamples = newSamples[:0] // Reset slice, reuse backing array - for len(newSamples) < samplesPerFrame { - chunk, err := reader.ReadChunk(samplesPerFrame - len(newSamples)) - if err != nil { - if errors.Is(err, io.EOF) { - // If we got no new samples at all, we're done - if len(newSamples) == 0 { - // Break out of the frame loop - no more audio - frameNum = numFrames - break - } - // Got partial frame at end of file, use what we have - break - } - cli.PrintError(fmt.Sprintf("error reading audio: %v", err)) - p.Quit() - return + nRead, readErr := audio.ReadNextFrame(reader, newSamples) + if readErr != nil { + if errors.Is(readErr, io.EOF) { + totalAudio += time.Since(t0) + break } - newSamples = append(newSamples, chunk...) - } - - // If we got no new samples, we're done - if len(newSamples) == 0 { - totalAudio += time.Since(t0) - break + cli.PrintError(fmt.Sprintf("error reading audio: %v", readErr)) + p.Quit() + return } // Write audio samples for this frame to encoder // Convert float64 samples to float32 for AAC encoder // Uses pre-allocated audioSamples buffer, slice to actual length - for i, s := range newSamples { - audioSamples[i] = float32(s) + for i := range nRead { + audioSamples[i] = float32(newSamples[i]) } - if err := enc.WriteAudioSamples(audioSamples[:len(newSamples)]); err != nil { + if err := enc.WriteAudioSamples(audioSamples[:nRead]); err != nil { cli.PrintError(fmt.Sprintf("error writing audio at frame %d: %v", frameNum, err)) p.Quit() return @@ -597,14 +570,14 @@ func runPass2(p *tea.Program, inputFile string, outputFile string, channels int, // Shift buffer left by samplesPerFrame, append new samples copy(fftBuffer, fftBuffer[samplesPerFrame:]) // Pad with zeros if we got fewer samples than expected - if len(newSamples) < samplesPerFrame { - copy(fftBuffer[config.FFTSize-samplesPerFrame:], newSamples) + if nRead < samplesPerFrame { + copy(fftBuffer[config.FFTSize-samplesPerFrame:], newSamples[:nRead]) // Zero-fill the remaining space - for i := config.FFTSize - samplesPerFrame + len(newSamples); i < config.FFTSize; i++ { + for i := config.FFTSize - samplesPerFrame + nRead; i < config.FFTSize; i++ { fftBuffer[i] = 0 } } else { - copy(fftBuffer[config.FFTSize-samplesPerFrame:], newSamples) + copy(fftBuffer[config.FFTSize-samplesPerFrame:], newSamples[:nRead]) } totalAudio += time.Since(t0) // === AUDIO TIMING END === diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md index d446a49..36a347a 100644 --- a/docs/ARCHITECTURE.md +++ b/docs/ARCHITECTURE.md @@ -92,7 +92,7 @@ MP4 Muxer (libavformat) ## Key Technical Choices ### Audio Frame Size Mismatch -FFT analysis requires 2048 samples for frequency resolution, but AAC encoder expects 1024 samples per frame. **Solution:** `SharedAudioBuffer` in `audio/shared_buffer.go` provides thread-safe multi-consumer access with independent read positions—FFT and encoder each consume at their own rate without blocking each other. +FFT analysis requires 2048 samples for frequency resolution, but AAC encoder expects 1024 samples per frame. **Solution:** `AudioFIFO` in `encoder/encoder.go` buffers incoming audio samples and drains them in encoder-sized frames, decoupling the FFT chunk size from the AAC frame size. ### Hardware-Accelerated Encoding Automatic GPU encoder detection in `encoder/hwaccel.go`: diff --git a/internal/audio/analyzer.go b/internal/audio/analyzer.go index 43807ca..54789af 100644 --- a/internal/audio/analyzer.go +++ b/internal/audio/analyzer.go @@ -5,6 +5,7 @@ import ( "fmt" "io" "math" + "math/cmplx" "time" "github.com/linuxmatters/jivefire/internal/config" @@ -74,32 +75,22 @@ func AnalyzeAudio(filename string, progressCb ProgressCallback) (*Profile, error // Sliding buffer for FFT: we advance by samplesPerFrame but need FFTSize for FFT fftBuffer := make([]float64, config.FFTSize) + frameBuf := make([]float64, samplesPerFrame) // Pre-fill buffer with first chunk - // Keep reading until we get the requested number of samples or EOF - var initialSamples []float64 - for len(initialSamples) < config.FFTSize { - chunk, err := reader.ReadChunk(config.FFTSize - len(initialSamples)) - if err != nil { - if errors.Is(err, io.EOF) { - break // Use what we have - } - return nil, fmt.Errorf("error reading initial chunk: %w", err) - } - initialSamples = append(initialSamples, chunk...) + n, err := FillFFTBuffer(reader, fftBuffer) + if err != nil { + return nil, fmt.Errorf("error reading initial chunk: %w", err) } - - if len(initialSamples) == 0 { + if n == 0 { return nil, fmt.Errorf("no audio data in file") } - copy(fftBuffer, initialSamples) - startTime := time.Now() frameNum := 0 for { - // Pass fftBuffer directly to ProcessChunk - it creates its own copy via ApplyHanning + // Pass fftBuffer directly to ProcessChunk - it applies the pre-computed Hanning window // No need for intermediate allocation since analyzeFrame only reads the buffer coeffs := processor.ProcessChunk(fftBuffer) @@ -129,42 +120,29 @@ func AnalyzeAudio(filename string, progressCb ProgressCallback) (*Profile, error } // Advance sliding buffer for next frame - // Read samplesPerFrame new samples and shift buffer - // Keep reading until we get the requested number of samples or EOF - newSamples := make([]float64, 0, samplesPerFrame) - for len(newSamples) < samplesPerFrame { - chunk, err := reader.ReadChunk(samplesPerFrame - len(newSamples)) - if err != nil { - if errors.Is(err, io.EOF) { - // If we got some samples, use them; otherwise we're done - if len(newSamples) == 0 { - // Send final progress update - if progressCb != nil { - barHeights := make([]float64, config.NumBars) - for i := range config.NumBars { - barHeights[i] = analysis.BarMagnitudes[i] - } - elapsed := time.Since(startTime) - progressCb(frameNum, frameNum, analysis.RMSLevel, analysis.PeakMagnitude, barHeights, elapsed) - } - break // Finished reading all audio + nRead, err := ReadNextFrame(reader, frameBuf) + if err != nil { + if errors.Is(err, io.EOF) { + // Send final progress update + if progressCb != nil { + barHeights := make([]float64, config.NumBars) + for i := range config.NumBars { + barHeights[i] = analysis.BarMagnitudes[i] } - // Got partial frame at end of file, use what we have - break + elapsed := time.Since(startTime) + progressCb(frameNum, frameNum, analysis.RMSLevel, analysis.PeakMagnitude, barHeights, elapsed) } - return nil, fmt.Errorf("error reading audio at frame %d: %w", frameNum, err) + break } - newSamples = append(newSamples, chunk...) - } - - // If we got no new samples, we're done - if len(newSamples) == 0 { - break + return nil, fmt.Errorf("error reading audio at frame %d: %w", frameNum, err) } // Shift buffer left by samplesPerFrame, append new samples copy(fftBuffer, fftBuffer[samplesPerFrame:]) - copy(fftBuffer[config.FFTSize-samplesPerFrame:], newSamples) + copy(fftBuffer[config.FFTSize-samplesPerFrame:], frameBuf[:nRead]) + if nRead < samplesPerFrame { + clear(fftBuffer[config.FFTSize-samplesPerFrame+nRead:]) + } } // Set actual frame count and duration @@ -222,7 +200,7 @@ func analyzeFrame(coeffs []complex128, audioChunk []float64) FrameAnalysis { var sum float64 for i := start; i < end; i++ { - magnitude := math.Sqrt(real(coeffs[i])*real(coeffs[i]) + imag(coeffs[i])*imag(coeffs[i])) + magnitude := cmplx.Abs(coeffs[i]) sum += magnitude } diff --git a/internal/audio/analyzer_test.go b/internal/audio/analyzer_test.go index 0c25d9c..16186ae 100644 --- a/internal/audio/analyzer_test.go +++ b/internal/audio/analyzer_test.go @@ -7,11 +7,17 @@ import ( "github.com/linuxmatters/jivefire/internal/config" ) -func TestAnalyzeAudio(t *testing.T) { +func mustAnalyze(t *testing.T) *Profile { + t.Helper() profile, err := AnalyzeAudio("../../testdata/LMP0.mp3", nil) if err != nil { - t.Fatalf("Failed to analyze audio: %v", err) + t.Fatalf("Failed to analyse audio: %v", err) } + return profile +} + +func TestAnalyzeAudio(t *testing.T) { + profile := mustAnalyze(t) // Validate basic properties if profile.NumFrames <= 0 { @@ -66,10 +72,7 @@ func TestAnalyzeAudioInvalidFile(t *testing.T) { } func TestAnalyzeFrameStatistics(t *testing.T) { - profile, err := AnalyzeAudio("../../testdata/LMP0.mp3", nil) - if err != nil { - t.Fatalf("Failed to analyze audio: %v", err) - } + profile := mustAnalyze(t) // Check first few frames have valid statistics for i := 0; i < 10 && i < len(profile.Frames); i++ { @@ -95,10 +98,7 @@ func TestAnalyzeFrameStatistics(t *testing.T) { } func TestOptimalBaseScaleCalculation(t *testing.T) { - profile, err := AnalyzeAudio("../../testdata/LMP0.mp3", nil) - if err != nil { - t.Fatalf("Failed to analyze audio: %v", err) - } + profile := mustAnalyze(t) // Optimal baseScale should be calculated as: 0.85 / GlobalPeak expectedBaseScale := 0.85 / profile.GlobalPeak @@ -120,10 +120,7 @@ func TestOptimalBaseScaleCalculation(t *testing.T) { } func TestGlobalPeakIsMaximum(t *testing.T) { - profile, err := AnalyzeAudio("../../testdata/LMP0.mp3", nil) - if err != nil { - t.Fatalf("Failed to analyze audio: %v", err) - } + profile := mustAnalyze(t) // GlobalPeak should be >= all frame peaks for i, frame := range profile.Frames { @@ -152,10 +149,7 @@ func TestGlobalPeakIsMaximum(t *testing.T) { } func TestGlobalRMSIsAverage(t *testing.T) { - profile, err := AnalyzeAudio("../../testdata/LMP0.mp3", nil) - if err != nil { - t.Fatalf("Failed to analyze audio: %v", err) - } + profile := mustAnalyze(t) // Calculate average RMS manually var sumRMS float64 @@ -175,10 +169,7 @@ func TestGlobalRMSIsAverage(t *testing.T) { } func TestDynamicRangeCalculation(t *testing.T) { - profile, err := AnalyzeAudio("../../testdata/LMP0.mp3", nil) - if err != nil { - t.Fatalf("Failed to analyze audio: %v", err) - } + profile := mustAnalyze(t) expectedDynamicRange := profile.GlobalPeak / profile.GlobalRMS diff --git a/internal/audio/buffer.go b/internal/audio/buffer.go new file mode 100644 index 0000000..8c9c1fc --- /dev/null +++ b/internal/audio/buffer.go @@ -0,0 +1,48 @@ +package audio + +import ( + "errors" + "fmt" + "io" +) + +// FillFFTBuffer reads up to len(buf) samples from reader via repeated ReadChunk +// calls. Returns the number of samples read. Returns (0, nil) on immediate EOF, +// allowing callers to decide whether that is an error. +func FillFFTBuffer(reader *StreamingReader, buf []float64) (int, error) { + var total int + for total < len(buf) { + chunk, err := reader.ReadChunk(len(buf) - total) + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return total, fmt.Errorf("reading audio chunk: %w", err) + } + copy(buf[total:], chunk) + total += len(chunk) + } + return total, nil +} + +// ReadNextFrame reads up to len(buf) samples from reader into the provided +// buffer. Returns the number of samples read. Returns (0, io.EOF) when no +// samples are available. Returns (n, nil) for partial frames at end of file. +func ReadNextFrame(reader *StreamingReader, buf []float64) (int, error) { + var total int + for total < len(buf) { + chunk, err := reader.ReadChunk(len(buf) - total) + if err != nil { + if errors.Is(err, io.EOF) { + if total == 0 { + return 0, io.EOF + } + break + } + return 0, fmt.Errorf("reading audio frame: %w", err) + } + copy(buf[total:], chunk) + total += len(chunk) + } + return total, nil +} diff --git a/internal/audio/buffer_test.go b/internal/audio/buffer_test.go new file mode 100644 index 0000000..ea28d0e --- /dev/null +++ b/internal/audio/buffer_test.go @@ -0,0 +1,166 @@ +package audio + +import ( + "errors" + "io" + "testing" +) + +func TestFillFFTBufferNormal(t *testing.T) { + reader, err := NewStreamingReader("../../testdata/LMP0.mp3") + if err != nil { + t.Fatalf("Failed to create reader: %v", err) + } + defer reader.Close() + + buf := make([]float64, 2048) + n, err := FillFFTBuffer(reader, buf) + if err != nil { + t.Fatalf("FillFFTBuffer returned error: %v", err) + } + if n != 2048 { + t.Errorf("Expected 2048 samples, got %d", n) + } +} + +func TestFillFFTBufferPartial(t *testing.T) { + // Read most of the file first, then try to fill a large buffer + reader, err := NewStreamingReader("../../testdata/LMP0.mp3") + if err != nil { + t.Fatalf("Failed to create reader: %v", err) + } + defer reader.Close() + + // Drain most of the file + drain := make([]float64, 4096) + for { + n, err := FillFFTBuffer(reader, drain) + if err != nil { + t.Fatalf("Unexpected error draining: %v", err) + } + if n < len(drain) { + // Partial read means we hit EOF partway through + break + } + } + + // Next fill should return 0 (EOF already reached) + buf := make([]float64, 2048) + n, err := FillFFTBuffer(reader, buf) + if err != nil { + t.Fatalf("FillFFTBuffer returned error: %v", err) + } + if n != 0 { + t.Errorf("Expected 0 samples after EOF, got %d", n) + } +} + +func TestFillFFTBufferEmpty(t *testing.T) { + // Reading after full drain should return (0, nil) + reader, err := NewStreamingReader("../../testdata/LMP0.mp3") + if err != nil { + t.Fatalf("Failed to create reader: %v", err) + } + defer reader.Close() + + // Drain completely + drain := make([]float64, 8192) + for { + n, err := FillFFTBuffer(reader, drain) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + if n == 0 { + break + } + } + + buf := make([]float64, 2048) + n, err := FillFFTBuffer(reader, buf) + if err != nil { + t.Fatalf("FillFFTBuffer returned error: %v", err) + } + if n != 0 { + t.Errorf("Expected (0, nil) on empty, got (%d, nil)", n) + } +} + +func TestReadNextFrameNormal(t *testing.T) { + reader, err := NewStreamingReader("../../testdata/LMP0.mp3") + if err != nil { + t.Fatalf("Failed to create reader: %v", err) + } + defer reader.Close() + + buf := make([]float64, 1024) + n, err := ReadNextFrame(reader, buf) + if err != nil { + t.Fatalf("ReadNextFrame returned error: %v", err) + } + if n != 1024 { + t.Errorf("Expected 1024 samples, got %d", n) + } +} + +func TestReadNextFramePartialAtEOF(t *testing.T) { + reader, err := NewStreamingReader("../../testdata/LMP0.mp3") + if err != nil { + t.Fatalf("Failed to create reader: %v", err) + } + defer reader.Close() + + // Drain most of the file, tracking whether we saw a partial frame + buf := make([]float64, 4096) + sawPartial := false + for { + n, err := ReadNextFrame(reader, buf) + if errors.Is(err, io.EOF) { + break + } + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + if n < len(buf) { + // Got a partial frame at EOF + if n == 0 { + t.Error("Expected partial samples, got 0") + } + sawPartial = true + break + } + } + + if !sawPartial { + t.Error("Expected to observe a partial frame before EOF, but none was returned") + } +} + +func TestReadNextFrameImmediateEOF(t *testing.T) { + reader, err := NewStreamingReader("../../testdata/LMP0.mp3") + if err != nil { + t.Fatalf("Failed to create reader: %v", err) + } + defer reader.Close() + + // Drain completely + buf := make([]float64, 8192) + for { + _, err := ReadNextFrame(reader, buf) + if errors.Is(err, io.EOF) { + break + } + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + } + + // Now should get immediate EOF + smallBuf := make([]float64, 1024) + n, err := ReadNextFrame(reader, smallBuf) + if !errors.Is(err, io.EOF) { + t.Errorf("Expected io.EOF, got err=%v n=%d", err, n) + } + if n != 0 { + t.Errorf("Expected 0 samples on EOF, got %d", n) + } +} diff --git a/internal/audio/decoder.go b/internal/audio/decoder.go index 1148889..221b313 100644 --- a/internal/audio/decoder.go +++ b/internal/audio/decoder.go @@ -1,7 +1,5 @@ package audio -import "io" - // Decoder defines the interface for all audio format decoders type Decoder interface { // ReadChunk reads the next chunk of samples as float64 @@ -17,6 +15,3 @@ type Decoder interface { // Close closes the decoder and releases resources Close() error } - -// Ensure io.EOF is available for decoder implementations -var EOF = io.EOF diff --git a/internal/audio/ffmpeg_common.go b/internal/audio/ffmpeg_common.go new file mode 100644 index 0000000..00ad563 --- /dev/null +++ b/internal/audio/ffmpeg_common.go @@ -0,0 +1,51 @@ +package audio + +import ( + "fmt" + + ffmpeg "github.com/linuxmatters/ffmpeg-statigo" +) + +// openAudioFormatCtx opens an audio file, finds stream info, and locates the +// first audio stream. The caller is responsible for closing the returned +// format context via AVFormatCloseInput. +func openAudioFormatCtx(filename string) (*ffmpeg.AVFormatContext, int, error) { + var formatCtx *ffmpeg.AVFormatContext + + path := ffmpeg.ToCStr(filename) + defer path.Free() + + ret, err := ffmpeg.AVFormatOpenInput(&formatCtx, path, nil, nil) + if err != nil { + return nil, 0, fmt.Errorf("failed to open audio file: %w", err) + } + if ret < 0 { + return nil, 0, fmt.Errorf("failed to open audio file: error code %d", ret) + } + + ret, err = ffmpeg.AVFormatFindStreamInfo(formatCtx, nil) + if err != nil { + ffmpeg.AVFormatCloseInput(&formatCtx) + return nil, 0, fmt.Errorf("failed to find stream info: %w", err) + } + if ret < 0 { + ffmpeg.AVFormatCloseInput(&formatCtx) + return nil, 0, fmt.Errorf("failed to find stream info: error code %d", ret) + } + + audioStreamIdx := -1 + streams := formatCtx.Streams() + for i := uintptr(0); i < uintptr(formatCtx.NbStreams()); i++ { + stream := streams.Get(i) + if stream.Codecpar().CodecType() == ffmpeg.AVMediaTypeAudio { + audioStreamIdx = int(i) + break + } + } + if audioStreamIdx == -1 { + ffmpeg.AVFormatCloseInput(&formatCtx) + return nil, 0, fmt.Errorf("no audio stream found in file") + } + + return formatCtx, audioStreamIdx, nil +} diff --git a/internal/audio/ffmpeg_decoder.go b/internal/audio/ffmpeg_decoder.go index 3c9df5f..e22c884 100644 --- a/internal/audio/ffmpeg_decoder.go +++ b/internal/audio/ffmpeg_decoder.go @@ -4,7 +4,7 @@ import ( "errors" "fmt" "io" - "unsafe" + "math" ffmpeg "github.com/linuxmatters/ffmpeg-statigo" ) @@ -31,45 +31,15 @@ func NewFFmpegDecoder(filename string) (*FFmpegDecoder, error) { sampleBuffer: make([]float64, 0, 8192), } - // Open input file - path := ffmpeg.ToCStr(filename) - defer path.Free() - - ret, err := ffmpeg.AVFormatOpenInput(&d.formatCtx, path, nil, nil) - if err != nil { - return nil, fmt.Errorf("failed to open audio file: %w", err) - } - if ret < 0 { - return nil, fmt.Errorf("failed to open audio file: error code %d", ret) - } - - // Find stream info - ret, err = ffmpeg.AVFormatFindStreamInfo(d.formatCtx, nil) + // Open input file and find audio stream + formatCtx, streamIndex, err := openAudioFormatCtx(filename) if err != nil { - d.Close() - return nil, fmt.Errorf("failed to find stream info: %w", err) - } - if ret < 0 { - d.Close() - return nil, fmt.Errorf("failed to find stream info: error code %d", ret) - } - - // Find audio stream - d.streamIndex = -1 - streams := d.formatCtx.Streams() - for i := uintptr(0); i < uintptr(d.formatCtx.NbStreams()); i++ { - stream := streams.Get(i) - if stream.Codecpar().CodecType() == ffmpeg.AVMediaTypeAudio { - d.streamIndex = int(i) - break - } - } - if d.streamIndex == -1 { - d.Close() - return nil, fmt.Errorf("no audio stream found in file") + return nil, err } + d.formatCtx = formatCtx + d.streamIndex = streamIndex - audioStream := streams.Get(uintptr(d.streamIndex)) //nolint:gosec // stream index is non-negative + audioStream := d.formatCtx.Streams().Get(uintptr(d.streamIndex)) //nolint:gosec // stream index is non-negative // Find decoder decoder := ffmpeg.AVCodecFindDecoder(audioStream.Codecpar().CodecId()) @@ -86,7 +56,7 @@ func NewFFmpegDecoder(filename string) (*FFmpegDecoder, error) { } // Copy codec parameters - ret, err = ffmpeg.AVCodecParametersToContext(d.codecCtx, audioStream.Codecpar()) + ret, err := ffmpeg.AVCodecParametersToContext(d.codecCtx, audioStream.Codecpar()) if err != nil { d.Close() return nil, fmt.Errorf("failed to copy codec parameters: %w", err) @@ -270,13 +240,13 @@ func (d *FFmpegDecoder) extractSamples() ([]float64, error) { uint32(leftSlice[i*4+1])<<8 | uint32(leftSlice[i*4+2])<<16 | uint32(leftSlice[i*4+3])<<24 - leftFloat := *(*float32)(unsafe.Pointer(&leftBits)) + leftFloat := math.Float32frombits(leftBits) rightBits := uint32(rightSlice[i*4]) | uint32(rightSlice[i*4+1])<<8 | uint32(rightSlice[i*4+2])<<16 | uint32(rightSlice[i*4+3])<<24 - rightFloat := *(*float32)(unsafe.Pointer(&rightBits)) + rightFloat := math.Float32frombits(rightBits) samples[i] = float64(leftFloat+rightFloat) / 2 } @@ -316,7 +286,7 @@ func (d *FFmpegDecoder) extractSamples() ([]float64, error) { uint32(dataSlice[i*4+1])<<8 | uint32(dataSlice[i*4+2])<<16 | uint32(dataSlice[i*4+3])<<24 - samples[i] = float64(*(*float32)(unsafe.Pointer(&bits))) + samples[i] = float64(math.Float32frombits(bits)) } default: @@ -381,7 +351,7 @@ func (d *FFmpegDecoder) extractSamples() ([]float64, error) { uint32(dataSlice[i*4+1])<<8 | uint32(dataSlice[i*4+2])<<16 | uint32(dataSlice[i*4+3])<<24 - samples[i] = float64(*(*float32)(unsafe.Pointer(&bits))) + samples[i] = float64(math.Float32frombits(bits)) } } else { // Stereo interleaved @@ -395,8 +365,8 @@ func (d *FFmpegDecoder) extractSamples() ([]float64, error) { uint32(dataSlice[i*8+5])<<8 | uint32(dataSlice[i*8+6])<<16 | uint32(dataSlice[i*8+7])<<24 - leftFloat := *(*float32)(unsafe.Pointer(&leftBits)) - rightFloat := *(*float32)(unsafe.Pointer(&rightBits)) + leftFloat := math.Float32frombits(leftBits) + rightFloat := math.Float32frombits(rightBits) samples[i] = float64(leftFloat+rightFloat) / 2 } } diff --git a/internal/audio/fft.go b/internal/audio/fft.go index 294821e..b5525f8 100644 --- a/internal/audio/fft.go +++ b/internal/audio/fft.go @@ -2,22 +2,12 @@ package audio import ( "math" + "math/cmplx" "github.com/argusdusty/gofft" "github.com/linuxmatters/jivefire/internal/config" ) -// ApplyHanning applies a Hanning window to the input data -func ApplyHanning(data []float64) []float64 { - windowed := make([]float64, len(data)) - n := len(data) - for i := range data { - window := 0.5 * (1 - math.Cos(2*math.Pi*float64(i)/float64(n-1))) - windowed[i] = data[i] * window - } - return windowed -} - // BinFFT bins FFT coefficients into bars and returns normalized values (0.0-1.0) // CAVA-style approach: work in normalized space, apply maxBarHeight scaling later // baseScale is calculated from Pass 1 analysis for optimal visualization @@ -41,7 +31,7 @@ func BinFFT(coeffs []complex128, sensitivity float64, baseScale float64, result // Average magnitude in this range var sum float64 for i := start; i < end; i++ { - magnitude := math.Sqrt(real(coeffs[i])*real(coeffs[i]) + imag(coeffs[i])*imag(coeffs[i])) + magnitude := cmplx.Abs(coeffs[i]) sum += magnitude } diff --git a/internal/audio/fft_test.go b/internal/audio/fft_test.go index 5d61ab0..d4d42da 100644 --- a/internal/audio/fft_test.go +++ b/internal/audio/fft_test.go @@ -3,8 +3,6 @@ package audio import ( "math" "testing" - - "github.com/argusdusty/gofft" ) // TestBinFFT_KnownSineWave verifies that BinFFT correctly identifies a known @@ -39,14 +37,9 @@ func TestBinFFT_KnownSineWave(t *testing.T) { // Take first FFT window (2048 samples) windowSamples := sine[:fftSize] - // Apply Hanning window (same as in ProcessChunk) - windowed := ApplyHanning(windowSamples) - - // Compute FFT - fftInput := gofft.Float64ToComplex128Array(windowed) - if err := gofft.FFT(fftInput); err != nil { - t.Fatalf("FFT computation failed: %v", err) - } + // Process through the live code path (Hanning window + FFT) + processor := NewProcessor() + fftInput := processor.ProcessChunk(windowSamples) // Bin the FFT results into 64 bars result := make([]float64, numBars) @@ -164,11 +157,9 @@ func TestBinFFT_NoiseGate(t *testing.T) { quietSignal[i] = 0.001 * math.Sin(2*math.Pi*float64(i)/100.0) } - windowed := ApplyHanning(quietSignal) - fftInput := gofft.Float64ToComplex128Array(windowed) - if err := gofft.FFT(fftInput); err != nil { - t.Fatalf("FFT computation failed: %v", err) - } + // Process through the live code path (Hanning window + FFT) + processor := NewProcessor() + fftInput := processor.ProcessChunk(quietSignal) result := make([]float64, numBars) BinFFT(fftInput, sensitivity, baseScale, result) @@ -206,11 +197,9 @@ func TestBinFFT_EnergyDistribution(t *testing.T) { math.Sin(2*math.Pi*1000*float64(i)/float64(fftSize))) } - windowed := ApplyHanning(signal) - fftInput := gofft.Float64ToComplex128Array(windowed) - if err := gofft.FFT(fftInput); err != nil { - t.Fatalf("FFT computation failed: %v", err) - } + // Process through the live code path (Hanning window + FFT) + processor := NewProcessor() + fftInput := processor.ProcessChunk(signal) result := make([]float64, numBars) BinFFT(fftInput, sensitivity, baseScale, result) @@ -241,55 +230,6 @@ func TestBinFFT_EnergyDistribution(t *testing.T) { t.Logf("Energy distribution: %.6f total energy across %d/%d bars", totalEnergy, nonzeroCount, numBars) } -// TestApplyHanning_WindowProperties verifies Hanning window coefficients -// match expected mathematical properties. -func TestApplyHanning_WindowProperties(t *testing.T) { - // Test with small known size - size := 8 - input := make([]float64, size) - for i := range input { - input[i] = 1.0 // All ones - } - - windowed := ApplyHanning(input) - - if len(windowed) != size { - t.Fatalf("Window size mismatch: got %d, want %d", len(windowed), size) - } - - // Hanning window properties: - // 1. Output length equals input length - if len(windowed) != len(input) { - t.Errorf("Window changed input length") - } - - // 2. Start and end values should be zero (or very close) - epsilon := 1e-10 - if math.Abs(windowed[0]) > epsilon { - t.Errorf("Window start value %.15f is not zero", windowed[0]) - } - if math.Abs(windowed[size-1]) > epsilon { - t.Errorf("Window end value %.15f is not zero", windowed[size-1]) - } - - // 3. Center value should be ~1.0 (maximum of Hanning window) - midPoint := windowed[size/2] - if midPoint < 0.9 || midPoint > 1.05 { - t.Errorf("Window center value %.6f not close to 1.0", midPoint) - } - - // 4. Window should be symmetric - for i := 0; i < size/2; i++ { - if math.Abs(windowed[i]-windowed[size-1-i]) > epsilon { - t.Errorf("Window not symmetric at position %d: %.15f != %.15f", - i, windowed[i], windowed[size-1-i]) - } - } - - t.Logf("Hanning window verified: start=%.15f, center=%.6f, end=%.15f", - windowed[0], windowed[size/2], windowed[size-1]) -} - // TestRearrangeFrequenciesCenterOut_Symmetry verifies that the output array // is perfectly symmetric around the center index. This catches off-by-one // errors in the mirroring logic that would produce visually jarring @@ -442,138 +382,6 @@ func TestRearrangeFrequenciesCenterOut_SmallInput(t *testing.T) { t.Logf("Small input test passed: %v → %v", input, result) } -// TestApplyHanning_KnownValues verifies Hanning window coefficients match expected values. -// This catches formula errors that would affect FFT accuracy. -// -// Hanning window formula: w[i] = 0.5 * (1 - cos(2π*i/(N-1))) -// Properties tested: -// - Window is symmetric: w[i] == w[N-1-i] -// - Window has zero endpoints: w[0] == w[N-1] == 0 -// - Window peaks at center: w[N/2] == 1.0 -func TestApplyHanning_KnownValues(t *testing.T) { - tests := []struct { - name string - input []float64 - validate func(t *testing.T, windowed []float64) - }{ - { - name: "4-element window", - input: []float64{1, 1, 1, 1}, - validate: func(t *testing.T, windowed []float64) { - // Hanning window for N=4: - // w[0] = 0.5 * (1 - cos(0)) = 0.5 * (1 - 1) = 0 - // w[1] = 0.5 * (1 - cos(2π*1/3)) = 0.5 * (1 - (-0.5)) = 0.75 - // w[2] = 0.5 * (1 - cos(2π*2/3)) = 0.5 * (1 - (-0.5)) = 0.75 - // w[3] = 0.5 * (1 - cos(2π)) = 0.5 * (1 - 1) = 0 - - expected := []float64{0, 0.75, 0.75, 0} - const epsilon = 1e-10 - - for i, v := range windowed { - if math.Abs(v-expected[i]) > epsilon { - t.Errorf("w[%d]: got %.10f, want %.10f", i, v, expected[i]) - } - } - - // Verify symmetry - if math.Abs(windowed[0]-windowed[3]) > epsilon { - t.Errorf("Symmetry: windowed[0]=%.10f != windowed[3]=%.10f", windowed[0], windowed[3]) - } - if math.Abs(windowed[1]-windowed[2]) > epsilon { - t.Errorf("Symmetry: windowed[1]=%.10f != windowed[2]=%.10f", windowed[1], windowed[2]) - } - }, - }, - { - name: "8-element window", - input: []float64{2, 2, 2, 2, 2, 2, 2, 2}, - validate: func(t *testing.T, windowed []float64) { - // Verify zero endpoints - const epsilon = 1e-10 - if math.Abs(windowed[0]) > epsilon { - t.Errorf("w[0] should be ~0, got %.10f", windowed[0]) - } - if math.Abs(windowed[7]) > epsilon { - t.Errorf("w[7] should be ~0, got %.10f", windowed[7]) - } - - // Verify symmetry around center - for i := range 4 { - if math.Abs(windowed[i]-windowed[7-i]) > epsilon { - t.Errorf("Symmetry violation at i=%d: windowed[%d]=%.10f != windowed[%d]=%.10f", - i, i, windowed[i], 7-i, windowed[7-i]) - } - } - - // Verify peak near center - peakValue := windowed[3] // or windowed[4] - if peakValue < 0.95 || peakValue > 1.05 { - t.Errorf("Peak value at center should be ~1.0, got %.10f", peakValue) - } - }, - }, - { - name: "Hanning window properties on 2048-sample", - input: make([]float64, 2048), - validate: func(t *testing.T, windowed []float64) { - const epsilon = 1e-10 - n := len(windowed) - - // Test 1: Zero endpoints - if math.Abs(windowed[0]) > epsilon { - t.Errorf("w[0] should be ~0, got %.10f", windowed[0]) - } - if math.Abs(windowed[n-1]) > epsilon { - t.Errorf("w[%d] should be ~0, got %.10f", n-1, windowed[n-1]) - } - - // Test 2: Perfect symmetry - for i := 0; i < n/2; i++ { - if math.Abs(windowed[i]-windowed[n-1-i]) > epsilon { - t.Errorf("Symmetry violation at i=%d: windowed[%d]=%.10f != windowed[%d]=%.10f", - i, i, windowed[i], n-1-i, windowed[n-1-i]) - } - } - - // Test 3: Peak at center (index 1023 or 1024 for N=2048) - centerLeft := windowed[1023] - centerRight := windowed[1024] - if centerLeft < 0.99 || centerLeft > 1.01 { - t.Errorf("Peak at center-left (1023): got %.10f, want ~1.0", centerLeft) - } - if centerRight < 0.99 || centerRight > 1.01 { - t.Errorf("Peak at center-right (1024): got %.10f, want ~1.0", centerRight) - } - - // Test 4: Monotonic increase from left to center - for i := 0; i < n/2-1; i++ { - if windowed[i] > windowed[i+1] { - t.Errorf("Monotonic violation at i=%d: windowed[%d]=%.10f > windowed[%d]=%.10f", - i, i, windowed[i], i+1, windowed[i+1]) - } - } - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - // Input constant doesn't matter - window only depends on index - for i := range tt.input { - tt.input[i] = 1.0 - } - - windowed := ApplyHanning(tt.input) - - if len(windowed) != len(tt.input) { - t.Fatalf("Output length mismatch: got %d, want %d", len(windowed), len(tt.input)) - } - - tt.validate(t, windowed) - }) - } -} - // BenchmarkProcessChunk measures FFT processing performance with pre-computed Hanning window. // This is the hot path called for every FFT frame during both analysis and render passes. func BenchmarkProcessChunk(b *testing.B) { @@ -594,22 +402,6 @@ func BenchmarkProcessChunk(b *testing.B) { } } -// BenchmarkApplyHanning measures the standalone Hanning window function (with trig per sample). -// This is the old approach - kept for comparison with pre-computed version. -func BenchmarkApplyHanning(b *testing.B) { - samples := make([]float64, 2048) - for i := range samples { - samples[i] = math.Sin(float64(i)*0.1) * 0.5 - } - - b.ResetTimer() - b.ReportAllocs() - - for i := 0; i < b.N; i++ { - _ = ApplyHanning(samples) - } -} - // BenchmarkHanningPrecomputed measures applying pre-computed Hanning coefficients. // This isolates just the window application (multiply) without FFT overhead. func BenchmarkHanningPrecomputed(b *testing.B) { diff --git a/internal/audio/metadata.go b/internal/audio/metadata.go index 4dfdc44..0c32f8c 100644 --- a/internal/audio/metadata.go +++ b/internal/audio/metadata.go @@ -1,8 +1,6 @@ package audio import ( - "fmt" - ffmpeg "github.com/linuxmatters/ffmpeg-statigo" ) @@ -16,43 +14,14 @@ type Metadata struct { // GetMetadata uses ffmpeg to extract accurate audio file metadata func GetMetadata(filename string) (*Metadata, error) { - // Open audio file - var inputCtx *ffmpeg.AVFormatContext - audioPath := ffmpeg.ToCStr(filename) - defer audioPath.Free() - - ret, err := ffmpeg.AVFormatOpenInput(&inputCtx, audioPath, nil, nil) + // Open audio file and find audio stream + inputCtx, audioStreamIdx, err := openAudioFormatCtx(filename) if err != nil { - return nil, fmt.Errorf("failed to open audio file: %w", err) - } - if ret < 0 { - return nil, fmt.Errorf("failed to open audio file: %d", ret) + return nil, err } defer ffmpeg.AVFormatCloseInput(&inputCtx) - ret, err = ffmpeg.AVFormatFindStreamInfo(inputCtx, nil) - if err != nil { - return nil, fmt.Errorf("failed to find stream info: %w", err) - } - if ret < 0 { - return nil, fmt.Errorf("failed to find stream info: %d", ret) - } - - // Find audio stream - audioStreamIdx := -1 - streams := inputCtx.Streams() - for i := uintptr(0); i < uintptr(inputCtx.NbStreams()); i++ { - stream := streams.Get(i) - if stream.Codecpar().CodecType() == ffmpeg.AVMediaTypeAudio { - audioStreamIdx = int(i) - break - } - } - if audioStreamIdx == -1 { - return nil, fmt.Errorf("no audio stream found in file") - } - - audioStream := streams.Get(uintptr(audioStreamIdx)) + audioStream := inputCtx.Streams().Get(uintptr(audioStreamIdx)) //nolint:gosec // stream index is non-negative codecpar := audioStream.Codecpar() // Extract metadata diff --git a/internal/audio/reader.go b/internal/audio/reader.go index a882f3c..a7a31ec 100644 --- a/internal/audio/reader.go +++ b/internal/audio/reader.go @@ -24,16 +24,6 @@ func (r *StreamingReader) ReadChunk(numSamples int) ([]float64, error) { return r.decoder.ReadChunk(numSamples) } -// SeekToSample repositions reader to sample position. -// Works with FFmpegDecoder; pure Go decoders do not support seeking. -func (r *StreamingReader) SeekToSample(samplePos int64) error { - // Check if decoder supports seeking (FFmpegDecoder does) - if seeker, ok := r.decoder.(*FFmpegDecoder); ok { - return seeker.SeekToSample(samplePos) - } - return fmt.Errorf("seek not supported by this decoder - use Close and create new reader") -} - // Close closes the underlying file func (r *StreamingReader) Close() error { return r.decoder.Close() @@ -43,8 +33,3 @@ func (r *StreamingReader) Close() error { func (r *StreamingReader) SampleRate() int { return r.decoder.SampleRate() } - -// NumChannels returns the number of audio channels -func (r *StreamingReader) NumChannels() int { - return r.decoder.NumChannels() -} diff --git a/internal/audio/shared_buffer.go b/internal/audio/shared_buffer.go deleted file mode 100644 index 45793b6..0000000 --- a/internal/audio/shared_buffer.go +++ /dev/null @@ -1,240 +0,0 @@ -package audio - -import ( - "errors" - "sync" -) - -// ErrBufferClosed is returned when attempting to read from a closed buffer -var ErrBufferClosed = errors.New("buffer is closed") - -// SharedAudioBuffer provides a thread-safe buffer for sharing decoded audio -// between FFT analysis and AAC encoding. Each consumer has an independent -// read position, allowing them to consume data at different rates. -// -// Design: -// - Single producer (decoder) writes samples via Write() -// - Two independent consumers: FFT (ReadForFFT) and encoder (ReadForEncoder) -// - Each consumer tracks its own read position -// - Buffer automatically expands as needed (no ring buffer complexity) -// - EOF signalling via Close() propagates to both consumers -type SharedAudioBuffer struct { - mu sync.Mutex - - // Underlying sample storage (float64 for FFT precision) - samples []float64 - - // Independent read positions for each consumer - fftReadPos int - encoderReadPos int - - // EOF signalling - closed bool - - // Condition variable for blocking reads - cond *sync.Cond -} - -// NewSharedAudioBuffer creates a new shared audio buffer. -// initialCapacity is a hint for the expected total samples (can grow as needed). -func NewSharedAudioBuffer(initialCapacity int) *SharedAudioBuffer { - if initialCapacity <= 0 { - initialCapacity = 1024 * 1024 // 1M samples default (~23 seconds at 44.1kHz) - } - - b := &SharedAudioBuffer{ - samples: make([]float64, 0, initialCapacity), - } - b.cond = sync.NewCond(&b.mu) - return b -} - -// Write appends samples to the buffer. This is called by the decoder. -// Signals waiting consumers when new data is available. -func (b *SharedAudioBuffer) Write(samples []float64) error { - b.mu.Lock() - defer b.mu.Unlock() - - if b.closed { - return ErrBufferClosed - } - - b.samples = append(b.samples, samples...) - b.cond.Broadcast() // Wake up any waiting consumers - return nil -} - -// ReadForFFT reads samples for FFT analysis. -// Returns up to numSamples, or fewer if not enough available. -// Non-blocking: returns immediately with available samples (may be empty). -// Returns io.EOF when buffer is closed and all samples have been read. -func (b *SharedAudioBuffer) ReadForFFT(numSamples int) ([]float64, error) { - b.mu.Lock() - defer b.mu.Unlock() - - available := len(b.samples) - b.fftReadPos - if available <= 0 { - if b.closed { - return nil, ErrBufferClosed - } - return nil, nil // No samples available yet - } - - // Read up to numSamples - toRead := min(numSamples, available) - - result := make([]float64, toRead) - copy(result, b.samples[b.fftReadPos:b.fftReadPos+toRead]) - b.fftReadPos += toRead - - return result, nil -} - -// ReadForEncoder reads samples for AAC encoding. -// Returns exactly numSamples, converted to float32. -// Blocks until enough samples are available or buffer is closed. -// Returns io.EOF when buffer is closed and all samples have been read. -func (b *SharedAudioBuffer) ReadForEncoder(numSamples int) ([]float32, error) { - b.mu.Lock() - defer b.mu.Unlock() - - for { - available := len(b.samples) - b.encoderReadPos - - // If we have enough samples, return them - if available >= numSamples { - result := make([]float32, numSamples) - for i := range numSamples { - result[i] = float32(b.samples[b.encoderReadPos+i]) - } - b.encoderReadPos += numSamples - return result, nil - } - - // Buffer closed - return whatever we have - if b.closed { - if available <= 0 { - return nil, ErrBufferClosed - } - // Return remaining samples (partial frame) - result := make([]float32, available) - for i := range available { - result[i] = float32(b.samples[b.encoderReadPos+i]) - } - b.encoderReadPos += available - return result, nil - } - - // Wait for more samples - b.cond.Wait() - } -} - -// ReadForEncoderNonBlocking reads samples for encoding without blocking. -// Returns nil if not enough samples are available yet. -// This is useful for the interleaved encode pattern where we only want -// to encode if we have enough audio to match the video PTS. -func (b *SharedAudioBuffer) ReadForEncoderNonBlocking(numSamples int) ([]float32, error) { - b.mu.Lock() - defer b.mu.Unlock() - - available := len(b.samples) - b.encoderReadPos - - // If we have enough samples, return them - if available >= numSamples { - result := make([]float32, numSamples) - for i := range numSamples { - result[i] = float32(b.samples[b.encoderReadPos+i]) - } - b.encoderReadPos += numSamples - return result, nil - } - - // Buffer closed - return whatever we have - if b.closed && available > 0 { - result := make([]float32, available) - for i := range available { - result[i] = float32(b.samples[b.encoderReadPos+i]) - } - b.encoderReadPos += available - return result, nil - } - - if b.closed { - return nil, ErrBufferClosed - } - - // Not enough samples yet - return nil, nil -} - -// AvailableForFFT returns the number of unread samples for FFT. -func (b *SharedAudioBuffer) AvailableForFFT() int { - b.mu.Lock() - defer b.mu.Unlock() - return len(b.samples) - b.fftReadPos -} - -// AvailableForEncoder returns the number of unread samples for encoder. -func (b *SharedAudioBuffer) AvailableForEncoder() int { - b.mu.Lock() - defer b.mu.Unlock() - return len(b.samples) - b.encoderReadPos -} - -// TotalSamples returns the total number of samples written to the buffer. -func (b *SharedAudioBuffer) TotalSamples() int { - b.mu.Lock() - defer b.mu.Unlock() - return len(b.samples) -} - -// Close signals that no more samples will be written. -// Wakes up any blocked consumers. -func (b *SharedAudioBuffer) Close() { - b.mu.Lock() - defer b.mu.Unlock() - - b.closed = true - b.cond.Broadcast() // Wake up any waiting consumers -} - -// IsClosed returns whether the buffer has been closed. -func (b *SharedAudioBuffer) IsClosed() bool { - b.mu.Lock() - defer b.mu.Unlock() - return b.closed -} - -// Reset clears the buffer and resets read positions. -// Useful for reusing the buffer for a new audio file. -func (b *SharedAudioBuffer) Reset() { - b.mu.Lock() - defer b.mu.Unlock() - - b.samples = b.samples[:0] - b.fftReadPos = 0 - b.encoderReadPos = 0 - b.closed = false -} - -// CompactBuffer removes samples that have been consumed by both consumers. -// This frees memory for long audio files. Call periodically during processing. -func (b *SharedAudioBuffer) CompactBuffer() { - b.mu.Lock() - defer b.mu.Unlock() - - // Find the minimum read position (samples consumed by both) - minPos := min(b.fftReadPos, b.encoderReadPos) - - if minPos == 0 { - return // Nothing to compact - } - - // Shift samples and adjust positions - remaining := len(b.samples) - minPos - copy(b.samples, b.samples[minPos:]) - b.samples = b.samples[:remaining] - b.fftReadPos -= minPos - b.encoderReadPos -= minPos -} diff --git a/internal/audio/shared_buffer_test.go b/internal/audio/shared_buffer_test.go deleted file mode 100644 index d143a04..0000000 --- a/internal/audio/shared_buffer_test.go +++ /dev/null @@ -1,350 +0,0 @@ -package audio - -import ( - "errors" - "sync" - "testing" - "time" -) - -func TestSharedAudioBuffer_BasicWriteRead(t *testing.T) { - buf := NewSharedAudioBuffer(1024) - - // Write some samples - samples := []float64{0.1, 0.2, 0.3, 0.4, 0.5} - if err := buf.Write(samples); err != nil { - t.Fatalf("Write failed: %v", err) - } - - // Verify availability - if got := buf.AvailableForFFT(); got != 5 { - t.Errorf("AvailableForFFT = %d, want 5", got) - } - if got := buf.AvailableForEncoder(); got != 5 { - t.Errorf("AvailableForEncoder = %d, want 5", got) - } - - // Read for FFT - fftSamples, err := buf.ReadForFFT(3) - if err != nil { - t.Fatalf("ReadForFFT failed: %v", err) - } - if len(fftSamples) != 3 { - t.Errorf("ReadForFFT returned %d samples, want 3", len(fftSamples)) - } - if fftSamples[0] != 0.1 || fftSamples[2] != 0.3 { - t.Errorf("ReadForFFT returned wrong values: %v", fftSamples) - } - - // FFT position advanced, encoder position unchanged - if got := buf.AvailableForFFT(); got != 2 { - t.Errorf("AvailableForFFT after read = %d, want 2", got) - } - if got := buf.AvailableForEncoder(); got != 5 { - t.Errorf("AvailableForEncoder after FFT read = %d, want 5 (unchanged)", got) - } -} - -func TestSharedAudioBuffer_IndependentReaders(t *testing.T) { - buf := NewSharedAudioBuffer(1024) - - samples := []float64{1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0} - if err := buf.Write(samples); err != nil { - t.Fatalf("Write failed: %v", err) - } - - // Read 4 samples for FFT - fft1, _ := buf.ReadForFFT(4) - if len(fft1) != 4 || fft1[0] != 1.0 { - t.Errorf("FFT read 1 wrong: %v", fft1) - } - - // Read 2 samples for encoder (non-blocking) - enc1, _ := buf.ReadForEncoderNonBlocking(2) - if len(enc1) != 2 || enc1[0] != 1.0 { - t.Errorf("Encoder read 1 wrong: %v", enc1) - } - - // Read more for FFT - fft2, _ := buf.ReadForFFT(4) - if len(fft2) != 4 || fft2[0] != 5.0 { - t.Errorf("FFT read 2 wrong: %v", fft2) - } - - // Read more for encoder - enc2, _ := buf.ReadForEncoderNonBlocking(4) - if len(enc2) != 4 || enc2[0] != 3.0 { - t.Errorf("Encoder read 2 wrong: %v", enc2) - } - - // Both have read different amounts - if got := buf.AvailableForFFT(); got != 0 { - t.Errorf("AvailableForFFT = %d, want 0", got) - } - if got := buf.AvailableForEncoder(); got != 2 { - t.Errorf("AvailableForEncoder = %d, want 2", got) - } -} - -func TestSharedAudioBuffer_EncoderBlocking(t *testing.T) { - buf := NewSharedAudioBuffer(1024) - - // Start a goroutine that will write samples after a delay - var wg sync.WaitGroup - wg.Go(func() { - time.Sleep(50 * time.Millisecond) - _ = buf.Write([]float64{1.0, 2.0, 3.0, 4.0}) - }) - - // This should block until samples are available - start := time.Now() - samples, err := buf.ReadForEncoder(4) - elapsed := time.Since(start) - - if err != nil { - t.Fatalf("ReadForEncoder failed: %v", err) - } - if len(samples) != 4 { - t.Errorf("Got %d samples, want 4", len(samples)) - } - if elapsed < 40*time.Millisecond { - t.Errorf("ReadForEncoder didn't block: elapsed %v", elapsed) - } - - wg.Wait() -} - -func TestSharedAudioBuffer_CloseUnblocksReaders(t *testing.T) { - buf := NewSharedAudioBuffer(1024) - - // Write partial data - _ = buf.Write([]float64{1.0, 2.0}) - - // Start a goroutine that will close after a delay - go func() { - time.Sleep(50 * time.Millisecond) - buf.Close() - }() - - // Request more than available - should block then return partial - start := time.Now() - samples, err := buf.ReadForEncoder(10) - elapsed := time.Since(start) - - // Should get partial samples, no error - if err != nil { - t.Fatalf("ReadForEncoder returned unexpected error: %v", err) - } - if len(samples) != 2 { - t.Errorf("Got %d samples, want 2 (partial)", len(samples)) - } - if elapsed < 40*time.Millisecond { - t.Errorf("ReadForEncoder didn't block: elapsed %v", elapsed) - } - - // Next read should return ErrBufferClosed - _, err = buf.ReadForEncoder(1) - if !errors.Is(err, ErrBufferClosed) { - t.Errorf("Expected ErrBufferClosed, got %v", err) - } -} - -func TestSharedAudioBuffer_NonBlockingReturnsNil(t *testing.T) { - buf := NewSharedAudioBuffer(1024) - - // Request more than available (non-blocking) - samples, err := buf.ReadForEncoderNonBlocking(10) - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - if samples != nil { - t.Errorf("Expected nil, got %v", samples) - } - - // Write some but not enough - _ = buf.Write([]float64{1.0, 2.0}) - - samples, err = buf.ReadForEncoderNonBlocking(10) - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - if samples != nil { - t.Errorf("Expected nil, got %v", samples) - } - - // Now write enough - _ = buf.Write([]float64{3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0}) - - samples, err = buf.ReadForEncoderNonBlocking(10) - if err != nil { - t.Errorf("Unexpected error: %v", err) - } - if len(samples) != 10 { - t.Errorf("Got %d samples, want 10", len(samples)) - } -} - -func TestSharedAudioBuffer_Compact(t *testing.T) { - buf := NewSharedAudioBuffer(1024) - - // Write samples - samples := make([]float64, 100) - for i := range samples { - samples[i] = float64(i) - } - _ = buf.Write(samples) - - // Both consumers read 50 samples - _, _ = buf.ReadForFFT(50) - _, _ = buf.ReadForEncoderNonBlocking(50) - - // Compact should remove the first 50 - buf.CompactBuffer() - - // Total should be 50 now - if got := buf.TotalSamples(); got != 50 { - t.Errorf("TotalSamples after compact = %d, want 50", got) - } - - // Both should still have 50 available - if got := buf.AvailableForFFT(); got != 50 { - t.Errorf("AvailableForFFT after compact = %d, want 50", got) - } - if got := buf.AvailableForEncoder(); got != 50 { - t.Errorf("AvailableForEncoder after compact = %d, want 50", got) - } - - // Next read should get correct values (50-99) - fftSamples, _ := buf.ReadForFFT(5) - if fftSamples[0] != 50.0 { - t.Errorf("After compact, first FFT sample = %v, want 50.0", fftSamples[0]) - } -} - -func TestSharedAudioBuffer_CompactPartial(t *testing.T) { - buf := NewSharedAudioBuffer(1024) - - samples := make([]float64, 100) - for i := range samples { - samples[i] = float64(i) - } - _ = buf.Write(samples) - - // FFT reads 70, encoder reads 30 - _, _ = buf.ReadForFFT(70) - _, _ = buf.ReadForEncoderNonBlocking(30) - - // Compact should only remove 30 (the minimum) - buf.CompactBuffer() - - if got := buf.TotalSamples(); got != 70 { - t.Errorf("TotalSamples after partial compact = %d, want 70", got) - } - - // FFT should have 30 remaining (was 100-70=30, now adjusted) - if got := buf.AvailableForFFT(); got != 30 { - t.Errorf("AvailableForFFT after partial compact = %d, want 30", got) - } - // Encoder should have 70 remaining - if got := buf.AvailableForEncoder(); got != 70 { - t.Errorf("AvailableForEncoder after partial compact = %d, want 70", got) - } -} - -func TestSharedAudioBuffer_Reset(t *testing.T) { - buf := NewSharedAudioBuffer(1024) - - _ = buf.Write([]float64{1.0, 2.0, 3.0}) - _, _ = buf.ReadForFFT(2) - buf.Close() - - // Reset - buf.Reset() - - // Should be empty and open - if buf.TotalSamples() != 0 { - t.Errorf("TotalSamples after reset = %d, want 0", buf.TotalSamples()) - } - if buf.IsClosed() { - t.Error("Buffer should not be closed after reset") - } - if buf.AvailableForFFT() != 0 { - t.Error("FFT position should be reset") - } - if buf.AvailableForEncoder() != 0 { - t.Error("Encoder position should be reset") - } - - // Should be able to write again - if err := buf.Write([]float64{4.0, 5.0}); err != nil { - t.Errorf("Write after reset failed: %v", err) - } -} - -func TestSharedAudioBuffer_WriteAfterClose(t *testing.T) { - buf := NewSharedAudioBuffer(1024) - buf.Close() - - err := buf.Write([]float64{1.0}) - if !errors.Is(err, ErrBufferClosed) { - t.Errorf("Write after close should return ErrBufferClosed, got %v", err) - } -} - -func TestSharedAudioBuffer_ConcurrentAccess(t *testing.T) { - buf := NewSharedAudioBuffer(0) // Use default capacity - - const numSamples = 100000 - const chunkSize = 1000 - - var wg sync.WaitGroup - - // Writer goroutine - wg.Go(func() { - for i := 0; i < numSamples; i += chunkSize { - chunk := make([]float64, chunkSize) - for j := range chunkSize { - chunk[j] = float64(i + j) - } - if err := buf.Write(chunk); err != nil { - t.Errorf("Write failed: %v", err) - return - } - } - buf.Close() - }) - - // FFT reader goroutine - fftTotal := 0 - wg.Go(func() { - for { - samples, err := buf.ReadForFFT(512) - if errors.Is(err, ErrBufferClosed) { - break - } - fftTotal += len(samples) - time.Sleep(time.Microsecond) // Simulate some work - } - }) - - // Encoder reader goroutine - encTotal := 0 - wg.Go(func() { - for { - samples, err := buf.ReadForEncoder(1024) - if errors.Is(err, ErrBufferClosed) { - break - } - encTotal += len(samples) - } - }) - - wg.Wait() - - if fftTotal != numSamples { - t.Errorf("FFT read %d samples, want %d", fftTotal, numSamples) - } - if encTotal != numSamples { - t.Errorf("Encoder read %d samples, want %d", encTotal, numSamples) - } -} diff --git a/internal/cli/styles.go b/internal/cli/styles.go index b199ce4..f69577f 100644 --- a/internal/cli/styles.go +++ b/internal/cli/styles.go @@ -3,8 +3,6 @@ package cli import ( "fmt" "os" - "strings" - "time" "github.com/charmbracelet/lipgloss" ) @@ -13,7 +11,6 @@ import ( var ( primaryColor = lipgloss.Color("#A40000") // Jivefire red accentColor = lipgloss.Color("#FFA500") // Orange/gold - successColor = lipgloss.Color("#00AA00") // Green mutedColor = lipgloss.Color("#888888") // Gray highlightColor = lipgloss.Color("#FFFF00") // Yellow textColor = lipgloss.Color("#FFFFFF") // White @@ -27,11 +24,6 @@ var ( Foreground(primaryColor). MarginBottom(1) - // Subtitle style - muted gray - SubtitleStyle = lipgloss.NewStyle(). - Foreground(mutedColor). - Italic(true) - // Section header style HeaderStyle = lipgloss.NewStyle(). Bold(true). @@ -39,11 +31,6 @@ var ( MarginTop(1). MarginBottom(1) - // Success message style - SuccessStyle = lipgloss.NewStyle(). - Bold(true). - Foreground(successColor) - // Error message style ErrorStyle = lipgloss.NewStyle(). Bold(true). @@ -61,25 +48,8 @@ var ( ValueStyle = lipgloss.NewStyle(). Bold(true). Foreground(textColor) - - // Box style for framed content - BoxStyle = lipgloss.NewStyle(). - Border(lipgloss.RoundedBorder()). - BorderForeground(primaryColor). - Padding(1, 2). - MarginTop(1). - MarginBottom(1) ) -// PrintBanner prints the application banner -func PrintBanner() { - banner := TitleStyle.Render("Jivefire 🔥") - subtitle := SubtitleStyle.Render("Spin your podcast .wav into a groovy MP4 visualiser with Cava-inspired real-time audio frequencies.") - fmt.Println(banner) - fmt.Println(subtitle) - fmt.Println() -} - // PrintVersion prints version information func PrintVersion(version string) { fmt.Println(TitleStyle.Render("Jivefire 🔥")) @@ -117,85 +87,3 @@ func PrintHardwareProbe(encoders []EncoderInfo) { func PrintError(message string) { fmt.Fprintf(os.Stderr, "%s %s\n", ErrorStyle.Render("Error:"), message) } - -// PrintWarning prints a warning message -func PrintWarning(message string) { - fmt.Printf("%s %s\n", HighlightStyle.Render("Warning:"), message) -} - -// PrintSuccess prints a success message -func PrintSuccess(message string) { - fmt.Printf("%s %s\n", SuccessStyle.Render("✓"), message) -} - -// PrintInfo prints an informational message -func PrintInfo(key, value string) { - fmt.Printf("%s %s\n", KeyStyle.Render(key+":"), ValueStyle.Render(value)) -} - -// PrintSection prints a section header -func PrintSection(title string) { - fmt.Println(HeaderStyle.Render(title)) -} - -// FormatDuration formats a duration nicely -func FormatDuration(d time.Duration) string { - if d < time.Second { - return fmt.Sprintf("%.0fms", d.Seconds()*1000) - } - return fmt.Sprintf("%.1fs", d.Seconds()) -} - -// FormatSpeed formats encoding speed -func FormatSpeed(speed float64) string { - return fmt.Sprintf("%.1fx realtime", speed) -} - -// FormatBytes formats bytes into human-readable format -func FormatBytes(bytes int64) string { - const unit = 1024 - if bytes < unit { - return fmt.Sprintf("%d B", bytes) - } - div, exp := int64(unit), 0 - for n := bytes / unit; n >= unit; n /= unit { - div *= unit - exp++ - } - return fmt.Sprintf("%.1f %cB", float64(bytes)/float64(div), "KMGTPE"[exp]) -} - -// PrintBox prints content in a styled box -func PrintBox(content string) { - fmt.Println(BoxStyle.Render(content)) -} - -// PrintProgressSummary prints a summary in a box -func PrintProgressSummary(duration, speed, size, frames, samples string) { - var b strings.Builder - - b.WriteString(SuccessStyle.Render("✓ Encoding Complete!")) - b.WriteString("\n\n") - - b.WriteString(KeyStyle.Render("Duration: ")) - b.WriteString(ValueStyle.Render(duration)) - b.WriteString("\n") - - b.WriteString(KeyStyle.Render("Speed: ")) - b.WriteString(ValueStyle.Render(speed)) - b.WriteString("\n") - - b.WriteString(KeyStyle.Render("File Size: ")) - b.WriteString(ValueStyle.Render(size)) - b.WriteString("\n\n") - - b.WriteString(KeyStyle.Render("Quality Metrics:")) - b.WriteString("\n") - b.WriteString(" " + KeyStyle.Render("Video: ")) - b.WriteString(ValueStyle.Render(frames)) - b.WriteString("\n") - b.WriteString(" " + KeyStyle.Render("Audio: ")) - b.WriteString(ValueStyle.Render(samples)) - - PrintBox(b.String()) -} diff --git a/internal/encoder/encoder.go b/internal/encoder/encoder.go index ea00ce0..46d756a 100644 --- a/internal/encoder/encoder.go +++ b/internal/encoder/encoder.go @@ -1,8 +1,10 @@ package encoder import ( + "encoding/binary" "errors" "fmt" + "math" "sync" "unsafe" @@ -520,17 +522,12 @@ func (e *Encoder) EncoderName() string { return "libx264" } -// EncoderDescription returns a human-readable description of the encoder -func (e *Encoder) EncoderDescription() string { - if e.hwEncoder != nil { - return e.hwEncoder.Description +// outputChannels returns the configured audio channel count, defaulting to mono. +func (e *Encoder) outputChannels() int { + if e.config.AudioChannels == 0 { + return 1 } - return "Software (libx264)" -} - -// IsHardwareAccelerated returns true if using hardware encoding -func (e *Encoder) IsHardwareAccelerated() bool { - return e.hwEncoder != nil + return e.config.AudioChannels } // initializeAudioEncoder sets up the AAC encoder for direct sample input. @@ -559,10 +556,7 @@ func (e *Encoder) initializeAudioEncoder() error { e.audioCodec.SetSampleRate(e.config.SampleRate) // Set channel configuration based on config (default mono) - outputChannels := e.config.AudioChannels - if outputChannels == 0 { - outputChannels = 1 // Default to mono if not specified - } + outputChannels := e.outputChannels() // Set channel layout using FFmpeg 8.0 API if outputChannels == 1 { @@ -770,73 +764,6 @@ func (e *Encoder) writeFrameHWUpload(rgbaData []byte) error { return e.receiveAndWriteVideoPackets() } -// WriteFrame encodes and writes a single RGB frame -func (e *Encoder) WriteFrame(rgbData []byte) error { - // Validate frame size - expectedSize := e.config.Width * e.config.Height * 3 // RGB24 = 3 bytes per pixel - if len(rgbData) != expectedSize { - return fmt.Errorf("invalid frame size: got %d, expected %d", len(rgbData), expectedSize) - } - - // Allocate YUV frame - yuvFrame := ffmpeg.AVFrameAlloc() - if yuvFrame == nil { - return fmt.Errorf("failed to allocate YUV frame") - } - defer ffmpeg.AVFrameFree(&yuvFrame) - - yuvFrame.SetWidth(e.config.Width) - yuvFrame.SetHeight(e.config.Height) - yuvFrame.SetFormat(int(ffmpeg.AVPixFmtYuv420P)) - - ret, err := ffmpeg.AVFrameGetBuffer(yuvFrame, 0) - if err := checkFFmpeg(ret, err, "allocate YUV buffer"); err != nil { - return err - } - - // Convert RGB to YUV420p using stdlib-optimized implementation - convertRGBToYUV(rgbData, yuvFrame, e.config.Width, e.config.Height) - - // Set presentation timestamp - yuvFrame.SetPts(e.nextVideoPts) - e.nextVideoPts++ - - // Send frame to encoder - ret, err = ffmpeg.AVCodecSendFrame(e.videoCodec, yuvFrame) - if err := checkFFmpeg(ret, err, "send frame to encoder"); err != nil { - return err - } - - // Receive and write encoded packets - for { - pkt := ffmpeg.AVPacketAlloc() - - _, err := ffmpeg.AVCodecReceivePacket(e.videoCodec, pkt) - if err != nil { - ffmpeg.AVPacketFree(&pkt) - // EAGAIN and EOF are expected - means no more packets available - if errors.Is(err, ffmpeg.EAgain) || errors.Is(err, ffmpeg.AVErrorEOF) { - break - } - return fmt.Errorf("receive packet: %w", err) - } - - // Set stream index and rescale timestamps - pkt.SetStreamIndex(e.videoStream.Index()) - ffmpeg.AVPacketRescaleTs(pkt, e.videoCodec.TimeBase(), e.videoStream.TimeBase()) - - // Write packet to output - ret, err = ffmpeg.AVInterleavedWriteFrame(e.formatCtx, pkt) - ffmpeg.AVPacketFree(&pkt) - - if err := checkFFmpeg(ret, err, "write packet"); err != nil { - return err - } - } - - return nil -} - // receiveAndWriteVideoPackets receives encoded packets from video codec and writes to output func (e *Encoder) receiveAndWriteVideoPackets() error { for { @@ -878,10 +805,7 @@ func (e *Encoder) WriteAudioSamples(samples []float32) error { } encoderFrameSize := e.audioCodec.FrameSize() // Should be 1024 for AAC - outputChannels := e.config.AudioChannels - if outputChannels == 0 { - outputChannels = 1 - } + outputChannels := e.outputChannels() // Push samples to FIFO e.audioFIFO.Push(samples) @@ -957,10 +881,7 @@ func (e *Encoder) FlushAudioEncoder() error { } encoderFrameSize := e.audioCodec.FrameSize() - outputChannels := e.config.AudioChannels - if outputChannels == 0 { - outputChannels = 1 - } + outputChannels := e.outputChannels() // Process any remaining samples in FIFO (may be partial frame) samplesPerFrame := encoderFrameSize * outputChannels @@ -1035,7 +956,7 @@ func writeMonoFloats(frame *ffmpeg.AVFrame, samples []float32) error { for i := range nbSamples { // Write channel - direct float32 byte copy sampleFloat := samples[i] - copy(data[i*4:(i+1)*4], (*[4]byte)(unsafe.Pointer(&sampleFloat))[:]) + binary.LittleEndian.PutUint32(data[i*4:(i+1)*4], math.Float32bits(sampleFloat)) } return nil @@ -1061,11 +982,11 @@ func writeStereoFloats(frame *ffmpeg.AVFrame, samples []float32) error { for i := range nbSamples { // Write left channel - direct float32 byte copy leftFloat := samples[i*2] - copy(leftData[i*4:(i+1)*4], (*[4]byte)(unsafe.Pointer(&leftFloat))[:]) + binary.LittleEndian.PutUint32(leftData[i*4:(i+1)*4], math.Float32bits(leftFloat)) // Write right channel - direct float32 byte copy rightFloat := samples[i*2+1] - copy(rightData[i*4:(i+1)*4], (*[4]byte)(unsafe.Pointer(&rightFloat))[:]) + binary.LittleEndian.PutUint32(rightData[i*4:(i+1)*4], math.Float32bits(rightFloat)) } return nil @@ -1145,8 +1066,3 @@ func (e *Encoder) Close() error { return nil } - -// GetVideoFramesEncoded returns the number of video frames encoded so far -func (e *Encoder) GetVideoFramesEncoded() int64 { - return e.nextVideoPts -} diff --git a/internal/encoder/encoder_test.go b/internal/encoder/encoder_test.go index 80b6f38..e302b95 100644 --- a/internal/encoder/encoder_test.go +++ b/internal/encoder/encoder_test.go @@ -5,18 +5,17 @@ import ( "testing" ) -// TestEncoderPOC is a proof-of-concept test that encodes a single black frame +// TestEncoderPOC is a proof-of-concept test that encodes a single black frame via RGBA path func TestEncoderPOC(t *testing.T) { outputPath := "../../testdata/poc-video.mp4" - // Keep the file for inspection - comment out cleanup - // defer os.Remove(outputPath) + defer os.Remove(outputPath) config := Config{ OutputPath: outputPath, Width: 1280, Height: 720, Framerate: 30, - HWAccel: HWAccelNone, // Force software encoding for WriteFrame (RGB24→YUV420P) + HWAccel: HWAccelNone, // Force software encoding } enc, err := New(config) @@ -30,15 +29,18 @@ func TestEncoderPOC(t *testing.T) { } defer enc.Close() - // Create a single black frame (RGB24 format) - frameSize := config.Width * config.Height * 3 // RGB24 = 3 bytes per pixel + // Create a single black frame (RGBA format) + frameSize := config.Width * config.Height * 4 // RGBA = 4 bytes per pixel blackFrame := make([]byte, frameSize) - // blackFrame is already all zeros (black) + // Set alpha channel to opaque + for i := 3; i < frameSize; i += 4 { + blackFrame[i] = 255 + } - // Write one frame - err = enc.WriteFrame(blackFrame) + // Write one frame using RGBA path + err = enc.WriteFrameRGBA(blackFrame) if err != nil { - t.Fatalf("Failed to write frame: %v", err) + t.Fatalf("Failed to write RGBA frame: %v", err) } // Close encoder (writes trailer) diff --git a/internal/encoder/frame.go b/internal/encoder/frame.go index 9271582..e950593 100644 --- a/internal/encoder/frame.go +++ b/internal/encoder/frame.go @@ -91,69 +91,6 @@ func parallelRows(height int, fn func(startY, endY int)) { wg.Wait() } -// convertRGBToYUV converts RGB24 data to YUV420P (planar) format. -func convertRGBToYUV(rgbData []byte, yuvFrame *ffmpeg.AVFrame, width, height int) { - yPlane := yuvFrame.Data().Get(0) - uPlane := yuvFrame.Data().Get(1) - vPlane := yuvFrame.Data().Get(2) - - yLinesize := yuvFrame.Linesize().Get(0) - uLinesize := yuvFrame.Linesize().Get(1) - vLinesize := yuvFrame.Linesize().Get(2) - - parallelRows(height, func(startY, endY int) { - // Align startY to even for correct UV row calculation - evenStart := startY - if evenStart&1 != 0 { - evenStart++ - } - - // Process even rows: Y + UV - for y := evenStart; y < endY; y += 2 { - yPtr := unsafe.Add(yPlane, y*yLinesize) - uvY := y >> 1 - uRowPtr := unsafe.Add(uPlane, uvY*uLinesize) - vRowPtr := unsafe.Add(vPlane, uvY*vLinesize) - rgbIdx := y * width * 3 - - for x := range width { - r := int32(rgbData[rgbIdx]) - g := int32(rgbData[rgbIdx+1]) - b := int32(rgbData[rgbIdx+2]) - rgbIdx += 3 - - *(*uint8)(unsafe.Add(yPtr, x)) = rgbToY(r, g, b) - - // UV subsampling: every other pixel on even rows - if (x & 1) == 0 { - uvX := x >> 1 - *(*uint8)(unsafe.Add(uRowPtr, uvX)) = rgbToCb(r, g, b) - *(*uint8)(unsafe.Add(vRowPtr, uvX)) = rgbToCr(r, g, b) - } - } - } - - // Process odd rows: Y only (no UV) - oddStart := startY - if oddStart&1 == 0 { - oddStart++ - } - for y := oddStart; y < endY; y += 2 { - yPtr := unsafe.Add(yPlane, y*yLinesize) - rgbIdx := y * width * 3 - - for x := range width { - r := int32(rgbData[rgbIdx]) - g := int32(rgbData[rgbIdx+1]) - b := int32(rgbData[rgbIdx+2]) - rgbIdx += 3 - - *(*uint8)(unsafe.Add(yPtr, x)) = rgbToY(r, g, b) - } - } - }) -} - // convertRGBAToYUV converts RGBA data directly to YUV420P (planar) format. // Skips the intermediate RGB24 buffer allocation for significantly faster software encoding. func convertRGBAToYUV(rgbaData []byte, yuvFrame *ffmpeg.AVFrame, width, height int) { diff --git a/internal/encoder/hwaccel.go b/internal/encoder/hwaccel.go index 65adcb2..6654e74 100644 --- a/internal/encoder/hwaccel.go +++ b/internal/encoder/hwaccel.go @@ -3,7 +3,6 @@ package encoder import ( "os" "runtime" - "strings" ffmpeg "github.com/linuxmatters/ffmpeg-statigo" ) @@ -259,27 +258,3 @@ func SelectBestEncoder(requestedType HWAccelType) *HWEncoder { return nil // Requested type not found } - -// GetEncoderStatus returns a human-readable status of all hardware encoders -func GetEncoderStatus() string { - encoders := DetectHWEncoders() - - var sb strings.Builder - sb.WriteString("Hardware Encoder Status:\n") - - for _, enc := range encoders { - status := "not available" - if enc.Available { - status = "available" - } - sb.WriteString(" ") - sb.WriteString(enc.Description) - sb.WriteString(" (") - sb.WriteString(enc.Name) - sb.WriteString("): ") - sb.WriteString(status) - sb.WriteString("\n") - } - - return sb.String() -} diff --git a/internal/encoder/hwaccel_test.go b/internal/encoder/hwaccel_test.go index 1afe208..7a4f6d6 100644 --- a/internal/encoder/hwaccel_test.go +++ b/internal/encoder/hwaccel_test.go @@ -33,8 +33,3 @@ func TestSelectBestEncoder(t *testing.T) { t.Errorf("Expected nil for HWAccelNone, got %s", enc.Name) } } - -func TestGetEncoderStatus(t *testing.T) { - status := GetEncoderStatus() - t.Logf("\n%s", status) -} diff --git a/internal/encoder/sws_benchmark_test.go b/internal/encoder/sws_benchmark_test.go index cadff9a..2e169e5 100644 --- a/internal/encoder/sws_benchmark_test.go +++ b/internal/encoder/sws_benchmark_test.go @@ -286,155 +286,6 @@ func BenchmarkRGBAToYUVDirect(b *testing.B) { } } -// BenchmarkRGBAToYUVViaRGB24 measures RGBA→RGB24→YUV420P (old path). -// This was the original software encoding path before direct RGBA→YUV. -func BenchmarkRGBAToYUVViaRGB24(b *testing.B) { - // Create RGBA test data - rgbaSize := benchWidth * benchHeight * 4 - rgbaData := make([]byte, rgbaSize) - for i := 0; i < rgbaSize; i += 4 { - rgbaData[i] = uint8(i % 256) // R - rgbaData[i+1] = uint8(i % 128) // G - rgbaData[i+2] = uint8(i % 64) // B - rgbaData[i+3] = 255 // A - } - - // Allocate RGB24 buffer and YUV frame - rgb24Size := benchWidth * benchHeight * 3 - rgb24Data := make([]byte, rgb24Size) - - yuvFrame := ffmpeg.AVFrameAlloc() - yuvFrame.SetWidth(benchWidth) - yuvFrame.SetHeight(benchHeight) - yuvFrame.SetFormat(int(ffmpeg.AVPixFmtYuv420P)) - _, _ = ffmpeg.AVFrameGetBuffer(yuvFrame, 0) - defer ffmpeg.AVFrameFree(&yuvFrame) - - b.ResetTimer() - for i := 0; i < b.N; i++ { - // Step 1: RGBA → RGB24 (strip alpha) - srcIdx := 0 - dstIdx := 0 - for dstIdx < rgb24Size { - rgb24Data[dstIdx] = rgbaData[srcIdx] // R - rgb24Data[dstIdx+1] = rgbaData[srcIdx+1] // G - rgb24Data[dstIdx+2] = rgbaData[srcIdx+2] // B - srcIdx += 4 - dstIdx += 3 - } - // Step 2: RGB24 → YUV420P - convertRGBToYUV(rgb24Data, yuvFrame, benchWidth, benchHeight) - } -} - -// ============================================================================= -// Equivalence Tests -// ============================================================================= - -// TestRGBAConversionEquivalence verifies RGBA→YUV direct and via-RGB24 produce identical output. -func TestRGBAConversionEquivalence(t *testing.T) { - // Create RGBA test data with varied pattern - rgbaSize := benchWidth * benchHeight * 4 - rgbaData := make([]byte, rgbaSize) - for i := 0; i < rgbaSize; i += 4 { - rgbaData[i] = uint8((i * 7) % 256) // R - rgbaData[i+1] = uint8((i * 3) % 256) // G - rgbaData[i+2] = uint8((i * 5) % 256) // B - rgbaData[i+3] = 255 // A (ignored) - } - - // Allocate YUV frames for both paths - yuvDirect := ffmpeg.AVFrameAlloc() - yuvDirect.SetWidth(benchWidth) - yuvDirect.SetHeight(benchHeight) - yuvDirect.SetFormat(int(ffmpeg.AVPixFmtYuv420P)) - _, _ = ffmpeg.AVFrameGetBuffer(yuvDirect, 0) - defer ffmpeg.AVFrameFree(&yuvDirect) - - yuvViaRGB := ffmpeg.AVFrameAlloc() - yuvViaRGB.SetWidth(benchWidth) - yuvViaRGB.SetHeight(benchHeight) - yuvViaRGB.SetFormat(int(ffmpeg.AVPixFmtYuv420P)) - _, _ = ffmpeg.AVFrameGetBuffer(yuvViaRGB, 0) - defer ffmpeg.AVFrameFree(&yuvViaRGB) - - // Convert using direct path - convertRGBAToYUV(rgbaData, yuvDirect, benchWidth, benchHeight) - - // Convert using via-RGB24 path - rgb24Size := benchWidth * benchHeight * 3 - rgb24Data := make([]byte, rgb24Size) - srcIdx := 0 - dstIdx := 0 - for dstIdx < rgb24Size { - rgb24Data[dstIdx] = rgbaData[srcIdx] // R - rgb24Data[dstIdx+1] = rgbaData[srcIdx+1] // G - rgb24Data[dstIdx+2] = rgbaData[srcIdx+2] // B - srcIdx += 4 - dstIdx += 3 - } - convertRGBToYUV(rgb24Data, yuvViaRGB, benchWidth, benchHeight) - - // Compare Y planes (should be identical) - yLinesize := yuvDirect.Linesize().Get(0) - yPlaneDirect := yuvDirect.Data().Get(0) - yPlaneViaRGB := yuvViaRGB.Data().Get(0) - - yDiffCount := 0 - for y := range benchHeight { - for x := range benchWidth { - offset := y*yLinesize + x - directVal := *(*uint8)(unsafe.Add(yPlaneDirect, offset)) - viaRGBVal := *(*uint8)(unsafe.Add(yPlaneViaRGB, offset)) - if directVal != viaRGBVal { - yDiffCount++ - } - } - } - - // Compare U planes - uLinesize := yuvDirect.Linesize().Get(1) - uPlaneDirect := yuvDirect.Data().Get(1) - uPlaneViaRGB := yuvViaRGB.Data().Get(1) - - uDiffCount := 0 - for y := range benchHeight / 2 { - for x := range benchWidth / 2 { - offset := y*uLinesize + x - directVal := *(*uint8)(unsafe.Add(uPlaneDirect, offset)) - viaRGBVal := *(*uint8)(unsafe.Add(uPlaneViaRGB, offset)) - if directVal != viaRGBVal { - uDiffCount++ - } - } - } - - // Compare V planes - vLinesize := yuvDirect.Linesize().Get(2) - vPlaneDirect := yuvDirect.Data().Get(2) - vPlaneViaRGB := yuvViaRGB.Data().Get(2) - - vDiffCount := 0 - for y := range benchHeight / 2 { - for x := range benchWidth / 2 { - offset := y*vLinesize + x - directVal := *(*uint8)(unsafe.Add(vPlaneDirect, offset)) - viaRGBVal := *(*uint8)(unsafe.Add(vPlaneViaRGB, offset)) - if directVal != viaRGBVal { - vDiffCount++ - } - } - } - - // Both paths should produce identical output - if yDiffCount > 0 || uDiffCount > 0 || vDiffCount > 0 { - t.Errorf("RGBA conversion paths differ: Y=%d, U=%d, V=%d pixel differences", - yDiffCount, uDiffCount, vDiffCount) - } else { - t.Log("RGBA→YUV direct and via-RGB24 paths produce identical output ✓") - } -} - // TestConversionEquivalence verifies both implementations produce similar output. // Some pixel differences are expected due to different rounding in coefficient // implementations (Go uses integer arithmetic, FFmpeg uses floating-point). diff --git a/internal/renderer/assets.go b/internal/renderer/assets.go index 67dd983..a13a1a7 100644 --- a/internal/renderer/assets.go +++ b/internal/renderer/assets.go @@ -21,22 +21,20 @@ import ( //go:embed assets/Poppins-Bold.ttf var embeddedAssets embed.FS +// loadImageData reads image bytes from the filesystem if customPath is set, +// otherwise from the embedded assets using assetPath. +func loadImageData(customPath string, assetPath string) ([]byte, error) { + if customPath != "" { + return os.ReadFile(customPath) + } + return embeddedAssets.ReadFile(assetPath) +} + // LoadBackgroundImage loads and scales the background image (from custom path or embedded asset) func LoadBackgroundImage(runtimeConfig *config.RuntimeConfig) (*image.RGBA, error) { imagePath := runtimeConfig.GetBackgroundImagePath() - var data []byte - var err error - - // Check if using custom image path or embedded asset - if runtimeConfig.BackgroundImagePath != "" { - // Load from filesystem - data, err = os.ReadFile(imagePath) - } else { - // Load from embedded assets - data, err = embeddedAssets.ReadFile(imagePath) - } - + data, err := loadImageData(runtimeConfig.BackgroundImagePath, imagePath) if err != nil { return nil, err } @@ -78,12 +76,7 @@ func LoadFont(size float64) (font.Face, error) { // DrawCenterText draws text centered horizontally at the specified Y position func DrawCenterText(img *image.RGBA, face font.Face, text string, centerY int, textColor color.RGBA) { - // Create a drawer - d := &font.Drawer{ - Dst: img, - Src: image.NewUniform(textColor), - Face: face, - } + d := newTextDrawer(img, face, textColor) // Measure text dimensions bounds, _ := d.BoundString(text) @@ -102,12 +95,7 @@ func DrawCenterText(img *image.RGBA, face font.Face, text string, centerY int, t // DrawEpisodeNumber draws the episode number in the top right corner func DrawEpisodeNumber(img *image.RGBA, face font.Face, episodeNum string, textColor color.RGBA) { - // Create a drawer - d := &font.Drawer{ - Dst: img, - Src: image.NewUniform(textColor), - Face: face, - } + d := newTextDrawer(img, face, textColor) // Measure text dimensions bounds, _ := d.BoundString(episodeNum) diff --git a/internal/renderer/frame.go b/internal/renderer/frame.go index 893b1c6..e4faec4 100644 --- a/internal/renderer/frame.go +++ b/internal/renderer/frame.go @@ -4,7 +4,6 @@ import ( "fmt" "image" "image/color" - "sync" "github.com/linuxmatters/jivefire/internal/config" "golang.org/x/image/font" @@ -32,12 +31,6 @@ type Frame struct { hasBackground bool } -var framePool = sync.Pool{ - New: func() any { - return image.NewRGBA(image.Rect(0, 0, config.Width, config.Height)) - }, -} - // NewFrame creates a new optimized frame renderer func NewFrame(bgImage *image.RGBA, fontFace font.Face, episodeNum int, title string, runtimeConfig *config.RuntimeConfig) *Frame { totalWidth := config.NumBars*config.BarWidth + (config.NumBars-1)*config.BarGap @@ -87,7 +80,7 @@ func NewFrame(bgImage *image.RGBA, fontFace font.Face, episodeNum int, title str } f := &Frame{ - img: framePool.Get().(*image.RGBA), + img: image.NewRGBA(image.Rect(0, 0, config.Width, config.Height)), bgImage: bgImage, fontFace: fontFace, centerY: centerY, @@ -319,11 +312,3 @@ func formatEpisodeNumber(num int) string { func (f *Frame) GetImage() *image.RGBA { return f.img } - -// Release returns the frame buffer to the pool -func (f *Frame) Release() { - if f.img != nil { - framePool.Put(f.img) - f.img = nil - } -} diff --git a/internal/renderer/text.go b/internal/renderer/text.go new file mode 100644 index 0000000..4796a1e --- /dev/null +++ b/internal/renderer/text.go @@ -0,0 +1,33 @@ +package renderer + +import ( + "image" + "image/color" + + "golang.org/x/image/font" + "golang.org/x/image/math/fixed" +) + +// newTextDrawer constructs a font.Drawer for drawing coloured text onto an RGBA image. +func newTextDrawer(img *image.RGBA, face font.Face, col color.RGBA) *font.Drawer { + return &font.Drawer{ + Dst: img, + Src: image.NewUniform(col), + Face: face, + } +} + +// measureStringWidth returns the pixel width of text as rendered by the drawer. +func measureStringWidth(d *font.Drawer, text string) int { + bounds, _ := d.BoundString(text) + return (bounds.Max.X - bounds.Min.X).Ceil() +} + +// measureTextBounds returns the pixel width and full bounds of text as rendered by the given face. +// Used when callers need both width and the vertical extent (ascent/descent). +func measureTextBounds(face font.Face, text string) (int, fixed.Rectangle26_6) { + d := &font.Drawer{Face: face} + bounds, _ := d.BoundString(text) + width := (bounds.Max.X - bounds.Min.X).Ceil() + return width, bounds +} diff --git a/internal/renderer/thumbnail.go b/internal/renderer/thumbnail.go index 1a374e1..11c38ac 100644 --- a/internal/renderer/thumbnail.go +++ b/internal/renderer/thumbnail.go @@ -16,7 +16,6 @@ import ( "golang.org/x/image/draw" "golang.org/x/image/font" "golang.org/x/image/math/f64" - "golang.org/x/image/math/fixed" ) // getThumbnailTextColor returns the text color for thumbnail (uses runtime config or default) @@ -73,18 +72,7 @@ func GenerateThumbnail(outputPath string, title string, runtimeConfig *config.Ru func loadThumbnailBackground(runtimeConfig *config.RuntimeConfig) (*image.RGBA, error) { imagePath := runtimeConfig.GetThumbnailImagePath() - var data []byte - var err error - - // Check if using custom image path or embedded asset - if runtimeConfig.ThumbnailImagePath != "" { - // Load from filesystem - data, err = os.ReadFile(imagePath) - } else { - // Load from embedded assets - data, err = embeddedAssets.ReadFile(imagePath) - } - + data, err := loadImageData(runtimeConfig.ThumbnailImagePath, imagePath) if err != nil { return nil, err } @@ -144,8 +132,8 @@ func findOptimalFontSize(parsedFont *truetype.Font, line1, line2 string) float64 }) // Measure both lines - width1, bounds1 := measureText(face, line1) - width2, bounds2 := measureText(face, line2) + width1, bounds1 := measureTextBounds(face, line1) + width2, bounds2 := measureTextBounds(face, line2) face.Close() @@ -177,23 +165,14 @@ func findOptimalFontSize(parsedFont *truetype.Font, line1, line2 string) float64 return 10.0 // Minimum fallback size } -// measureText returns the width and actual bounds of rendered text -// Returns width, and the bounds rectangle (Min.Y is negative for ascent, Max.Y is positive for descent) -func measureText(face font.Face, text string) (int, fixed.Rectangle26_6) { - d := &font.Drawer{Face: face} - bounds, _ := d.BoundString(text) - width := (bounds.Max.X - bounds.Min.X).Ceil() - return width, bounds -} - // drawThumbnailText draws the title text on the thumbnail with a slight rotation // Line 1 is top-aligned at the ThumbnailMargin // Bottom edge of line 2 must not extend below center line // Text is rotated ThumbnailTextRotationDegrees clockwise for dynamic effect func drawThumbnailText(img *image.RGBA, face font.Face, line1, line2 string, runtimeConfig *config.RuntimeConfig) { // Measure text dimensions - bounds.Min.Y is negative (ascent), bounds.Max.Y is positive (descent) - width1, bounds1 := measureText(face, line1) - width2, bounds2 := measureText(face, line2) + width1, bounds1 := measureTextBounds(face, line1) + width2, bounds2 := measureTextBounds(face, line2) // Calculate line spacing (50% of font size for more vertical spacing) metrics := face.Metrics() @@ -295,15 +274,8 @@ func drawCenteredLineOnTemp(img *image.RGBA, face font.Face, text string, imgWid return } - d := &font.Drawer{ - Dst: img, - Src: image.NewUniform(getThumbnailTextColor(runtimeConfig)), - Face: face, - } - - // Measure text width - bounds, _ := d.BoundString(text) - textWidth := (bounds.Max.X - bounds.Min.X).Ceil() + d := newTextDrawer(img, face, getThumbnailTextColor(runtimeConfig)) + textWidth := measureStringWidth(d, text) // Center horizontally x := (imgWidth - textWidth) / 2 diff --git a/internal/ui/progress.go b/internal/ui/progress.go index be95364..4e7562b 100644 --- a/internal/ui/progress.go +++ b/internal/ui/progress.go @@ -64,12 +64,6 @@ type RenderProgress struct { AudioCodec string } -// AudioFlushProgress represents progress during audio flushing -type AudioFlushProgress struct { - PacketsProcessed int - Elapsed time.Duration -} - // RenderComplete signals completion of Pass 2 type RenderComplete struct { OutputFile string @@ -538,15 +532,34 @@ func (m *Model) renderComplete() string { valueStyle := lipgloss.NewStyle() highlightValueStyle := lipgloss.NewStyle().Foreground(fireOrange) + totalMs := m.complete.TotalTime.Milliseconds() + if totalMs == 0 { + totalMs = 1 + } + + // writeRow writes a simple label-value row + writeRow := func(label, value string) { + fmt.Fprintf(&s, " %s%s\n", labelStyle.Render(fmt.Sprintf("%-18s", label)), valueStyle.Render(value)) + } + + // writeBarRow writes a row with percentage and summary bar + writeBarRow := func(label string, duration time.Duration) { + pct := int(float64(duration.Milliseconds()) * 100 / float64(totalMs)) + fmt.Fprintf(&s, " %s%s (~%2d%%) %s\n", + labelStyle.Render(fmt.Sprintf("%-18s", label)), + valueStyle.Render(fmt.Sprintf("~%-6s", formatDuration(duration))), + pct, m.summaryBar.ViewAs(float64(duration.Milliseconds())/float64(totalMs))) + } + s.WriteString(headerStyle.Render("Pass 1: Audio Analysis")) s.WriteString("\n") if m.audioProfile != nil { - fmt.Fprintf(&s, " %s%s\n", labelStyle.Render(fmt.Sprintf("%-18s", "Duration:")), valueStyle.Render(fmt.Sprintf("%.1fs", m.audioProfile.Duration.Seconds()))) - fmt.Fprintf(&s, " %s%s\n", labelStyle.Render(fmt.Sprintf("%-18s", "Peak Level:")), valueStyle.Render(fmt.Sprintf("%.1f dB", m.audioProfile.PeakLevel))) - fmt.Fprintf(&s, " %s%s\n", labelStyle.Render(fmt.Sprintf("%-18s", "RMS Level:")), valueStyle.Render(fmt.Sprintf("%.1f dB", m.audioProfile.RMSLevel))) - fmt.Fprintf(&s, " %s%s\n", labelStyle.Render(fmt.Sprintf("%-18s", "Dynamic Range:")), valueStyle.Render(fmt.Sprintf("%.1f dB", m.audioProfile.DynamicRange))) - fmt.Fprintf(&s, " %s%s\n", labelStyle.Render(fmt.Sprintf("%-18s", "Optimal Scale:")), valueStyle.Render(fmt.Sprintf("%.3f", m.audioProfile.OptimalScale))) + writeRow("Duration:", fmt.Sprintf("%.1fs", m.audioProfile.Duration.Seconds())) + writeRow("Peak Level:", fmt.Sprintf("%.1f dB", m.audioProfile.PeakLevel)) + writeRow("RMS Level:", fmt.Sprintf("%.1f dB", m.audioProfile.RMSLevel)) + writeRow("Dynamic Range:", fmt.Sprintf("%.1f dB", m.audioProfile.DynamicRange)) + writeRow("Optimal Scale:", fmt.Sprintf("%.3f", m.audioProfile.OptimalScale)) fmt.Fprintf(&s, " %s%s\n", labelStyle.Render(fmt.Sprintf("%-18s", "Analysis Time:")), highlightValueStyle.Render(formatDuration(m.audioProfile.AnalysisTime))) } @@ -556,37 +569,15 @@ func (m *Model) renderComplete() string { s.WriteString(headerStyle.Render("Pass 2: Rendering & Encoding")) s.WriteString("\n") - totalMs := m.complete.TotalTime.Milliseconds() - if totalMs == 0 { - totalMs = 1 - } - if m.complete.ThumbnailTime > 0 { - fmt.Fprintf(&s, " %s%s (~%2d%%) %s\n", - labelStyle.Render(fmt.Sprintf("%-18s", "Thumbnail:")), - valueStyle.Render(fmt.Sprintf("~%-6s", formatDuration(m.complete.ThumbnailTime))), - int(float64(m.complete.ThumbnailTime.Milliseconds())*100/float64(totalMs)), - m.summaryBar.ViewAs(float64(m.complete.ThumbnailTime.Milliseconds())/float64(totalMs))) + writeBarRow("Thumbnail:", m.complete.ThumbnailTime) } - fmt.Fprintf(&s, " %s%s (~%2d%%) %s\n", - labelStyle.Render(fmt.Sprintf("%-18s", "Visualisation:")), - valueStyle.Render(fmt.Sprintf("~%-6s", formatDuration(m.complete.VisTime))), - int(float64(m.complete.VisTime.Milliseconds())*100/float64(totalMs)), - m.summaryBar.ViewAs(float64(m.complete.VisTime.Milliseconds())/float64(totalMs))) - - fmt.Fprintf(&s, " %s%s (~%2d%%) %s\n", - labelStyle.Render(fmt.Sprintf("%-18s", "Video encoding:")), - valueStyle.Render(fmt.Sprintf("~%-6s", formatDuration(m.complete.EncodeTime))), - int(float64(m.complete.EncodeTime.Milliseconds())*100/float64(totalMs)), - m.summaryBar.ViewAs(float64(m.complete.EncodeTime.Milliseconds())/float64(totalMs))) + writeBarRow("Visualisation:", m.complete.VisTime) + writeBarRow("Video encoding:", m.complete.EncodeTime) if m.complete.AudioTime > 0 { - fmt.Fprintf(&s, " %s%s (~%2d%%) %s\n", - labelStyle.Render(fmt.Sprintf("%-18s", "Audio encoding:")), - valueStyle.Render(fmt.Sprintf("~%-6s", formatDuration(m.complete.AudioTime))), - int(float64(m.complete.AudioTime.Milliseconds())*100/float64(totalMs)), - m.summaryBar.ViewAs(float64(m.complete.AudioTime.Milliseconds())/float64(totalMs))) + writeBarRow("Audio encoding:", m.complete.AudioTime) } // Calculate unaccounted time including finalisation (Pass 2 only) @@ -605,11 +596,7 @@ func (m *Model) renderComplete() string { strings.Contains(m.complete.EncoderName, "videotoolbox") { otherLabel = "GPU pipeline:" } - fmt.Fprintf(&s, " %s%s (~%2d%%) %s\n", - labelStyle.Render(fmt.Sprintf("%-18s", otherLabel)), - valueStyle.Render(fmt.Sprintf("~%-6s", formatDuration(otherTime))), - int(float64(otherTime.Milliseconds())*100/float64(totalMs)), - m.summaryBar.ViewAs(float64(otherTime.Milliseconds())/float64(totalMs))) + writeBarRow(otherLabel, otherTime) } fmt.Fprintf(&s, " %s%s", labelStyle.Render(fmt.Sprintf("%-18s", "Total time:")), highlightValueStyle.Render(formatDuration(m.complete.TotalTime)))