Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
240 changes: 240 additions & 0 deletions airsync-mac/Core/Media/NowPlayingPublisher.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,240 @@
//
// NowPlayingPublisher.swift
// AirSync
//
// Publishes Android now-playing info into macOS MPNowPlayingInfoCenter
// so boring.notch (via MediaRemote.framework) picks it up naturally.
// Uses silent audio to make the app audio-eligible for MediaRemote reporting.
//

import Foundation
import AppKit
import AVFoundation
import MediaPlayer

final class NowPlayingPublisher {
static let shared = NowPlayingPublisher()

// MARK: - Silent Audio Engine (makes app audio-eligible for MediaRemote)
private var audioEngine: AVAudioEngine?
private var playerNode: AVAudioPlayerNode?
private var isSilentAudioRunning: Bool = false

// MARK: - State
private var currentInfo: NowPlayingInfo?
private var commandCenterRegistered = false

/// Timestamp of the last remote command we sent to Android.
private var lastCommandSentAt: Date = .distantPast
/// Timestamp of the last time we updated MPNowPlayingInfoCenter.
private var lastStateUpdateAt: Date = .distantPast

// Short debounces to provide an instant UI while preventing macOS feedback loops:
// 0.35s limits how fast the user can mash buttons, and blocks automated
// counter-commands that macOS fires right after we update the info center.
private let commandDebounceInterval: TimeInterval = 0.35
private let stateUpdateDebounceInterval: TimeInterval = 0.35

private init() {}

// MARK: - Public API

/// Call once at app startup. Sets up remote commands and starts silent audio.
func start() {
registerRemoteCommands()
// Start silent audio immediately so the app is ALWAYS audio-eligible.
// If we wait until the first play command, macOS sees us publish
// MPNowPlayingInfoCenter data without backing audio and fires a pauseCommand
// to "correct" the state — which is the root cause of the glitch loop.
startSilentAudio()
}

/// Update now-playing with Android media info.
/// During the 1-second window after the user clicks a button, we ignore incoming
/// status updates. This protects our instant optimistic UI from being overwritten
/// by stale network packets that Android dispatched before the command took effect.
func update(info: NowPlayingInfo) {
let timeSinceCommand = Date().timeIntervalSince(lastCommandSentAt)
if timeSinceCommand < 1.0 {
return
}

currentInfo = info

// Always publish metadata on the main thread (MPNowPlayingInfoCenter requirement)
DispatchQueue.main.async {
self.lastStateUpdateAt = Date()
self.publishToNowPlayingInfoCenter(info: info)
}
// Silent audio is always running (started in start()), nothing to do here.
}

/// Clear now-playing info (e.g., Android disconnected)
func clear() {
currentInfo = nil
stopSilentAudio() // Only place we stop the engine
DispatchQueue.main.async {
MPNowPlayingInfoCenter.default().nowPlayingInfo = nil
MPNowPlayingInfoCenter.default().playbackState = .stopped
}
}

// MARK: - Silent Audio

private func startSilentAudio() {
guard !isSilentAudioRunning else { return }
isSilentAudioRunning = true

let engine = AVAudioEngine()
let player = AVAudioPlayerNode()
engine.attach(player)
engine.connect(player, to: engine.mainMixerNode, format: nil)

// Generate one second of silence
let format = AVAudioFormat(standardFormatWithSampleRate: 44100, channels: 2)!
let frameCount = AVAudioFrameCount(format.sampleRate)
guard let buffer = AVAudioPCMBuffer(pcmFormat: format, frameCapacity: frameCount) else {
isSilentAudioRunning = false
return
}
buffer.frameLength = frameCount
// Buffer is already zeroed by default — silence

// BUG FIX: engine.start() MUST come before player.play() / scheduleBuffer.
// Calling player.play() on an un-started engine produces:
// "Engine is not running because it was not explicitly started"
do {
try engine.start()
} catch {
print("[NowPlayingPublisher] Failed to start silent audio engine: \(error)")
isSilentAudioRunning = false
return
}

audioEngine = engine
playerNode = player

player.scheduleBuffer(buffer, at: nil, options: .loops)
player.play()

print("[NowPlayingPublisher] Silent audio engine started — app is now audio-eligible")
}

private func stopSilentAudio() {
guard isSilentAudioRunning else { return }
playerNode?.stop()
audioEngine?.stop()
audioEngine?.reset()
audioEngine = nil
playerNode = nil
isSilentAudioRunning = false
print("[NowPlayingPublisher] Silent audio engine stopped")
}

// MARK: - Publish to MPNowPlayingInfoCenter

private func publishToNowPlayingInfoCenter(info: NowPlayingInfo) {
let center = MPNowPlayingInfoCenter.default()

var mpInfo: [String: Any] = [
MPMediaItemPropertyTitle: info.title ?? "",
MPMediaItemPropertyArtist: info.artist ?? "",
MPMediaItemPropertyAlbumTitle: info.album ?? "",
]

if let duration = info.duration, duration > 0 {
mpInfo[MPMediaItemPropertyPlaybackDuration] = duration
}
if let elapsed = info.elapsedTime {
mpInfo[MPNowPlayingInfoPropertyElapsedPlaybackTime] = elapsed
}
mpInfo[MPNowPlayingInfoPropertyPlaybackRate] = info.isPlaying == true ? 1.0 : 0.0

if let artworkData = info.artworkData,
let nsImage = NSImage(data: artworkData) {
let artwork = MPMediaItemArtwork(boundsSize: CGSize(width: nsImage.size.width, height: nsImage.size.height)) { _ in
return nsImage
}
mpInfo[MPMediaItemPropertyArtwork] = artwork
}

center.nowPlayingInfo = mpInfo
// Restore playbackState so UI elements like boringNotch know it's explicitly playing/paused.
// Automated counter-commands triggered by this change will be dropped by stateUpdateDebounceInterval.
center.playbackState = info.isPlaying == true ? .playing : .paused
}

// MARK: - Remote Commands

private func processCommand(name: String, action: String, optimisticUpdate: ((NowPlayingPublisher) -> Void)? = nil) -> MPRemoteCommandHandlerStatus {
let now = Date()
let timeSinceCommand = now.timeIntervalSince(lastCommandSentAt)
let timeSinceState = now.timeIntervalSince(lastStateUpdateAt)

if timeSinceCommand < commandDebounceInterval {
return .success
}
if timeSinceState < stateUpdateDebounceInterval {
return .success
}

lastCommandSentAt = now
WebSocketServer.shared.sendAndroidMediaControl(action: action)
optimisticUpdate?(self)

return .success
}

private func registerRemoteCommands() {
guard !commandCenterRegistered else { return }
commandCenterRegistered = true

let commandCenter = MPRemoteCommandCenter.shared()

// NOTE: Commands are forwarded to Android via WebSocket (not NowPlayingCLI which
// controls LOCAL Mac media via the `media-control` binary). This music is from
// the phone, so control actions must go back over the WebSocket connection.
// IMPORTANT: macOS often fires automated counter-commands when we update MPNowPlayingInfoCenter.
// We drop any commands received within `stateUpdateDebounceInterval` of our last update.
// We also do optimistic updates so the UI responds instantly to clicks.

commandCenter.playCommand.addTarget { [weak self] _ in
return self?.processCommand(name: "Play", action: "play") { $0.publishPlaybackStateUpdate(playing: true) } ?? .commandFailed
}

commandCenter.pauseCommand.addTarget { [weak self] _ in
return self?.processCommand(name: "Pause", action: "pause") { $0.publishPlaybackStateUpdate(playing: false) } ?? .commandFailed
}

commandCenter.togglePlayPauseCommand.addTarget { [weak self] _ in
let isPlaying = self?.currentInfo?.isPlaying == true
let explicitAction = isPlaying ? "pause" : "play"
return self?.processCommand(name: "TogglePlayPause", action: explicitAction) { publisher in
publisher.publishPlaybackStateUpdate(playing: !isPlaying)
} ?? .commandFailed
}

commandCenter.nextTrackCommand.addTarget { [weak self] _ in
return self?.processCommand(name: "NextTrack", action: "nextTrack") ?? .commandFailed
}

commandCenter.previousTrackCommand.addTarget { [weak self] _ in
return self?.processCommand(name: "PreviousTrack", action: "previousTrack") ?? .commandFailed
}

// Seeking not yet supported for Android remote
commandCenter.changePlaybackPositionCommand.isEnabled = false

print("[NowPlayingPublisher] Remote commands registered")
}

private func publishPlaybackStateUpdate(playing: Bool) {
guard var info = currentInfo else { return }
info.isPlaying = playing
currentInfo = info
DispatchQueue.main.async {
self.lastStateUpdateAt = Date()
self.publishToNowPlayingInfoCenter(info: info)
}
}
}
10 changes: 10 additions & 0 deletions airsync-mac/Core/Storage/UserDefaults.swift
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ extension UserDefaults {
static let continueApp = "continueApp"
static let directKeyInput = "directKeyInput"
static let sendNowPlayingStatus = "sendNowPlayingStatus"
static let syncAndroidPlaybackSeekbar = "syncAndroidPlaybackSeekbar"
static let isMusicCardHidden = "isMusicCardHidden"
static let lastOnboarding = "lastOnboarding"

Expand Down Expand Up @@ -130,6 +131,15 @@ extension UserDefaults {
set { set(newValue, forKey: Keys.sendNowPlayingStatus)}
}

/// When enabled, AirSync plays a silent audio loop to claim macOS Now Playing focus,
/// allowing the Android playback seekbar to be exposed in boringNotch / Control Center.
/// Disabled by default because it causes Bluetooth multipoint headphones to route
/// audio to the Mac, preventing Android media from playing through the headphones.
var syncAndroidPlaybackSeekbar: Bool {
get { bool(forKey: Keys.syncAndroidPlaybackSeekbar) }
set { set(newValue, forKey: Keys.syncAndroidPlaybackSeekbar) }
}

var isMusicCardHidden: Bool {
get { bool(forKey: Keys.isMusicCardHidden) }
set { set(newValue, forKey: Keys.isMusicCardHidden) }
Expand Down
15 changes: 15 additions & 0 deletions airsync-mac/Core/Util/MacInfo/MacInfoSyncManager.swift
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,21 @@ class MacInfoSyncManager: ObservableObject {
self?.sendDeviceStatusWithoutMusic()
return
}

// IMPORTANT: Filter out AirSync's own bundle ID.
// NowPlayingPublisher writes Android's media info into macOS
// MPNowPlayingInfoCenter so boringNotch can display it.
// media-control reads from the same source, so without this guard
// we'd forward AirSync's own published entry back to Android,
// creating a play/pause feedback loop.
let ownBundleId = Bundle.main.bundleIdentifier ?? ""
if let bundleId = info.bundleIdentifier, !ownBundleId.isEmpty,
bundleId == ownBundleId {
// This is our own reflection — treat as nothing playing on Mac
self?.sendDeviceStatusWithoutMusic()
return
}

// MUST update @Published properties on main thread
DispatchQueue.main.async {
// print("Now Playing fetched:", info) // debug
Expand Down
57 changes: 56 additions & 1 deletion airsync-mac/Core/WebSocket/WebSocketServer+Handlers.swift
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,32 @@ extension WebSocketServer {
{
let albumArt = (music["albumArt"] as? String) ?? ""
let likeStatus = (music["likeStatus"] as? String) ?? "none"
let isBuffering = (music["isBuffering"] as? Bool) ?? false

// Android sends duration/position in ms; convert to seconds.
// Using NSNumber because Swift's `as? Double` fails if the JSON parser inferred an Int.
let durationSec = (music["duration"] as? NSNumber).map { $0.doubleValue / 1000.0 } ?? -1.0
var positionSec = (music["position"] as? NSNumber).map { $0.doubleValue / 1000.0 } ?? -1.0

// Timestamp-based position correction:
// Android includes the wall-clock ms when the position snapshot was taken.
// We add the elapsed time since then (which includes WiFi transit) to get a
// much more accurate "current" position — effectively NTP-style compensation.
// Clamp: only correct for realistic WiFi delays (< 5s). Larger deltas likely
// indicate clock skew between devices, which would worsen accuracy if applied.
if positionSec >= 0, playing, !isBuffering,
let tsMs = music["positionTimestamp"] as? NSNumber {
let capturedAt = tsMs.doubleValue / 1000.0
let nowSec = Date().timeIntervalSince1970
let networkDelta = nowSec - capturedAt
if networkDelta > -2.0 && networkDelta < 5.0 {
positionSec += max(0.0, networkDelta)
}
}
// Clamp to duration to prevent the seekbar going past the end
if durationSec > 0 && positionSec > durationSec {
positionSec = durationSec
}

AppState.shared.status = DeviceStatus(
battery: .init(level: level, isCharging: isCharging),
Expand All @@ -265,9 +291,38 @@ extension WebSocketServer {
volume: volume,
isMuted: isMuted,
albumArt: albumArt,
likeStatus: likeStatus
likeStatus: likeStatus,
duration: durationSec,
position: positionSec,
isBuffering: isBuffering
)
)

// Publish Android now-playing info to MPNowPlayingInfoCenter only when
// the user has opted in, because this requires playing silent audio which
// causes multipoint Bluetooth headphones to route audio to the Mac.
if UserDefaults.standard.syncAndroidPlaybackSeekbar {
var npInfo = NowPlayingInfo()
npInfo.title = title
npInfo.artist = artist
npInfo.isPlaying = playing
if let data = Data(base64Encoded: albumArt) {
npInfo.artworkData = data
}
// Seekbar: Android sends duration/position in ms; MPNowPlayingInfoCenter needs seconds.
// positionMs uses optDouble so missing/null safely falls back to -1.
// NOTE: Use NSNumber because Swift's JSON parser returns an Int type for flat numbers.
if let nsNum = music["duration"] as? NSNumber, nsNum.doubleValue > 0 {
npInfo.duration = nsNum.doubleValue / 1000.0
}
if let pMs = music["position"] as? NSNumber, pMs.doubleValue >= 0 {
npInfo.elapsedTime = pMs.doubleValue / 1000.0
}
NowPlayingPublisher.shared.update(info: npInfo)
} else {
// If the setting is off, ensure any previously running session is cleared
NowPlayingPublisher.shared.clear()
}
}
}

Expand Down
22 changes: 22 additions & 0 deletions airsync-mac/Core/WebSocket/WebSocketServer+Outgoing.swift
Original file line number Diff line number Diff line change
Expand Up @@ -108,10 +108,32 @@ extension WebSocketServer {
func like() { sendMediaAction("like") }
func unlike() { sendMediaAction("unlike") }

/// Seek Android playback to a specific position (in seconds).
func seekTo(positionSeconds: Double) {
let positionMs = Int(positionSeconds * 1000)
sendMessage(type: "mediaControl", data: ["action": "seekTo", "positionMs": positionMs])
}

private func sendMediaAction(_ action: String) {
sendMessage(type: "mediaControl", data: ["action": action])
}

/// Forward a system media command (from MPRemoteCommandCenter) back to Android.
/// - action: "play", "pause", "playPause", "nextTrack", "previousTrack"
func sendAndroidMediaControl(action: String) {
// Map MPRemoteCommandCenter-style names to the Android protocol's action names
let androidAction: String
switch action {
case "play": androidAction = "play"
case "pause": androidAction = "pause"
case "playPause": androidAction = "playPause"
case "nextTrack": androidAction = "next"
case "previousTrack": androidAction = "previous"
default: androidAction = action
}
sendMediaAction(androidAction)
}

// MARK: - Volume Controls

func volumeUp() { sendVolumeAction("volumeUp") }
Expand Down
Loading