diff --git a/README.md b/README.md index a28763e..b83141b 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ Pipewire client library, statically linked, ported to the Zig build system. ## Motivation -I want a static executable that can play audio and turn screen contents into a video feed. The pipewire library makes heavy use of `dlopen` internally, so this is nontrivial. +I want a static executable that can play audio and turn screen contents into a video feed. The pipewire library makes heavy use of `dlopen` internally, so this is nontrivial. ## Strategy @@ -17,10 +17,9 @@ This project follows the pristine tarball approach. No modifications are require ## Status -You can run the `video-play` example with `zig build video-play` to see the current webcam feed. This currently works without pipewire accessing the dynamic linker, but the example executable isn't fully static since it relies on SDL. I plan to port the example away from SDL so that this can be changed. - -Only the pipewire plugins/modules required for this example are currently built. To use other parts of the pipewire API, you may need to add more symbols to [src/wrap/dlfcn.zig](src/wrap/dlfcn.zig) and regenerate `c.zig` if additional pipewire headers are required. +Only the pipewire plugins/modules required for the provided example are currently built. To use other parts of the pipewire API, you may need to add more symbols to [src/wrap/dlfcn.zig](src/wrap/dlfcn.zig) and regenerate `c.zig` if additional pipewire headers are required. +You can run the `video-play` example with `zig build video-play` to see the current webcam feed. Use something like `-Dtarget=x86_64-linux-musl` if you want full static linking. Note that the video will be fairly low resolution as the example doesn't have a real graphics stack and as such is rendering pixels one at a time. The example only supports the YUV2 video format. ## Usage @@ -54,3 +53,9 @@ defer pw.pw_deinit(); ``` See [`src/examples`](`src/examples`) for more information. + +### Help, I'm getting undefined symbols! + +If you import the Pipewire zig module but don't reference it, the import won't get evaluated and the wrapper functions won't get exported. + +To resolve this, use something from the pipewire module, or declare `comptime { _ = @import("pipewire"); }` to force evaluation. diff --git a/build.zig b/build.zig index 3f722d9..331fe4e 100644 --- a/build.zig +++ b/build.zig @@ -134,11 +134,9 @@ pub fn build(b: *std.Build) void { .HAVE_GRP_H = {}, .HAVE_GSTREAMER_DEVICE_PROVIDER = {}, .HAVE_MALLOC_INFO = {}, - .HAVE_MALLOC_TRIM = {}, .HAVE_MEMFD_CREATE = {}, .HAVE_PIDFD_OPEN = {}, .HAVE_PWD_H = {}, - .HAVE_RANDOM_R = {}, .HAVE_REALLOCARRAY = {}, .HAVE_SIGABBREV_NP = {}, .HAVE_SPA_PLUGINS = {}, @@ -162,6 +160,12 @@ pub fn build(b: *std.Build) void { .RTPRIO_CLIENT = rtprio_client, .RTPRIO_SERVER = rtprio_server, }); + if (target.result.isGnuLibC()) { + config_h.addValues(.{ + .HAVE_MALLOC_TRIM = {}, + .HAVE_RANDOM_R = {}, + }); + } // Build the library plugins and modules { @@ -304,20 +308,23 @@ pub fn build(b: *std.Build) void { // Build the video play example. { + const zin = b.dependency("zin", .{ + .optimize = optimize, + .target = target, + }).module("zin"); + const video_play = b.addExecutable(.{ .name = "video-play", .root_module = b.createModule(.{ .root_source_file = b.path("src/examples/video_play.zig"), .target = target, .optimize = optimize, + .imports = &.{ + .{ .name = "zin", .module = zin }, + }, }), }); - const sdl = b.dependency("sdl", .{ - .optimize = optimize, - .target = target, - }); - if (use_zig_module) { video_play.root_module.addImport("pipewire", libpipewire_zig); } else { @@ -327,7 +334,6 @@ pub fn build(b: *std.Build) void { video_play.root_module.addOptions("example_options", example_options); - video_play.linkLibrary(sdl.artifact("SDL3")); b.installArtifact(video_play); const run_step = b.step("video-play", "Run the video-play example"); @@ -382,11 +388,6 @@ const flags: []const []const u8 = &.{ // we just wrap the aliases as well. "-D__open_2=__wrap_open_2", "-D__open_alias=__wrap___open_alias", - - // Since `spa_autoclose` points to a function defined in a header, its close doesn't get - // wrapped. Wrap it manually. - "-Dspa_autoclose=__attribute__((__cleanup__(__wrap_close)))", - "-Dspa_autoclose=__attribute__((__cleanup__(__wrap_close)))", }; pub const PluginAndModuleCtx = struct { diff --git a/build.zig.zon b/build.zig.zon index 8a2f58e..ed4c427 100644 --- a/build.zig.zon +++ b/build.zig.zon @@ -16,13 +16,9 @@ .hash = "N-V-__8AAKYw2AD301ZQsWszbYSWZQF5y-q4WXJif0UGRvFh", }, // Used by the examples - .sdl = .{ - .url = "git+https://github.com/allyourcodebase/SDL3#af9c32ce4824e0ad4337447434c40f2b672faf94", - .hash = "sdl-0.0.0-i4QD0btgqAABajEXrQnyZr1xVsk7LM48w2nBmuQ5gdfr", - }, .zin = .{ - .url = "git+https://github.com/marler8997/zin#d3e230690f09debdb99dffe4586ad19f9dfb4716", - .hash = "zin-0.0.0-W7QDx6kKAwCVg-wWMO4FdpvZMH66NJpc9wFDdxp5c8E2", + .url = "git+https://github.com/marler8997/zin#62706713b7089b4220d8e1eb49f8d776138a9058", + .hash = "zin-0.0.0-W7QDx9BaAwC-H1uS9Cz68oMn9uh5fWuVis5b-eqhCeeq", }, }, .paths = .{ diff --git a/src/examples/video_play.zig b/src/examples/video_play.zig index bc5691c..d837d59 100644 --- a/src/examples/video_play.zig +++ b/src/examples/video_play.zig @@ -1,34 +1,81 @@ -// `pipewire/src/examples/video-play.c` translated to Zig. +//! `pipewire/src/examples/video-play.c` translated to Zig and ported from SDL to Zin to demonstrate +//! video without a dynamic linker. This is not an efficient way to render, each pixel is rendered +//! as a rectangle, in a real application you'll want a better strategy for this. +const builtin = @import("builtin"); const std = @import("std"); +const zin = @import("zin"); +const win32 = zin.platform.win32; const log = std.log; const example_options = @import("example_options"); +const assert = std.debug.assert; -// Normal code wouldn't do this, this +const Allocator = std.mem.Allocator; + +// Configure logging +pub const std_options: std.Options = .{ + .log_level = .info, +}; + +// Normal code wouldn't need this conditional, we're just demonstrating both the static library and +// the Zig module here. Prefer the Zig module when possible. We wrap the C module in a struct just +// to make it look like the Zig module so that the rest of the file can use it as is. const pw = if (example_options.use_zig_module) - // Example of linking with the pipewire zig module @import("pipewire") else - // Example of linking with the pipewire static library. We're wrapping it like this just to - // match the Zig module so the rest of the code doesn't need conditionals. struct { pub const c = @import("pipewire"); }; -const sdl = @cImport({ - @cDefine("WIDTH", std.fmt.comptimePrint("{}", .{width})); - @cDefine("HEIGHT", std.fmt.comptimePrint("{}", .{height})); - @cDefine("RATE", std.fmt.comptimePrint("{}", .{rate})); - @cInclude("SDL3/SDL.h"); -}); +// Configure zin +pub const zin_config: zin.Config = .{ + .StaticWindowId = enum { + main, + pub fn getConfig(self: @This()) zin.WindowConfigData { + return switch (self) { + .main => .{ + .window_size_events = true, + .key_events = true, + .mouse_events = true, + .timers = .one, + .background = .{ .r = 49, .g = 49, .b = 49 }, + .dynamic_background = false, + .win32 = .{ .render = .{ .gdi = .{} } }, + .x11 = .{ .render_kind = .double_buffered }, + }, + }; + } + }, +}; -const width = 1920; -const height = 1080; -const rate = 30; +// Video settings +const texel_width = 10; const max_buffers = 64; +const default_video_width = 160; +const default_video_height = 90; +const default_frame_rate = 60; +const max_frame_rate = 120; -pub const std_options: std.Options = .{ - .log_level = .info, +// Global state used by Zin and Pipewire +const global = struct { + const default_timer_period_ns = 16 * std.time.ns_per_ms; + + var last_render: ?std.time.Instant = null; + var timer_period_ns: u64 = 0; + + var loop: ?*pw.c.pw_main_loop = null; + var stream: ?*pw.c.pw_stream = null; + + var position: ?*pw.c.spa_io_position = null; + + var format: pw.c.spa_video_info = .{}; + var stride: i32 = 0; + var size: pw.c.spa_rectangle = .{}; + + var rect: Rect = .{}; + var is_yuv: bool = false; + + var current_buffer: ?*pw.c.pw_buffer = null; }; pub fn main() !void { @@ -39,337 +86,221 @@ pub fn main() !void { pw.c.pw_log_set_level(pw.Logger.default_level); } + // Initialize pipewire pw.c.pw_init(0, null); defer pw.c.pw_deinit(); - var data: Data = .{}; - - // Create a main loop - data.loop = pw.c.pw_main_loop_new(null).?; - defer pw.c.pw_main_loop_destroy(data.loop); - - _ = pw.c.pw_loop_add_signal(pw.c.pw_main_loop_get_loop(data.loop), pw.c.SIGINT, &doQuit, &data); - _ = pw.c.pw_loop_add_signal(pw.c.pw_main_loop_get_loop(data.loop), pw.c.SIGTERM, &doQuit, &data); - - // create a simple stream, the simple stream manages to core and remote objects for you if you - // don't need to deal with them - // - // If you plan to autoconnect your stream, you need to provide at least media, category and role - // properties - // - // Pass your events and a user_data pointer as the last arguments. This will inform you about - // the stream state. The most important event you need to listen to is the process event where - // you need to consume the data provided to you. - const props = pw.c.pw_properties_new( - pw.c.PW_KEY_MEDIA_TYPE, - "Video", - pw.c.PW_KEY_MEDIA_CATEGORY, - "Capture", - pw.c.PW_KEY_MEDIA_ROLE, - "Camera", - @as(?*anyopaque, null), - ).?; - - var args: std.process.ArgIterator = .init(); - _ = args.next(); - if (args.next()) |arg| { - _ = pw.c.pw_properties_set(props, pw.c.PW_KEY_TARGET_OBJECT, arg); - } - - data.stream = pw.c.pw_stream_new_simple( - pw.c.pw_main_loop_get_loop(data.loop), - "video-play", - props, - &.{ - .version = pw.c.PW_VERSION_STREAM_EVENTS, - .state_changed = &onStreamStateChanged, - .io_changed = &onStreamIoChanged, - .param_changed = &onStreamParamChanged, - .process = &onProcess, - }, - &data, - ).?; - defer pw.c.pw_stream_destroy(data.stream); - - if (!sdl.SDL_Init(sdl.SDL_INIT_VIDEO)) { - log.err("can't initialize SDL: {s}", .{sdl.SDL_GetError()}); - std.process.exit(1); - } - - if (!sdl.SDL_CreateWindowAndRenderer( - "Demo", - width, - height, - sdl.SDL_WINDOW_RESIZABLE, - &data.window, - &data.renderer, - )) { - log.err("can't create window: {s}", .{sdl.SDL_GetError()}); - std.process.exit(1); - } - defer { - if (data.texture) |texture| sdl.SDL_DestroyTexture(texture); - if (data.cursor) |cursor| sdl.SDL_DestroyTexture(cursor); - sdl.SDL_DestroyRenderer(data.renderer); - sdl.SDL_DestroyWindow(data.window); - } - - var buffer: [1024]u8 align(@alignOf(u32)) = undefined; - var b = std.mem.zeroInit(pw.c.spa_pod_builder, .{ - .data = &buffer, - .size = buffer.len, - }); - - // build the extra parameters to connect with. To connect, we can provide a list of supported - // formats. We use a builder that writes the param object to the stack. - var params_buf: [3]?*const pw.c.spa_pod = undefined; - var params: std.ArrayList(?*const pw.c.spa_pod) = .initBuffer(¶ms_buf); - buildFormat(&data, &b, ¶ms); + // Create the pipewire loop + global.loop = pw.c.pw_main_loop_new(null).?; + defer pw.c.pw_main_loop_destroy(global.loop); + // Create the pipewire stream { - var f: pw.c.spa_pod_frame = undefined; - // send a tag, input tags travel upstream - pw.c.spa_tag_build_start(&b, &f, pw.c.SPA_PARAM_Tag, pw.c.SPA_DIRECTION_INPUT); - const items: [1]pw.c.spa_dict_item = .{ - pw.c.SPA_DICT_ITEM_INIT("my-tag-other-key", "my-special-other-tag-value"), - }; - pw.c.spa_tag_build_add_dict(&b, &pw.c.SPA_DICT_INIT(items, 1)); - params.appendBounded(pw.c.spa_tag_build_end(&b, &f)) catch @panic("OOB"); - } + const props = pw.c.pw_properties_new( + pw.c.PW_KEY_MEDIA_TYPE, + "Video", + pw.c.PW_KEY_MEDIA_CATEGORY, + "Capture", + pw.c.PW_KEY_MEDIA_ROLE, + "Camera", + @as(?*anyopaque, null), + ).?; + + var args: std.process.ArgIterator = .init(); + _ = args.next(); + if (args.next()) |arg| { + check(pw.c.pw_properties_set(props, pw.c.PW_KEY_TARGET_OBJECT, arg)); + } - // now connect the stream, we need a direction (input/output), - // an optional target node to connect to, some flags and parameters - // - const res = pw.c.pw_stream_connect( - data.stream, - pw.c.PW_DIRECTION_INPUT, - pw.c.PW_ID_ANY, - pw.c.PW_STREAM_FLAG_AUTOCONNECT | // try to automatically connect this stream - pw.c.PW_STREAM_FLAG_INACTIVE | // we will activate ourselves - pw.c.PW_STREAM_FLAG_MAP_BUFFERS, // mmap the buffer data for us - // extra parameters, see above - params.items.ptr, - @intCast(params.items.len), - ); - if (res < 0) { - log.err("can't connect: {s}", .{pw.c.spa_strerror(res)}); - std.process.exit(1); + global.stream = pw.c.pw_stream_new_simple( + pw.c.pw_main_loop_get_loop(global.loop), + "video-play", + props, + &.{ + .version = pw.c.PW_VERSION_STREAM_EVENTS, + .state_changed = &onStreamStateChanged, + .io_changed = &onStreamIoChanged, + .param_changed = &onStreamParamChanged, + .process = &onProcess, + }, + null, + ).?; } + defer pw.c.pw_stream_destroy(global.stream); - // /do things until we quit the mainloop - _ = pw.c.pw_main_loop_run(data.loop); -} - -const Pixel = extern struct { - r: f32, - g: f32, - b: f32, - a: f32, -}; - -const Data = struct { - renderer: ?*sdl.SDL_Renderer = null, - window: ?*sdl.SDL_Window = null, - texture: ?*sdl.SDL_Texture = null, - cursor: ?*sdl.SDL_Texture = null, - - loop: ?*pw.c.pw_main_loop = null, - stream: ?*pw.c.pw_stream = null, - - position: ?*pw.c.spa_io_position = null, - - format: pw.c.spa_video_info = .{}, - stride: i32 = 0, - size: pw.c.spa_rectangle = .{}, - - rect: sdl.SDL_FRect = .{}, - cursor_rect: sdl.SDL_FRect = .{}, - is_yuv: bool = false, -}; + // Connect to the stream + { + var builder_buf: [1024]u8 align(@alignOf(u32)) = undefined; + var b = std.mem.zeroInit(pw.c.spa_pod_builder, .{ + .data = &builder_buf, + .size = builder_buf.len, + }); + + var params_buf: [2]?*const pw.c.spa_pod = undefined; + var params: std.ArrayList(?*const pw.c.spa_pod) = .initBuffer(¶ms_buf); + + // Tell pipewire which formats we support + { + var format_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_push_object( + &b, + &format_frame, + pw.c.SPA_TYPE_OBJECT_Format, + pw.c.SPA_PARAM_EnumFormat, + )); + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_FORMAT_mediaType, 0)); + check(pw.c.spa_pod_builder_id(&b, pw.c.SPA_MEDIA_TYPE_video)); + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_FORMAT_mediaSubtype, 0)); + check(pw.c.spa_pod_builder_id(&b, pw.c.SPA_MEDIA_SUBTYPE_raw)); + + // Tell pipewire we prefer yuy2 since it's the only one we currently support, but tell + // it that we support all other formats as fallbacks so we have a chance to respond if + // that's what webcam gives us. + { + var choice_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_FORMAT_VIDEO_format, 0)); + check(pw.c.spa_pod_builder_push_choice(&b, &choice_frame, pw.c.SPA_CHOICE_Enum, 0)); + check(pw.c.spa_pod_builder_id(&b, pw.c.SPA_VIDEO_FORMAT_YUY2)); + for (all_formats) |format| { + check(pw.c.spa_pod_builder_id(&b, format)); + } + assert(pw.c.spa_pod_builder_pop(&b, &choice_frame) != null); + } -fn doQuit(userdata: ?*anyopaque, signal_number: c_int) callconv(.c) void { - _ = signal_number; - const data: *Data = @ptrCast(@alignCast(userdata)); - _ = pw.c.pw_main_loop_quit(data.loop); -} + // Set the resolutions we support. We default pretty low since we're rendering a + // rectangle per pixel. + { + var choice_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_FORMAT_VIDEO_size, 0)); + check(pw.c.spa_pod_builder_push_choice(&b, &choice_frame, pw.c.SPA_CHOICE_Range, 0)); + check(pw.c.spa_pod_builder_rectangle(&b, default_video_width, default_video_height)); + check(pw.c.spa_pod_builder_rectangle(&b, 1, 1)); + check(pw.c.spa_pod_builder_rectangle(&b, default_video_width, default_video_height)); + assert(pw.c.spa_pod_builder_pop(&b, &choice_frame) != null); + } -// our data processing function is in general: -// ``` -// struct pw_buffer *b; -// b = pw_stream_dequeue_buffer(stream); -// -// .. do stuff with buffer ... -// -// pw_stream_queue_buffer(stream, b); -// ``` -fn onProcess(userdata: ?*anyopaque) callconv(.c) void { - const data: *Data = @ptrCast(@alignCast(userdata)); - const stream = data.stream; + // Tell pipewire what framerates we support. + { + var choice_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_FORMAT_VIDEO_framerate, 0)); + check(pw.c.spa_pod_builder_push_choice(&b, &choice_frame, pw.c.SPA_CHOICE_Range, 0)); + check(pw.c.spa_pod_builder_fraction(&b, default_frame_rate, 1)); + check(pw.c.spa_pod_builder_fraction(&b, 0, 1)); + check(pw.c.spa_pod_builder_fraction(&b, max_frame_rate, 1)); + assert(pw.c.spa_pod_builder_pop(&b, &choice_frame) != null); + } - var render_cursor = false; + // Log the supported formats + const format: *pw.c.spa_pod = @ptrCast(@alignCast(pw.c.spa_pod_builder_pop(&b, &format_frame).?)); + log.info("supported formats:", .{}); + check(pw.c.spa_debug_format(2, null, format)); - var maybe_buffer: ?*pw.c.pw_buffer = null; - while (true) { - const t = pw.c.pw_stream_dequeue_buffer(stream) orelse break; - if (maybe_buffer) |b| _ = pw.c.pw_stream_queue_buffer(stream, b); - maybe_buffer = t; - } - const b = maybe_buffer orelse { - log.warn("out of buffers", .{}); - return; - }; - defer _ = pw.c.pw_stream_queue_buffer(stream, b); + // Add the supported formats to our params + params.appendBounded(format) catch @panic("OOB"); + } - const buf: *pw.c.spa_buffer = b.buffer; + // Request the webcam feed + { + var format_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_push_object(&b, &format_frame, pw.c.SPA_TYPE_OBJECT_Format, pw.c.SPA_PARAM_EnumFormat)); - log.debug("new buffer {*}", .{buf}); + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_FORMAT_mediaType, 0)); + check(pw.c.spa_pod_builder_id(&b, pw.c.SPA_MEDIA_TYPE_video)); - handleEvents(data); + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_FORMAT_mediaSubtype, 0)); + check(pw.c.spa_pod_builder_id(&b, pw.c.SPA_MEDIA_SUBTYPE_dsp)); - const sdata = buf.datas[0].data orelse return; + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_FORMAT_VIDEO_format, 0)); + check(pw.c.spa_pod_builder_id(&b, pw.c.SPA_VIDEO_FORMAT_DSP_F32)); - const maybe_h: ?*pw.c.spa_meta_header = @ptrCast(@alignCast(pw.c.spa_buffer_find_meta_data(buf, pw.c.SPA_META_Header, @sizeOf(pw.c.spa_meta_header)))); - if (maybe_h) |h| { - const now = pw.c.pw_stream_get_nsec(stream); - log.debug("now:{} pts:{} diff:{}", .{ now, h.pts, now - @as(u64, @intCast(h.pts)) }); - } + const format: *const pw.c.spa_pod = @ptrCast(@alignCast(pw.c.spa_pod_builder_pop(&b, &format_frame))); + check(pw.c.spa_debug_format(2, null, format)); + params.appendBounded(format) catch @panic("OOB"); + } - // get the videocrop metadata if any - const maybe_mc: ?*pw.c.spa_meta_region = @ptrCast(@alignCast(pw.c.spa_buffer_find_meta_data(buf, pw.c.SPA_META_VideoCrop, @sizeOf(pw.c.spa_meta_region)))); - if (maybe_mc) |mc| { - if (pw.c.spa_meta_region_is_valid(mc)) { - data.rect.x = @floatFromInt(mc.region.position.x); - data.rect.y = @floatFromInt(mc.region.position.y); - data.rect.w = @floatFromInt(mc.region.size.width); - data.rect.h = @floatFromInt(mc.region.size.height); + // Connect to the stream, start inactive we'll active it later + const res = pw.c.pw_stream_connect( + global.stream, + pw.c.PW_DIRECTION_INPUT, + pw.c.PW_ID_ANY, + pw.c.PW_STREAM_FLAG_AUTOCONNECT | + pw.c.PW_STREAM_FLAG_INACTIVE | + pw.c.PW_STREAM_FLAG_MAP_BUFFERS, + params.items.ptr, + @intCast(params.items.len), + ); + if (res < 0) { + log.err("can't connect: {s}", .{pw.c.spa_strerror(res)}); + std.process.exit(1); } } - // get cursor metadata - const maybe_mcs: ?*pw.c.spa_meta_cursor = @ptrCast(@alignCast(pw.c.spa_buffer_find_meta_data(buf, pw.c.SPA_META_Cursor, @sizeOf(pw.c.spa_meta_cursor)))); - if (maybe_mcs) |mcs| { - if (pw.c.spa_meta_cursor_is_valid(mcs)) { - data.cursor_rect.x = @floatFromInt(mcs.position.x); - data.cursor_rect.y = @floatFromInt(mcs.position.y); - - const mb: *pw.c.spa_meta_bitmap = @ptrFromInt(@intFromPtr(mcs) + mcs.bitmap_offset); - data.cursor_rect.w = @floatFromInt(mb.size.width); - data.cursor_rect.h = @floatFromInt(mb.size.height); - - if (data.cursor == null) { - data.cursor = sdl.SDL_CreateTexture( - data.renderer, - idToSdlFormat(mb.format), - sdl.SDL_TEXTUREACCESS_STREAMING, - @intCast(mb.size.width), - @intCast(mb.size.height), - ); - _ = sdl.SDL_SetTextureBlendMode(data.cursor, sdl.SDL_BLENDMODE_BLEND); - } - - var cdata: [*c]u8 = undefined; - var cstride: c_int = undefined; - if (!sdl.SDL_LockTexture(data.cursor, null, &cdata, &cstride)) { - log.err("Couldn't lock cursor texture: {s}", .{sdl.SDL_GetError()}); - return; - } - defer sdl.SDL_UnlockTexture(data.cursor); - // copy the cursor bitmap into the texture - var src: [*]u8 = @ptrFromInt(@intFromPtr(mb) + mb.offset); - var dst = cdata; - const ostride: usize = @intCast(@min(cstride, mb.stride)); + // Setup Zin + try zin.processInit(.{}); + { + var err: zin.X11ConnectError = undefined; + zin.x11Connect(&err) catch std.debug.panic("X11 connect failed: {f}", .{err}); + } + defer zin.x11Disconnect(); + + zin.staticWindow(.main).registerClass(.{ + .callback = windowEvent, + .win32_name = zin.L("VideoPlay"), + .macos_view = "VideoPlay", + }, .{ + .win32_icon_large = .none, + .win32_icon_small = .none, + }); + defer zin.staticWindow(.main).unregisterClass(); + + try zin.staticWindow(.main).create(.{ + .title = "Video Play", + .size = .{ .client_points = .{ + .x = default_video_width * texel_width, + .y = default_video_height * texel_width, + } }, + .pos = null, + }); + defer zin.staticWindow(.main).destroy(); + zin.staticWindow(.main).show(); - for (0..mb.size.height) |_| { - @memcpy(dst[0..ostride], src[0..ostride]); - dst += @intCast(cstride); - src += @intCast(mb.stride); - } + // Start a timer with the default timer period since we don't have a video feed yet, and call + // our callback once on startup so we don't have to wait for one timer period to elapse. + startTimerNanos(global.default_timer_period_ns); + windowEvent(.{ .timer = {} }); - render_cursor = true; - } - } + // Start the main loop. + try zin.mainLoop(); +} - // copy video image in texture - if (data.is_yuv) { - var datas: [4]?[*]u8 = undefined; - const sstride = data.stride; - if (buf.n_datas == 1) { - _ = sdl.SDL_UpdateTexture(data.texture, null, sdata, sstride); - } else { - datas[0] = @ptrCast(sdata); - datas[1] = @ptrCast(buf.datas[1].data); - datas[2] = @ptrCast(buf.datas[2].data); - _ = sdl.SDL_UpdateYUVTexture( - data.texture, - null, - datas[0], - sstride, - datas[1], - @divExact(sstride, 2), - datas[2], - @divExact(sstride, 2), - ); - } - } else { - var dstride: c_int = undefined; - var ddata: ?*anyopaque = undefined; - if (!sdl.SDL_LockTexture(data.texture, null, &ddata, &dstride)) { - log.err("Couldn't lock texture: {s}", .{sdl.SDL_GetError()}); - } - defer sdl.SDL_UnlockTexture(data.texture); - - var sstride: u32 = @intCast(buf.datas[0].chunk.*.stride); - if (sstride == 0) sstride = buf.datas[0].chunk.*.size / data.size.height; - const ostride = @min(sstride, dstride); - - var src: [*]u8 = @ptrCast(sdata); - var dst: [*]u8 = @ptrCast(ddata); - - if (data.format.media_subtype == pw.c.SPA_MEDIA_SUBTYPE_dsp) { - for (0..data.size.height) |_| { - const pixel: [*]Pixel = @ptrCast(@alignCast(src)); - for (0..data.size.width) |j| { - dst[j * 4 + 0] = @intFromFloat(std.math.clamp(pixel[j].r * 255.0, 0, 255)); - dst[j * 4 + 1] = @intFromFloat(std.math.clamp(pixel[j].g * 255.0, 0, 255)); - dst[j * 4 + 2] = @intFromFloat(std.math.clamp(pixel[j].b * 255.0, 0, 255)); - dst[j * 4 + 3] = @intFromFloat(std.math.clamp(pixel[j].a * 255.0, 0, 255)); - } - src += sstride; - dst += @intCast(dstride); - } - } else { - for (0..data.size.height) |_| { - @memcpy(dst[0..@intCast(ostride)], src[0..@intCast(ostride)]); - src += sstride; - dst += @intCast(dstride); - } - } +/// Process a window event. +fn windowEvent(cb: zin.Callback(.{ .static = .main })) void { + switch (cb) { + .close => zin.quitMainLoop(), + .window_size => {}, + .draw => |d| render(d), + .timer => { + pipewireFlush(); + zin.staticWindow(.main).invalidate(); + }, + else => {}, } - - _ = sdl.SDL_RenderClear(data.renderer); - // now render the video and then the cursor if any - _ = sdl.SDL_RenderTexture(data.renderer, data.texture, &data.rect, null); - if (render_cursor) _ = sdl.SDL_RenderTexture( - data.renderer, - data.cursor, - null, - &data.cursor_rect, - ); - _ = sdl.SDL_RenderPresent(data.renderer); } -fn handleEvents(data: *Data) void { - var event: sdl.SDL_Event = undefined; - while (sdl.SDL_PollEvent(&event)) { - switch (event.type) { - sdl.SDL_EVENT_QUIT => { - _ = pw.c.pw_main_loop_quit(data.loop); - }, - else => {}, +/// Flush all pending pipewire events. +fn pipewireFlush() void { + while (true) { + const result = pw.c.pw_loop_iterate(pw.c.pw_main_loop_get_loop(global.loop), 0); + if (result == 0) break; + if (result < 0) { + std.log.err("pipewire error {}", .{result}); + zin.quitMainLoop(); + break; } } } +/// Handle the stream options changing. fn onStreamStateChanged( userdata: ?*anyopaque, old: pw.c.pw_stream_state, @@ -377,38 +308,39 @@ fn onStreamStateChanged( err: [*c]const u8, ) callconv(.c) void { _ = old; - _ = err; - const data: *Data = @ptrCast(@alignCast(userdata)); - log.info("stream state: \"{s}\"", .{pw.c.pw_stream_state_as_string(state)}); - switch (state) { - pw.c.PW_STREAM_STATE_UNCONNECTED => _ = pw.c.pw_main_loop_quit(data.loop), - // because we started inactive, activate ourselves now - pw.c.PW_STREAM_STATE_PAUSED => _ = pw.c.pw_stream_set_active(data.stream, true), - else => {}, + _ = userdata; + + global.current_buffer = null; + + if (err != null) { + log.err("stream state: \"{s}\" (error={s})", .{ pw.c.pw_stream_state_as_string(state), err }); + } else { + log.info("stream state: \"{s}\"", .{pw.c.pw_stream_state_as_string(state)}); + } + + if (state == pw.c.PW_STREAM_STATE_PAUSED) { + check(pw.c.pw_stream_set_active(global.stream, true)); + } + + if (state != pw.c.PW_STREAM_STATE_STREAMING) { + startTimerNanos(global.default_timer_period_ns); } } +/// Handle the stream IO state changing. fn onStreamIoChanged(userdata: ?*anyopaque, id: u32, area: ?*anyopaque, size: u32) callconv(.c) void { _ = size; - const data: *Data = @ptrCast(@alignCast(userdata)); + _ = userdata; if (id == pw.c.SPA_IO_Position) { - data.position = @ptrCast(@alignCast(area)); + global.position = @ptrCast(@alignCast(area)); } } -// Be notified when the stream param changes. We're only looking at the -// format changes. -// -// We are now supposed to call pw_stream_finish_format() with success or -// failure, depending on if we can support the format. Because we gave -// a list of supported formats, this should be ok. -// -// As part of pw_stream_finish_format() we can provide parameters that -// will control the buffer memory allocation. This includes the metadata -// that we would like on our buffer, the size, alignment, etp. +/// Handle the stream parameters changing. fn onStreamParamChanged(userdata: ?*anyopaque, id: u32, param: [*c]const pw.c.spa_pod) callconv(.c) void { - const data: *Data = @ptrCast(@alignCast(userdata)); - const stream = data.stream; + _ = userdata; + + const stream = global.stream; var params_buffer: [1024]u8 align(@alignOf(u32)) = undefined; var b: pw.c.spa_pod_builder = .{ .data = ¶ms_buffer, @@ -418,383 +350,419 @@ fn onStreamParamChanged(userdata: ?*anyopaque, id: u32, param: [*c]const pw.c.sp .callbacks = .{ .funcs = null, .data = null }, }; + // Fail if the pod is invalid if (param != null and id == pw.c.SPA_PARAM_Tag) { log.err("invalid pod", .{}); return; } + + // Handle latency changing if (param != null and id == pw.c.SPA_PARAM_Latency) { var info: pw.c.spa_latency_info = undefined; if (pw.c.spa_latency_parse(param, &info) >= 0) { - log.info("got latency: {}", .{@divTrunc((info.min_ns + info.max_ns), 2)}); + log.info("got latency: {}ns", .{@divTrunc((info.min_ns + info.max_ns), 2)}); } return; } - // NULL means to clear the format + + // Clear the format if requested if (param == null or id != pw.c.SPA_PARAM_Format) return; + // Log the new format log.info("got format:", .{}); - _ = pw.c.spa_debug_format(2, null, param); + check(pw.c.spa_debug_format(2, null, param)); - if (pw.c.spa_format_parse(param, &data.format.media_type, &data.format.media_subtype) < 0) { + // Parse the new format and reset our timer to the new interval + var parsed: pw.c.spa_video_info_raw = undefined; + if (pw.c.spa_format_video_raw_parse(param, &parsed) < 0) { + std.debug.panic("failed to parse format", .{}); + } + if (pw.c.spa_format_parse(param, &global.format.media_type, &global.format.media_subtype) < 0) { return; } - - if (data.format.media_type != pw.c.SPA_MEDIA_TYPE_video) return; - - const sdl_format, const mult: i32 = switch (data.format.media_subtype) { + if (global.format.media_type != pw.c.SPA_MEDIA_TYPE_video) return; + const num: f32 = @floatFromInt(parsed.framerate.num); + const denom: f32 = @floatFromInt(parsed.framerate.denom); + const hz = denom / num; + startTimerNanos(@intFromFloat(hz * std.time.ns_per_s)); + + // Check what format we got + const format, const mult: i32 = switch (global.format.media_subtype) { pw.c.SPA_MEDIA_SUBTYPE_raw => b: { // call a helper function to parse the format for us. - _ = pw.c.spa_format_video_raw_parse(param, &data.format.info.raw); - data.size = pw.c.SPA_RECTANGLE(data.format.info.raw.size.width, data.format.info.raw.size.height); - break :b .{ idToSdlFormat(data.format.info.raw.format), 1 }; + _ = pw.c.spa_format_video_raw_parse(param, &global.format.info.raw); + global.size = pw.c.SPA_RECTANGLE(global.format.info.raw.size.width, global.format.info.raw.size.height); + break :b .{ global.format.info.raw.format, 1 }; }, pw.c.SPA_MEDIA_SUBTYPE_dsp => b: { - _ = pw.c.spa_format_video_dsp_parse(param, &data.format.info.dsp); - if (data.format.info.dsp.format != pw.c.SPA_VIDEO_FORMAT_DSP_F32) return; - data.size = pw.c.SPA_RECTANGLE(data.position.?.video.size.width, data.position.?.video.size.height); - break :b .{ sdl.SDL_PIXELFORMAT_RGBA32, 4 }; + check(pw.c.spa_format_video_dsp_parse(param, &global.format.info.dsp)); + if (global.format.info.dsp.format != pw.c.SPA_VIDEO_FORMAT_DSP_F32) return; + global.size = pw.c.SPA_RECTANGLE(global.position.?.video.size.width, global.position.?.video.size.height); + break :b .{ pw.c.SPA_VIDEO_FORMAT_DSP_F32, 4 }; }, - else => .{ sdl.SDL_PIXELFORMAT_UNKNOWN, 0 }, + else => .{ pw.c.SPA_VIDEO_FORMAT_UNKNOWN, 0 }, }; - - if (sdl_format == sdl.SDL_PIXELFORMAT_UNKNOWN) { + if (format == pw.c.SPA_VIDEO_FORMAT_UNKNOWN) { _ = pw.c.pw_stream_set_error(stream, -pw.c.EINVAL, "unknown pixel format"); return; } - if (data.size.width == 0 or data.size.height == 0) { + if (global.size.width == 0 or global.size.height == 0) { _ = pw.c.pw_stream_set_error(stream, -pw.c.EINVAL, "invalid size"); return; } - data.texture = sdl.SDL_CreateTexture( - data.renderer, - sdl_format, - sdl.SDL_TEXTUREACCESS_STREAMING, - @intCast(data.size.width), - @intCast(data.size.height), - ); - var d: ?*anyopaque = null; - const size: i32, const blocks: i32 = switch (sdl_format) { - sdl.SDL_PIXELFORMAT_YV12, sdl.SDL_PIXELFORMAT_IYUV => b: { - data.stride = @intCast(data.size.width); - data.is_yuv = true; + // Check what size we got + const size: i32, const blocks: i32 = switch (format) { + pw.c.SPA_VIDEO_FORMAT_YV12, pw.c.SPA_VIDEO_FORMAT_I420 => b: { + global.stride = @intCast(global.size.width); + global.is_yuv = true; break :b .{ - @divExact((data.stride * @as(i32, @intCast(data.size.height))) * 3, 2), + @divExact((global.stride * @as(i32, @intCast(global.size.height))) * 3, 2), 3, }; }, - sdl.SDL_PIXELFORMAT_YUY2 => b: { - data.is_yuv = true; - data.stride = @intCast(data.size.width * 2); + pw.c.SPA_VIDEO_FORMAT_YUY2 => b: { + global.is_yuv = true; + global.stride = @intCast(global.size.width * 2); break :b .{ - data.stride * @as(i32, @intCast(data.size.height)), + global.stride * @as(i32, @intCast(global.size.height)), 1, }; }, else => b: { - if (!sdl.SDL_LockTexture(data.texture, null, &d, &data.stride)) { - log.err("Couldn't lock texture: {s}", .{sdl.SDL_GetError()}); - data.stride = @intCast(data.size.width * 2); - } else { - sdl.SDL_UnlockTexture(data.texture); - } + global.stride = @intCast(global.size.width * 2); break :b .{ - data.stride * @as(i32, @intCast(data.size.height)), + global.stride * @as(i32, @intCast(global.size.height)), 1, }; }, }; - data.rect.x = 0; - data.rect.y = 0; - data.rect.w = @floatFromInt(data.size.width); - data.rect.h = @floatFromInt(data.size.height); + // Update the global rect + global.rect.x = 0; + global.rect.y = 0; + global.rect.w = @floatFromInt(global.size.width); + global.rect.h = @floatFromInt(global.size.height); - // a SPA_TYPE_OBJECT_ParamBuffers object defines the acceptable size, - // number, stride etc of the buffers - var params_buf: [5]?*const pw.c.spa_pod = undefined; + // Specify our buffer options + var params_buf: [3]?*const pw.c.spa_pod = undefined; var params: std.ArrayList(?*const pw.c.spa_pod) = .initBuffer(¶ms_buf); - var f: pw.c.spa_pod_frame = undefined; - - _ = pw.c.spa_pod_builder_push_object( - &b, - &f, - pw.c.SPA_TYPE_OBJECT_ParamBuffers, - pw.c.SPA_PARAM_Buffers, - ); - _ = pw.c.spa_pod_builder_add( - &b, - - pw.c.SPA_PARAM_BUFFERS_buffers, - "?ri", - @as(c_int, 3), - @as(c_int, 8), - @as(c_int, 2), - @as(c_int, max_buffers), - - pw.c.SPA_PARAM_BUFFERS_blocks, - "i", - blocks, - - pw.c.SPA_PARAM_BUFFERS_size, - "i", - size * mult, - - pw.c.SPA_PARAM_BUFFERS_stride, - "i", - data.stride * mult, - - pw.c.SPA_PARAM_BUFFERS_dataType, - "?fi", - @as(c_int, 1), - @as(c_int, 1 << pw.c.SPA_DATA_MemPtr), - - @as(c_int, 0), - ); - params.appendBounded(@ptrCast(@alignCast(pw.c.spa_pod_builder_pop(&b, &f)))) catch @panic("OOB"); - - // a header metadata with timing information - _ = pw.c.spa_pod_builder_push_object( - &b, - &f, - pw.c.SPA_TYPE_OBJECT_ParamMeta, - pw.c.SPA_PARAM_Meta, - ); - _ = pw.c.spa_pod_builder_add( - &b, - - pw.c.SPA_PARAM_META_type, - "I", - pw.c.SPA_META_Header, - - pw.c.SPA_PARAM_META_size, - "i", - @as(usize, @sizeOf(pw.c.spa_meta_header)), - - @as(c_int, 0), - ); - params.appendBounded(@ptrCast(@alignCast(pw.c.spa_pod_builder_pop(&b, &f)))) catch @panic("OOB"); - - // video cropping information - _ = pw.c.spa_pod_builder_push_object( - &b, - &f, - pw.c.SPA_TYPE_OBJECT_ParamMeta, - pw.c.SPA_PARAM_Meta, - ); - _ = pw.c.spa_pod_builder_add( - &b, - - pw.c.SPA_PARAM_META_type, - "I", - pw.c.SPA_META_VideoCrop, - - pw.c.SPA_PARAM_META_size, - "i", - @as(usize, @sizeOf(pw.c.spa_meta_region)), - - @as(c_int, 0), - ); - params.appendBounded(@ptrCast(@alignCast(pw.c.spa_pod_builder_pop(&b, &f)))) catch @panic("OOB"); - - // cursor information - _ = pw.c.spa_pod_builder_push_object( - &b, - &f, - pw.c.SPA_TYPE_OBJECT_ParamMeta, - pw.c.SPA_PARAM_Meta, - ); - _ = pw.c.spa_pod_builder_add( - &b, - - pw.c.SPA_PARAM_META_type, - "I", - pw.c.SPA_META_Cursor, - - pw.c.SPA_PARAM_META_size, - "?ri", - @as(c_int, 3), - cursorMetaSize(64, 64), - cursorMetaSize(1, 1), - cursorMetaSize(256, 256), - - @as(c_int, 0), - ); - params.appendBounded(@ptrCast(@alignCast(pw.c.spa_pod_builder_pop(&b, &f)))) catch @panic("OOB"); - - // we are done - _ = pw.c.pw_stream_update_params(stream, params.items.ptr, @intCast(params.items.len)); -} + { + var param_buffers_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_push_object( + &b, + ¶m_buffers_frame, + pw.c.SPA_TYPE_OBJECT_ParamBuffers, + pw.c.SPA_PARAM_Buffers, + )); + + { + var choice_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_BUFFERS_buffers, 0)); + check(pw.c.spa_pod_builder_push_choice(&b, &choice_frame, pw.c.SPA_CHOICE_Range, 0)); + check(pw.c.spa_pod_builder_int(&b, 8)); + check(pw.c.spa_pod_builder_int(&b, 2)); + check(pw.c.spa_pod_builder_int(&b, max_buffers)); + assert(pw.c.spa_pod_builder_pop(&b, &choice_frame) != null); + } -fn cursorMetaSize(w: usize, h: usize) usize { - return @sizeOf(pw.c.spa_meta_cursor) + @sizeOf(pw.c.spa_meta_bitmap) + w * h * 4; -} + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_BUFFERS_blocks, 0)); + check(pw.c.spa_pod_builder_int(&b, blocks)); -fn buildFormat(data: *Data, b: *pw.c.spa_pod_builder, params: *std.ArrayList(?*const pw.c.spa_pod)) void { - { - const format = sdlBuildFormats(data.renderer.?, b); - log.info("supported SDL formats:", .{}); - _ = pw.c.spa_debug_format(2, null, format); - params.appendBounded(format) catch @panic("OOB"); + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_BUFFERS_size, 0)); + check(pw.c.spa_pod_builder_int(&b, size * mult)); + + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_BUFFERS_stride, 0)); + check(pw.c.spa_pod_builder_int(&b, global.stride * mult)); + + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_BUFFERS_stride, 0)); + check(pw.c.spa_pod_builder_int(&b, global.stride * mult)); + + { + var choice_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_BUFFERS_dataType, 0)); + check(pw.c.spa_pod_builder_push_choice(&b, &choice_frame, pw.c.SPA_CHOICE_Range, 0)); + check(pw.c.spa_pod_builder_int(&b, 8)); + check(pw.c.spa_pod_builder_int(&b, 2)); + check(pw.c.spa_pod_builder_int(&b, max_buffers)); + assert(pw.c.spa_pod_builder_pop(&b, &choice_frame) != null); + } + + { + var choice_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_BUFFERS_dataType, 0)); + check(pw.c.spa_pod_builder_push_choice(&b, &choice_frame, pw.c.SPA_CHOICE_Flags, 0)); + check(pw.c.spa_pod_builder_int(&b, 1 << pw.c.SPA_DATA_MemPtr)); + assert(pw.c.spa_pod_builder_pop(&b, &choice_frame) != null); + } + + params.appendBounded(@ptrCast(@alignCast(pw.c.spa_pod_builder_pop(&b, ¶m_buffers_frame)))) catch @panic("OOB"); } + // Specify the timing options { - var f: pw.c.spa_pod_frame = undefined; - _ = pw.c.spa_pod_builder_push_object(b, &f, pw.c.SPA_TYPE_OBJECT_Format, pw.c.SPA_PARAM_EnumFormat); - _ = pw.c.spa_pod_builder_add( - b, - pw.c.SPA_FORMAT_mediaType, - "I", - pw.c.SPA_MEDIA_TYPE_video, + var timing_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_push_object( + &b, + &timing_frame, + pw.c.SPA_TYPE_OBJECT_ParamMeta, + pw.c.SPA_PARAM_Meta, + )); + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_META_type, 0)); + check(pw.c.spa_pod_builder_id(&b, pw.c.SPA_META_Header)); + + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_META_size, 0)); + check(pw.c.spa_pod_builder_int(&b, @sizeOf(pw.c.spa_meta_header))); + + params.appendBounded(@ptrCast(@alignCast(pw.c.spa_pod_builder_pop(&b, &timing_frame)))) catch @panic("OOB"); + } - pw.c.SPA_FORMAT_mediaSubtype, - "I", - pw.c.SPA_MEDIA_SUBTYPE_dsp, + // Specify the cropping options + { + var crop_frame: pw.c.spa_pod_frame = undefined; + check(pw.c.spa_pod_builder_push_object( + &b, + &crop_frame, + pw.c.SPA_TYPE_OBJECT_ParamMeta, + pw.c.SPA_PARAM_Meta, + )); - pw.c.SPA_FORMAT_VIDEO_format, + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_META_type, 0)); + check(pw.c.spa_pod_builder_id(&b, pw.c.SPA_META_VideoCrop)); - "I", - pw.c.SPA_VIDEO_FORMAT_DSP_F32, + check(pw.c.spa_pod_builder_prop(&b, pw.c.SPA_PARAM_META_size, 0)); + check(pw.c.spa_pod_builder_id(&b, @sizeOf(pw.c.spa_meta_region))); - @as(c_int, 0), - ); - const format: *const pw.c.spa_pod = @ptrCast(@alignCast(pw.c.spa_pod_builder_pop(b, &f))); - _ = pw.c.spa_debug_format(2, null, format); - params.appendBounded(format) catch @panic("OOB"); + params.appendBounded(@ptrCast(@alignCast(pw.c.spa_pod_builder_pop(&b, &crop_frame)))) catch @panic("OOB"); } -} -const FormatPair = struct { - format: u32, - id: u32, -}; + // Success + check(pw.c.pw_stream_update_params(stream, params.items.ptr, @intCast(params.items.len))); +} -const sdl_video_formats = [_]FormatPair{ - .{ .format = sdl.SDL_PIXELFORMAT_UNKNOWN, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_INDEX1LSB, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_UNKNOWN, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_INDEX1LSB, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_INDEX1MSB, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_INDEX4LSB, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_INDEX4MSB, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_INDEX8, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_RGB332, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_XRGB4444, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_XRGB1555, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_XBGR1555, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_ARGB4444, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_RGBA4444, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_ABGR4444, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_BGRA4444, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_ARGB1555, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_RGBA5551, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_ABGR1555, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_BGRA5551, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_RGB565, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_BGR565, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_RGB24, .id = pw.c.SPA_VIDEO_FORMAT_BGR }, - .{ .format = sdl.SDL_PIXELFORMAT_XRGB8888, .id = pw.c.SPA_VIDEO_FORMAT_BGR }, - .{ .format = sdl.SDL_PIXELFORMAT_RGBX8888, .id = pw.c.SPA_VIDEO_FORMAT_xBGR }, - .{ .format = sdl.SDL_PIXELFORMAT_BGR24, .id = pw.c.SPA_VIDEO_FORMAT_RGB }, - .{ .format = sdl.SDL_PIXELFORMAT_XBGR8888, .id = pw.c.SPA_VIDEO_FORMAT_RGB }, - .{ .format = sdl.SDL_PIXELFORMAT_BGRX8888, .id = pw.c.SPA_VIDEO_FORMAT_xRGB }, - .{ .format = sdl.SDL_PIXELFORMAT_ARGB2101010, .id = pw.c.SPA_VIDEO_FORMAT_UNKNOWN }, - .{ .format = sdl.SDL_PIXELFORMAT_RGBA8888, .id = pw.c.SPA_VIDEO_FORMAT_ABGR }, - .{ .format = sdl.SDL_PIXELFORMAT_ARGB8888, .id = pw.c.SPA_VIDEO_FORMAT_BGRA }, - .{ .format = sdl.SDL_PIXELFORMAT_BGRA8888, .id = pw.c.SPA_VIDEO_FORMAT_ARGB }, - .{ .format = sdl.SDL_PIXELFORMAT_ABGR8888, .id = pw.c.SPA_VIDEO_FORMAT_RGBA }, - .{ .format = sdl.SDL_PIXELFORMAT_YV12, .id = pw.c.SPA_VIDEO_FORMAT_YV12 }, - .{ .format = sdl.SDL_PIXELFORMAT_IYUV, .id = pw.c.SPA_VIDEO_FORMAT_I420 }, - .{ .format = sdl.SDL_PIXELFORMAT_YUY2, .id = pw.c.SPA_VIDEO_FORMAT_YUY2 }, - .{ .format = sdl.SDL_PIXELFORMAT_UYVY, .id = pw.c.SPA_VIDEO_FORMAT_UYVY }, - .{ .format = sdl.SDL_PIXELFORMAT_YVYU, .id = pw.c.SPA_VIDEO_FORMAT_YVYU }, - .{ .format = sdl.SDL_PIXELFORMAT_NV12, .id = pw.c.SPA_VIDEO_FORMAT_NV12 }, - .{ .format = sdl.SDL_PIXELFORMAT_NV21, .id = pw.c.SPA_VIDEO_FORMAT_NV21 }, -}; +/// Process a new buffer. +fn onProcess(userdata: ?*anyopaque) callconv(.c) void { + _ = userdata; + const stream = global.stream; -fn sdlFormatToId(format: u32) u32 { - for (sdl_video_formats) |f| { - if (f.format == format) { - return f.id; + var maybe_buffer: ?*pw.c.pw_buffer = null; + while (true) { + const t = pw.c.pw_stream_dequeue_buffer(stream) orelse break; + if (maybe_buffer) |b| check(pw.c.pw_stream_queue_buffer(stream, b)); + maybe_buffer = t; + } + if (maybe_buffer) |b| { + if (global.current_buffer) |current| { + check(pw.c.pw_stream_queue_buffer(stream, current)); } + global.current_buffer = b; } - return pw.c.SPA_VIDEO_FORMAT_UNKNOWN; } -fn idToSdlFormat(id: u32) u32 { - for (sdl_video_formats) |f| { - if (f.id == id) { - return f.format; +/// Render the current buffer. +fn render(draw: zin.Draw(.{ .static = .main })) void { + // Early out if we're redrawing too fast (e.g. during a resize) + { + const now = std.time.Instant.now() catch |err| @panic(@errorName(err)); + if (global.last_render) |last_render| { + const elapsed_ns = now.since(last_render); + if (elapsed_ns < global.timer_period_ns / 2) return; } + global.last_render = now; } - return sdl.SDL_PIXELFORMAT_UNKNOWN; -} -fn sdlBuildFormats(renderer: *sdl.SDL_Renderer, b: *pw.c.spa_pod_builder) *pw.c.spa_pod { - var f: [2]pw.c.spa_pod_frame = undefined; - - // make an object of type SPA_TYPE_OBJECT_Format and id SPA_PARAM_EnumFormat. The object type is - // important because it defines the properties that are acceptable. The id gives more context - // about what the object is meant to contain. In this case we enumerate supported formats. - _ = pw.c.spa_pod_builder_push_object(b, &f[0], pw.c.SPA_TYPE_OBJECT_Format, pw.c.SPA_PARAM_EnumFormat); - // add media type and media subtype properties - _ = pw.c.spa_pod_builder_prop(b, pw.c.SPA_FORMAT_mediaType, 0); - _ = pw.c.spa_pod_builder_id(b, pw.c.SPA_MEDIA_TYPE_video); - _ = pw.c.spa_pod_builder_prop(b, pw.c.SPA_FORMAT_mediaSubtype, 0); - _ = pw.c.spa_pod_builder_id(b, pw.c.SPA_MEDIA_SUBTYPE_raw); - - // build an enumeration of formats - _ = pw.c.spa_pod_builder_prop(b, pw.c.SPA_FORMAT_VIDEO_format, 0); - _ = pw.c.spa_pod_builder_push_choice(b, &f[1], pw.c.SPA_CHOICE_Enum, 0); - - const props: sdl.SDL_PropertiesID = sdl.SDL_GetRendererProperties(renderer); - - const texture_formats: [*]sdl.SDL_PixelFormat = @ptrCast(@alignCast(sdl.SDL_GetPointerProperty( - props, - sdl.SDL_PROP_RENDERER_TEXTURE_FORMATS_POINTER, - null, - ))); - - // first the formats supported by the textures - var i: u32 = 0; - var ci: u32 = 0; - while (texture_formats[i] != sdl.SDL_PIXELFORMAT_UNKNOWN) : (i += 1) { - const id: u32 = sdlFormatToId(texture_formats[i]); - if (id == 0) continue; - if (ci == 0) _ = pw.c.spa_pod_builder_id(b, pw.c.SPA_VIDEO_FORMAT_UNKNOWN); - ci += 1; - _ = pw.c.spa_pod_builder_id(b, id); + // Clear the screen + draw.clear(); + + // Render the current frame + const client_size = zin.staticWindow(.main).getClientSize(); + + const buf: *pw.c.spa_buffer = (global.current_buffer orelse { + draw.text("waiting for webcam...", @divTrunc(client_size.x, 2) - 50, @divTrunc(client_size.y, 2), .white); + return; + }).buffer; + + const sdata = buf.datas[0].data orelse return; + + const maybe_mc: ?*pw.c.spa_meta_region = @ptrCast(@alignCast(pw.c.spa_buffer_find_meta_data(buf, pw.c.SPA_META_VideoCrop, @sizeOf(pw.c.spa_meta_region)))); + if (maybe_mc) |mc| { + if (pw.c.spa_meta_region_is_valid(mc)) { + global.rect.x = @floatFromInt(mc.region.position.x); + global.rect.y = @floatFromInt(mc.region.position.y); + global.rect.w = @floatFromInt(mc.region.size.width); + global.rect.h = @floatFromInt(mc.region.size.height); + } } - // then all the other ones SDL can convert from/to - for (sdl_video_formats) |format| { - const id: u32 = format.id; - if (id != pw.c.SPA_VIDEO_FORMAT_UNKNOWN) { - _ = pw.c.spa_pod_builder_id(b, id); + + if (global.is_yuv and buf.n_datas == 1) { + const sstride = global.stride; + const udata: [*]u8 = @ptrCast(sdata); + const size = zin.staticWindow(.main).getClientSize(); + const rect_size = zin.scale(i32, texel_width, draw.getDpiScale().x); + for (0..@intCast(@min(size.y, global.size.height))) |y| { + var x: usize = 0; + while (x < @min(size.x, global.size.width)) : (x += 2) { + const i: usize = @intCast(y * @as(usize, @intCast(sstride)) + x * 2); + const colors = yuyvToRgb(udata[i..][0..4].*); + draw.rect( + .ltwh( + @as(i32, @intCast(x)) * rect_size, + @as(i32, @intCast(y)) * rect_size, + rect_size, + rect_size, + ), + colors[0], + ); + draw.rect( + .ltwh( + (@as(i32, @intCast(x)) + 1) * rect_size, + @as(i32, @intCast(y)) * rect_size, + rect_size, + rect_size, + ), + colors[1], + ); + } } + } else { + draw.text( + "unsupported format...", + @divTrunc(client_size.x, 2) - 50, + @divTrunc(client_size.y, 2), + .white, + ); + return; + } +} + +pub fn clampUnorm(val: anytype) u8 { + return @intCast(std.math.clamp(val, 0, 255)); +} + +fn yuyvToRgb(yuyv: [4]u8) [2]zin.Rgb8 { + const d = @as(i32, yuyv[1]) - 128; + const e = @as(i32, yuyv[3]) - 128; + const c0 = @as(i32, yuyv[0]) - 16; + const c1 = @as(i32, yuyv[2]) - 16; + return .{ + .{ + .r = clampUnorm(((298 * c0) + (409 * e) + 128) >> 8), + .g = clampUnorm(((298 * c0) - (100 * d) - (208 * e) + 128) >> 8), + .b = clampUnorm(((298 * c0) + (516 * d) + 128) >> 8), + }, + .{ + .r = clampUnorm(((298 * c1) + (409 * e) + 128) >> 8), + .g = clampUnorm(((298 * c1) - (100 * d) - (208 * e) + 128) >> 8), + .b = clampUnorm(((298 * c1) + (516 * d) + 128) >> 8), + }, + }; +} + +fn check(res: c_int) void { + if (res != 0) { + std.debug.panic("pipewire call failed: {s}", .{pw.c.spa_strerror(res)}); } - _ = pw.c.spa_pod_builder_id(b, pw.c.SPA_VIDEO_FORMAT_RGBA_F32); - _ = pw.c.spa_pod_builder_pop(b, &f[1]); - // add size and framerate ranges - const max_texture_size: u32 = @intCast(sdl.SDL_GetNumberProperty( - props, - sdl.SDL_PROP_RENDERER_MAX_TEXTURE_SIZE_NUMBER, - 0, - )); - _ = pw.c.spa_pod_builder_add( - b, - pw.c.SPA_FORMAT_VIDEO_size, - pw.c.SPA_POD_CHOICE_RANGE_Rectangle( - &pw.c.SPA_RECTANGLE(width, height), - &pw.c.SPA_RECTANGLE(1, 1), - &pw.c.SPA_RECTANGLE(max_texture_size, max_texture_size), - ), - pw.c.SPA_FORMAT_VIDEO_framerate, - pw.c.SPA_POD_CHOICE_RANGE_Fraction( - &pw.c.SPA_FRACTION(rate, 1), - &pw.c.SPA_FRACTION(0, 1), - &pw.c.SPA_FRACTION(30, 1), - ), - @as(c_int, 0), - ); - return @ptrCast(@alignCast(pw.c.spa_pod_builder_pop(b, &f[0]))); } + +fn startTimerNanos(ns: u64) void { + global.timer_period_ns = ns; + zin.staticWindow(.main).startTimerNanos({}, ns); +} + +const Rect = struct { + x: f32 = 0, + y: f32 = 0, + w: f32 = 0, + h: f32 = 0, +}; + +const all_formats: []const pw.c.spa_video_format = &.{ + pw.c.SPA_VIDEO_FORMAT_ENCODED, + pw.c.SPA_VIDEO_FORMAT_I420, + pw.c.SPA_VIDEO_FORMAT_YV12, + pw.c.SPA_VIDEO_FORMAT_YUY2, + pw.c.SPA_VIDEO_FORMAT_UYVY, + pw.c.SPA_VIDEO_FORMAT_AYUV, + pw.c.SPA_VIDEO_FORMAT_RGBx, + pw.c.SPA_VIDEO_FORMAT_BGRx, + pw.c.SPA_VIDEO_FORMAT_xRGB, + pw.c.SPA_VIDEO_FORMAT_xBGR, + pw.c.SPA_VIDEO_FORMAT_RGBA, + pw.c.SPA_VIDEO_FORMAT_BGRA, + pw.c.SPA_VIDEO_FORMAT_ARGB, + pw.c.SPA_VIDEO_FORMAT_ABGR, + pw.c.SPA_VIDEO_FORMAT_RGB, + pw.c.SPA_VIDEO_FORMAT_BGR, + pw.c.SPA_VIDEO_FORMAT_Y41B, + pw.c.SPA_VIDEO_FORMAT_Y42B, + pw.c.SPA_VIDEO_FORMAT_YVYU, + pw.c.SPA_VIDEO_FORMAT_Y444, + pw.c.SPA_VIDEO_FORMAT_v210, + pw.c.SPA_VIDEO_FORMAT_v216, + pw.c.SPA_VIDEO_FORMAT_NV12, + pw.c.SPA_VIDEO_FORMAT_NV21, + pw.c.SPA_VIDEO_FORMAT_GRAY8, + pw.c.SPA_VIDEO_FORMAT_GRAY16_BE, + pw.c.SPA_VIDEO_FORMAT_GRAY16_LE, + pw.c.SPA_VIDEO_FORMAT_v308, + pw.c.SPA_VIDEO_FORMAT_RGB16, + pw.c.SPA_VIDEO_FORMAT_BGR16, + pw.c.SPA_VIDEO_FORMAT_RGB15, + pw.c.SPA_VIDEO_FORMAT_BGR15, + pw.c.SPA_VIDEO_FORMAT_UYVP, + pw.c.SPA_VIDEO_FORMAT_A420, + pw.c.SPA_VIDEO_FORMAT_RGB8P, + pw.c.SPA_VIDEO_FORMAT_YUV9, + pw.c.SPA_VIDEO_FORMAT_YVU9, + pw.c.SPA_VIDEO_FORMAT_IYU1, + pw.c.SPA_VIDEO_FORMAT_ARGB64, + pw.c.SPA_VIDEO_FORMAT_AYUV64, + pw.c.SPA_VIDEO_FORMAT_r210, + pw.c.SPA_VIDEO_FORMAT_I420_10BE, + pw.c.SPA_VIDEO_FORMAT_I420_10LE, + pw.c.SPA_VIDEO_FORMAT_I422_10BE, + pw.c.SPA_VIDEO_FORMAT_I422_10LE, + pw.c.SPA_VIDEO_FORMAT_Y444_10BE, + pw.c.SPA_VIDEO_FORMAT_Y444_10LE, + pw.c.SPA_VIDEO_FORMAT_GBR, + pw.c.SPA_VIDEO_FORMAT_GBR_10BE, + pw.c.SPA_VIDEO_FORMAT_GBR_10LE, + pw.c.SPA_VIDEO_FORMAT_NV16, + pw.c.SPA_VIDEO_FORMAT_NV24, + pw.c.SPA_VIDEO_FORMAT_NV12_64Z32, + pw.c.SPA_VIDEO_FORMAT_A420_10BE, + pw.c.SPA_VIDEO_FORMAT_A420_10LE, + pw.c.SPA_VIDEO_FORMAT_A422_10BE, + pw.c.SPA_VIDEO_FORMAT_A422_10LE, + pw.c.SPA_VIDEO_FORMAT_A444_10BE, + pw.c.SPA_VIDEO_FORMAT_A444_10LE, + pw.c.SPA_VIDEO_FORMAT_NV61, + pw.c.SPA_VIDEO_FORMAT_P010_10BE, + pw.c.SPA_VIDEO_FORMAT_P010_10LE, + pw.c.SPA_VIDEO_FORMAT_IYU2, + pw.c.SPA_VIDEO_FORMAT_VYUY, + pw.c.SPA_VIDEO_FORMAT_GBRA, + pw.c.SPA_VIDEO_FORMAT_GBRA_10BE, + pw.c.SPA_VIDEO_FORMAT_GBRA_10LE, + pw.c.SPA_VIDEO_FORMAT_GBR_12BE, + pw.c.SPA_VIDEO_FORMAT_GBR_12LE, + pw.c.SPA_VIDEO_FORMAT_GBRA_12BE, + pw.c.SPA_VIDEO_FORMAT_GBRA_12LE, + pw.c.SPA_VIDEO_FORMAT_I420_12BE, + pw.c.SPA_VIDEO_FORMAT_I420_12LE, + pw.c.SPA_VIDEO_FORMAT_I422_12BE, + pw.c.SPA_VIDEO_FORMAT_I422_12LE, + pw.c.SPA_VIDEO_FORMAT_Y444_12BE, + pw.c.SPA_VIDEO_FORMAT_Y444_12LE, + pw.c.SPA_VIDEO_FORMAT_RGBA_F16, + pw.c.SPA_VIDEO_FORMAT_RGBA_F32, + pw.c.SPA_VIDEO_FORMAT_xRGB_210LE, + pw.c.SPA_VIDEO_FORMAT_xBGR_210LE, + pw.c.SPA_VIDEO_FORMAT_RGBx_102LE, + pw.c.SPA_VIDEO_FORMAT_BGRx_102LE, + pw.c.SPA_VIDEO_FORMAT_ARGB_210LE, + pw.c.SPA_VIDEO_FORMAT_ABGR_210LE, + pw.c.SPA_VIDEO_FORMAT_RGBA_102LE, + pw.c.SPA_VIDEO_FORMAT_BGRA_102LE, + pw.c.SPA_VIDEO_FORMAT_DSP_F32, +}; diff --git a/src/wrap/format.zig b/src/wrap/format.zig index 58ccfd6..686457c 100644 --- a/src/wrap/format.zig +++ b/src/wrap/format.zig @@ -52,10 +52,10 @@ pub fn FmtFlags(T: type) type { }; } -// Forked from std to fix bug, will upstream. +// https://codeberg.org/ziglang/zig/pulls/30156 pub fn enumFromInt(comptime E: type, integer: anytype) ?E { const enum_info = @typeInfo(E).@"enum"; - if (comptime !enum_info.is_exhaustive) { + if (!enum_info.is_exhaustive) { if (std.math.cast(enum_info.tag_type, integer)) |tag| { return @enumFromInt(tag); }