From 16620ea4c9d65c36fd710c947a81b5167b4f7125 Mon Sep 17 00:00:00 2001 From: Jerzy Wilczek Date: Thu, 14 Nov 2024 11:33:48 +0100 Subject: [PATCH] Add a readme. --- vk-video/Cargo.toml | 8 +++- vk-video/README.md | 92 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 98 insertions(+), 2 deletions(-) create mode 100644 vk-video/README.md diff --git a/vk-video/Cargo.toml b/vk-video/Cargo.toml index a31f2ff0d..650d3f926 100644 --- a/vk-video/Cargo.toml +++ b/vk-video/Cargo.toml @@ -6,12 +6,15 @@ authors = ["Software Mansion "] readme = "README.md" license = "MIT" repository = "https://github.com/software-mansion/live-compositor" +rust-version = "1.81" +description = "A library for hardware video coding using Vulkan Video, with wgpu integration." +categories = ["multimedia::video", "multimedia::encoding", "hardware-support", "encoding", "graphics"] +keywords = ["vulkan", "video", "wgpu", "decoding", "h264"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] ash = "0.38.0" -bytemuck = { version = "1.19.0", features = ["derive"] } bytes = "1" derivative = "2.2.0" h264-reader = { git = "https://github.com/membraneframework-labs/h264-reader.git", branch = "live-compositor" } @@ -21,9 +24,10 @@ vk-mem = "0.4.0" wgpu = "23.0.0" [dev-dependencies] +bytemuck = { version = "1.19.0", features = ["derive"] } +clap = { version = "4.5.20", features = ["derive"] } tracing-subscriber = "0.3.18" winit = "0.29" -clap = { version = "4.5.20", features = ["derive"] } [build-dependencies] cfg_aliases = "0.2.1" diff --git a/vk-video/README.md b/vk-video/README.md new file mode 100644 index 000000000..874d593bf --- /dev/null +++ b/vk-video/README.md @@ -0,0 +1,92 @@ +# vk-video + +A library for hardware video coding using Vulkan Video, with [wgpu] integration. + +[![Crates.io][crates-badge]][crates-url] +[![docs.rs][docs-badge]][docs-url] +[![MIT licensed][mit-badge]][mit-url] +[![Build Status][actions-badge]][actions-url] + +[crates-badge]: https://img.shields.io/crates/v/vk-video +[crates-url]: https://crates.io/crates/vk-video +[mit-badge]: https://img.shields.io/badge/license-MIT-blue.svg +[mit-url]: https://github.com/software-mansion/live-compositor/blob/master/vk-video/LICENSE +[actions-badge]: https://github.com/software-mansion/live-compositor/actions/workflows/test.yml/badge.svg +[actions-url]: https://github.com/software-mansion/live-compositor/actions/workflows/test.yml?query=branch%3Amaster +[docs-badge]: https://img.shields.io/docsrs/vk-video +[docs-url]: https://docs.rs/vk-video/latest/vk-video/ + +## Overview + +The goal of this library is to provide easy access to hardware video coding. You can use it to decode a video frame into a `Vec` with pixel data, or into a [`wgpu::Texture`]. Currently, we only support H.264 (aka AVC or MPEG 4 Part 10) decoding, but we plan to support at least H.264 encoding and hopefully other codecs supported by Vulkan Video. + +An advantage of using this library with wgpu is that decoded video frames never leave the GPU memory. There's no copying the frames to RAM and back to the GPU, so it should be quite fast if you want to use them for rendering. + +## Usage + +```rs +fn decode_video( + window: &winit::window::Window, + mut encoded_video_reader: impl std::io::Read, +) { + let instance = vk_video::VulkanInstance::new().unwrap(); + let mut surface = instance.wgpu_instance.create_surface(window).unwrap(); + let device = instance + .create_device( + wgpu::Features::empty(), + wgpu::Limits::default(), + &mut Some(&mut surface), + ) + .unwrap(); + + let mut decoder = device.create_wgpu_textures_decoder().unwrap(); + let mut buffer = vec![0; 4096]; + + while let Ok(n) = encoded_video_reader.read(&mut buffer) { + if n == 0 { + return; + } + + let decoded_frames = decoder.decode(&buffer[..n], None).unwrap(); + + for frame in decoded_frames { + // Each frame contains a wgpu::Texture you can sample for drawing. + // device.wgpu_device is a wgpu::Device and device.wgpu_queue + // is a wgpu::Queue. You can use these for interacting with the frames. + } + } +} +``` +Be sure to check out our examples, especially the `player` example, which is a simple video player built using this library and wgpu. Because the player is very simple, you need to extract the raw h264 data from a container before usage. Here's an example on how to extract the h264 bytestream out of an mp4 file using ffmpeg: + +```sh +ffmpeg -i input.mp4 -c:v copy -bsf:v h264_mp4toannexb -an output.h264 +``` + +Then you can run the example with: + +```sh +git clone https://github.com/software-mansion/live-compositor.git +cd live-compositor/vk-video +cargo run --example player -- output.h264 FRAMERATE +``` + +## Compatibility + +On linux, the library should work on NVIDIA GPUs out of the box. For AMD GPUs with recent Mesa drivers, you need to set the `RADV_PERFTEST` environment variable to `video_decode` for now: + +```sh +RADV_PERFTEST=video_decode cargo run +``` + + +It shoud work on Windows with recent drivers out of the box, but this wasn't tested. If you have verified this library works on Windows, send us a PR modifying this message. + +[wgpu]: https://wgpu.rs/ +[`wgpu::Texture`]: https://docs.rs/wgpu/latest/wgpu/struct.Texture.html + +## vk-video is created by Software Mansion + +[![swm](https://logo.swmansion.com/logo?color=white&variant=desktop&width=150&tag=live-compositor-github 'Software Mansion')](https://swmansion.com) + +Since 2012 [Software Mansion](https://swmansion.com) is a software agency with experience in building web and mobile apps as well as complex multimedia solutions. We are Core React Native Contributors and experts in live streaming and broadcasting technologies. We can help you build your next dream product – [Hire us](https://swmansion.com/contact/projects?utm_source=live-compositor&utm_medium=readme).