Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

encoder: Add VP9 support and refactor common encoder code #64

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/encoder/stateless.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use crate::encoder::FrameMetadata;
use crate::BlockingMode;

pub mod h264;
pub mod vp9;

#[derive(Error, Debug)]
pub enum StatelessBackendError {
Expand Down
147 changes: 147 additions & 0 deletions src/encoder/stateless/vp9.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
// Copyright 2024 The ChromiumOS Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

use std::rc::Rc;

use crate::codec::vp9::parser::Header;
use crate::encoder::stateless::vp9::predictor::LowDelay;
pub use crate::encoder::stateless::vp9::predictor::PredictionStructure;
use crate::encoder::stateless::BitstreamPromise;
use crate::encoder::stateless::EncodeResult;
use crate::encoder::stateless::Predictor;
use crate::encoder::stateless::StatelessBackendResult;
use crate::encoder::stateless::StatelessCodec;
use crate::encoder::stateless::StatelessCodecSpecific;
use crate::encoder::stateless::StatelessEncoder;
use crate::encoder::stateless::StatelessEncoderExecute;
use crate::encoder::stateless::StatelessVideoEncoderBackend;
use crate::encoder::Bitrate;
use crate::encoder::FrameMetadata;
use crate::BlockingMode;
use crate::Resolution;

mod predictor;

#[cfg(feature = "vaapi")]
pub mod vaapi;

#[derive(Clone)]
pub struct EncoderConfig {
pub bitrate: Bitrate,
pub framerate: u32,
pub resolution: Resolution,
pub pred_structure: PredictionStructure,
}

impl Default for EncoderConfig {
fn default() -> Self {
// Artificially encoder configuration with intent to be widely supported.
Self {
bitrate: Bitrate::Constant(30_000_000),
framerate: 30,
resolution: Resolution {
width: 320,
height: 240,
},
pred_structure: PredictionStructure::LowDelay { limit: 2048 },
}
}
}

/// Determines how reference frame shall be used
bgrzesik marked this conversation as resolved.
Show resolved Hide resolved
pub enum ReferenceUse {
/// The frame will be used for single prediction
Single,
/// The frame will be used for compound prediction
Compound,
/// The frame will be used for both single and compound prediction
Hybrid,
}

pub struct BackendRequest<P, R> {
header: Header,

/// Input frame to be encoded
input: P,

/// Input frame metadata
input_meta: FrameMetadata,

/// Reference frames
last_frame_ref: Option<(Rc<R>, ReferenceUse)>,
golden_frame_ref: Option<(Rc<R>, ReferenceUse)>,
altref_frame_ref: Option<(Rc<R>, ReferenceUse)>,

/// Current expected bitrate
bitrate: Bitrate,

/// Container for the request output. [`StatelessVP9EncoderBackend`] impl shall move it and
/// append the slice data to it. This prevents unnecessary copying of bitstream around.
coded_output: Vec<u8>,
}

pub struct VP9;

impl<Backend> StatelessCodecSpecific<Backend> for VP9
where
Backend: StatelessVideoEncoderBackend<VP9>,
{
type Reference = Backend::Reconstructed;

type Request = BackendRequest<Backend::Picture, Backend::Reconstructed>;

type CodedPromise = BitstreamPromise<Backend::CodedPromise>;

type ReferencePromise = Backend::ReconPromise;
}

impl StatelessCodec for VP9 {}

pub trait StatelessVP9EncoderBackend: StatelessVideoEncoderBackend<VP9> {
fn encode_frame(
&mut self,
request: BackendRequest<Self::Picture, Self::Reconstructed>,
) -> StatelessBackendResult<(Self::ReconPromise, Self::CodedPromise)>;
}

impl<Handle, Backend> StatelessEncoderExecute<VP9, Handle, Backend>
for StatelessEncoder<VP9, Handle, Backend>
where
Backend: StatelessVP9EncoderBackend,
{
fn execute(
&mut self,
request: BackendRequest<Backend::Picture, Backend::Reconstructed>,
) -> EncodeResult<()> {
let meta = request.input_meta.clone();

// The [`BackendRequest`] has a frame from predictor. Decresing internal counter.
self.predictor_frame_count -= 1;

log::trace!("submitting new request");
let (recon, bitstream) = self.backend.encode_frame(request)?;

// Wrap promise from backend with headers and metadata
let slice_promise = BitstreamPromise { bitstream, meta };

self.output_queue.add_promise(slice_promise);

self.recon_queue.add_promise(recon);

Ok(())
}
}

impl<Handle, Backend> StatelessEncoder<VP9, Handle, Backend>
where
Backend: StatelessVP9EncoderBackend,
{
fn new_vp9(backend: Backend, config: EncoderConfig, mode: BlockingMode) -> EncodeResult<Self> {
let predictor: Box<dyn Predictor<_, _, _>> = match config.pred_structure {
PredictionStructure::LowDelay { .. } => Box::new(LowDelay::new(config)),
};

Self::new(backend, mode, predictor)
}
}
169 changes: 169 additions & 0 deletions src/encoder/stateless/vp9/predictor.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
// Copyright 2024 The ChromiumOS Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

use std::collections::VecDeque;
use std::rc::Rc;

use super::BackendRequest;
use super::EncoderConfig;
use crate::codec::vp9::parser::FrameType;
use crate::codec::vp9::parser::Header;
use crate::encoder::stateless::vp9::ReferenceUse;
use crate::encoder::stateless::EncodeError;
use crate::encoder::stateless::EncodeResult;
use crate::encoder::stateless::Predictor;
use crate::encoder::FrameMetadata;

#[derive(Clone)]
pub enum PredictionStructure {
/// Simplest prediction structure, suitable eg. for RTC. Interframe is produced at the start of
/// the stream and every time when [`limit`] frames are reached. Following interframe frames
/// are frames relying solely on the last frame.
LowDelay { limit: u16 },
}

/// See [`PredictionStructure::LowDelay`]
pub(super) struct LowDelay<P, R> {
queue: VecDeque<(P, FrameMetadata)>,

references: VecDeque<Rc<R>>,

counter: usize,

/// Encoder config
config: EncoderConfig,
}

impl<P, R> LowDelay<P, R> {
pub(super) fn new(config: EncoderConfig) -> Self {
Self {
queue: Default::default(),
references: Default::default(),
counter: 0,
config,
}
}

fn request_keyframe(
&mut self,
input: P,
input_meta: FrameMetadata,
) -> EncodeResult<Vec<BackendRequest<P, R>>> {
log::trace!("Requested keyframe timestamp={}", input_meta.timestamp);

let header = Header {
frame_type: FrameType::KeyFrame,
show_frame: true,
error_resilient_mode: true,
width: self.config.resolution.width,
height: self.config.resolution.height,
render_and_frame_size_different: false,
render_width: self.config.resolution.width,
render_height: self.config.resolution.height,
intra_only: true,
refresh_frame_flags: 0x01,
ref_frame_idx: [0, 0, 0],

..Default::default()
};

let request = BackendRequest {
header,
input,
input_meta,
last_frame_ref: None,
golden_frame_ref: None,
altref_frame_ref: None,
bitrate: self.config.bitrate.clone(),
coded_output: Vec::new(),
};

self.counter += 1;

Ok(vec![request])
}

fn request_interframe(
&mut self,
input: P,
input_meta: FrameMetadata,
) -> EncodeResult<Vec<BackendRequest<P, R>>> {
log::trace!("Requested interframe timestamp={}", input_meta.timestamp);

let header = Header {
frame_type: FrameType::InterFrame,
show_frame: true,
error_resilient_mode: true,
width: self.config.resolution.width,
height: self.config.resolution.height,
render_and_frame_size_different: false,
render_width: self.config.resolution.width,
render_height: self.config.resolution.height,
intra_only: false,
refresh_frame_flags: 0x01,
ref_frame_idx: [0; 3],

..Default::default()
};

let ref_frame = self.references.pop_front().unwrap();
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I don't see a hard guarantee that the unwrap won't crash us - it would be nice to remove it at some point.

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh, I should add a // SAFETY: comment. It is in the next_request(). If references is empty, this function would not be called.


let request = BackendRequest {
header,
input,
input_meta,
last_frame_ref: Some((ref_frame, ReferenceUse::Single)),
golden_frame_ref: None,
altref_frame_ref: None,
bitrate: self.config.bitrate.clone(),
coded_output: Vec::new(),
};

self.counter += 1;
self.references.clear();

Ok(vec![request])
}

fn next_request(&mut self) -> EncodeResult<Vec<BackendRequest<P, R>>> {
match self.queue.pop_front() {
// Nothing to do. Quit.
None => Ok(Vec::new()),
// If first frame in the sequence or forced IDR then create IDR request.
Some((input, meta)) if self.counter == 0 || meta.force_keyframe => {
self.request_keyframe(input, meta)
}
// There is no enough frames reconstructed
Some((input, meta)) if self.references.is_empty() => {
self.queue.push_front((input, meta));
Ok(Vec::new())
}

Some((input, meta)) => self.request_interframe(input, meta),
}
}
}

impl<P, R> Predictor<P, R, BackendRequest<P, R>> for LowDelay<P, R> {
fn new_frame(
&mut self,
input: P,
frame_metadata: FrameMetadata,
) -> EncodeResult<Vec<BackendRequest<P, R>>> {
// Add new frame in the request queue and request new encoding if possible
self.queue.push_back((input, frame_metadata));
self.next_request()
}

fn reconstructed(&mut self, recon: R) -> EncodeResult<Vec<BackendRequest<P, R>>> {
// Add new reconstructed surface and request next encoding if possible
self.references.push_back(Rc::new(recon));
self.next_request()
}

fn drain(&mut self) -> EncodeResult<Vec<BackendRequest<P, R>>> {
// [`LowDelay`] will not hold any frames, therefore the drain function shall never be called.
Err(EncodeError::InvalidInternalState)
}
}
Loading