0
0
mirror of https://github.com/obsproject/obs-studio.git synced 2024-09-20 04:42:18 +02:00

obs-outputs: Add support for AV1/HEVC over RTMP

Implements AV1 and HEVC according to enhanced RTMP spec found at
https://github.com/veovera/enhanced-rtmp

Co-authored-by: derrod <dennis@obsproject.com>
This commit is contained in:
Yuriy Chumak 2023-03-25 11:45:42 +01:00 committed by derrod
parent 31ba0973e5
commit 2d4f0ac440
10 changed files with 2243 additions and 18 deletions

View File

@ -28,6 +28,9 @@ target_sources(
rtmp-stream.c
rtmp-stream.h
rtmp-windows.c
rtmp-av1.c
rtmp-av1.h
utils.h
librtmp/amf.c
librtmp/amf.h
librtmp/bytes.h
@ -45,6 +48,10 @@ target_sources(
librtmp/rtmp.h
librtmp/rtmp_sys.h)
if(ENABLE_HEVC)
target_sources(obs-outputs PRIVATE rtmp-hevc.c rtmp-hevc.h)
endif()
target_link_libraries(obs-outputs PRIVATE OBS::libobs)
set_target_properties(obs-outputs PROPERTIES FOLDER "plugins" PREFIX "")

View File

@ -32,6 +32,66 @@
#define VIDEODATA_AVCVIDEOPACKET 7.0
#define AUDIODATA_AAC 10.0
#define VIDEO_FRAMETYPE_OFFSET 4
enum video_frametype_t {
FT_KEY = 1 << VIDEO_FRAMETYPE_OFFSET,
FT_INTER = 2 << VIDEO_FRAMETYPE_OFFSET,
};
// Y2023 spec
const uint8_t FRAME_HEADER_EX = 8 << VIDEO_FRAMETYPE_OFFSET;
enum packet_type_t {
PACKETTYPE_SEQ_START = 0,
PACKETTYPE_FRAMES = 1,
PACKETTYPE_SEQ_END = 2,
#ifdef ENABLE_HEVC
PACKETTYPE_FRAMESX = 3,
#endif
PACKETTYPE_METADATA = 4
};
enum datatype_t {
DATA_TYPE_NUMBER = 0,
DATA_TYPE_STRING = 2,
DATA_TYPE_OBJECT = 3,
DATA_TYPE_OBJECT_END = 9,
};
static void s_w4cc(struct serializer *s, enum video_id_t id)
{
switch (id) {
case CODEC_AV1:
s_w8(s, 'a');
s_w8(s, 'v');
s_w8(s, '0');
s_w8(s, '1');
break;
#ifdef ENABLE_HEVC
case CODEC_HEVC:
s_w8(s, 'h');
s_w8(s, 'v');
s_w8(s, 'c');
s_w8(s, '1');
break;
#endif
case CODEC_H264:
assert(0);
}
}
static void s_wstring(struct serializer *s, const char *str)
{
size_t len = strlen(str);
s_wb16(s, (uint16_t)len);
s_write(s, str, len);
}
static inline void s_wtimestamp(struct serializer *s, int32_t i32)
{
s_wb24(s, (uint32_t)(i32 & 0xFFFFFF));
s_w8(s, (uint32_t)(i32 >> 24) & 0x7F);
}
static inline double encoder_bitrate(obs_encoder_t *encoder)
{
obs_data_t *settings = obs_encoder_get_settings(encoder);
@ -189,7 +249,7 @@ static void flv_video(struct serializer *s, int32_t dts_offset,
#endif
s_wb24(s, (uint32_t)packet->size + 5);
s_wb24(s, time_ms);
s_wb24(s, (uint32_t)time_ms);
s_w8(s, (time_ms >> 24) & 0x7F);
s_wb24(s, 0);
@ -223,7 +283,7 @@ static void flv_audio(struct serializer *s, int32_t dts_offset,
#endif
s_wb24(s, (uint32_t)packet->size + 2);
s_wb24(s, time_ms);
s_wb24(s, (uint32_t)time_ms);
s_w8(s, (time_ms >> 24) & 0x7F);
s_wb24(s, 0);
@ -253,6 +313,151 @@ void flv_packet_mux(struct encoder_packet *packet, int32_t dts_offset,
*size = data.bytes.num;
}
// Y2023 spec
void flv_packet_ex(struct encoder_packet *packet, enum video_id_t codec_id,
int32_t dts_offset, uint8_t **output, size_t *size, int type)
{
struct array_output_data data;
struct serializer s;
array_output_serializer_init(&s, &data);
assert(packet->type == OBS_ENCODER_VIDEO);
int32_t time_ms = get_ms_time(packet, packet->dts) - dts_offset;
// packet head
s_w8(&s, RTMP_PACKET_TYPE_VIDEO);
s_wb24(&s, (uint32_t)packet->size + 5); // 5 = (w8+w4cc)
s_wtimestamp(&s, time_ms);
s_wb24(&s, 0); // always 0
// packet ext header
s_w8(&s, FRAME_HEADER_EX | type | (packet->keyframe ? FT_KEY : 0));
s_w4cc(&s, codec_id);
// packet data
s_write(&s, packet->data, packet->size);
// packet tail
s_wb32(&s, (uint32_t)serializer_get_pos(&s) - 1);
*output = data.bytes.array;
*size = data.bytes.num;
}
void flv_packet_start(struct encoder_packet *packet, enum video_id_t codec,
int32_t dts_offset, uint8_t **output, size_t *size)
{
flv_packet_ex(packet, codec, dts_offset, output, size,
PACKETTYPE_SEQ_START);
}
void flv_packet_frames(struct encoder_packet *packet, enum video_id_t codec,
int32_t dts_offset, uint8_t **output, size_t *size)
{
#ifdef ENABLE_HEVC
flv_packet_ex(packet, codec, dts_offset, output, size,
(codec == CODEC_HEVC) ? PACKETTYPE_FRAMESX
: PACKETTYPE_FRAMES);
#else
flv_packet_ex(packet, dts_offset, output, size, PACKETTYPE_FRAMES);
#endif
}
void flv_packet_end(struct encoder_packet *packet, enum video_id_t codec,
int32_t dts_offset, uint8_t **output, size_t *size)
{
flv_packet_ex(packet, codec, dts_offset, output, size,
PACKETTYPE_SEQ_END);
}
void flv_packet_metadata(enum video_id_t codec_id, uint8_t **output,
size_t *size, int bits_per_raw_sample,
uint8_t color_primaries, int color_trc,
int color_space, int min_luminance, int max_luminance)
{
// metadata array
struct array_output_data data;
struct array_output_data metadata;
struct serializer s;
array_output_serializer_init(&s, &data);
// metadata data array
{
struct serializer s;
array_output_serializer_init(&s, &metadata);
s_w8(&s, DATA_TYPE_STRING);
s_wstring(&s, "colorInfo");
s_w8(&s, DATA_TYPE_OBJECT);
{
// colorConfig:
s_wstring(&s, "colorConfig");
s_w8(&s, DATA_TYPE_OBJECT);
{
s_wstring(&s, "bitDepth");
s_w8(&s, DATA_TYPE_NUMBER);
s_wbd(&s, bits_per_raw_sample);
s_wstring(&s, "colorPrimaries");
s_w8(&s, DATA_TYPE_NUMBER);
s_wbd(&s, color_primaries);
s_wstring(&s, "transferCharacteristics");
s_w8(&s, DATA_TYPE_NUMBER);
s_wbd(&s, color_trc);
s_wstring(&s, "matrixCoefficients");
s_w8(&s, DATA_TYPE_NUMBER);
s_wbd(&s, color_space);
}
s_w8(&s, 0);
s_w8(&s, 0);
s_w8(&s, DATA_TYPE_OBJECT_END);
if (max_luminance != 0) {
// hdrMdcv
s_wstring(&s, "hdrMdcv");
s_w8(&s, DATA_TYPE_OBJECT);
{
s_wstring(&s, "maxLuminance");
s_w8(&s, DATA_TYPE_NUMBER);
s_wbd(&s, max_luminance);
s_wstring(&s, "minLuminance");
s_w8(&s, DATA_TYPE_NUMBER);
s_wbd(&s, min_luminance);
}
s_w8(&s, 0);
s_w8(&s, 0);
s_w8(&s, DATA_TYPE_OBJECT_END);
}
}
s_w8(&s, 0);
s_w8(&s, 0);
s_w8(&s, DATA_TYPE_OBJECT_END);
}
// packet head
s_w8(&s, RTMP_PACKET_TYPE_VIDEO);
s_wb24(&s, (uint32_t)metadata.bytes.num + 5); // 5 = (w8+w4cc)
s_wtimestamp(&s, 0);
s_wb24(&s, 0); // always 0
// packet ext header
// these are the 5 extra bytes mentioned above
s_w8(&s, FRAME_HEADER_EX | PACKETTYPE_METADATA);
s_w4cc(&s, codec_id);
// packet data
s_write(&s, metadata.bytes.array, metadata.bytes.num);
array_output_serializer_free(&metadata); // must be freed
// packet tail
s_wb32(&s, (uint32_t)serializer_get_pos(&s) - 1);
*output = data.bytes.array;
*size = data.bytes.num;
}
/* ------------------------------------------------------------------------- */
/* stuff for additional media streams */
@ -471,7 +676,7 @@ static void flv_additional_audio(struct serializer *s, int32_t dts_offset,
#endif
s_wb24(s, (uint32_t)size);
s_wb24(s, time_ms);
s_wb24(s, (uint32_t)time_ms);
s_w8(s, (time_ms >> 24) & 0x7F);
s_wb24(s, 0);

View File

@ -21,6 +21,27 @@
#define MILLISECOND_DEN 1000
enum video_id_t {
CODEC_H264 = 1, // legacy
CODEC_AV1, // Y2023 spec
#ifdef ENABLE_HEVC
CODEC_HEVC,
#endif
};
static enum video_id_t to_video_type(const char *codec)
{
if (strcmp(codec, "h264") == 0)
return CODEC_H264;
if (strcmp(codec, "av1") == 0)
return CODEC_AV1;
#ifdef ENABLE_HEVC
if (strcmp(codec, "hevc") == 0)
return CODEC_HEVC;
#endif
return 0;
}
static int32_t get_ms_time(struct encoder_packet *packet, int64_t val)
{
return (int32_t)(val * MILLISECOND_DEN / packet->timebase_den);
@ -38,3 +59,17 @@ extern void flv_additional_packet_mux(struct encoder_packet *packet,
int32_t dts_offset, uint8_t **output,
size_t *size, bool is_header,
size_t index);
// Y2023 spec
extern void flv_packet_start(struct encoder_packet *packet,
enum video_id_t codec, int32_t dts_offset,
uint8_t **output, size_t *size);
extern void flv_packet_frames(struct encoder_packet *packet,
enum video_id_t codec, int32_t dts_offset,
uint8_t **output, size_t *size);
extern void flv_packet_end(struct encoder_packet *packet, enum video_id_t codec,
int32_t dts_offset, uint8_t **output, size_t *size);
extern void flv_packet_metadata(enum video_id_t codec, uint8_t **output,
size_t *size, int bits_per_raw_sample,
uint8_t color_primaries, int color_trc,
int color_space, int min_luminance,
int max_luminance);

View File

@ -0,0 +1,604 @@
/******************************************************************************
Copyright (C) 2023 by Hugh Bailey <obs.jim@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
#include "rtmp-av1.h"
#include "utils.h"
#include <obs.h>
#include <util/array-serializer.h>
/* Adapted from FFmpeg's libavformat/av1.c for our FLV muxer. */
#define AV1_OBU_SEQUENCE_HEADER 1
#define AV1_OBU_TEMPORAL_DELIMITER 2
#define AV1_OBU_REDUNDANT_FRAME_HEADER 7
#define AV1_OBU_TILE_LIST 8
#define AV1_OBU_PADDING 15
#define AV1_OBU_METADATA 5
#define AV1_OBU_TILE_GROUP 4
#define AV1_OBU_TILE_LIST 8
#define AV1_OBU_FRAME 6
#define FF_PROFILE_AV1_MAIN 0
#define FF_PROFILE_AV1_HIGH 1
#define FF_PROFILE_AV1_PROFESSIONAL 2
typedef struct AV1SequenceParameters {
uint8_t profile;
uint8_t level;
uint8_t tier;
uint8_t bitdepth;
uint8_t monochrome;
uint8_t chroma_subsampling_x;
uint8_t chroma_subsampling_y;
uint8_t chroma_sample_position;
uint8_t color_description_present_flag;
uint8_t color_primaries;
uint8_t transfer_characteristics;
uint8_t matrix_coefficients;
uint8_t color_range;
} AV1SequenceParameters;
#define MAX_OBU_HEADER_SIZE (2 + 8)
typedef struct Av1GetBitContext {
const uint8_t *buffer, *buffer_end;
int index;
int size_in_bits;
int size_in_bits_plus8;
} Av1GetBitContext;
static inline int init_get_bits_xe(Av1GetBitContext *s, const uint8_t *buffer,
int bit_size)
{
int buffer_size;
int ret = 0;
if (bit_size >= INT_MAX - 64 * 8 || bit_size < 0 || !buffer) {
bit_size = 0;
buffer = NULL;
ret = -1;
}
buffer_size = (bit_size + 7) >> 3;
s->buffer = buffer;
s->size_in_bits = bit_size;
s->size_in_bits_plus8 = bit_size + 8;
s->buffer_end = buffer + buffer_size;
s->index = 0;
return ret;
}
static inline int init_get_bits(Av1GetBitContext *s, const uint8_t *buffer,
int bit_size)
{
return init_get_bits_xe(s, buffer, bit_size);
}
static inline int init_get_bits8(Av1GetBitContext *s, const uint8_t *buffer,
int byte_size)
{
if (byte_size > INT_MAX / 8 || byte_size < 0)
byte_size = -1;
return init_get_bits(s, buffer, byte_size * 8);
}
static inline unsigned int get_bit1(Av1GetBitContext *s)
{
unsigned int index = s->index;
uint8_t result = s->buffer[index >> 3];
result <<= index & 7;
result >>= 8 - 1;
if (s->index < s->size_in_bits_plus8)
index++;
s->index = index;
return result;
}
static inline unsigned int get_bits(Av1GetBitContext *s, unsigned int n)
{
unsigned int out = 0;
for (unsigned int i = 0; i < n; i++)
out = (out << 1) | get_bit1(s);
return out;
}
#define skip_bits get_bits
static inline int get_bits_count(Av1GetBitContext *s)
{
return s->index;
}
static inline int get_bits_left(Av1GetBitContext *gb)
{
return gb->size_in_bits - get_bits_count(gb);
}
#define get_bits_long get_bits
#define skip_bits_long get_bits_long
static inline int64_t leb128(Av1GetBitContext *gb)
{
int64_t ret = 0;
int i;
for (i = 0; i < 8; i++) {
int byte = get_bits(gb, 8);
ret |= (int64_t)(byte & 0x7f) << (i * 7);
if (!(byte & 0x80))
break;
}
return ret;
}
static inline void uvlc(Av1GetBitContext *gb)
{
int leading_zeros = 0;
while (get_bits_left(gb)) {
if (get_bits(gb, 1))
break;
leading_zeros++;
}
if (leading_zeros >= 32)
return;
skip_bits_long(gb, leading_zeros);
}
static inline int parse_obu_header(const uint8_t *buf, int buf_size,
int64_t *obu_size, int *start_pos, int *type,
int *temporal_id, int *spatial_id)
{
Av1GetBitContext gb;
int ret, extension_flag, has_size_flag;
size_t size;
ret = init_get_bits8(&gb, buf, min_i32(buf_size, MAX_OBU_HEADER_SIZE));
if (ret < 0)
return ret;
if (get_bits(&gb, 1) != 0) // obu_forbidden_bit
return -1;
*type = get_bits(&gb, 4);
extension_flag = get_bits(&gb, 1);
has_size_flag = get_bits(&gb, 1);
skip_bits(&gb, 1); // obu_reserved_1bit
if (extension_flag) {
*temporal_id = get_bits(&gb, 3);
*spatial_id = get_bits(&gb, 2);
skip_bits(&gb, 3); // extension_header_reserved_3bits
} else {
*temporal_id = *spatial_id = 0;
}
*obu_size = has_size_flag ? leb128(&gb) : buf_size - 1 - extension_flag;
if (get_bits_left(&gb) < 0)
return -1;
*start_pos = get_bits_count(&gb) / 8;
size = (size_t)(*obu_size + *start_pos);
if (size > (size_t)buf_size)
return -1;
assert(size <= INT_MAX);
return (int)size;
}
static inline int get_obu_bit_length(const uint8_t *buf, int size, int type)
{
int v;
/* There are no trailing bits on these */
if (type == AV1_OBU_TILE_GROUP || type == AV1_OBU_TILE_LIST ||
type == AV1_OBU_FRAME) {
if (size > INT_MAX / 8)
return -1;
else
return size * 8;
}
while (size > 0 && buf[size - 1] == 0)
size--;
if (!size)
return 0;
v = buf[size - 1];
if (size > INT_MAX / 8)
return -1;
size *= 8;
/* Remove the trailing_one_bit and following trailing zeros */
if (v)
size -= ctz32(v) + 1;
return size;
}
static int parse_color_config(AV1SequenceParameters *seq_params,
Av1GetBitContext *gb)
{
int twelve_bit = 0;
int high_bitdepth = get_bits(gb, 1);
if (seq_params->profile == FF_PROFILE_AV1_PROFESSIONAL && high_bitdepth)
twelve_bit = get_bits(gb, 1);
seq_params->bitdepth = 8 + (high_bitdepth * 2) + (twelve_bit * 2);
if (seq_params->profile == FF_PROFILE_AV1_HIGH)
seq_params->monochrome = 0;
else
seq_params->monochrome = get_bits(gb, 1);
seq_params->color_description_present_flag = get_bits(gb, 1);
if (seq_params->color_description_present_flag) {
seq_params->color_primaries = get_bits(gb, 8);
seq_params->transfer_characteristics = get_bits(gb, 8);
seq_params->matrix_coefficients = get_bits(gb, 8);
} else {
seq_params->color_primaries = 2;
seq_params->transfer_characteristics = 2;
seq_params->matrix_coefficients = 2;
}
if (seq_params->monochrome) {
seq_params->color_range = get_bits(gb, 1);
seq_params->chroma_subsampling_x = 1;
seq_params->chroma_subsampling_y = 1;
seq_params->chroma_sample_position = 0;
return 0;
} else if (seq_params->color_primaries == 1 &&
seq_params->transfer_characteristics == 13 &&
seq_params->matrix_coefficients == 0) {
seq_params->chroma_subsampling_x = 0;
seq_params->chroma_subsampling_y = 0;
} else {
seq_params->color_range = get_bits(gb, 1);
if (seq_params->profile == FF_PROFILE_AV1_MAIN) {
seq_params->chroma_subsampling_x = 1;
seq_params->chroma_subsampling_y = 1;
} else if (seq_params->profile == FF_PROFILE_AV1_HIGH) {
seq_params->chroma_subsampling_x = 0;
seq_params->chroma_subsampling_y = 0;
} else {
if (twelve_bit) {
seq_params->chroma_subsampling_x =
get_bits(gb, 1);
if (seq_params->chroma_subsampling_x)
seq_params->chroma_subsampling_y =
get_bits(gb, 1);
else
seq_params->chroma_subsampling_y = 0;
} else {
seq_params->chroma_subsampling_x = 1;
seq_params->chroma_subsampling_y = 0;
}
}
if (seq_params->chroma_subsampling_x &&
seq_params->chroma_subsampling_y)
seq_params->chroma_sample_position = get_bits(gb, 2);
}
skip_bits(gb, 1); // separate_uv_delta_q
return 0;
}
static int parse_sequence_header(AV1SequenceParameters *seq_params,
const uint8_t *buf, int size)
{
Av1GetBitContext gb;
int reduced_still_picture_header;
int frame_width_bits_minus_1, frame_height_bits_minus_1;
int size_bits, ret;
size_bits = get_obu_bit_length(buf, size, AV1_OBU_SEQUENCE_HEADER);
if (size_bits < 0)
return size_bits;
ret = init_get_bits(&gb, buf, size_bits);
if (ret < 0)
return ret;
memset(seq_params, 0, sizeof(*seq_params));
seq_params->profile = get_bits(&gb, 3);
skip_bits(&gb, 1); // still_picture
reduced_still_picture_header = get_bits(&gb, 1);
if (reduced_still_picture_header) {
seq_params->level = get_bits(&gb, 5);
seq_params->tier = 0;
} else {
int initial_display_delay_present_flag,
operating_points_cnt_minus_1;
int decoder_model_info_present_flag,
buffer_delay_length_minus_1;
if (get_bits(&gb, 1)) { // timing_info_present_flag
skip_bits_long(&gb, 32); // num_units_in_display_tick
skip_bits_long(&gb, 32); // time_scale
if (get_bits(&gb, 1)) // equal_picture_interval
uvlc(&gb); // num_ticks_per_picture_minus_1
decoder_model_info_present_flag = get_bits(&gb, 1);
if (decoder_model_info_present_flag) {
buffer_delay_length_minus_1 = get_bits(&gb, 5);
skip_bits_long(&gb, 32);
skip_bits(&gb, 10);
}
} else
decoder_model_info_present_flag = 0;
initial_display_delay_present_flag = get_bits(&gb, 1);
operating_points_cnt_minus_1 = get_bits(&gb, 5);
for (int i = 0; i <= operating_points_cnt_minus_1; i++) {
int seq_level_idx, seq_tier;
skip_bits(&gb, 12);
seq_level_idx = get_bits(&gb, 5);
if (seq_level_idx > 7)
seq_tier = get_bits(&gb, 1);
else
seq_tier = 0;
if (decoder_model_info_present_flag) {
if (get_bits(&gb, 1)) {
skip_bits_long(
&gb,
buffer_delay_length_minus_1 +
1);
skip_bits_long(
&gb,
buffer_delay_length_minus_1 +
1);
skip_bits(&gb, 1);
}
}
if (initial_display_delay_present_flag) {
if (get_bits(&gb, 1))
skip_bits(&gb, 4);
}
if (i == 0) {
seq_params->level = seq_level_idx;
seq_params->tier = seq_tier;
}
}
}
frame_width_bits_minus_1 = get_bits(&gb, 4);
frame_height_bits_minus_1 = get_bits(&gb, 4);
skip_bits(&gb, frame_width_bits_minus_1 + 1); // max_frame_width_minus_1
skip_bits(&gb,
frame_height_bits_minus_1 + 1); // max_frame_height_minus_1
if (!reduced_still_picture_header) {
if (get_bits(&gb, 1)) // frame_id_numbers_present_flag
skip_bits(&gb, 7);
}
skip_bits(
&gb,
3); // use_128x128_superblock (1), enable_filter_intra (1), enable_intra_edge_filter (1)
if (!reduced_still_picture_header) {
int enable_order_hint, seq_force_screen_content_tools;
skip_bits(&gb, 4);
enable_order_hint = get_bits(&gb, 1);
if (enable_order_hint)
skip_bits(&gb, 2);
if (get_bits(&gb, 1)) // seq_choose_screen_content_tools
seq_force_screen_content_tools = 2;
else
seq_force_screen_content_tools = get_bits(&gb, 1);
if (seq_force_screen_content_tools) {
if (!get_bits(&gb, 1)) // seq_choose_integer_mv
skip_bits(&gb, 1); // seq_force_integer_mv
}
if (enable_order_hint)
skip_bits(&gb, 3); // order_hint_bits_minus_1
}
skip_bits(&gb, 3);
parse_color_config(seq_params, &gb);
skip_bits(&gb, 1); // film_grain_params_present
if (get_bits_left(&gb))
return -1;
return 0;
}
size_t obs_parse_av1_header(uint8_t **header, const uint8_t *data, size_t size)
{
if (data[0] & 0x80) {
int config_record_version = data[0] & 0x7f;
if (config_record_version != 1 || size < 4)
return 0;
*header = bmemdup(data, size);
return size;
}
// AV1S init
AV1SequenceParameters seq_params;
int nb_seq = 0, seq_size = 0, meta_size = 0;
const uint8_t *seq = 0, *meta = 0;
uint8_t *buf = (uint8_t *)data;
while (size > 0) {
int64_t obu_size;
int start_pos, type, temporal_id, spatial_id;
assert(size <= INT_MAX);
int len = parse_obu_header(buf, (int)size, &obu_size,
&start_pos, &type, &temporal_id,
&spatial_id);
if (len < 0)
return 0;
switch (type) {
case AV1_OBU_SEQUENCE_HEADER:
nb_seq++;
if (!obu_size || nb_seq > 1) {
return 0;
}
assert(obu_size <= INT_MAX);
if (parse_sequence_header(&seq_params, buf + start_pos,
(int)obu_size) < 0)
return 0;
seq = buf;
seq_size = len;
break;
case AV1_OBU_METADATA:
if (!obu_size)
return 0;
meta = buf;
meta_size = len;
break;
default:
break;
}
size -= len;
buf += len;
}
if (!nb_seq)
return 0;
uint8_t av1header[4];
av1header[0] = (1 << 7) | 1; // marker and version
av1header[1] = (seq_params.profile << 5) | (seq_params.level);
av1header[2] = (seq_params.tier << 7) | (seq_params.bitdepth > 8) << 6 |
(seq_params.bitdepth == 12) << 5 |
(seq_params.monochrome) << 4 |
(seq_params.chroma_subsampling_x) << 3 |
(seq_params.chroma_subsampling_y) << 2 |
(seq_params.chroma_sample_position);
av1header[3] = 0;
struct array_output_data output;
struct serializer s;
long ref = 1;
array_output_serializer_init(&s, &output);
serialize(&s, &ref, sizeof(ref));
s_write(&s, av1header, sizeof(av1header));
if (seq_size)
s_write(&s, seq, seq_size);
if (meta_size)
s_write(&s, meta, meta_size);
*header = output.bytes.array + sizeof(ref);
return output.bytes.num - sizeof(ref);
}
static void serialize_av1_data(struct serializer *s, const uint8_t *data,
size_t size, bool *is_keyframe, int *priority)
{
(void)is_keyframe;
(void)priority;
uint8_t *buf = (uint8_t *)data;
uint8_t *end = (uint8_t *)data + size;
enum {
START_NOT_FOUND,
START_FOUND,
END_FOUND,
OFFSET_IMPOSSIBLE,
} state = START_NOT_FOUND;
while (buf < end) {
int64_t obu_size;
int start_pos, type, temporal_id, spatial_id;
assert(end - buf <= INT_MAX);
int len = parse_obu_header(buf, (int)(end - buf), &obu_size,
&start_pos, &type, &temporal_id,
&spatial_id);
if (len < 0)
return;
switch (type) {
case AV1_OBU_TEMPORAL_DELIMITER:
case AV1_OBU_REDUNDANT_FRAME_HEADER:
case AV1_OBU_TILE_LIST:
case AV1_OBU_PADDING:
if (state == START_FOUND)
state = END_FOUND;
break;
default:
if (state == START_NOT_FOUND) {
state = START_FOUND;
} else if (state == END_FOUND) {
state = OFFSET_IMPOSSIBLE;
}
s_write(s, buf, len);
size += len;
break;
}
buf += len;
}
}
void obs_parse_av1_packet(struct encoder_packet *av1_packet,
const struct encoder_packet *src)
{
struct array_output_data output;
struct serializer s;
long ref = 1;
array_output_serializer_init(&s, &output);
serialize(&s, &ref, sizeof(ref));
*av1_packet = *src;
serialize_av1_data(&s, src->data, src->size, &av1_packet->keyframe,
&av1_packet->priority);
av1_packet->data = output.bytes.array + sizeof(ref);
av1_packet->size = output.bytes.num - sizeof(ref);
av1_packet->drop_priority = av1_packet->priority;
}

View File

@ -0,0 +1,28 @@
/******************************************************************************
Copyright (C) 2023 by Hugh Bailey <obs.jim@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
#pragma once
#include <stdint.h>
#include <stddef.h>
struct encoder_packet;
extern void obs_parse_av1_packet(struct encoder_packet *avc_packet,
const struct encoder_packet *src);
extern size_t obs_parse_av1_header(uint8_t **header, const uint8_t *data,
size_t size);

View File

@ -0,0 +1,938 @@
/******************************************************************************
Copyright (C) 2023 by Hugh Bailey <obs.jim@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
#include "rtmp-hevc.h"
#include "utils.h"
#include <obs.h>
#include <obs-nal.h>
#include <obs-hevc.h>
#include <util/array-serializer.h>
/* Adapted from FFmpeg's libavformat/hevc.c for our FLV muxer. */
enum {
OBS_VPS_INDEX,
OBS_SPS_INDEX,
OBS_PPS_INDEX,
OBS_SEI_PREFIX_INDEX,
OBS_SEI_SUFFIX_INDEX,
OBS_NB_ARRAYS
};
enum {
OBS_HEVC_MAX_VPS_COUNT = 16,
OBS_HEVC_MAX_SPS_COUNT = 16,
OBS_HEVC_MAX_PPS_COUNT = 64,
};
typedef struct HVCCNALUnitArray {
uint8_t array_completeness;
uint8_t NAL_unit_type;
uint16_t numNalus;
struct array_output_data nalUnitData;
struct serializer nalUnit;
} HVCCNALUnitArray;
typedef struct HEVCDecoderConfigurationRecord {
uint8_t general_profile_space;
uint8_t general_tier_flag;
uint8_t general_profile_idc;
uint32_t general_profile_compatibility_flags;
uint64_t general_constraint_indicator_flags;
uint8_t general_level_idc;
uint16_t min_spatial_segmentation_idc;
uint8_t parallelismType;
uint8_t chromaFormat;
uint8_t bitDepthLumaMinus8;
uint8_t bitDepthChromaMinus8;
uint16_t avgFrameRate;
uint8_t constantFrameRate;
uint8_t numTemporalLayers;
uint8_t temporalIdNested;
uint8_t lengthSizeMinusOne;
uint8_t numOfArrays;
HVCCNALUnitArray arrays[OBS_NB_ARRAYS];
} HEVCDecoderConfigurationRecord;
typedef struct HVCCProfileTierLevel {
uint8_t profile_space;
uint8_t tier_flag;
uint8_t profile_idc;
uint32_t profile_compatibility_flags;
uint64_t constraint_indicator_flags;
uint8_t level_idc;
} HVCCProfileTierLevel;
typedef struct HevcGetBitContext {
const uint8_t *buffer, *buffer_end;
uint64_t cache;
unsigned bits_left;
int index;
int size_in_bits;
int size_in_bits_plus8;
} HevcGetBitContext;
static inline uint32_t rb32(const uint8_t *ptr)
{
return (ptr[0] << 24) + (ptr[1] << 16) + (ptr[2] << 8) + ptr[3];
}
static inline void refill_32(HevcGetBitContext *s)
{
s->cache = s->cache | (uint64_t)rb32(s->buffer + (s->index >> 3))
<< (32 - s->bits_left);
s->index += 32;
s->bits_left += 32;
}
static inline uint64_t get_val(HevcGetBitContext *s, unsigned n)
{
uint64_t ret;
ret = s->cache >> (64 - n);
s->cache <<= n;
s->bits_left -= n;
return ret;
}
static inline int init_get_bits_xe(HevcGetBitContext *s, const uint8_t *buffer,
int bit_size)
{
int buffer_size;
int ret = 0;
if (bit_size >= INT_MAX - 64 * 8 || bit_size < 0 || !buffer) {
bit_size = 0;
buffer = NULL;
ret = -1;
}
buffer_size = (bit_size + 7) >> 3;
s->buffer = buffer;
s->size_in_bits = bit_size;
s->size_in_bits_plus8 = bit_size + 8;
s->buffer_end = buffer + buffer_size;
s->index = 0;
s->cache = 0;
s->bits_left = 0;
refill_32(s);
return ret;
}
static inline int init_get_bits(HevcGetBitContext *s, const uint8_t *buffer,
int bit_size)
{
return init_get_bits_xe(s, buffer, bit_size);
}
static inline int init_get_bits8(HevcGetBitContext *s, const uint8_t *buffer,
int byte_size)
{
if (byte_size > INT_MAX / 8 || byte_size < 0)
byte_size = -1;
return init_get_bits(s, buffer, byte_size * 8);
}
static inline unsigned int get_bits(HevcGetBitContext *s, unsigned int n)
{
register unsigned int tmp;
if (n > s->bits_left) {
refill_32(s);
}
tmp = (unsigned int)get_val(s, n);
return tmp;
}
#define skip_bits get_bits
static inline int get_bits_count(HevcGetBitContext *s)
{
return s->index - s->bits_left;
}
static inline int get_bits_left(HevcGetBitContext *gb)
{
return gb->size_in_bits - get_bits_count(gb);
}
static inline unsigned int get_bits_long(HevcGetBitContext *s, int n)
{
if (!n)
return 0;
return get_bits(s, n);
}
#define skip_bits_long get_bits_long
static inline uint64_t get_bits64(HevcGetBitContext *s, int n)
{
if (n <= 32) {
return get_bits_long(s, n);
} else {
uint64_t ret = (uint64_t)get_bits_long(s, n - 32) << 32;
return ret | get_bits_long(s, 32);
}
}
static inline int ilog2(unsigned x)
{
return (31 - clz32(x | 1));
}
static inline unsigned show_val(const HevcGetBitContext *s, unsigned n)
{
return s->cache & ((UINT64_C(1) << n) - 1);
}
static inline unsigned int show_bits(HevcGetBitContext *s, unsigned int n)
{
register unsigned int tmp;
if (n > s->bits_left)
refill_32(s);
tmp = show_val(s, n);
return tmp;
}
static inline unsigned int show_bits_long(HevcGetBitContext *s, int n)
{
if (n <= 25) {
return show_bits(s, n);
} else {
HevcGetBitContext gb = *s;
return get_bits_long(&gb, n);
}
}
static inline unsigned get_ue_golomb_long(HevcGetBitContext *gb)
{
unsigned buf, log;
buf = show_bits_long(gb, 32);
log = 31 - ilog2(buf);
skip_bits_long(gb, log);
return get_bits_long(gb, log + 1) - 1;
}
static inline int get_se_golomb_long(HevcGetBitContext *gb)
{
unsigned int buf = get_ue_golomb_long(gb);
int sign = (buf & 1) - 1;
return ((buf >> 1) ^ sign) + 1;
}
static inline bool has_start_code(const uint8_t *data, size_t size)
{
if (size > 3 && data[0] == 0 && data[1] == 0 && data[2] == 1)
return true;
if (size > 4 && data[0] == 0 && data[1] == 0 && data[2] == 0 &&
data[3] == 1)
return true;
return false;
}
uint8_t *ff_nal_unit_extract_rbsp(uint8_t *dst, const uint8_t *src, int src_len,
uint32_t *dst_len, int header_len)
{
int i, len;
/* NAL unit header */
i = len = 0;
while (i < header_len && i < src_len)
dst[len++] = src[i++];
while (i + 2 < src_len)
if (!src[i] && !src[i + 1] && src[i + 2] == 3) {
dst[len++] = src[i++];
dst[len++] = src[i++];
i++; // remove emulation_prevention_three_byte
} else
dst[len++] = src[i++];
while (i < src_len)
dst[len++] = src[i++];
memset(dst + len, 0, 64);
*dst_len = (uint32_t)len;
return dst;
}
static int hvcc_array_add_nal_unit(uint8_t *nal_buf, uint32_t nal_size,
uint8_t nal_type, int ps_array_completeness,
HVCCNALUnitArray *array)
{
s_wb16(&array->nalUnit, nal_size);
s_write(&array->nalUnit, nal_buf, nal_size);
array->NAL_unit_type = nal_type;
array->numNalus++;
if (nal_type == OBS_HEVC_NAL_VPS || nal_type == OBS_HEVC_NAL_SPS ||
nal_type == OBS_HEVC_NAL_PPS)
array->array_completeness = ps_array_completeness;
return 0;
}
static void nal_unit_parse_header(HevcGetBitContext *gb, uint8_t *nal_type)
{
skip_bits(gb, 1); // forbidden_zero_bit
*nal_type = get_bits(gb, 6);
get_bits(gb, 9);
}
static void hvcc_update_ptl(HEVCDecoderConfigurationRecord *hvcc,
HVCCProfileTierLevel *ptl)
{
hvcc->general_profile_space = ptl->profile_space;
if (hvcc->general_tier_flag < ptl->tier_flag)
hvcc->general_level_idc = ptl->level_idc;
else
hvcc->general_level_idc =
max_u8(hvcc->general_level_idc, ptl->level_idc);
hvcc->general_tier_flag =
max_u8(hvcc->general_tier_flag, ptl->tier_flag);
hvcc->general_profile_idc =
max_u8(hvcc->general_profile_idc, ptl->profile_idc);
hvcc->general_profile_compatibility_flags &=
ptl->profile_compatibility_flags;
hvcc->general_constraint_indicator_flags &=
ptl->constraint_indicator_flags;
}
static void hvcc_parse_ptl(HevcGetBitContext *gb,
HEVCDecoderConfigurationRecord *hvcc,
unsigned int max_sub_layers_minus1)
{
unsigned int i;
HVCCProfileTierLevel general_ptl;
uint8_t sub_layer_profile_present_flag[7]; // max sublayers
uint8_t sub_layer_level_present_flag[7]; // max sublayers
general_ptl.profile_space = get_bits(gb, 2);
general_ptl.tier_flag = get_bits(gb, 1);
general_ptl.profile_idc = get_bits(gb, 5);
general_ptl.profile_compatibility_flags = get_bits_long(gb, 32);
general_ptl.constraint_indicator_flags = get_bits64(gb, 48);
general_ptl.level_idc = get_bits(gb, 8);
hvcc_update_ptl(hvcc, &general_ptl);
for (i = 0; i < max_sub_layers_minus1; i++) {
sub_layer_profile_present_flag[i] = get_bits(gb, 1);
sub_layer_level_present_flag[i] = get_bits(gb, 1);
}
// skip the rest
if (max_sub_layers_minus1 > 0)
for (i = max_sub_layers_minus1; i < 8; i++)
skip_bits(gb, 2);
for (i = 0; i < max_sub_layers_minus1; i++) {
if (sub_layer_profile_present_flag[i]) {
skip_bits_long(gb, 32);
skip_bits_long(gb, 32);
skip_bits(gb, 24);
}
if (sub_layer_level_present_flag[i])
skip_bits(gb, 8);
}
}
static int hvcc_parse_vps(HevcGetBitContext *gb,
HEVCDecoderConfigurationRecord *hvcc)
{
unsigned int vps_max_sub_layers_minus1;
skip_bits(gb, 12);
vps_max_sub_layers_minus1 = get_bits(gb, 3);
hvcc->numTemporalLayers =
max_u8(hvcc->numTemporalLayers, vps_max_sub_layers_minus1 + 1);
skip_bits(gb, 17);
hvcc_parse_ptl(gb, hvcc, vps_max_sub_layers_minus1);
return 0;
}
#define HEVC_MAX_SHORT_TERM_REF_PIC_SETS 64
static void skip_scaling_list_data(HevcGetBitContext *gb)
{
int i, j, k, num_coeffs;
for (i = 0; i < 4; i++) {
for (j = 0; j < (i == 3 ? 2 : 6); j++) {
if (!get_bits(gb, 1))
get_ue_golomb_long(gb);
else {
num_coeffs = min_i32(64, 1 << (4 + (i << 1)));
if (i > 1)
get_se_golomb_long(gb);
for (k = 0; k < num_coeffs; k++)
get_se_golomb_long(gb);
}
}
}
}
static int
parse_rps(HevcGetBitContext *gb, unsigned int rps_idx, unsigned int num_rps,
unsigned int num_delta_pocs[HEVC_MAX_SHORT_TERM_REF_PIC_SETS])
{
unsigned int i;
if (rps_idx && get_bits(gb, 1)) { // inter_ref_pic_set_prediction_flag
/* this should only happen for slice headers, and this isn't one */
if (rps_idx >= num_rps)
return -1;
get_bits(gb, 1); // delta_rps_sign
get_ue_golomb_long(gb); // abs_delta_rps_minus1
num_delta_pocs[rps_idx] = 0;
for (i = 0; i <= num_delta_pocs[rps_idx - 1]; i++) {
uint8_t use_delta_flag = 0;
uint8_t used_by_curr_pic_flag = get_bits(gb, 1);
if (!used_by_curr_pic_flag)
use_delta_flag = get_bits(gb, 1);
if (used_by_curr_pic_flag || use_delta_flag)
num_delta_pocs[rps_idx]++;
}
} else {
unsigned int num_negative_pics = get_ue_golomb_long(gb);
unsigned int num_positive_pics = get_ue_golomb_long(gb);
if ((num_positive_pics + (uint64_t)num_negative_pics) * 2 >
(uint64_t)get_bits_left(gb))
return -1;
num_delta_pocs[rps_idx] = num_negative_pics + num_positive_pics;
for (i = 0; i < num_negative_pics; i++) {
get_ue_golomb_long(gb); // delta_poc_s0_minus1[rps_idx]
get_bits(gb, 1); // used_by_curr_pic_s0_flag[rps_idx]
}
for (i = 0; i < num_positive_pics; i++) {
get_ue_golomb_long(gb); // delta_poc_s1_minus1[rps_idx]
get_bits(gb, 1); // used_by_curr_pic_s1_flag[rps_idx]
}
}
return 0;
}
static void
skip_sub_layer_hrd_parameters(HevcGetBitContext *gb,
unsigned int cpb_cnt_minus1,
uint8_t sub_pic_hrd_params_present_flag)
{
unsigned int i;
for (i = 0; i <= cpb_cnt_minus1; i++) {
get_ue_golomb_long(gb); // bit_rate_value_minus1
get_ue_golomb_long(gb); // cpb_size_value_minus1
if (sub_pic_hrd_params_present_flag) {
get_ue_golomb_long(gb); // cpb_size_du_value_minus1
get_ue_golomb_long(gb); // bit_rate_du_value_minus1
}
get_bits(gb, 1); // cbr_flag
}
}
static int skip_hrd_parameters(HevcGetBitContext *gb,
uint8_t cprms_present_flag,
unsigned int max_sub_layers_minus1)
{
unsigned int i;
uint8_t sub_pic_hrd_params_present_flag = 0;
uint8_t nal_hrd_parameters_present_flag = 0;
uint8_t vcl_hrd_parameters_present_flag = 0;
if (cprms_present_flag) {
nal_hrd_parameters_present_flag = get_bits(gb, 1);
vcl_hrd_parameters_present_flag = get_bits(gb, 1);
if (nal_hrd_parameters_present_flag ||
vcl_hrd_parameters_present_flag) {
sub_pic_hrd_params_present_flag = get_bits(gb, 1);
if (sub_pic_hrd_params_present_flag)
get_bits(gb, 19);
get_bits(gb, 8);
if (sub_pic_hrd_params_present_flag)
get_bits(gb, 4);
get_bits(gb, 15);
}
}
for (i = 0; i <= max_sub_layers_minus1; i++) {
unsigned int cpb_cnt_minus1 = 0;
uint8_t low_delay_hrd_flag = 0;
uint8_t fixed_pic_rate_within_cvs_flag = 0;
uint8_t fixed_pic_rate_general_flag = get_bits(gb, 1);
if (!fixed_pic_rate_general_flag)
fixed_pic_rate_within_cvs_flag = get_bits(gb, 1);
if (fixed_pic_rate_within_cvs_flag)
get_ue_golomb_long(gb);
else
low_delay_hrd_flag = get_bits(gb, 1);
if (!low_delay_hrd_flag) {
cpb_cnt_minus1 = get_ue_golomb_long(gb);
if (cpb_cnt_minus1 > 31)
return -1;
}
if (nal_hrd_parameters_present_flag)
skip_sub_layer_hrd_parameters(
gb, cpb_cnt_minus1,
sub_pic_hrd_params_present_flag);
if (vcl_hrd_parameters_present_flag)
skip_sub_layer_hrd_parameters(
gb, cpb_cnt_minus1,
sub_pic_hrd_params_present_flag);
}
return 0;
}
static void hvcc_parse_vui(HevcGetBitContext *gb,
HEVCDecoderConfigurationRecord *hvcc,
unsigned int max_sub_layers_minus1)
{
unsigned int min_spatial_segmentation_idc;
if (get_bits(gb, 1)) // aspect_ratio_info_present_flag
if (get_bits(gb, 8) == 255) // aspect_ratio_idc
get_bits_long(gb,
32); // sar_width u(16), sar_height u(16)
if (get_bits(gb, 1)) // overscan_info_present_flag
get_bits(gb, 1); // overscan_appropriate_flag
if (get_bits(gb, 1)) { // video_signal_type_present_flag
get_bits(gb,
4); // video_format u(3), video_full_range_flag u(1)
if (get_bits(gb, 1)) // colour_description_present_flag
get_bits(gb, 24);
}
if (get_bits(gb, 1)) {
get_ue_golomb_long(gb);
get_ue_golomb_long(gb);
}
get_bits(gb, 3);
if (get_bits(gb, 1)) { // default_display_window_flag
get_ue_golomb_long(gb); // def_disp_win_left_offset
get_ue_golomb_long(gb); // def_disp_win_right_offset
get_ue_golomb_long(gb); // def_disp_win_top_offset
get_ue_golomb_long(gb); // def_disp_win_bottom_offset
}
if (get_bits(gb, 1)) { // vui_timing_info_present_flag
// skip timing info
get_bits_long(gb, 32); // num_units_in_tick
get_bits_long(gb, 32); // time_scale
if (get_bits(gb, 1)) // poc_proportional_to_timing_flag
get_ue_golomb_long(gb); // num_ticks_poc_diff_one_minus1
if (get_bits(gb, 1)) // vui_hrd_parameters_present_flag
skip_hrd_parameters(gb, 1, max_sub_layers_minus1);
}
if (get_bits(gb, 1)) { // bitstream_restriction_flag
get_bits(gb, 3);
min_spatial_segmentation_idc = get_ue_golomb_long(gb);
hvcc->min_spatial_segmentation_idc =
min_u16(hvcc->min_spatial_segmentation_idc,
min_spatial_segmentation_idc);
get_ue_golomb_long(gb); // max_bytes_per_pic_denom
get_ue_golomb_long(gb); // max_bits_per_min_cu_denom
get_ue_golomb_long(gb); // log2_max_mv_length_horizontal
get_ue_golomb_long(gb); // log2_max_mv_length_vertical
}
}
static int hvcc_parse_sps(HevcGetBitContext *gb,
HEVCDecoderConfigurationRecord *hvcc)
{
unsigned int i, sps_max_sub_layers_minus1,
log2_max_pic_order_cnt_lsb_minus4;
unsigned int num_short_term_ref_pic_sets,
num_delta_pocs[HEVC_MAX_SHORT_TERM_REF_PIC_SETS];
get_bits(gb, 4); // sps_video_parameter_set_id
sps_max_sub_layers_minus1 = get_bits(gb, 3);
hvcc->numTemporalLayers =
max_u8(hvcc->numTemporalLayers, sps_max_sub_layers_minus1 + 1);
hvcc->temporalIdNested = get_bits(gb, 1);
hvcc_parse_ptl(gb, hvcc, sps_max_sub_layers_minus1);
get_ue_golomb_long(gb); // sps_seq_parameter_set_id
hvcc->chromaFormat = get_ue_golomb_long(gb);
if (hvcc->chromaFormat == 3)
get_bits(gb, 1); // separate_colour_plane_flag
get_ue_golomb_long(gb); // pic_width_in_luma_samples
get_ue_golomb_long(gb); // pic_height_in_luma_samples
if (get_bits(gb, 1)) { // conformance_window_flag
get_ue_golomb_long(gb); // conf_win_left_offset
get_ue_golomb_long(gb); // conf_win_right_offset
get_ue_golomb_long(gb); // conf_win_top_offset
get_ue_golomb_long(gb); // conf_win_bottom_offset
}
hvcc->bitDepthLumaMinus8 = get_ue_golomb_long(gb);
hvcc->bitDepthChromaMinus8 = get_ue_golomb_long(gb);
log2_max_pic_order_cnt_lsb_minus4 = get_ue_golomb_long(gb);
/* sps_sub_layer_ordering_info_present_flag */
i = get_bits(gb, 1) ? 0 : sps_max_sub_layers_minus1;
for (; i <= sps_max_sub_layers_minus1; i++) {
get_ue_golomb_long(gb); // max_dec_pic_buffering_minus1
get_ue_golomb_long(gb); // max_num_reorder_pics
get_ue_golomb_long(gb); // max_latency_increase_plus1
}
get_ue_golomb_long(gb); // log2_min_luma_coding_block_size_minus3
get_ue_golomb_long(gb); // log2_diff_max_min_luma_coding_block_size
get_ue_golomb_long(gb); // log2_min_transform_block_size_minus2
get_ue_golomb_long(gb); // log2_diff_max_min_transform_block_size
get_ue_golomb_long(gb); // max_transform_hierarchy_depth_inter
get_ue_golomb_long(gb); // max_transform_hierarchy_depth_intra
if (get_bits(gb, 1) && // scaling_list_enabled_flag
get_bits(gb, 1)) // sps_scaling_list_data_present_flag
skip_scaling_list_data(gb);
get_bits(gb, 1); // amp_enabled_flag
get_bits(gb, 1); // sample_adaptive_offset_enabled_flag
if (get_bits(gb, 1)) { // pcm_enabled_flag
get_bits(gb, 4); // pcm_sample_bit_depth_luma_minus1
get_bits(gb, 4); // pcm_sample_bit_depth_chroma_minus1
get_ue_golomb_long(
gb); // log2_min_pcm_luma_coding_block_size_minus3
get_ue_golomb_long(
gb); // log2_diff_max_min_pcm_luma_coding_block_size
get_bits(gb, 1); // pcm_loop_filter_disabled_flag
}
num_short_term_ref_pic_sets = get_ue_golomb_long(gb);
if (num_short_term_ref_pic_sets > HEVC_MAX_SHORT_TERM_REF_PIC_SETS)
return -1;
for (i = 0; i < num_short_term_ref_pic_sets; i++) {
int ret = parse_rps(gb, i, num_short_term_ref_pic_sets,
num_delta_pocs);
if (ret < 0)
return ret;
}
if (get_bits(gb, 1)) { // long_term_ref_pics_present_flag
unsigned num_long_term_ref_pics_sps = get_ue_golomb_long(gb);
if (num_long_term_ref_pics_sps > 31U)
return -1;
for (i = 0; i < num_long_term_ref_pics_sps;
i++) { // num_long_term_ref_pics_sps
int len = min_i32(log2_max_pic_order_cnt_lsb_minus4 + 4,
16);
get_bits(gb, len); // lt_ref_pic_poc_lsb_sps[i]
get_bits(gb, 1); // used_by_curr_pic_lt_sps_flag[i]
}
}
get_bits(gb, 1); // sps_temporal_mvp_enabled_flag
get_bits(gb, 1); // strong_intra_smoothing_enabled_flag
if (get_bits(gb, 1)) // vui_parameters_present_flag
hvcc_parse_vui(gb, hvcc, sps_max_sub_layers_minus1);
/* nothing useful for hvcC past this point */
return 0;
}
static int hvcc_parse_pps(HevcGetBitContext *gb,
HEVCDecoderConfigurationRecord *hvcc)
{
uint8_t tiles_enabled_flag, entropy_coding_sync_enabled_flag;
get_ue_golomb_long(gb); // pps_pic_parameter_set_id
get_ue_golomb_long(gb); // pps_seq_parameter_set_id
get_bits(gb, 7);
get_ue_golomb_long(gb); // num_ref_idx_l0_default_active_minus1
get_ue_golomb_long(gb); // num_ref_idx_l1_default_active_minus1
get_se_golomb_long(gb); // init_qp_minus26
get_bits(gb, 2);
if (get_bits(gb, 1)) // cu_qp_delta_enabled_flag
get_ue_golomb_long(gb); // diff_cu_qp_delta_depth
get_se_golomb_long(gb); // pps_cb_qp_offset
get_se_golomb_long(gb); // pps_cr_qp_offset
get_bits(gb, 4);
tiles_enabled_flag = get_bits(gb, 1);
entropy_coding_sync_enabled_flag = get_bits(gb, 1);
if (entropy_coding_sync_enabled_flag && tiles_enabled_flag)
hvcc->parallelismType = 0; // mixed-type parallel decoding
else if (entropy_coding_sync_enabled_flag)
hvcc->parallelismType = 3; // wavefront-based parallel decoding
else if (tiles_enabled_flag)
hvcc->parallelismType = 2; // tile-based parallel decoding
else
hvcc->parallelismType = 1; // slice-based parallel decoding
/* nothing useful for hvcC past this point */
return 0;
}
static int hvcc_add_nal_unit(uint8_t *nal_buf, uint32_t nal_size,
int ps_array_completeness,
HEVCDecoderConfigurationRecord *hvcc,
unsigned array_idx)
{
int ret = 0;
HevcGetBitContext gbc;
uint8_t nal_type;
uint8_t *rbsp_buf;
uint32_t rbsp_size;
uint8_t *dst;
dst = bmalloc(nal_size + 64);
rbsp_buf =
ff_nal_unit_extract_rbsp(dst, nal_buf, nal_size, &rbsp_size, 2);
if (!rbsp_buf) {
ret = -1;
goto end;
}
ret = init_get_bits8(&gbc, rbsp_buf, rbsp_size);
if (ret < 0)
goto end;
nal_unit_parse_header(&gbc, &nal_type);
ret = hvcc_array_add_nal_unit(nal_buf, nal_size, nal_type,
ps_array_completeness,
&hvcc->arrays[array_idx]);
if (ret < 0)
goto end;
if (hvcc->arrays[array_idx].numNalus == 1)
hvcc->numOfArrays++;
if (nal_type == OBS_HEVC_NAL_VPS)
ret = hvcc_parse_vps(&gbc, hvcc);
else if (nal_type == OBS_HEVC_NAL_SPS)
ret = hvcc_parse_sps(&gbc, hvcc);
else if (nal_type == OBS_HEVC_NAL_PPS)
ret = hvcc_parse_pps(&gbc, hvcc);
if (ret < 0)
goto end;
end:
bfree(dst);
return ret;
}
size_t obs_parse_hevc_header(uint8_t **header, const uint8_t *data, size_t size)
{
const uint8_t *start;
const uint8_t *end;
if (!has_start_code(data, size)) {
*header = bmemdup(data, size);
return size;
}
if (size < 6)
return 0; // invalid
if (*data == 1) { // already hvcC-formatted
*header = bmemdup(data, size);
return size;
}
struct array_output_data nals;
struct serializer sn;
array_output_serializer_init(&sn, &nals);
const uint8_t *nal_start, *nal_end;
start = data;
end = data + size;
size = 0; // reset size
nal_start = obs_nal_find_startcode(start, end);
for (;;) {
while (nal_start < end && !*(nal_start++))
;
if (nal_start == end)
break;
nal_end = obs_nal_find_startcode(nal_start, end);
assert(nal_end - nal_start <= INT_MAX);
s_wb32(&sn, (uint32_t)(nal_end - nal_start));
s_write(&sn, nal_start, nal_end - nal_start);
size += 4 + nal_end - nal_start;
nal_start = nal_end;
}
if (size == 0)
goto done;
start = nals.bytes.array;
end = nals.bytes.array + nals.bytes.num;
// HVCC init
HEVCDecoderConfigurationRecord hvcc;
memset(&hvcc, 0, sizeof(HEVCDecoderConfigurationRecord));
hvcc.lengthSizeMinusOne = 3; // 4 bytes
hvcc.general_profile_compatibility_flags = 0xffffffff; // all bits set
hvcc.general_constraint_indicator_flags =
0xffffffffffff; // all bits set
hvcc.min_spatial_segmentation_idc = 4096 + 1; // assume invalid value
for (unsigned i = 0; i < OBS_NB_ARRAYS; i++) {
HVCCNALUnitArray *const array = &hvcc.arrays[i];
array_output_serializer_init(&array->nalUnit,
&array->nalUnitData);
}
uint8_t *buf = (uint8_t *)start;
while (end - buf > 4) {
uint32_t len = rb32(buf);
assert((end - buf - 4) <= INT_MAX);
len = min_u32(len, (uint32_t)(end - buf - 4));
uint8_t type = (buf[4] >> 1) & 0x3f;
buf += 4;
for (unsigned i = 0; i < OBS_NB_ARRAYS; i++) {
static const uint8_t array_idx_to_type[] = {
OBS_HEVC_NAL_VPS, OBS_HEVC_NAL_SPS,
OBS_HEVC_NAL_PPS, OBS_HEVC_NAL_SEI_PREFIX,
OBS_HEVC_NAL_SEI_SUFFIX};
if (type == array_idx_to_type[i]) {
int ps_array_completeness = 0;
int ret = hvcc_add_nal_unit(
buf, len, ps_array_completeness, &hvcc,
i);
if (ret < 0)
goto free;
break;
}
}
buf += len;
}
// write hvcc data
uint16_t vps_count, sps_count, pps_count;
if (hvcc.min_spatial_segmentation_idc > 4096) // invalid?
hvcc.min_spatial_segmentation_idc = 0;
if (!hvcc.min_spatial_segmentation_idc)
hvcc.parallelismType = 0;
hvcc.avgFrameRate = 0;
hvcc.constantFrameRate = 0;
vps_count = hvcc.arrays[OBS_VPS_INDEX].numNalus;
sps_count = hvcc.arrays[OBS_SPS_INDEX].numNalus;
pps_count = hvcc.arrays[OBS_PPS_INDEX].numNalus;
if (!vps_count || vps_count > OBS_HEVC_MAX_VPS_COUNT || !sps_count ||
sps_count > OBS_HEVC_MAX_SPS_COUNT || !pps_count ||
pps_count > OBS_HEVC_MAX_PPS_COUNT)
goto free;
struct array_output_data output;
struct serializer s;
array_output_serializer_init(&s, &output);
s_w8(&s, 1); // configurationVersion, always 1
s_w8(&s, hvcc.general_profile_space << 6 | hvcc.general_tier_flag << 5 |
hvcc.general_profile_idc);
s_wb32(&s, hvcc.general_profile_compatibility_flags);
s_wb32(&s, (uint32_t)(hvcc.general_constraint_indicator_flags >> 16));
s_wb16(&s, (uint16_t)(hvcc.general_constraint_indicator_flags));
s_w8(&s, hvcc.general_level_idc);
s_wb16(&s, hvcc.min_spatial_segmentation_idc | 0xf000);
s_w8(&s, hvcc.parallelismType | 0xfc);
s_w8(&s, hvcc.chromaFormat | 0xfc);
s_w8(&s, hvcc.bitDepthLumaMinus8 | 0xf8);
s_w8(&s, hvcc.bitDepthChromaMinus8 | 0xf8);
s_wb16(&s, hvcc.avgFrameRate);
s_w8(&s, hvcc.constantFrameRate << 6 | hvcc.numTemporalLayers << 3 |
hvcc.temporalIdNested << 2 | hvcc.lengthSizeMinusOne);
s_w8(&s, hvcc.numOfArrays);
for (unsigned i = 0; i < OBS_NB_ARRAYS; i++) {
const HVCCNALUnitArray *const array = &hvcc.arrays[i];
if (!array->numNalus)
continue;
s_w8(&s, (array->array_completeness << 7) |
(array->NAL_unit_type & 0x3f));
s_wb16(&s, array->numNalus);
s_write(&s, array->nalUnitData.bytes.array,
array->nalUnitData.bytes.num);
}
*header = output.bytes.array;
size = output.bytes.num;
free:
for (unsigned i = 0; i < OBS_NB_ARRAYS; i++) {
HVCCNALUnitArray *const array = &hvcc.arrays[i];
array->numNalus = 0;
array_output_serializer_free(&array->nalUnitData);
}
done:
array_output_serializer_free(&nals);
return size;
}

View File

@ -0,0 +1,24 @@
/******************************************************************************
Copyright (C) 2023 by Hugh Bailey <obs.jim@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
#pragma once
#include <stdint.h>
#include <stddef.h>
extern size_t obs_parse_hevc_header(uint8_t **header, const uint8_t *data,
size_t size);

View File

@ -16,6 +16,12 @@
******************************************************************************/
#include "rtmp-stream.h"
#include "rtmp-av1.h"
#include "rtmp-hevc.h"
#include <obs-avc.h>
#include <obs-hevc.h>
#ifdef _WIN32
#include <util/windows/win-version.h>
#endif
@ -415,17 +421,10 @@ retry_send:
return len;
}
static int send_packet(struct rtmp_stream *stream,
struct encoder_packet *packet, bool is_header,
size_t idx)
static int handle_socket_read(struct rtmp_stream *stream)
{
uint8_t *data;
size_t size;
int recv_size = 0;
int ret = 0;
assert(idx < RTMP_MAX_STREAMS);
int recv_size = 0;
if (!stream->new_socket_loop) {
#ifdef _WIN32
ret = ioctlsocket(stream->rtmp.m_sb.sb_socket, FIONREAD,
@ -439,6 +438,20 @@ static int send_packet(struct rtmp_stream *stream,
return -1;
}
}
return 0;
}
static int send_packet(struct rtmp_stream *stream,
struct encoder_packet *packet, bool is_header,
size_t idx)
{
uint8_t *data;
size_t size;
int ret = 0;
assert(idx < RTMP_MAX_STREAMS);
if (handle_socket_read(stream))
return -1;
if (idx > 0) {
flv_additional_packet_mux(
@ -465,7 +478,46 @@ static int send_packet(struct rtmp_stream *stream,
return ret;
}
static int send_packet_ex(struct rtmp_stream *stream,
struct encoder_packet *packet, bool is_header,
bool is_footer)
{
uint8_t *data;
size_t size = 0;
int ret = 0;
if (handle_socket_read(stream))
return -1;
if (is_header) {
flv_packet_start(packet, stream->video_codec,
stream->start_dts_offset, &data, &size);
} else if (is_footer) {
flv_packet_end(packet, stream->video_codec,
stream->start_dts_offset, &data, &size);
} else {
flv_packet_frames(packet, stream->video_codec,
stream->start_dts_offset, &data, &size);
}
#ifdef TEST_FRAMEDROPS
droptest_cap_data_rate(stream, size);
#endif
ret = RTMP_Write(&stream->rtmp, (char *)data, (int)size, 0);
bfree(data);
if (is_header || is_footer) // manually created packets
bfree(packet->data);
else
obs_encoder_packet_release(packet);
stream->total_bytes_sent += size;
return ret;
}
static inline bool send_headers(struct rtmp_stream *stream);
static inline bool send_footers(struct rtmp_stream *stream);
static inline bool can_shutdown_stream(struct rtmp_stream *stream,
struct encoder_packet *packet)
@ -650,7 +702,16 @@ static void *send_thread(void *data)
dbr_frame.size = packet.size;
}
if (send_packet(stream, &packet, false, packet.track_idx) < 0) {
int sent;
if (packet.type == OBS_ENCODER_VIDEO &&
stream->video_codec != CODEC_H264) {
sent = send_packet_ex(stream, &packet, false, false);
} else {
sent = send_packet(stream, &packet, false,
packet.track_idx);
}
if (sent < 0) {
os_atomic_set_bool(&stream->disconnected, true);
break;
}
@ -670,10 +731,12 @@ static void *send_thread(void *data)
info("Disconnected from %s", stream->path.array);
} else if (encode_error) {
info("Encoder error, disconnecting");
send_footers(stream); // Y2023 spec
} else if (silently_reconnecting(stream)) {
info("Silent reconnect signal received from server");
} else {
info("User stopped the stream");
send_footers(stream); // Y2023 spec
}
#if defined(_WIN32)
@ -803,8 +866,113 @@ static bool send_video_header(struct rtmp_stream *stream)
if (!obs_encoder_get_extra_data(vencoder, &header, &size))
return false;
packet.size = obs_parse_avc_header(&packet.data, header, size);
return send_packet(stream, &packet, true, 0) >= 0;
switch (stream->video_codec) {
case CODEC_H264:
packet.size = obs_parse_avc_header(&packet.data, header, size);
return send_packet(stream, &packet, true, 0) >= 0;
#ifdef ENABLE_HEVC
case CODEC_HEVC:
packet.size = obs_parse_hevc_header(&packet.data, header, size);
return send_packet_ex(stream, &packet, true, 0) >= 0;
#endif
case CODEC_AV1:
packet.size = obs_parse_av1_header(&packet.data, header, size);
return send_packet_ex(stream, &packet, true, 0) >= 0;
}
return false;
}
static bool send_video_metadata(struct rtmp_stream *stream)
{
if (handle_socket_read(stream))
return -1;
// Y2023 spec
if (stream->video_codec != CODEC_H264) {
uint8_t *data;
size_t size;
video_t *video = obs_get_video();
const struct video_output_info *info =
video_output_get_info(video);
enum video_format format = info->format;
enum video_colorspace colorspace = info->colorspace;
int bits_per_raw_sample;
switch (format) {
case VIDEO_FORMAT_I010:
case VIDEO_FORMAT_P010:
case VIDEO_FORMAT_I210:
bits_per_raw_sample = 10;
break;
case VIDEO_FORMAT_I412:
case VIDEO_FORMAT_YA2L:
bits_per_raw_sample = 12;
break;
default:
bits_per_raw_sample = 8;
}
int pri = 0, trc = 0, spc = 0;
switch (colorspace) {
case VIDEO_CS_601:
pri = OBSCOL_PRI_SMPTE170M;
trc = OBSCOL_PRI_SMPTE170M;
spc = OBSCOL_PRI_SMPTE170M;
break;
case VIDEO_CS_DEFAULT:
case VIDEO_CS_709:
pri = OBSCOL_PRI_BT709;
trc = OBSCOL_PRI_BT709;
spc = OBSCOL_PRI_BT709;
break;
case VIDEO_CS_SRGB:
pri = OBSCOL_PRI_BT709;
trc = OBSCOL_TRC_IEC61966_2_1;
spc = OBSCOL_PRI_BT709;
break;
case VIDEO_CS_2100_PQ:
pri = OBSCOL_PRI_BT2020;
trc = OBSCOL_TRC_SMPTE2084;
spc = OBSCOL_SPC_BT2020_NCL;
break;
case VIDEO_CS_2100_HLG:
pri = OBSCOL_PRI_BT2020;
trc = OBSCOL_TRC_ARIB_STD_B67;
spc = OBSCOL_SPC_BT2020_NCL;
}
int max_luminance = 0;
if (trc == OBSCOL_TRC_ARIB_STD_B67)
max_luminance = 1000;
else if (trc == OBSCOL_TRC_SMPTE2084)
max_luminance =
(int)obs_get_video_hdr_nominal_peak_level();
flv_packet_metadata(stream->video_codec, &data, &size,
bits_per_raw_sample, pri, trc, spc, 0,
max_luminance);
int ret = RTMP_Write(&stream->rtmp, (char *)data, (int)size, 0);
bfree(data);
stream->total_bytes_sent += size;
return ret >= 0;
}
// legacy
return true;
}
static bool send_video_footer(struct rtmp_stream *stream)
{
struct encoder_packet packet = {.type = OBS_ENCODER_VIDEO,
.timebase_den = 1,
.keyframe = false};
packet.size = 0;
return send_packet_ex(stream, &packet, 0, true) >= 0;
}
static inline bool send_headers(struct rtmp_stream *stream)
@ -818,6 +986,14 @@ static inline bool send_headers(struct rtmp_stream *stream)
if (!send_video_header(stream))
return false;
// send metadata only if HDR
video_t *video = obs_get_video();
const struct video_output_info *info = video_output_get_info(video);
enum video_colorspace colorspace = info->colorspace;
if (colorspace == VIDEO_CS_2100_PQ || colorspace == VIDEO_CS_2100_HLG)
if (!send_video_metadata(stream)) // Y2023 spec
return false;
while (next) {
if (!send_audio_header(stream, i++, &next))
return false;
@ -826,6 +1002,15 @@ static inline bool send_headers(struct rtmp_stream *stream)
return true;
}
static inline bool send_footers(struct rtmp_stream *stream)
{
if (stream->video_codec == CODEC_H264)
return false;
// Y2023 spec
return send_video_footer(stream);
}
static inline bool reset_semaphore(struct rtmp_stream *stream)
{
os_sem_destroy(stream->send_sem);
@ -1158,6 +1343,9 @@ static bool init_connect(struct rtmp_stream *stream)
obs_data_t *vsettings = obs_encoder_get_settings(venc);
obs_data_t *asettings = obs_encoder_get_settings(aenc);
const char *codec = obs_encoder_get_codec(venc);
stream->video_codec = to_video_type(codec);
circlebuf_free(&stream->dbr_frames);
stream->audio_bitrate = (long)obs_data_get_int(asettings, "bitrate");
stream->dbr_data_size = 0;
@ -1547,7 +1735,19 @@ static void rtmp_stream_data(void *data, struct encoder_packet *packet)
stream->got_first_video = true;
}
obs_parse_avc_packet(&new_packet, packet);
switch (stream->video_codec) {
case CODEC_H264:
obs_parse_avc_packet(&new_packet, packet);
break;
#ifdef ENABLE_HEVC
case CODEC_HEVC:
obs_parse_hevc_packet(&new_packet, packet);
break;
#endif
case CODEC_AV1:
obs_parse_av1_packet(&new_packet, packet);
break;
}
} else {
obs_encoder_packet_ref(&new_packet, packet);
}
@ -1655,7 +1855,11 @@ struct obs_output_info rtmp_output_info = {
#else
.protocols = "RTMP;RTMPS",
#endif
.encoded_video_codecs = "h264",
#ifdef ENABLE_HEVC
.encoded_video_codecs = "h264;hevc;av1",
#else
.encoded_video_codecs = "h264;av1",
#endif
.encoded_audio_codecs = "aac",
.get_name = rtmp_stream_getname,
.create = rtmp_stream_create,

View File

@ -1,5 +1,4 @@
#include <obs-module.h>
#include <obs-avc.h>
#include <util/platform.h>
#include <util/circlebuf.h>
#include <util/dstr.h>
@ -114,6 +113,8 @@ struct rtmp_stream {
long dbr_inc_bitrate;
bool dbr_enabled;
enum video_id_t video_codec;
RTMP rtmp;
bool new_socket_loop;
@ -134,3 +135,110 @@ struct rtmp_stream {
#ifdef _WIN32
void *socket_thread_windows(void *data);
#endif
/* Adapted from FFmpeg's libavutil/pixfmt.h
*
* Renamed to make it apparent that these are not imported as this module does
* not use or link against FFmpeg.
*/
/**
* Chromaticity coordinates of the source primaries.
* These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.1 and ITU-T H.273.
*/
enum OBSColorPrimaries {
OBSCOL_PRI_RESERVED0 = 0,
OBSCOL_PRI_BT709 =
1, ///< also ITU-R BT1361 / IEC 61966-2-4 / SMPTE RP 177 Annex B
OBSCOL_PRI_UNSPECIFIED = 2,
OBSCOL_PRI_RESERVED = 3,
OBSCOL_PRI_BT470M =
4, ///< also FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
OBSCOL_PRI_BT470BG =
5, ///< also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM
OBSCOL_PRI_SMPTE170M =
6, ///< also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC
OBSCOL_PRI_SMPTE240M =
7, ///< identical to above, also called "SMPTE C" even though it uses D65
OBSCOL_PRI_FILM = 8, ///< colour filters using Illuminant C
OBSCOL_PRI_BT2020 = 9, ///< ITU-R BT2020
OBSCOL_PRI_SMPTE428 = 10, ///< SMPTE ST 428-1 (CIE 1931 XYZ)
OBSCOL_PRI_SMPTEST428_1 = OBSCOL_PRI_SMPTE428,
OBSCOL_PRI_SMPTE431 = 11, ///< SMPTE ST 431-2 (2011) / DCI P3
OBSCOL_PRI_SMPTE432 =
12, ///< SMPTE ST 432-1 (2010) / P3 D65 / Display P3
OBSCOL_PRI_EBU3213 =
22, ///< EBU Tech. 3213-E (nothing there) / one of JEDEC P22 group phosphors
OBSCOL_PRI_JEDEC_P22 = OBSCOL_PRI_EBU3213,
OBSCOL_PRI_NB ///< Not part of ABI
};
/**
* Color Transfer Characteristic.
* These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.2.
*/
enum OBSColorTransferCharacteristic {
OBSCOL_TRC_RESERVED0 = 0,
OBSCOL_TRC_BT709 = 1, ///< also ITU-R BT1361
OBSCOL_TRC_UNSPECIFIED = 2,
OBSCOL_TRC_RESERVED = 3,
OBSCOL_TRC_GAMMA22 =
4, ///< also ITU-R BT470M / ITU-R BT1700 625 PAL & SECAM
OBSCOL_TRC_GAMMA28 = 5, ///< also ITU-R BT470BG
OBSCOL_TRC_SMPTE170M =
6, ///< also ITU-R BT601-6 525 or 625 / ITU-R BT1358 525 or 625 / ITU-R BT1700 NTSC
OBSCOL_TRC_SMPTE240M = 7,
OBSCOL_TRC_LINEAR = 8, ///< "Linear transfer characteristics"
OBSCOL_TRC_LOG =
9, ///< "Logarithmic transfer characteristic (100:1 range)"
OBSCOL_TRC_LOG_SQRT =
10, ///< "Logarithmic transfer characteristic (100 * Sqrt(10) : 1 range)"
OBSCOL_TRC_IEC61966_2_4 = 11, ///< IEC 61966-2-4
OBSCOL_TRC_BT1361_ECG = 12, ///< ITU-R BT1361 Extended Colour Gamut
OBSCOL_TRC_IEC61966_2_1 = 13, ///< IEC 61966-2-1 (sRGB or sYCC)
OBSCOL_TRC_BT2020_10 = 14, ///< ITU-R BT2020 for 10-bit system
OBSCOL_TRC_BT2020_12 = 15, ///< ITU-R BT2020 for 12-bit system
OBSCOL_TRC_SMPTE2084 =
16, ///< SMPTE ST 2084 for 10-, 12-, 14- and 16-bit systems
OBSCOL_TRC_SMPTEST2084 = OBSCOL_TRC_SMPTE2084,
OBSCOL_TRC_SMPTE428 = 17, ///< SMPTE ST 428-1
OBSCOL_TRC_SMPTEST428_1 = OBSCOL_TRC_SMPTE428,
OBSCOL_TRC_ARIB_STD_B67 =
18, ///< ARIB STD-B67, known as "Hybrid log-gamma"
OBSCOL_TRC_NB ///< Not part of ABI
};
/**
* YUV colorspace type.
* These values match the ones defined by ISO/IEC 23091-2_2019 subclause 8.3.
*/
enum OBSColorSpace {
OBSCOL_SPC_RGB =
0, ///< order of coefficients is actually GBR, also IEC 61966-2-1 (sRGB), YZX and ST 428-1
OBSCOL_SPC_BT709 =
1, ///< also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / derived in SMPTE RP 177 Annex B
OBSCOL_SPC_UNSPECIFIED = 2,
OBSCOL_SPC_RESERVED =
3, ///< reserved for future use by ITU-T and ISO/IEC just like 15-255 are
OBSCOL_SPC_FCC =
4, ///< FCC Title 47 Code of Federal Regulations 73.682 (a)(20)
OBSCOL_SPC_BT470BG =
5, ///< also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601
OBSCOL_SPC_SMPTE170M =
6, ///< also ITU-R BT601-6 525 / ITU-R BT1358 525 / ITU-R BT1700 NTSC / functionally identical to above
OBSCOL_SPC_SMPTE240M =
7, ///< derived from 170M primaries and D65 white point, 170M is derived from BT470 System M's primaries
OBSCOL_SPC_YCGCO =
8, ///< used by Dirac / VC-2 and H.264 FRext, see ITU-T SG16
OBSCOL_SPC_YCOCG = OBSCOL_SPC_YCGCO,
OBSCOL_SPC_BT2020_NCL =
9, ///< ITU-R BT2020 non-constant luminance system
OBSCOL_SPC_BT2020_CL = 10, ///< ITU-R BT2020 constant luminance system
OBSCOL_SPC_SMPTE2085 = 11, ///< SMPTE 2085, Y'D'zD'x
OBSCOL_SPC_CHROMA_DERIVED_NCL =
12, ///< Chromaticity-derived non-constant luminance system
OBSCOL_SPC_CHROMA_DERIVED_CL =
13, ///< Chromaticity-derived constant luminance system
OBSCOL_SPC_ICTCP = 14, ///< ITU-R BT.2100-0, ICtCp
OBSCOL_SPC_NB ///< Not part of ABI
};

View File

@ -0,0 +1,72 @@
#pragma once
#if defined(__GNUC__) || defined(__clang__)
static inline uint32_t clz32(unsigned long val)
{
return __builtin_clz(val);
}
static inline uint32_t ctz32(unsigned long val)
{
return __builtin_ctz(val);
}
#elif defined(_MSC_VER) && _MSC_VER >= 1400
static inline uint32_t clz32(unsigned long val)
{
/* __lzcnt() / _lzcnt_u32() do not work correctly on older Intel CPUs,
* so use BSR instead for better compatibility. */
uint32_t zeros = 0;
_BitScanReverse(&zeros, val);
return 31 - zeros;
}
static inline uint32_t ctz32(unsigned long val)
{
return _tzcnt_u32(val);
}
#else
static uint32_t popcnt(uint32_t x)
{
x -= ((x >> 1) & 0x55555555);
x = (((x >> 2) & 0x33333333) + (x & 0x33333333));
x = (((x >> 4) + x) & 0x0f0f0f0f);
x += (x >> 8);
x += (x >> 16);
return x & 0x0000003f;
}
static uint32_t clz32(uint32_t x)
{
x |= (x >> 1);
x |= (x >> 2);
x |= (x >> 4);
x |= (x >> 8);
x |= (x >> 16);
return 32 - popcnt(x);
}
static uint32_t ctz32(uint32_t x)
{
return popcnt((x & -x) - 1);
}
#endif
static inline uint32_t min_u32(uint32_t a, uint32_t b)
{
return (a < b) ? a : b;
}
static inline uint16_t min_u16(uint16_t a, uint16_t b)
{
return (a < b) ? a : b;
}
static inline int32_t min_i32(int32_t a, int32_t b)
{
return (a < b) ? a : b;
}
static inline uint8_t max_u8(uint8_t a, uint8_t b)
{
return (a > b) ? a : b;
}