#![allow(non_camel_case_types, non_upper_case_globals, non_snake_case)]
#![allow(
clippy::approx_constant,
clippy::type_complexity,
clippy::unreadable_literal
)]
extern crate glib_sys as glib;
extern crate gobject_sys as gobject;
extern crate gstreamer_base_sys as gst_base;
extern crate gstreamer_sys as gst;
extern crate libc;
#[allow(unused_imports)]
use libc::{
c_char, c_double, c_float, c_int, c_long, c_short, c_uchar, c_uint, c_ulong, c_ushort, c_void,
intptr_t, size_t, ssize_t, time_t, uintptr_t, FILE,
};
#[allow(unused_imports)]
use glib::{gboolean, gconstpointer, gpointer, GType};
pub type GstColorBalanceType = c_int;
pub const GST_COLOR_BALANCE_HARDWARE: GstColorBalanceType = 0;
pub const GST_COLOR_BALANCE_SOFTWARE: GstColorBalanceType = 1;
pub type GstNavigationCommand = c_int;
pub const GST_NAVIGATION_COMMAND_INVALID: GstNavigationCommand = 0;
pub const GST_NAVIGATION_COMMAND_MENU1: GstNavigationCommand = 1;
pub const GST_NAVIGATION_COMMAND_MENU2: GstNavigationCommand = 2;
pub const GST_NAVIGATION_COMMAND_MENU3: GstNavigationCommand = 3;
pub const GST_NAVIGATION_COMMAND_MENU4: GstNavigationCommand = 4;
pub const GST_NAVIGATION_COMMAND_MENU5: GstNavigationCommand = 5;
pub const GST_NAVIGATION_COMMAND_MENU6: GstNavigationCommand = 6;
pub const GST_NAVIGATION_COMMAND_MENU7: GstNavigationCommand = 7;
pub const GST_NAVIGATION_COMMAND_LEFT: GstNavigationCommand = 20;
pub const GST_NAVIGATION_COMMAND_RIGHT: GstNavigationCommand = 21;
pub const GST_NAVIGATION_COMMAND_UP: GstNavigationCommand = 22;
pub const GST_NAVIGATION_COMMAND_DOWN: GstNavigationCommand = 23;
pub const GST_NAVIGATION_COMMAND_ACTIVATE: GstNavigationCommand = 24;
pub const GST_NAVIGATION_COMMAND_PREV_ANGLE: GstNavigationCommand = 30;
pub const GST_NAVIGATION_COMMAND_NEXT_ANGLE: GstNavigationCommand = 31;
pub type GstNavigationEventType = c_int;
pub const GST_NAVIGATION_EVENT_INVALID: GstNavigationEventType = 0;
pub const GST_NAVIGATION_EVENT_KEY_PRESS: GstNavigationEventType = 1;
pub const GST_NAVIGATION_EVENT_KEY_RELEASE: GstNavigationEventType = 2;
pub const GST_NAVIGATION_EVENT_MOUSE_BUTTON_PRESS: GstNavigationEventType = 3;
pub const GST_NAVIGATION_EVENT_MOUSE_BUTTON_RELEASE: GstNavigationEventType = 4;
pub const GST_NAVIGATION_EVENT_MOUSE_MOVE: GstNavigationEventType = 5;
pub const GST_NAVIGATION_EVENT_COMMAND: GstNavigationEventType = 6;
pub type GstNavigationMessageType = c_int;
pub const GST_NAVIGATION_MESSAGE_INVALID: GstNavigationMessageType = 0;
pub const GST_NAVIGATION_MESSAGE_MOUSE_OVER: GstNavigationMessageType = 1;
pub const GST_NAVIGATION_MESSAGE_COMMANDS_CHANGED: GstNavigationMessageType = 2;
pub const GST_NAVIGATION_MESSAGE_ANGLES_CHANGED: GstNavigationMessageType = 3;
pub const GST_NAVIGATION_MESSAGE_EVENT: GstNavigationMessageType = 4;
pub type GstNavigationQueryType = c_int;
pub const GST_NAVIGATION_QUERY_INVALID: GstNavigationQueryType = 0;
pub const GST_NAVIGATION_QUERY_COMMANDS: GstNavigationQueryType = 1;
pub const GST_NAVIGATION_QUERY_ANGLES: GstNavigationQueryType = 2;
pub type GstVideoAlphaMode = c_int;
pub const GST_VIDEO_ALPHA_MODE_COPY: GstVideoAlphaMode = 0;
pub const GST_VIDEO_ALPHA_MODE_SET: GstVideoAlphaMode = 1;
pub const GST_VIDEO_ALPHA_MODE_MULT: GstVideoAlphaMode = 2;
pub type GstVideoAncillaryDID = c_int;
pub const GST_VIDEO_ANCILLARY_DID_UNDEFINED: GstVideoAncillaryDID = 0;
pub const GST_VIDEO_ANCILLARY_DID_DELETION: GstVideoAncillaryDID = 128;
pub const GST_VIDEO_ANCILLARY_DID_HANC_3G_AUDIO_DATA_FIRST: GstVideoAncillaryDID = 160;
pub const GST_VIDEO_ANCILLARY_DID_HANC_3G_AUDIO_DATA_LAST: GstVideoAncillaryDID = 167;
pub const GST_VIDEO_ANCILLARY_DID_HANC_HDTV_AUDIO_DATA_FIRST: GstVideoAncillaryDID = 224;
pub const GST_VIDEO_ANCILLARY_DID_HANC_HDTV_AUDIO_DATA_LAST: GstVideoAncillaryDID = 231;
pub const GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_1_FIRST: GstVideoAncillaryDID = 236;
pub const GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_1_LAST: GstVideoAncillaryDID = 239;
pub const GST_VIDEO_ANCILLARY_DID_CAMERA_POSITION: GstVideoAncillaryDID = 240;
pub const GST_VIDEO_ANCILLARY_DID_HANC_ERROR_DETECTION: GstVideoAncillaryDID = 244;
pub const GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_2_FIRST: GstVideoAncillaryDID = 248;
pub const GST_VIDEO_ANCILLARY_DID_HANC_SDTV_AUDIO_DATA_2_LAST: GstVideoAncillaryDID = 255;
pub type GstVideoAncillaryDID16 = c_int;
pub const GST_VIDEO_ANCILLARY_DID16_S334_EIA_708: GstVideoAncillaryDID16 = 24833;
pub const GST_VIDEO_ANCILLARY_DID16_S334_EIA_608: GstVideoAncillaryDID16 = 24834;
pub const GST_VIDEO_ANCILLARY_DID16_S2016_3_AFD_BAR: GstVideoAncillaryDID16 = 16645;
pub type GstVideoCaptionType = c_int;
pub const GST_VIDEO_CAPTION_TYPE_UNKNOWN: GstVideoCaptionType = 0;
pub const GST_VIDEO_CAPTION_TYPE_CEA608_RAW: GstVideoCaptionType = 1;
pub const GST_VIDEO_CAPTION_TYPE_CEA608_S334_1A: GstVideoCaptionType = 2;
pub const GST_VIDEO_CAPTION_TYPE_CEA708_RAW: GstVideoCaptionType = 3;
pub const GST_VIDEO_CAPTION_TYPE_CEA708_CDP: GstVideoCaptionType = 4;
pub type GstVideoChromaMethod = c_int;
pub const GST_VIDEO_CHROMA_METHOD_NEAREST: GstVideoChromaMethod = 0;
pub const GST_VIDEO_CHROMA_METHOD_LINEAR: GstVideoChromaMethod = 1;
pub type GstVideoChromaMode = c_int;
pub const GST_VIDEO_CHROMA_MODE_FULL: GstVideoChromaMode = 0;
pub const GST_VIDEO_CHROMA_MODE_UPSAMPLE_ONLY: GstVideoChromaMode = 1;
pub const GST_VIDEO_CHROMA_MODE_DOWNSAMPLE_ONLY: GstVideoChromaMode = 2;
pub const GST_VIDEO_CHROMA_MODE_NONE: GstVideoChromaMode = 3;
pub type GstVideoColorMatrix = c_int;
pub const GST_VIDEO_COLOR_MATRIX_UNKNOWN: GstVideoColorMatrix = 0;
pub const GST_VIDEO_COLOR_MATRIX_RGB: GstVideoColorMatrix = 1;
pub const GST_VIDEO_COLOR_MATRIX_FCC: GstVideoColorMatrix = 2;
pub const GST_VIDEO_COLOR_MATRIX_BT709: GstVideoColorMatrix = 3;
pub const GST_VIDEO_COLOR_MATRIX_BT601: GstVideoColorMatrix = 4;
pub const GST_VIDEO_COLOR_MATRIX_SMPTE240M: GstVideoColorMatrix = 5;
pub const GST_VIDEO_COLOR_MATRIX_BT2020: GstVideoColorMatrix = 6;
pub type GstVideoColorPrimaries = c_int;
pub const GST_VIDEO_COLOR_PRIMARIES_UNKNOWN: GstVideoColorPrimaries = 0;
pub const GST_VIDEO_COLOR_PRIMARIES_BT709: GstVideoColorPrimaries = 1;
pub const GST_VIDEO_COLOR_PRIMARIES_BT470M: GstVideoColorPrimaries = 2;
pub const GST_VIDEO_COLOR_PRIMARIES_BT470BG: GstVideoColorPrimaries = 3;
pub const GST_VIDEO_COLOR_PRIMARIES_SMPTE170M: GstVideoColorPrimaries = 4;
pub const GST_VIDEO_COLOR_PRIMARIES_SMPTE240M: GstVideoColorPrimaries = 5;
pub const GST_VIDEO_COLOR_PRIMARIES_FILM: GstVideoColorPrimaries = 6;
pub const GST_VIDEO_COLOR_PRIMARIES_BT2020: GstVideoColorPrimaries = 7;
pub const GST_VIDEO_COLOR_PRIMARIES_ADOBERGB: GstVideoColorPrimaries = 8;
pub const GST_VIDEO_COLOR_PRIMARIES_SMPTEST428: GstVideoColorPrimaries = 9;
pub const GST_VIDEO_COLOR_PRIMARIES_SMPTERP431: GstVideoColorPrimaries = 10;
pub const GST_VIDEO_COLOR_PRIMARIES_SMPTEEG432: GstVideoColorPrimaries = 11;
pub const GST_VIDEO_COLOR_PRIMARIES_EBU3213: GstVideoColorPrimaries = 12;
pub type GstVideoColorRange = c_int;
pub const GST_VIDEO_COLOR_RANGE_UNKNOWN: GstVideoColorRange = 0;
pub const GST_VIDEO_COLOR_RANGE_0_255: GstVideoColorRange = 1;
pub const GST_VIDEO_COLOR_RANGE_16_235: GstVideoColorRange = 2;
pub type GstVideoDitherMethod = c_int;
pub const GST_VIDEO_DITHER_NONE: GstVideoDitherMethod = 0;
pub const GST_VIDEO_DITHER_VERTERR: GstVideoDitherMethod = 1;
pub const GST_VIDEO_DITHER_FLOYD_STEINBERG: GstVideoDitherMethod = 2;
pub const GST_VIDEO_DITHER_SIERRA_LITE: GstVideoDitherMethod = 3;
pub const GST_VIDEO_DITHER_BAYER: GstVideoDitherMethod = 4;
pub type GstVideoFieldOrder = c_int;
pub const GST_VIDEO_FIELD_ORDER_UNKNOWN: GstVideoFieldOrder = 0;
pub const GST_VIDEO_FIELD_ORDER_TOP_FIELD_FIRST: GstVideoFieldOrder = 1;
pub const GST_VIDEO_FIELD_ORDER_BOTTOM_FIELD_FIRST: GstVideoFieldOrder = 2;
pub type GstVideoFormat = c_int;
pub const GST_VIDEO_FORMAT_UNKNOWN: GstVideoFormat = 0;
pub const GST_VIDEO_FORMAT_ENCODED: GstVideoFormat = 1;
pub const GST_VIDEO_FORMAT_I420: GstVideoFormat = 2;
pub const GST_VIDEO_FORMAT_YV12: GstVideoFormat = 3;
pub const GST_VIDEO_FORMAT_YUY2: GstVideoFormat = 4;
pub const GST_VIDEO_FORMAT_UYVY: GstVideoFormat = 5;
pub const GST_VIDEO_FORMAT_AYUV: GstVideoFormat = 6;
pub const GST_VIDEO_FORMAT_RGBx: GstVideoFormat = 7;
pub const GST_VIDEO_FORMAT_BGRx: GstVideoFormat = 8;
pub const GST_VIDEO_FORMAT_xRGB: GstVideoFormat = 9;
pub const GST_VIDEO_FORMAT_xBGR: GstVideoFormat = 10;
pub const GST_VIDEO_FORMAT_RGBA: GstVideoFormat = 11;
pub const GST_VIDEO_FORMAT_BGRA: GstVideoFormat = 12;
pub const GST_VIDEO_FORMAT_ARGB: GstVideoFormat = 13;
pub const GST_VIDEO_FORMAT_ABGR: GstVideoFormat = 14;
pub const GST_VIDEO_FORMAT_RGB: GstVideoFormat = 15;
pub const GST_VIDEO_FORMAT_BGR: GstVideoFormat = 16;
pub const GST_VIDEO_FORMAT_Y41B: GstVideoFormat = 17;
pub const GST_VIDEO_FORMAT_Y42B: GstVideoFormat = 18;
pub const GST_VIDEO_FORMAT_YVYU: GstVideoFormat = 19;
pub const GST_VIDEO_FORMAT_Y444: GstVideoFormat = 20;
pub const GST_VIDEO_FORMAT_v210: GstVideoFormat = 21;
pub const GST_VIDEO_FORMAT_v216: GstVideoFormat = 22;
pub const GST_VIDEO_FORMAT_NV12: GstVideoFormat = 23;
pub const GST_VIDEO_FORMAT_NV21: GstVideoFormat = 24;
pub const GST_VIDEO_FORMAT_GRAY8: GstVideoFormat = 25;
pub const GST_VIDEO_FORMAT_GRAY16_BE: GstVideoFormat = 26;
pub const GST_VIDEO_FORMAT_GRAY16_LE: GstVideoFormat = 27;
pub const GST_VIDEO_FORMAT_v308: GstVideoFormat = 28;
pub const GST_VIDEO_FORMAT_RGB16: GstVideoFormat = 29;
pub const GST_VIDEO_FORMAT_BGR16: GstVideoFormat = 30;
pub const GST_VIDEO_FORMAT_RGB15: GstVideoFormat = 31;
pub const GST_VIDEO_FORMAT_BGR15: GstVideoFormat = 32;
pub const GST_VIDEO_FORMAT_UYVP: GstVideoFormat = 33;
pub const GST_VIDEO_FORMAT_A420: GstVideoFormat = 34;
pub const GST_VIDEO_FORMAT_RGB8P: GstVideoFormat = 35;
pub const GST_VIDEO_FORMAT_YUV9: GstVideoFormat = 36;
pub const GST_VIDEO_FORMAT_YVU9: GstVideoFormat = 37;
pub const GST_VIDEO_FORMAT_IYU1: GstVideoFormat = 38;
pub const GST_VIDEO_FORMAT_ARGB64: GstVideoFormat = 39;
pub const GST_VIDEO_FORMAT_AYUV64: GstVideoFormat = 40;
pub const GST_VIDEO_FORMAT_r210: GstVideoFormat = 41;
pub const GST_VIDEO_FORMAT_I420_10BE: GstVideoFormat = 42;
pub const GST_VIDEO_FORMAT_I420_10LE: GstVideoFormat = 43;
pub const GST_VIDEO_FORMAT_I422_10BE: GstVideoFormat = 44;
pub const GST_VIDEO_FORMAT_I422_10LE: GstVideoFormat = 45;
pub const GST_VIDEO_FORMAT_Y444_10BE: GstVideoFormat = 46;
pub const GST_VIDEO_FORMAT_Y444_10LE: GstVideoFormat = 47;
pub const GST_VIDEO_FORMAT_GBR: GstVideoFormat = 48;
pub const GST_VIDEO_FORMAT_GBR_10BE: GstVideoFormat = 49;
pub const GST_VIDEO_FORMAT_GBR_10LE: GstVideoFormat = 50;
pub const GST_VIDEO_FORMAT_NV16: GstVideoFormat = 51;
pub const GST_VIDEO_FORMAT_NV24: GstVideoFormat = 52;
pub const GST_VIDEO_FORMAT_NV12_64Z32: GstVideoFormat = 53;
pub const GST_VIDEO_FORMAT_A420_10BE: GstVideoFormat = 54;
pub const GST_VIDEO_FORMAT_A420_10LE: GstVideoFormat = 55;
pub const GST_VIDEO_FORMAT_A422_10BE: GstVideoFormat = 56;
pub const GST_VIDEO_FORMAT_A422_10LE: GstVideoFormat = 57;
pub const GST_VIDEO_FORMAT_A444_10BE: GstVideoFormat = 58;
pub const GST_VIDEO_FORMAT_A444_10LE: GstVideoFormat = 59;
pub const GST_VIDEO_FORMAT_NV61: GstVideoFormat = 60;
pub const GST_VIDEO_FORMAT_P010_10BE: GstVideoFormat = 61;
pub const GST_VIDEO_FORMAT_P010_10LE: GstVideoFormat = 62;
pub const GST_VIDEO_FORMAT_IYU2: GstVideoFormat = 63;
pub const GST_VIDEO_FORMAT_VYUY: GstVideoFormat = 64;
pub const GST_VIDEO_FORMAT_GBRA: GstVideoFormat = 65;
pub const GST_VIDEO_FORMAT_GBRA_10BE: GstVideoFormat = 66;
pub const GST_VIDEO_FORMAT_GBRA_10LE: GstVideoFormat = 67;
pub const GST_VIDEO_FORMAT_GBR_12BE: GstVideoFormat = 68;
pub const GST_VIDEO_FORMAT_GBR_12LE: GstVideoFormat = 69;
pub const GST_VIDEO_FORMAT_GBRA_12BE: GstVideoFormat = 70;
pub const GST_VIDEO_FORMAT_GBRA_12LE: GstVideoFormat = 71;
pub const GST_VIDEO_FORMAT_I420_12BE: GstVideoFormat = 72;
pub const GST_VIDEO_FORMAT_I420_12LE: GstVideoFormat = 73;
pub const GST_VIDEO_FORMAT_I422_12BE: GstVideoFormat = 74;
pub const GST_VIDEO_FORMAT_I422_12LE: GstVideoFormat = 75;
pub const GST_VIDEO_FORMAT_Y444_12BE: GstVideoFormat = 76;
pub const GST_VIDEO_FORMAT_Y444_12LE: GstVideoFormat = 77;
pub const GST_VIDEO_FORMAT_GRAY10_LE32: GstVideoFormat = 78;
pub const GST_VIDEO_FORMAT_NV12_10LE32: GstVideoFormat = 79;
pub const GST_VIDEO_FORMAT_NV16_10LE32: GstVideoFormat = 80;
pub const GST_VIDEO_FORMAT_NV12_10LE40: GstVideoFormat = 81;
pub const GST_VIDEO_FORMAT_Y210: GstVideoFormat = 82;
pub const GST_VIDEO_FORMAT_Y410: GstVideoFormat = 83;
pub const GST_VIDEO_FORMAT_VUYA: GstVideoFormat = 84;
pub const GST_VIDEO_FORMAT_BGR10A2_LE: GstVideoFormat = 85;
pub type GstVideoGLTextureOrientation = c_int;
pub const GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_NORMAL: GstVideoGLTextureOrientation = 0;
pub const GST_VIDEO_GL_TEXTURE_ORIENTATION_X_NORMAL_Y_FLIP: GstVideoGLTextureOrientation = 1;
pub const GST_VIDEO_GL_TEXTURE_ORIENTATION_X_FLIP_Y_NORMAL: GstVideoGLTextureOrientation = 2;
pub const GST_VIDEO_GL_TEXTURE_ORIENTATION_X_FLIP_Y_FLIP: GstVideoGLTextureOrientation = 3;
pub type GstVideoGLTextureType = c_int;
pub const GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE: GstVideoGLTextureType = 0;
pub const GST_VIDEO_GL_TEXTURE_TYPE_LUMINANCE_ALPHA: GstVideoGLTextureType = 1;
pub const GST_VIDEO_GL_TEXTURE_TYPE_RGB16: GstVideoGLTextureType = 2;
pub const GST_VIDEO_GL_TEXTURE_TYPE_RGB: GstVideoGLTextureType = 3;
pub const GST_VIDEO_GL_TEXTURE_TYPE_RGBA: GstVideoGLTextureType = 4;
pub const GST_VIDEO_GL_TEXTURE_TYPE_R: GstVideoGLTextureType = 5;
pub const GST_VIDEO_GL_TEXTURE_TYPE_RG: GstVideoGLTextureType = 6;
pub type GstVideoGammaMode = c_int;
pub const GST_VIDEO_GAMMA_MODE_NONE: GstVideoGammaMode = 0;
pub const GST_VIDEO_GAMMA_MODE_REMAP: GstVideoGammaMode = 1;
pub type GstVideoInterlaceMode = c_int;
pub const GST_VIDEO_INTERLACE_MODE_PROGRESSIVE: GstVideoInterlaceMode = 0;
pub const GST_VIDEO_INTERLACE_MODE_INTERLEAVED: GstVideoInterlaceMode = 1;
pub const GST_VIDEO_INTERLACE_MODE_MIXED: GstVideoInterlaceMode = 2;
pub const GST_VIDEO_INTERLACE_MODE_FIELDS: GstVideoInterlaceMode = 3;
pub const GST_VIDEO_INTERLACE_MODE_ALTERNATE: GstVideoInterlaceMode = 4;
pub type GstVideoMatrixMode = c_int;
pub const GST_VIDEO_MATRIX_MODE_FULL: GstVideoMatrixMode = 0;
pub const GST_VIDEO_MATRIX_MODE_INPUT_ONLY: GstVideoMatrixMode = 1;
pub const GST_VIDEO_MATRIX_MODE_OUTPUT_ONLY: GstVideoMatrixMode = 2;
pub const GST_VIDEO_MATRIX_MODE_NONE: GstVideoMatrixMode = 3;
pub type GstVideoMultiviewFramePacking = c_int;
pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_NONE: GstVideoMultiviewFramePacking = -1;
pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_MONO: GstVideoMultiviewFramePacking = 0;
pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_LEFT: GstVideoMultiviewFramePacking = 1;
pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_RIGHT: GstVideoMultiviewFramePacking = 2;
pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_SIDE_BY_SIDE: GstVideoMultiviewFramePacking = 3;
pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_SIDE_BY_SIDE_QUINCUNX: GstVideoMultiviewFramePacking =
4;
pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_COLUMN_INTERLEAVED: GstVideoMultiviewFramePacking = 5;
pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_ROW_INTERLEAVED: GstVideoMultiviewFramePacking = 6;
pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_TOP_BOTTOM: GstVideoMultiviewFramePacking = 7;
pub const GST_VIDEO_MULTIVIEW_FRAME_PACKING_CHECKERBOARD: GstVideoMultiviewFramePacking = 8;
pub type GstVideoMultiviewMode = c_int;
pub const GST_VIDEO_MULTIVIEW_MODE_NONE: GstVideoMultiviewMode = -1;
pub const GST_VIDEO_MULTIVIEW_MODE_MONO: GstVideoMultiviewMode = 0;
pub const GST_VIDEO_MULTIVIEW_MODE_LEFT: GstVideoMultiviewMode = 1;
pub const GST_VIDEO_MULTIVIEW_MODE_RIGHT: GstVideoMultiviewMode = 2;
pub const GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE: GstVideoMultiviewMode = 3;
pub const GST_VIDEO_MULTIVIEW_MODE_SIDE_BY_SIDE_QUINCUNX: GstVideoMultiviewMode = 4;
pub const GST_VIDEO_MULTIVIEW_MODE_COLUMN_INTERLEAVED: GstVideoMultiviewMode = 5;
pub const GST_VIDEO_MULTIVIEW_MODE_ROW_INTERLEAVED: GstVideoMultiviewMode = 6;
pub const GST_VIDEO_MULTIVIEW_MODE_TOP_BOTTOM: GstVideoMultiviewMode = 7;
pub const GST_VIDEO_MULTIVIEW_MODE_CHECKERBOARD: GstVideoMultiviewMode = 8;
pub const GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME: GstVideoMultiviewMode = 32;
pub const GST_VIDEO_MULTIVIEW_MODE_MULTIVIEW_FRAME_BY_FRAME: GstVideoMultiviewMode = 33;
pub const GST_VIDEO_MULTIVIEW_MODE_SEPARATED: GstVideoMultiviewMode = 34;
pub type GstVideoOrientationMethod = c_int;
pub const GST_VIDEO_ORIENTATION_IDENTITY: GstVideoOrientationMethod = 0;
pub const GST_VIDEO_ORIENTATION_90R: GstVideoOrientationMethod = 1;
pub const GST_VIDEO_ORIENTATION_180: GstVideoOrientationMethod = 2;
pub const GST_VIDEO_ORIENTATION_90L: GstVideoOrientationMethod = 3;
pub const GST_VIDEO_ORIENTATION_HORIZ: GstVideoOrientationMethod = 4;
pub const GST_VIDEO_ORIENTATION_VERT: GstVideoOrientationMethod = 5;
pub const GST_VIDEO_ORIENTATION_UL_LR: GstVideoOrientationMethod = 6;
pub const GST_VIDEO_ORIENTATION_UR_LL: GstVideoOrientationMethod = 7;
pub const GST_VIDEO_ORIENTATION_AUTO: GstVideoOrientationMethod = 8;
pub const GST_VIDEO_ORIENTATION_CUSTOM: GstVideoOrientationMethod = 9;
pub type GstVideoPrimariesMode = c_int;
pub const GST_VIDEO_PRIMARIES_MODE_NONE: GstVideoPrimariesMode = 0;
pub const GST_VIDEO_PRIMARIES_MODE_MERGE_ONLY: GstVideoPrimariesMode = 1;
pub const GST_VIDEO_PRIMARIES_MODE_FAST: GstVideoPrimariesMode = 2;
pub type GstVideoResamplerMethod = c_int;
pub const GST_VIDEO_RESAMPLER_METHOD_NEAREST: GstVideoResamplerMethod = 0;
pub const GST_VIDEO_RESAMPLER_METHOD_LINEAR: GstVideoResamplerMethod = 1;
pub const GST_VIDEO_RESAMPLER_METHOD_CUBIC: GstVideoResamplerMethod = 2;
pub const GST_VIDEO_RESAMPLER_METHOD_SINC: GstVideoResamplerMethod = 3;
pub const GST_VIDEO_RESAMPLER_METHOD_LANCZOS: GstVideoResamplerMethod = 4;
pub type GstVideoTileMode = c_int;
pub const GST_VIDEO_TILE_MODE_UNKNOWN: GstVideoTileMode = 0;
pub const GST_VIDEO_TILE_MODE_ZFLIPZ_2X2: GstVideoTileMode = 65536;
pub type GstVideoTileType = c_int;
pub const GST_VIDEO_TILE_TYPE_INDEXED: GstVideoTileType = 0;
pub type GstVideoTransferFunction = c_int;
pub const GST_VIDEO_TRANSFER_UNKNOWN: GstVideoTransferFunction = 0;
pub const GST_VIDEO_TRANSFER_GAMMA10: GstVideoTransferFunction = 1;
pub const GST_VIDEO_TRANSFER_GAMMA18: GstVideoTransferFunction = 2;
pub const GST_VIDEO_TRANSFER_GAMMA20: GstVideoTransferFunction = 3;
pub const GST_VIDEO_TRANSFER_GAMMA22: GstVideoTransferFunction = 4;
pub const GST_VIDEO_TRANSFER_BT709: GstVideoTransferFunction = 5;
pub const GST_VIDEO_TRANSFER_SMPTE240M: GstVideoTransferFunction = 6;
pub const GST_VIDEO_TRANSFER_SRGB: GstVideoTransferFunction = 7;
pub const GST_VIDEO_TRANSFER_GAMMA28: GstVideoTransferFunction = 8;
pub const GST_VIDEO_TRANSFER_LOG100: GstVideoTransferFunction = 9;
pub const GST_VIDEO_TRANSFER_LOG316: GstVideoTransferFunction = 10;
pub const GST_VIDEO_TRANSFER_BT2020_12: GstVideoTransferFunction = 11;
pub const GST_VIDEO_TRANSFER_ADOBERGB: GstVideoTransferFunction = 12;
pub type GstVideoVBIParserResult = c_int;
pub const GST_VIDEO_VBI_PARSER_RESULT_DONE: GstVideoVBIParserResult = 0;
pub const GST_VIDEO_VBI_PARSER_RESULT_OK: GstVideoVBIParserResult = 1;
pub const GST_VIDEO_VBI_PARSER_RESULT_ERROR: GstVideoVBIParserResult = 2;
pub const GST_BUFFER_POOL_OPTION_VIDEO_AFFINE_TRANSFORMATION_META: *const c_char =
b"GstBufferPoolOptionVideoAffineTransformation\0" as *const u8 as *const c_char;
pub const GST_BUFFER_POOL_OPTION_VIDEO_ALIGNMENT: *const c_char =
b"GstBufferPoolOptionVideoAlignment\0" as *const u8 as *const c_char;
pub const GST_BUFFER_POOL_OPTION_VIDEO_GL_TEXTURE_UPLOAD_META: *const c_char =
b"GstBufferPoolOptionVideoGLTextureUploadMeta\0" as *const u8 as *const c_char;
pub const GST_BUFFER_POOL_OPTION_VIDEO_META: *const c_char =
b"GstBufferPoolOptionVideoMeta\0" as *const u8 as *const c_char;
pub const GST_CAPS_FEATURE_FORMAT_INTERLACED: *const c_char =
b"format:Interlaced\0" as *const u8 as *const c_char;
pub const GST_CAPS_FEATURE_META_GST_VIDEO_AFFINE_TRANSFORMATION_META: *const c_char =
b"meta:GstVideoAffineTransformation\0" as *const u8 as *const c_char;
pub const GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META: *const c_char =
b"meta:GstVideoGLTextureUploadMeta\0" as *const u8 as *const c_char;
pub const GST_CAPS_FEATURE_META_GST_VIDEO_META: *const c_char =
b"meta:GstVideoMeta\0" as *const u8 as *const c_char;
pub const GST_CAPS_FEATURE_META_GST_VIDEO_OVERLAY_COMPOSITION: *const c_char =
b"meta:GstVideoOverlayComposition\0" as *const u8 as *const c_char;
pub const GST_META_TAG_VIDEO_COLORSPACE_STR: *const c_char =
b"colorspace\0" as *const u8 as *const c_char;
pub const GST_META_TAG_VIDEO_ORIENTATION_STR: *const c_char =
b"orientation\0" as *const u8 as *const c_char;
pub const GST_META_TAG_VIDEO_SIZE_STR: *const c_char = b"size\0" as *const u8 as *const c_char;
pub const GST_META_TAG_VIDEO_STR: *const c_char = b"video\0" as *const u8 as *const c_char;
pub const GST_VIDEO_COLORIMETRY_BT2020: *const c_char = b"bt2020\0" as *const u8 as *const c_char;
pub const GST_VIDEO_COLORIMETRY_BT601: *const c_char = b"bt601\0" as *const u8 as *const c_char;
pub const GST_VIDEO_COLORIMETRY_BT709: *const c_char = b"bt709\0" as *const u8 as *const c_char;
pub const GST_VIDEO_COLORIMETRY_SMPTE240M: *const c_char =
b"smpte240m\0" as *const u8 as *const c_char;
pub const GST_VIDEO_COLORIMETRY_SRGB: *const c_char = b"sRGB\0" as *const u8 as *const c_char;
pub const GST_VIDEO_COMP_A: c_int = 3;
pub const GST_VIDEO_COMP_B: c_int = 2;
pub const GST_VIDEO_COMP_G: c_int = 1;
pub const GST_VIDEO_COMP_INDEX: c_int = 0;
pub const GST_VIDEO_COMP_PALETTE: c_int = 1;
pub const GST_VIDEO_COMP_R: c_int = 0;
pub const GST_VIDEO_COMP_U: c_int = 1;
pub const GST_VIDEO_COMP_V: c_int = 2;
pub const GST_VIDEO_COMP_Y: c_int = 0;
pub const GST_VIDEO_CONVERTER_OPT_ALPHA_MODE: *const c_char =
b"GstVideoConverter.alpha-mode\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_ALPHA_VALUE: *const c_char =
b"GstVideoConverter.alpha-value\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_BORDER_ARGB: *const c_char =
b"GstVideoConverter.border-argb\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_CHROMA_MODE: *const c_char =
b"GstVideoConverter.chroma-mode\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_CHROMA_RESAMPLER_METHOD: *const c_char =
b"GstVideoConverter.chroma-resampler-method\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_DEST_HEIGHT: *const c_char =
b"GstVideoConverter.dest-height\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_DEST_WIDTH: *const c_char =
b"GstVideoConverter.dest-width\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_DEST_X: *const c_char =
b"GstVideoConverter.dest-x\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_DEST_Y: *const c_char =
b"GstVideoConverter.dest-y\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_DITHER_METHOD: *const c_char =
b"GstVideoConverter.dither-method\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_DITHER_QUANTIZATION: *const c_char =
b"GstVideoConverter.dither-quantization\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_FILL_BORDER: *const c_char =
b"GstVideoConverter.fill-border\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_GAMMA_MODE: *const c_char =
b"GstVideoConverter.gamma-mode\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_MATRIX_MODE: *const c_char =
b"GstVideoConverter.matrix-mode\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_PRIMARIES_MODE: *const c_char =
b"GstVideoConverter.primaries-mode\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_RESAMPLER_METHOD: *const c_char =
b"GstVideoConverter.resampler-method\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_RESAMPLER_TAPS: *const c_char =
b"GstVideoConverter.resampler-taps\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_SRC_HEIGHT: *const c_char =
b"GstVideoConverter.src-height\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_SRC_WIDTH: *const c_char =
b"GstVideoConverter.src-width\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_SRC_X: *const c_char =
b"GstVideoConverter.src-x\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_SRC_Y: *const c_char =
b"GstVideoConverter.src-y\0" as *const u8 as *const c_char;
pub const GST_VIDEO_CONVERTER_OPT_THREADS: *const c_char =
b"GstVideoConverter.threads\0" as *const u8 as *const c_char;
pub const GST_VIDEO_DECODER_MAX_ERRORS: c_int = 10;
pub const GST_VIDEO_DECODER_SINK_NAME: *const c_char = b"sink\0" as *const u8 as *const c_char;
pub const GST_VIDEO_DECODER_SRC_NAME: *const c_char = b"src\0" as *const u8 as *const c_char;
pub const GST_VIDEO_ENCODER_SINK_NAME: *const c_char = b"sink\0" as *const u8 as *const c_char;
pub const GST_VIDEO_ENCODER_SRC_NAME: *const c_char = b"src\0" as *const u8 as *const c_char;
pub const GST_VIDEO_FORMATS_ALL: *const c_char = b"{ I420, YV12, YUY2, UYVY, AYUV, VUYA, RGBx, BGRx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, Y41B, Y42B, YVYU, Y444, v210, v216, Y210, Y410, NV12, NV21, GRAY8, GRAY16_BE, GRAY16_LE, v308, RGB16, BGR16, RGB15, BGR15, UYVP, A420, RGB8P, YUV9, YVU9, IYU1, ARGB64, AYUV64, r210, I420_10BE, I420_10LE, I422_10BE, I422_10LE, Y444_10BE, Y444_10LE, GBR, GBR_10BE, GBR_10LE, NV16, NV24, NV12_64Z32, A420_10BE, A420_10LE, A422_10BE, A422_10LE, A444_10BE, A444_10LE, NV61, P010_10BE, P010_10LE, IYU2, VYUY, GBRA, GBRA_10BE, GBRA_10LE, BGR10A2_LE, GBR_12BE, GBR_12LE, GBRA_12BE, GBRA_12LE, I420_12BE, I420_12LE, I422_12BE, I422_12LE, Y444_12BE, Y444_12LE, GRAY10_LE32, NV12_10LE32, NV16_10LE32, NV12_10LE40 }\0" as *const u8 as *const c_char;
pub const GST_VIDEO_FPS_RANGE: *const c_char =
b"(fraction) [ 0, max ]\0" as *const u8 as *const c_char;
pub const GST_VIDEO_MAX_COMPONENTS: c_int = 4;
pub const GST_VIDEO_MAX_PLANES: c_int = 4;
pub const GST_VIDEO_OVERLAY_COMPOSITION_BLEND_FORMATS: *const c_char = b"{ BGRx, RGBx, xRGB, xBGR, RGBA, BGRA, ARGB, ABGR, RGB, BGR, I420, YV12, AYUV, YUY2, UYVY, v308, Y41B, Y42B, Y444, NV12, NV21, A420, YUV9, YVU9, IYU1, GRAY8 }\0" as *const u8 as *const c_char;
pub const GST_VIDEO_RESAMPLER_OPT_CUBIC_B: *const c_char =
b"GstVideoResampler.cubic-b\0" as *const u8 as *const c_char;
pub const GST_VIDEO_RESAMPLER_OPT_CUBIC_C: *const c_char =
b"GstVideoResampler.cubic-c\0" as *const u8 as *const c_char;
pub const GST_VIDEO_RESAMPLER_OPT_ENVELOPE: *const c_char =
b"GstVideoResampler.envelope\0" as *const u8 as *const c_char;
pub const GST_VIDEO_RESAMPLER_OPT_MAX_TAPS: *const c_char =
b"GstVideoResampler.max-taps\0" as *const u8 as *const c_char;
pub const GST_VIDEO_RESAMPLER_OPT_SHARPEN: *const c_char =
b"GstVideoResampler.sharpen\0" as *const u8 as *const c_char;
pub const GST_VIDEO_RESAMPLER_OPT_SHARPNESS: *const c_char =
b"GstVideoResampler.sharpness\0" as *const u8 as *const c_char;
pub const GST_VIDEO_SCALER_OPT_DITHER_METHOD: *const c_char =
b"GstVideoScaler.dither-method\0" as *const u8 as *const c_char;
pub const GST_VIDEO_SIZE_RANGE: *const c_char = b"(int) [ 1, max ]\0" as *const u8 as *const c_char;
pub const GST_VIDEO_TILE_TYPE_MASK: c_int = 65535;
pub const GST_VIDEO_TILE_TYPE_SHIFT: c_int = 16;
pub const GST_VIDEO_TILE_X_TILES_MASK: c_int = 65535;
pub const GST_VIDEO_TILE_Y_TILES_SHIFT: c_int = 16;
pub type GstVideoBufferFlags = c_uint;
pub const GST_VIDEO_BUFFER_FLAG_INTERLACED: GstVideoBufferFlags = 1048576;
pub const GST_VIDEO_BUFFER_FLAG_TFF: GstVideoBufferFlags = 2097152;
pub const GST_VIDEO_BUFFER_FLAG_RFF: GstVideoBufferFlags = 4194304;
pub const GST_VIDEO_BUFFER_FLAG_ONEFIELD: GstVideoBufferFlags = 8388608;
pub const GST_VIDEO_BUFFER_FLAG_MULTIPLE_VIEW: GstVideoBufferFlags = 16777216;
pub const GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE: GstVideoBufferFlags = 33554432;
pub const GST_VIDEO_BUFFER_FLAG_TOP_FIELD: GstVideoBufferFlags = 10485760;
pub const GST_VIDEO_BUFFER_FLAG_BOTTOM_FIELD: GstVideoBufferFlags = 8388608;
pub const GST_VIDEO_BUFFER_FLAG_LAST: GstVideoBufferFlags = 268435456;
pub type GstVideoChromaFlags = c_uint;
pub const GST_VIDEO_CHROMA_FLAG_NONE: GstVideoChromaFlags = 0;
pub const GST_VIDEO_CHROMA_FLAG_INTERLACED: GstVideoChromaFlags = 1;
pub type GstVideoChromaSite = c_uint;
pub const GST_VIDEO_CHROMA_SITE_UNKNOWN: GstVideoChromaSite = 0;
pub const GST_VIDEO_CHROMA_SITE_NONE: GstVideoChromaSite = 1;
pub const GST_VIDEO_CHROMA_SITE_H_COSITED: GstVideoChromaSite = 2;
pub const GST_VIDEO_CHROMA_SITE_V_COSITED: GstVideoChromaSite = 4;
pub const GST_VIDEO_CHROMA_SITE_ALT_LINE: GstVideoChromaSite = 8;
pub const GST_VIDEO_CHROMA_SITE_COSITED: GstVideoChromaSite = 6;
pub const GST_VIDEO_CHROMA_SITE_JPEG: GstVideoChromaSite = 1;
pub const GST_VIDEO_CHROMA_SITE_MPEG2: GstVideoChromaSite = 2;
pub const GST_VIDEO_CHROMA_SITE_DV: GstVideoChromaSite = 14;
pub type GstVideoCodecFrameFlags = c_uint;
pub const GST_VIDEO_CODEC_FRAME_FLAG_DECODE_ONLY: GstVideoCodecFrameFlags = 1;
pub const GST_VIDEO_CODEC_FRAME_FLAG_SYNC_POINT: GstVideoCodecFrameFlags = 2;
pub const GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME: GstVideoCodecFrameFlags = 4;
pub const GST_VIDEO_CODEC_FRAME_FLAG_FORCE_KEYFRAME_HEADERS: GstVideoCodecFrameFlags = 8;
pub type GstVideoDitherFlags = c_uint;
pub const GST_VIDEO_DITHER_FLAG_NONE: GstVideoDitherFlags = 0;
pub const GST_VIDEO_DITHER_FLAG_INTERLACED: GstVideoDitherFlags = 1;
pub const GST_VIDEO_DITHER_FLAG_QUANTIZE: GstVideoDitherFlags = 2;
pub type GstVideoFlags = c_uint;
pub const GST_VIDEO_FLAG_NONE: GstVideoFlags = 0;
pub const GST_VIDEO_FLAG_VARIABLE_FPS: GstVideoFlags = 1;
pub const GST_VIDEO_FLAG_PREMULTIPLIED_ALPHA: GstVideoFlags = 2;
pub type GstVideoFormatFlags = c_uint;
pub const GST_VIDEO_FORMAT_FLAG_YUV: GstVideoFormatFlags = 1;
pub const GST_VIDEO_FORMAT_FLAG_RGB: GstVideoFormatFlags = 2;
pub const GST_VIDEO_FORMAT_FLAG_GRAY: GstVideoFormatFlags = 4;
pub const GST_VIDEO_FORMAT_FLAG_ALPHA: GstVideoFormatFlags = 8;
pub const GST_VIDEO_FORMAT_FLAG_LE: GstVideoFormatFlags = 16;
pub const GST_VIDEO_FORMAT_FLAG_PALETTE: GstVideoFormatFlags = 32;
pub const GST_VIDEO_FORMAT_FLAG_COMPLEX: GstVideoFormatFlags = 64;
pub const GST_VIDEO_FORMAT_FLAG_UNPACK: GstVideoFormatFlags = 128;
pub const GST_VIDEO_FORMAT_FLAG_TILED: GstVideoFormatFlags = 256;
pub type GstVideoFrameFlags = c_uint;
pub const GST_VIDEO_FRAME_FLAG_NONE: GstVideoFrameFlags = 0;
pub const GST_VIDEO_FRAME_FLAG_INTERLACED: GstVideoFrameFlags = 1;
pub const GST_VIDEO_FRAME_FLAG_TFF: GstVideoFrameFlags = 2;
pub const GST_VIDEO_FRAME_FLAG_RFF: GstVideoFrameFlags = 4;
pub const GST_VIDEO_FRAME_FLAG_ONEFIELD: GstVideoFrameFlags = 8;
pub const GST_VIDEO_FRAME_FLAG_MULTIPLE_VIEW: GstVideoFrameFlags = 16;
pub const GST_VIDEO_FRAME_FLAG_FIRST_IN_BUNDLE: GstVideoFrameFlags = 32;
pub const GST_VIDEO_FRAME_FLAG_TOP_FIELD: GstVideoFrameFlags = 10;
pub const GST_VIDEO_FRAME_FLAG_BOTTOM_FIELD: GstVideoFrameFlags = 8;
pub type GstVideoFrameMapFlags = c_uint;
pub const GST_VIDEO_FRAME_MAP_FLAG_NO_REF: GstVideoFrameMapFlags = 65536;
pub const GST_VIDEO_FRAME_MAP_FLAG_LAST: GstVideoFrameMapFlags = 16777216;
pub type GstVideoMultiviewFlags = c_uint;
pub const GST_VIDEO_MULTIVIEW_FLAGS_NONE: GstVideoMultiviewFlags = 0;
pub const GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_VIEW_FIRST: GstVideoMultiviewFlags = 1;
pub const GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLIPPED: GstVideoMultiviewFlags = 2;
pub const GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLOPPED: GstVideoMultiviewFlags = 4;
pub const GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLIPPED: GstVideoMultiviewFlags = 8;
pub const GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLOPPED: GstVideoMultiviewFlags = 16;
pub const GST_VIDEO_MULTIVIEW_FLAGS_HALF_ASPECT: GstVideoMultiviewFlags = 16384;
pub const GST_VIDEO_MULTIVIEW_FLAGS_MIXED_MONO: GstVideoMultiviewFlags = 32768;
pub type GstVideoOverlayFormatFlags = c_uint;
pub const GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE: GstVideoOverlayFormatFlags = 0;
pub const GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA: GstVideoOverlayFormatFlags = 1;
pub const GST_VIDEO_OVERLAY_FORMAT_FLAG_GLOBAL_ALPHA: GstVideoOverlayFormatFlags = 2;
pub type GstVideoPackFlags = c_uint;
pub const GST_VIDEO_PACK_FLAG_NONE: GstVideoPackFlags = 0;
pub const GST_VIDEO_PACK_FLAG_TRUNCATE_RANGE: GstVideoPackFlags = 1;
pub const GST_VIDEO_PACK_FLAG_INTERLACED: GstVideoPackFlags = 2;
pub type GstVideoResamplerFlags = c_uint;
pub const GST_VIDEO_RESAMPLER_FLAG_NONE: GstVideoResamplerFlags = 0;
pub const GST_VIDEO_RESAMPLER_FLAG_HALF_TAPS: GstVideoResamplerFlags = 1;
pub type GstVideoScalerFlags = c_uint;
pub const GST_VIDEO_SCALER_FLAG_NONE: GstVideoScalerFlags = 0;
pub const GST_VIDEO_SCALER_FLAG_INTERLACED: GstVideoScalerFlags = 1;
pub type GstVideoTimeCodeFlags = c_uint;
pub const GST_VIDEO_TIME_CODE_FLAGS_NONE: GstVideoTimeCodeFlags = 0;
pub const GST_VIDEO_TIME_CODE_FLAGS_DROP_FRAME: GstVideoTimeCodeFlags = 1;
pub const GST_VIDEO_TIME_CODE_FLAGS_INTERLACED: GstVideoTimeCodeFlags = 2;
#[repr(C)]
#[derive(Copy, Clone)]
pub union GstVideoCodecFrame_abidata {
pub ABI: GstVideoCodecFrame_abidata_ABI,
pub padding: [gpointer; 20],
}
impl ::std::fmt::Debug for GstVideoCodecFrame_abidata {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoCodecFrame_abidata @ {:?}",
self as *const _
))
.field("ABI", unsafe { &self.ABI })
.field("padding", unsafe { &self.padding })
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub union GstVideoInfo_ABI {
pub abi: GstVideoInfo_ABI_abi,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoInfo_ABI {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoInfo_ABI @ {:?}", self as *const _))
.field("abi", unsafe { &self.abi })
.finish()
}
}
pub type GstVideoAffineTransformationGetMatrix =
Option<unsafe extern "C" fn(*mut GstVideoAffineTransformationMeta, *mut c_float) -> gboolean>;
pub type GstVideoConvertSampleCallback =
Option<unsafe extern "C" fn(*mut gst::GstSample, *mut glib::GError, gpointer)>;
pub type GstVideoFormatPack = Option<
unsafe extern "C" fn(
*const GstVideoFormatInfo,
GstVideoPackFlags,
gpointer,
c_int,
*mut gpointer,
*const c_int,
GstVideoChromaSite,
c_int,
c_int,
),
>;
pub type GstVideoFormatUnpack = Option<
unsafe extern "C" fn(
*const GstVideoFormatInfo,
GstVideoPackFlags,
gpointer,
*const gpointer,
*const c_int,
c_int,
c_int,
c_int,
),
>;
pub type GstVideoGLTextureUpload =
Option<unsafe extern "C" fn(*mut GstVideoGLTextureUploadMeta, *mut c_uint) -> gboolean>;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstColorBalanceChannelClass {
pub parent: gobject::GObjectClass,
pub value_changed: Option<unsafe extern "C" fn(*mut GstColorBalanceChannel, c_int)>,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstColorBalanceChannelClass {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstColorBalanceChannelClass @ {:?}",
self as *const _
))
.field("parent", &self.parent)
.field("value_changed", &self.value_changed)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstColorBalanceInterface {
pub iface: gobject::GTypeInterface,
pub list_channels: Option<unsafe extern "C" fn(*mut GstColorBalance) -> *const glib::GList>,
pub set_value:
Option<unsafe extern "C" fn(*mut GstColorBalance, *mut GstColorBalanceChannel, c_int)>,
pub get_value:
Option<unsafe extern "C" fn(*mut GstColorBalance, *mut GstColorBalanceChannel) -> c_int>,
pub get_balance_type: Option<unsafe extern "C" fn(*mut GstColorBalance) -> GstColorBalanceType>,
pub value_changed:
Option<unsafe extern "C" fn(*mut GstColorBalance, *mut GstColorBalanceChannel, c_int)>,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstColorBalanceInterface {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstColorBalanceInterface @ {:?}",
self as *const _
))
.field("iface", &self.iface)
.field("list_channels", &self.list_channels)
.field("set_value", &self.set_value)
.field("get_value", &self.get_value)
.field("get_balance_type", &self.get_balance_type)
.field("value_changed", &self.value_changed)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstNavigationInterface {
pub iface: gobject::GTypeInterface,
pub send_event: Option<unsafe extern "C" fn(*mut GstNavigation, *mut gst::GstStructure)>,
}
impl ::std::fmt::Debug for GstNavigationInterface {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstNavigationInterface @ {:?}", self as *const _))
.field("iface", &self.iface)
.field("send_event", &self.send_event)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoAffineTransformationMeta {
pub meta: gst::GstMeta,
pub matrix: [c_float; 16],
}
impl ::std::fmt::Debug for GstVideoAffineTransformationMeta {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoAffineTransformationMeta @ {:?}",
self as *const _
))
.field("meta", &self.meta)
.field("matrix", &self.matrix)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoAggregatorClass {
pub parent_class: gst_base::GstAggregatorClass,
pub update_caps: Option<
unsafe extern "C" fn(*mut GstVideoAggregator, *mut gst::GstCaps) -> *mut gst::GstCaps,
>,
pub aggregate_frames: Option<
unsafe extern "C" fn(
*mut GstVideoAggregator,
*mut *mut gst::GstBuffer,
) -> gst::GstFlowReturn,
>,
pub create_output_buffer: Option<
unsafe extern "C" fn(
*mut GstVideoAggregator,
*mut *mut gst::GstBuffer,
) -> gst::GstFlowReturn,
>,
pub find_best_format: Option<
unsafe extern "C" fn(
*mut GstVideoAggregator,
*mut gst::GstCaps,
*mut GstVideoInfo,
*mut gboolean,
),
>,
pub _gst_reserved: [gpointer; 20],
}
impl ::std::fmt::Debug for GstVideoAggregatorClass {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoAggregatorClass @ {:?}", self as *const _))
.field("update_caps", &self.update_caps)
.field("aggregate_frames", &self.aggregate_frames)
.field("create_output_buffer", &self.create_output_buffer)
.field("find_best_format", &self.find_best_format)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoAggregatorConvertPadClass {
pub parent_class: GstVideoAggregatorPadClass,
pub create_conversion_info: Option<
unsafe extern "C" fn(
*mut GstVideoAggregatorConvertPad,
*mut GstVideoAggregator,
*mut GstVideoInfo,
),
>,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoAggregatorConvertPadClass {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoAggregatorConvertPadClass @ {:?}",
self as *const _
))
.field("parent_class", &self.parent_class)
.field("create_conversion_info", &self.create_conversion_info)
.finish()
}
}
#[repr(C)]
pub struct _GstVideoAggregatorConvertPadPrivate(c_void);
pub type GstVideoAggregatorConvertPadPrivate = *mut _GstVideoAggregatorConvertPadPrivate;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoAggregatorPadClass {
pub parent_class: gst_base::GstAggregatorPadClass,
pub update_conversion_info: Option<unsafe extern "C" fn(*mut GstVideoAggregatorPad)>,
pub prepare_frame: Option<
unsafe extern "C" fn(
*mut GstVideoAggregatorPad,
*mut GstVideoAggregator,
*mut gst::GstBuffer,
*mut GstVideoFrame,
) -> gboolean,
>,
pub clean_frame: Option<
unsafe extern "C" fn(
*mut GstVideoAggregatorPad,
*mut GstVideoAggregator,
*mut GstVideoFrame,
),
>,
pub _gst_reserved: [gpointer; 20],
}
impl ::std::fmt::Debug for GstVideoAggregatorPadClass {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoAggregatorPadClass @ {:?}",
self as *const _
))
.field("parent_class", &self.parent_class)
.field("update_conversion_info", &self.update_conversion_info)
.field("prepare_frame", &self.prepare_frame)
.field("clean_frame", &self.clean_frame)
.field("_gst_reserved", &self._gst_reserved)
.finish()
}
}
#[repr(C)]
pub struct _GstVideoAggregatorPadPrivate(c_void);
pub type GstVideoAggregatorPadPrivate = *mut _GstVideoAggregatorPadPrivate;
#[repr(C)]
pub struct _GstVideoAggregatorPrivate(c_void);
pub type GstVideoAggregatorPrivate = *mut _GstVideoAggregatorPrivate;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoAlignment {
pub padding_top: c_uint,
pub padding_bottom: c_uint,
pub padding_left: c_uint,
pub padding_right: c_uint,
pub stride_align: [c_uint; 4],
}
impl ::std::fmt::Debug for GstVideoAlignment {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoAlignment @ {:?}", self as *const _))
.field("padding_top", &self.padding_top)
.field("padding_bottom", &self.padding_bottom)
.field("padding_left", &self.padding_left)
.field("padding_right", &self.padding_right)
.field("stride_align", &self.stride_align)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoAncillary {
pub DID: u8,
pub SDID_block_number: u8,
pub data_count: u8,
pub data: [u8; 256],
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoAncillary {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoAncillary @ {:?}", self as *const _))
.field("DID", &self.DID)
.field("SDID_block_number", &self.SDID_block_number)
.field("data_count", &self.data_count)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoBufferPoolClass {
pub parent_class: gst::GstBufferPoolClass,
}
impl ::std::fmt::Debug for GstVideoBufferPoolClass {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoBufferPoolClass @ {:?}", self as *const _))
.field("parent_class", &self.parent_class)
.finish()
}
}
#[repr(C)]
pub struct _GstVideoBufferPoolPrivate(c_void);
pub type GstVideoBufferPoolPrivate = *mut _GstVideoBufferPoolPrivate;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoCaptionMeta {
pub meta: gst::GstMeta,
pub caption_type: GstVideoCaptionType,
pub data: *mut u8,
pub size: size_t,
}
impl ::std::fmt::Debug for GstVideoCaptionMeta {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoCaptionMeta @ {:?}", self as *const _))
.field("meta", &self.meta)
.field("caption_type", &self.caption_type)
.field("data", &self.data)
.field("size", &self.size)
.finish()
}
}
#[repr(C)]
pub struct _GstVideoChromaResample(c_void);
pub type GstVideoChromaResample = *mut _GstVideoChromaResample;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoCodecFrame {
pub ref_count: c_int,
pub flags: u32,
pub system_frame_number: u32,
pub decode_frame_number: u32,
pub presentation_frame_number: u32,
pub dts: gst::GstClockTime,
pub pts: gst::GstClockTime,
pub duration: gst::GstClockTime,
pub distance_from_sync: c_int,
pub input_buffer: *mut gst::GstBuffer,
pub output_buffer: *mut gst::GstBuffer,
pub deadline: gst::GstClockTime,
pub events: *mut glib::GList,
pub user_data: gpointer,
pub user_data_destroy_notify: glib::GDestroyNotify,
pub abidata: GstVideoCodecFrame_abidata,
}
impl ::std::fmt::Debug for GstVideoCodecFrame {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoCodecFrame @ {:?}", self as *const _))
.field("system_frame_number", &self.system_frame_number)
.field("dts", &self.dts)
.field("pts", &self.pts)
.field("duration", &self.duration)
.field("distance_from_sync", &self.distance_from_sync)
.field("input_buffer", &self.input_buffer)
.field("output_buffer", &self.output_buffer)
.field("deadline", &self.deadline)
.field("abidata", &self.abidata)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoCodecFrame_abidata_ABI {
pub ts: gst::GstClockTime,
pub ts2: gst::GstClockTime,
}
impl ::std::fmt::Debug for GstVideoCodecFrame_abidata_ABI {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoCodecFrame_abidata_ABI @ {:?}",
self as *const _
))
.field("ts", &self.ts)
.field("ts2", &self.ts2)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoCodecState {
pub ref_count: c_int,
pub info: GstVideoInfo,
pub caps: *mut gst::GstCaps,
pub codec_data: *mut gst::GstBuffer,
pub allocation_caps: *mut gst::GstCaps,
pub padding: [gpointer; 19],
}
impl ::std::fmt::Debug for GstVideoCodecState {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoCodecState @ {:?}", self as *const _))
.field("info", &self.info)
.field("caps", &self.caps)
.field("codec_data", &self.codec_data)
.field("allocation_caps", &self.allocation_caps)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoColorPrimariesInfo {
pub primaries: GstVideoColorPrimaries,
pub Wx: c_double,
pub Wy: c_double,
pub Rx: c_double,
pub Ry: c_double,
pub Gx: c_double,
pub Gy: c_double,
pub Bx: c_double,
pub By: c_double,
}
impl ::std::fmt::Debug for GstVideoColorPrimariesInfo {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoColorPrimariesInfo @ {:?}",
self as *const _
))
.field("primaries", &self.primaries)
.field("Wx", &self.Wx)
.field("Wy", &self.Wy)
.field("Rx", &self.Rx)
.field("Ry", &self.Ry)
.field("Gx", &self.Gx)
.field("Gy", &self.Gy)
.field("Bx", &self.Bx)
.field("By", &self.By)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoColorimetry {
pub range: GstVideoColorRange,
pub matrix: GstVideoColorMatrix,
pub transfer: GstVideoTransferFunction,
pub primaries: GstVideoColorPrimaries,
}
impl ::std::fmt::Debug for GstVideoColorimetry {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoColorimetry @ {:?}", self as *const _))
.field("range", &self.range)
.field("matrix", &self.matrix)
.field("transfer", &self.transfer)
.field("primaries", &self.primaries)
.finish()
}
}
#[repr(C)]
pub struct _GstVideoConverter(c_void);
pub type GstVideoConverter = *mut _GstVideoConverter;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoCropMeta {
pub meta: gst::GstMeta,
pub x: c_uint,
pub y: c_uint,
pub width: c_uint,
pub height: c_uint,
}
impl ::std::fmt::Debug for GstVideoCropMeta {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoCropMeta @ {:?}", self as *const _))
.field("meta", &self.meta)
.field("x", &self.x)
.field("y", &self.y)
.field("width", &self.width)
.field("height", &self.height)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoDecoderClass {
pub element_class: gst::GstElementClass,
pub open: Option<unsafe extern "C" fn(*mut GstVideoDecoder) -> gboolean>,
pub close: Option<unsafe extern "C" fn(*mut GstVideoDecoder) -> gboolean>,
pub start: Option<unsafe extern "C" fn(*mut GstVideoDecoder) -> gboolean>,
pub stop: Option<unsafe extern "C" fn(*mut GstVideoDecoder) -> gboolean>,
pub parse: Option<
unsafe extern "C" fn(
*mut GstVideoDecoder,
*mut GstVideoCodecFrame,
*mut gst_base::GstAdapter,
gboolean,
) -> gst::GstFlowReturn,
>,
pub set_format:
Option<unsafe extern "C" fn(*mut GstVideoDecoder, *mut GstVideoCodecState) -> gboolean>,
pub reset: Option<unsafe extern "C" fn(*mut GstVideoDecoder, gboolean) -> gboolean>,
pub finish: Option<unsafe extern "C" fn(*mut GstVideoDecoder) -> gst::GstFlowReturn>,
pub handle_frame: Option<
unsafe extern "C" fn(*mut GstVideoDecoder, *mut GstVideoCodecFrame) -> gst::GstFlowReturn,
>,
pub sink_event:
Option<unsafe extern "C" fn(*mut GstVideoDecoder, *mut gst::GstEvent) -> gboolean>,
pub src_event:
Option<unsafe extern "C" fn(*mut GstVideoDecoder, *mut gst::GstEvent) -> gboolean>,
pub negotiate: Option<unsafe extern "C" fn(*mut GstVideoDecoder) -> gboolean>,
pub decide_allocation:
Option<unsafe extern "C" fn(*mut GstVideoDecoder, *mut gst::GstQuery) -> gboolean>,
pub propose_allocation:
Option<unsafe extern "C" fn(*mut GstVideoDecoder, *mut gst::GstQuery) -> gboolean>,
pub flush: Option<unsafe extern "C" fn(*mut GstVideoDecoder) -> gboolean>,
pub sink_query:
Option<unsafe extern "C" fn(*mut GstVideoDecoder, *mut gst::GstQuery) -> gboolean>,
pub src_query:
Option<unsafe extern "C" fn(*mut GstVideoDecoder, *mut gst::GstQuery) -> gboolean>,
pub getcaps:
Option<unsafe extern "C" fn(*mut GstVideoDecoder, *mut gst::GstCaps) -> *mut gst::GstCaps>,
pub drain: Option<unsafe extern "C" fn(*mut GstVideoDecoder) -> gst::GstFlowReturn>,
pub transform_meta: Option<
unsafe extern "C" fn(
*mut GstVideoDecoder,
*mut GstVideoCodecFrame,
*mut gst::GstMeta,
) -> gboolean,
>,
pub padding: [gpointer; 14],
}
impl ::std::fmt::Debug for GstVideoDecoderClass {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoDecoderClass @ {:?}", self as *const _))
.field("open", &self.open)
.field("close", &self.close)
.field("start", &self.start)
.field("stop", &self.stop)
.field("parse", &self.parse)
.field("set_format", &self.set_format)
.field("reset", &self.reset)
.field("finish", &self.finish)
.field("handle_frame", &self.handle_frame)
.field("sink_event", &self.sink_event)
.field("src_event", &self.src_event)
.field("negotiate", &self.negotiate)
.field("decide_allocation", &self.decide_allocation)
.field("propose_allocation", &self.propose_allocation)
.field("flush", &self.flush)
.field("sink_query", &self.sink_query)
.field("src_query", &self.src_query)
.field("getcaps", &self.getcaps)
.field("drain", &self.drain)
.field("transform_meta", &self.transform_meta)
.finish()
}
}
#[repr(C)]
pub struct _GstVideoDecoderPrivate(c_void);
pub type GstVideoDecoderPrivate = *mut _GstVideoDecoderPrivate;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoDirectionInterface {
pub iface: gobject::GTypeInterface,
}
impl ::std::fmt::Debug for GstVideoDirectionInterface {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoDirectionInterface @ {:?}",
self as *const _
))
.field("iface", &self.iface)
.finish()
}
}
#[repr(C)]
pub struct _GstVideoDither(c_void);
pub type GstVideoDither = *mut _GstVideoDither;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoEncoderClass {
pub element_class: gst::GstElementClass,
pub open: Option<unsafe extern "C" fn(*mut GstVideoEncoder) -> gboolean>,
pub close: Option<unsafe extern "C" fn(*mut GstVideoEncoder) -> gboolean>,
pub start: Option<unsafe extern "C" fn(*mut GstVideoEncoder) -> gboolean>,
pub stop: Option<unsafe extern "C" fn(*mut GstVideoEncoder) -> gboolean>,
pub set_format:
Option<unsafe extern "C" fn(*mut GstVideoEncoder, *mut GstVideoCodecState) -> gboolean>,
pub handle_frame: Option<
unsafe extern "C" fn(*mut GstVideoEncoder, *mut GstVideoCodecFrame) -> gst::GstFlowReturn,
>,
pub reset: Option<unsafe extern "C" fn(*mut GstVideoEncoder, gboolean) -> gboolean>,
pub finish: Option<unsafe extern "C" fn(*mut GstVideoEncoder) -> gst::GstFlowReturn>,
pub pre_push: Option<
unsafe extern "C" fn(*mut GstVideoEncoder, *mut GstVideoCodecFrame) -> gst::GstFlowReturn,
>,
pub getcaps:
Option<unsafe extern "C" fn(*mut GstVideoEncoder, *mut gst::GstCaps) -> *mut gst::GstCaps>,
pub sink_event:
Option<unsafe extern "C" fn(*mut GstVideoEncoder, *mut gst::GstEvent) -> gboolean>,
pub src_event:
Option<unsafe extern "C" fn(*mut GstVideoEncoder, *mut gst::GstEvent) -> gboolean>,
pub negotiate: Option<unsafe extern "C" fn(*mut GstVideoEncoder) -> gboolean>,
pub decide_allocation:
Option<unsafe extern "C" fn(*mut GstVideoEncoder, *mut gst::GstQuery) -> gboolean>,
pub propose_allocation:
Option<unsafe extern "C" fn(*mut GstVideoEncoder, *mut gst::GstQuery) -> gboolean>,
pub flush: Option<unsafe extern "C" fn(*mut GstVideoEncoder) -> gboolean>,
pub sink_query:
Option<unsafe extern "C" fn(*mut GstVideoEncoder, *mut gst::GstQuery) -> gboolean>,
pub src_query:
Option<unsafe extern "C" fn(*mut GstVideoEncoder, *mut gst::GstQuery) -> gboolean>,
pub transform_meta: Option<
unsafe extern "C" fn(
*mut GstVideoEncoder,
*mut GstVideoCodecFrame,
*mut gst::GstMeta,
) -> gboolean,
>,
pub _gst_reserved: [gpointer; 16],
}
impl ::std::fmt::Debug for GstVideoEncoderClass {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoEncoderClass @ {:?}", self as *const _))
.field("open", &self.open)
.field("close", &self.close)
.field("start", &self.start)
.field("stop", &self.stop)
.field("set_format", &self.set_format)
.field("handle_frame", &self.handle_frame)
.field("reset", &self.reset)
.field("finish", &self.finish)
.field("pre_push", &self.pre_push)
.field("getcaps", &self.getcaps)
.field("sink_event", &self.sink_event)
.field("src_event", &self.src_event)
.field("negotiate", &self.negotiate)
.field("decide_allocation", &self.decide_allocation)
.field("propose_allocation", &self.propose_allocation)
.field("flush", &self.flush)
.field("sink_query", &self.sink_query)
.field("src_query", &self.src_query)
.field("transform_meta", &self.transform_meta)
.finish()
}
}
#[repr(C)]
pub struct _GstVideoEncoderPrivate(c_void);
pub type GstVideoEncoderPrivate = *mut _GstVideoEncoderPrivate;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoFilterClass {
pub parent_class: gst_base::GstBaseTransformClass,
pub set_info: Option<
unsafe extern "C" fn(
*mut GstVideoFilter,
*mut gst::GstCaps,
*mut GstVideoInfo,
*mut gst::GstCaps,
*mut GstVideoInfo,
) -> gboolean,
>,
pub transform_frame: Option<
unsafe extern "C" fn(
*mut GstVideoFilter,
*mut GstVideoFrame,
*mut GstVideoFrame,
) -> gst::GstFlowReturn,
>,
pub transform_frame_ip:
Option<unsafe extern "C" fn(*mut GstVideoFilter, *mut GstVideoFrame) -> gst::GstFlowReturn>,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoFilterClass {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoFilterClass @ {:?}", self as *const _))
.field("parent_class", &self.parent_class)
.field("set_info", &self.set_info)
.field("transform_frame", &self.transform_frame)
.field("transform_frame_ip", &self.transform_frame_ip)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoFormatInfo {
pub format: GstVideoFormat,
pub name: *const c_char,
pub description: *const c_char,
pub flags: GstVideoFormatFlags,
pub bits: c_uint,
pub n_components: c_uint,
pub shift: [c_uint; 4],
pub depth: [c_uint; 4],
pub pixel_stride: [c_int; 4],
pub n_planes: c_uint,
pub plane: [c_uint; 4],
pub poffset: [c_uint; 4],
pub w_sub: [c_uint; 4],
pub h_sub: [c_uint; 4],
pub unpack_format: GstVideoFormat,
pub unpack_func: GstVideoFormatUnpack,
pub pack_lines: c_int,
pub pack_func: GstVideoFormatPack,
pub tile_mode: GstVideoTileMode,
pub tile_ws: c_uint,
pub tile_hs: c_uint,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoFormatInfo {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoFormatInfo @ {:?}", self as *const _))
.field("format", &self.format)
.field("name", &self.name)
.field("description", &self.description)
.field("flags", &self.flags)
.field("bits", &self.bits)
.field("n_components", &self.n_components)
.field("shift", &self.shift)
.field("depth", &self.depth)
.field("pixel_stride", &self.pixel_stride)
.field("n_planes", &self.n_planes)
.field("plane", &self.plane)
.field("poffset", &self.poffset)
.field("w_sub", &self.w_sub)
.field("h_sub", &self.h_sub)
.field("unpack_format", &self.unpack_format)
.field("unpack_func", &self.unpack_func)
.field("pack_lines", &self.pack_lines)
.field("pack_func", &self.pack_func)
.field("tile_mode", &self.tile_mode)
.field("tile_ws", &self.tile_ws)
.field("tile_hs", &self.tile_hs)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoFrame {
pub info: GstVideoInfo,
pub flags: GstVideoFrameFlags,
pub buffer: *mut gst::GstBuffer,
pub meta: gpointer,
pub id: c_int,
pub data: [gpointer; 4],
pub map: [gst::GstMapInfo; 4],
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoFrame {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoFrame @ {:?}", self as *const _))
.field("info", &self.info)
.field("flags", &self.flags)
.field("buffer", &self.buffer)
.field("meta", &self.meta)
.field("id", &self.id)
.field("data", &self.data)
.field("map", &self.map)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoGLTextureUploadMeta {
pub meta: gst::GstMeta,
pub texture_orientation: GstVideoGLTextureOrientation,
pub n_textures: c_uint,
pub texture_type: [GstVideoGLTextureType; 4],
pub buffer: *mut gst::GstBuffer,
pub upload: GstVideoGLTextureUpload,
pub user_data: gpointer,
pub user_data_copy: gobject::GBoxedCopyFunc,
pub user_data_free: gobject::GBoxedFreeFunc,
}
impl ::std::fmt::Debug for GstVideoGLTextureUploadMeta {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoGLTextureUploadMeta @ {:?}",
self as *const _
))
.field("meta", &self.meta)
.field("texture_orientation", &self.texture_orientation)
.field("n_textures", &self.n_textures)
.field("texture_type", &self.texture_type)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoInfo {
pub finfo: *const GstVideoFormatInfo,
pub interlace_mode: GstVideoInterlaceMode,
pub flags: GstVideoFlags,
pub width: c_int,
pub height: c_int,
pub size: size_t,
pub views: c_int,
pub chroma_site: GstVideoChromaSite,
pub colorimetry: GstVideoColorimetry,
pub par_n: c_int,
pub par_d: c_int,
pub fps_n: c_int,
pub fps_d: c_int,
pub offset: [size_t; 4],
pub stride: [c_int; 4],
pub ABI: GstVideoInfo_ABI,
}
impl ::std::fmt::Debug for GstVideoInfo {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoInfo @ {:?}", self as *const _))
.field("finfo", &self.finfo)
.field("interlace_mode", &self.interlace_mode)
.field("flags", &self.flags)
.field("width", &self.width)
.field("height", &self.height)
.field("size", &self.size)
.field("views", &self.views)
.field("chroma_site", &self.chroma_site)
.field("colorimetry", &self.colorimetry)
.field("par_n", &self.par_n)
.field("par_d", &self.par_d)
.field("fps_n", &self.fps_n)
.field("fps_d", &self.fps_d)
.field("offset", &self.offset)
.field("stride", &self.stride)
.field("ABI", &self.ABI)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoInfo_ABI_abi {
pub multiview_mode: GstVideoMultiviewMode,
pub multiview_flags: GstVideoMultiviewFlags,
pub field_order: GstVideoFieldOrder,
}
impl ::std::fmt::Debug for GstVideoInfo_ABI_abi {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoInfo_ABI_abi @ {:?}", self as *const _))
.field("multiview_mode", &self.multiview_mode)
.field("multiview_flags", &self.multiview_flags)
.field("field_order", &self.field_order)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoMeta {
pub meta: gst::GstMeta,
pub buffer: *mut gst::GstBuffer,
pub flags: GstVideoFrameFlags,
pub format: GstVideoFormat,
pub id: c_int,
pub width: c_uint,
pub height: c_uint,
pub n_planes: c_uint,
pub offset: [size_t; 4],
pub stride: [c_int; 4],
pub map: Option<
unsafe extern "C" fn(
*mut GstVideoMeta,
c_uint,
*mut gst::GstMapInfo,
*mut gpointer,
*mut c_int,
gst::GstMapFlags,
) -> gboolean,
>,
pub unmap:
Option<unsafe extern "C" fn(*mut GstVideoMeta, c_uint, *mut gst::GstMapInfo) -> gboolean>,
}
impl ::std::fmt::Debug for GstVideoMeta {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoMeta @ {:?}", self as *const _))
.field("meta", &self.meta)
.field("buffer", &self.buffer)
.field("flags", &self.flags)
.field("format", &self.format)
.field("id", &self.id)
.field("width", &self.width)
.field("height", &self.height)
.field("n_planes", &self.n_planes)
.field("offset", &self.offset)
.field("stride", &self.stride)
.field("map", &self.map)
.field("unmap", &self.unmap)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoMetaTransform {
pub in_info: *mut GstVideoInfo,
pub out_info: *mut GstVideoInfo,
}
impl ::std::fmt::Debug for GstVideoMetaTransform {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoMetaTransform @ {:?}", self as *const _))
.field("in_info", &self.in_info)
.field("out_info", &self.out_info)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoOrientationInterface {
pub iface: gobject::GTypeInterface,
pub get_hflip: Option<unsafe extern "C" fn(*mut GstVideoOrientation, gboolean) -> gboolean>,
pub get_vflip: Option<unsafe extern "C" fn(*mut GstVideoOrientation, gboolean) -> gboolean>,
pub get_hcenter: Option<unsafe extern "C" fn(*mut GstVideoOrientation, c_int) -> gboolean>,
pub get_vcenter: Option<unsafe extern "C" fn(*mut GstVideoOrientation, c_int) -> gboolean>,
pub set_hflip: Option<unsafe extern "C" fn(*mut GstVideoOrientation, gboolean) -> gboolean>,
pub set_vflip: Option<unsafe extern "C" fn(*mut GstVideoOrientation, gboolean) -> gboolean>,
pub set_hcenter: Option<unsafe extern "C" fn(*mut GstVideoOrientation, c_int) -> gboolean>,
pub set_vcenter: Option<unsafe extern "C" fn(*mut GstVideoOrientation, c_int) -> gboolean>,
}
impl ::std::fmt::Debug for GstVideoOrientationInterface {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoOrientationInterface @ {:?}",
self as *const _
))
.field("iface", &self.iface)
.field("get_hflip", &self.get_hflip)
.field("get_vflip", &self.get_vflip)
.field("get_hcenter", &self.get_hcenter)
.field("get_vcenter", &self.get_vcenter)
.field("set_hflip", &self.set_hflip)
.field("set_vflip", &self.set_vflip)
.field("set_hcenter", &self.set_hcenter)
.field("set_vcenter", &self.set_vcenter)
.finish()
}
}
#[repr(C)]
pub struct GstVideoOverlayComposition(c_void);
impl ::std::fmt::Debug for GstVideoOverlayComposition {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoOverlayComposition @ {:?}",
self as *const _
))
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoOverlayCompositionMeta {
pub meta: gst::GstMeta,
pub overlay: *mut GstVideoOverlayComposition,
}
impl ::std::fmt::Debug for GstVideoOverlayCompositionMeta {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoOverlayCompositionMeta @ {:?}",
self as *const _
))
.field("meta", &self.meta)
.field("overlay", &self.overlay)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoOverlayInterface {
pub iface: gobject::GTypeInterface,
pub expose: Option<unsafe extern "C" fn(*mut GstVideoOverlay)>,
pub handle_events: Option<unsafe extern "C" fn(*mut GstVideoOverlay, gboolean)>,
pub set_render_rectangle:
Option<unsafe extern "C" fn(*mut GstVideoOverlay, c_int, c_int, c_int, c_int)>,
pub set_window_handle: Option<unsafe extern "C" fn(*mut GstVideoOverlay, uintptr_t)>,
}
impl ::std::fmt::Debug for GstVideoOverlayInterface {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoOverlayInterface @ {:?}",
self as *const _
))
.field("iface", &self.iface)
.field("expose", &self.expose)
.field("handle_events", &self.handle_events)
.field("set_render_rectangle", &self.set_render_rectangle)
.field("set_window_handle", &self.set_window_handle)
.finish()
}
}
#[repr(C)]
pub struct GstVideoOverlayRectangle(c_void);
impl ::std::fmt::Debug for GstVideoOverlayRectangle {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoOverlayRectangle @ {:?}",
self as *const _
))
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoRectangle {
pub x: c_int,
pub y: c_int,
pub w: c_int,
pub h: c_int,
}
impl ::std::fmt::Debug for GstVideoRectangle {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoRectangle @ {:?}", self as *const _))
.field("x", &self.x)
.field("y", &self.y)
.field("w", &self.w)
.field("h", &self.h)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoRegionOfInterestMeta {
pub meta: gst::GstMeta,
pub roi_type: glib::GQuark,
pub id: c_int,
pub parent_id: c_int,
pub x: c_uint,
pub y: c_uint,
pub w: c_uint,
pub h: c_uint,
pub params: *mut glib::GList,
}
impl ::std::fmt::Debug for GstVideoRegionOfInterestMeta {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoRegionOfInterestMeta @ {:?}",
self as *const _
))
.field("meta", &self.meta)
.field("roi_type", &self.roi_type)
.field("id", &self.id)
.field("parent_id", &self.parent_id)
.field("x", &self.x)
.field("y", &self.y)
.field("w", &self.w)
.field("h", &self.h)
.field("params", &self.params)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoResampler {
pub in_size: c_int,
pub out_size: c_int,
pub max_taps: c_uint,
pub n_phases: c_uint,
pub offset: *mut u32,
pub phase: *mut u32,
pub n_taps: *mut u32,
pub taps: *mut c_double,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoResampler {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoResampler @ {:?}", self as *const _))
.field("in_size", &self.in_size)
.field("out_size", &self.out_size)
.field("max_taps", &self.max_taps)
.field("n_phases", &self.n_phases)
.field("offset", &self.offset)
.field("phase", &self.phase)
.field("n_taps", &self.n_taps)
.field("taps", &self.taps)
.finish()
}
}
#[repr(C)]
pub struct _GstVideoScaler(c_void);
pub type GstVideoScaler = *mut _GstVideoScaler;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoSinkClass {
pub parent_class: gst_base::GstBaseSinkClass,
pub show_frame:
Option<unsafe extern "C" fn(*mut GstVideoSink, *mut gst::GstBuffer) -> gst::GstFlowReturn>,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoSinkClass {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoSinkClass @ {:?}", self as *const _))
.field("parent_class", &self.parent_class)
.field("show_frame", &self.show_frame)
.finish()
}
}
#[repr(C)]
pub struct _GstVideoSinkPrivate(c_void);
pub type GstVideoSinkPrivate = *mut _GstVideoSinkPrivate;
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoTimeCode {
pub config: GstVideoTimeCodeConfig,
pub hours: c_uint,
pub minutes: c_uint,
pub seconds: c_uint,
pub frames: c_uint,
pub field_count: c_uint,
}
impl ::std::fmt::Debug for GstVideoTimeCode {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoTimeCode @ {:?}", self as *const _))
.field("config", &self.config)
.field("hours", &self.hours)
.field("minutes", &self.minutes)
.field("seconds", &self.seconds)
.field("frames", &self.frames)
.field("field_count", &self.field_count)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoTimeCodeConfig {
pub fps_n: c_uint,
pub fps_d: c_uint,
pub flags: GstVideoTimeCodeFlags,
pub latest_daily_jam: *mut glib::GDateTime,
}
impl ::std::fmt::Debug for GstVideoTimeCodeConfig {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoTimeCodeConfig @ {:?}", self as *const _))
.field("fps_n", &self.fps_n)
.field("fps_d", &self.fps_d)
.field("flags", &self.flags)
.field("latest_daily_jam", &self.latest_daily_jam)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoTimeCodeInterval {
pub hours: c_uint,
pub minutes: c_uint,
pub seconds: c_uint,
pub frames: c_uint,
}
impl ::std::fmt::Debug for GstVideoTimeCodeInterval {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoTimeCodeInterval @ {:?}",
self as *const _
))
.field("hours", &self.hours)
.field("minutes", &self.minutes)
.field("seconds", &self.seconds)
.field("frames", &self.frames)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoTimeCodeMeta {
pub meta: gst::GstMeta,
pub tc: GstVideoTimeCode,
}
impl ::std::fmt::Debug for GstVideoTimeCodeMeta {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoTimeCodeMeta @ {:?}", self as *const _))
.field("meta", &self.meta)
.field("tc", &self.tc)
.finish()
}
}
#[repr(C)]
pub struct GstVideoVBIEncoder(c_void);
impl ::std::fmt::Debug for GstVideoVBIEncoder {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoVBIEncoder @ {:?}", self as *const _))
.finish()
}
}
#[repr(C)]
pub struct GstVideoVBIParser(c_void);
impl ::std::fmt::Debug for GstVideoVBIParser {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoVBIParser @ {:?}", self as *const _))
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstColorBalanceChannel {
pub parent: gobject::GObject,
pub label: *mut c_char,
pub min_value: c_int,
pub max_value: c_int,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstColorBalanceChannel {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstColorBalanceChannel @ {:?}", self as *const _))
.field("parent", &self.parent)
.field("label", &self.label)
.field("min_value", &self.min_value)
.field("max_value", &self.max_value)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoAggregator {
pub aggregator: gst_base::GstAggregator,
pub info: GstVideoInfo,
pub priv_: *mut GstVideoAggregatorPrivate,
pub _gst_reserved: [gpointer; 20],
}
impl ::std::fmt::Debug for GstVideoAggregator {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoAggregator @ {:?}", self as *const _))
.field("aggregator", &self.aggregator)
.field("info", &self.info)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoAggregatorConvertPad {
pub parent: GstVideoAggregatorPad,
pub priv_: *mut GstVideoAggregatorConvertPadPrivate,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoAggregatorConvertPad {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoAggregatorConvertPad @ {:?}",
self as *const _
))
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoAggregatorPad {
pub parent: gst_base::GstAggregatorPad,
pub info: GstVideoInfo,
pub priv_: *mut GstVideoAggregatorPadPrivate,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoAggregatorPad {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoAggregatorPad @ {:?}", self as *const _))
.field("parent", &self.parent)
.field("info", &self.info)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoBufferPool {
pub bufferpool: gst::GstBufferPool,
pub priv_: *mut GstVideoBufferPoolPrivate,
}
impl ::std::fmt::Debug for GstVideoBufferPool {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoBufferPool @ {:?}", self as *const _))
.field("bufferpool", &self.bufferpool)
.field("priv_", &self.priv_)
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoDecoder {
pub element: gst::GstElement,
pub sinkpad: *mut gst::GstPad,
pub srcpad: *mut gst::GstPad,
pub stream_lock: glib::GRecMutex,
pub input_segment: gst::GstSegment,
pub output_segment: gst::GstSegment,
pub priv_: *mut GstVideoDecoderPrivate,
pub padding: [gpointer; 20],
}
impl ::std::fmt::Debug for GstVideoDecoder {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoDecoder @ {:?}", self as *const _))
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoEncoder {
pub element: gst::GstElement,
pub sinkpad: *mut gst::GstPad,
pub srcpad: *mut gst::GstPad,
pub stream_lock: glib::GRecMutex,
pub input_segment: gst::GstSegment,
pub output_segment: gst::GstSegment,
pub priv_: *mut GstVideoEncoderPrivate,
pub padding: [gpointer; 20],
}
impl ::std::fmt::Debug for GstVideoEncoder {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoEncoder @ {:?}", self as *const _))
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoFilter {
pub element: gst_base::GstBaseTransform,
pub negotiated: gboolean,
pub in_info: GstVideoInfo,
pub out_info: GstVideoInfo,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoFilter {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoFilter @ {:?}", self as *const _))
.field("element", &self.element)
.field("negotiated", &self.negotiated)
.field("in_info", &self.in_info)
.field("out_info", &self.out_info)
.finish()
}
}
#[repr(C)]
pub struct GstVideoMultiviewFlagsSet(c_void);
impl ::std::fmt::Debug for GstVideoMultiviewFlagsSet {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!(
"GstVideoMultiviewFlagsSet @ {:?}",
self as *const _
))
.finish()
}
}
#[repr(C)]
#[derive(Copy, Clone)]
pub struct GstVideoSink {
pub element: gst_base::GstBaseSink,
pub width: c_int,
pub height: c_int,
pub priv_: *mut GstVideoSinkPrivate,
pub _gst_reserved: [gpointer; 4],
}
impl ::std::fmt::Debug for GstVideoSink {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
f.debug_struct(&format!("GstVideoSink @ {:?}", self as *const _))
.field("element", &self.element)
.field("width", &self.width)
.field("height", &self.height)
.finish()
}
}
#[repr(C)]
pub struct GstColorBalance(c_void);
impl ::std::fmt::Debug for GstColorBalance {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "GstColorBalance @ {:?}", self as *const _)
}
}
#[repr(C)]
pub struct GstNavigation(c_void);
impl ::std::fmt::Debug for GstNavigation {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "GstNavigation @ {:?}", self as *const _)
}
}
#[repr(C)]
pub struct GstVideoDirection(c_void);
impl ::std::fmt::Debug for GstVideoDirection {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "GstVideoDirection @ {:?}", self as *const _)
}
}
#[repr(C)]
pub struct GstVideoOrientation(c_void);
impl ::std::fmt::Debug for GstVideoOrientation {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "GstVideoOrientation @ {:?}", self as *const _)
}
}
#[repr(C)]
pub struct GstVideoOverlay(c_void);
impl ::std::fmt::Debug for GstVideoOverlay {
fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
write!(f, "GstVideoOverlay @ {:?}", self as *const _)
}
}
extern "C" {
pub fn gst_color_balance_type_get_type() -> GType;
pub fn gst_navigation_command_get_type() -> GType;
pub fn gst_navigation_event_type_get_type() -> GType;
pub fn gst_navigation_message_type_get_type() -> GType;
pub fn gst_navigation_query_type_get_type() -> GType;
pub fn gst_video_alpha_mode_get_type() -> GType;
pub fn gst_video_ancillary_did_get_type() -> GType;
pub fn gst_video_ancillary_di_d16_get_type() -> GType;
pub fn gst_video_caption_type_get_type() -> GType;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_caption_type_from_caps(caps: *const gst::GstCaps) -> GstVideoCaptionType;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_caption_type_to_caps(type_: GstVideoCaptionType) -> *mut gst::GstCaps;
pub fn gst_video_chroma_method_get_type() -> GType;
pub fn gst_video_chroma_mode_get_type() -> GType;
pub fn gst_video_color_matrix_get_type() -> GType;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_color_matrix_get_Kr_Kb(
matrix: GstVideoColorMatrix,
Kr: *mut c_double,
Kb: *mut c_double,
) -> gboolean;
pub fn gst_video_color_primaries_get_type() -> GType;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_color_primaries_get_info(
primaries: GstVideoColorPrimaries,
) -> *const GstVideoColorPrimariesInfo;
pub fn gst_video_color_range_get_type() -> GType;
pub fn gst_video_color_range_offsets(
range: GstVideoColorRange,
info: *const GstVideoFormatInfo,
offset: *mut [c_int; 4],
scale: *mut [c_int; 4],
);
pub fn gst_video_dither_method_get_type() -> GType;
pub fn gst_video_field_order_get_type() -> GType;
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_field_order_from_string(order: *const c_char) -> GstVideoFieldOrder;
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_field_order_to_string(order: GstVideoFieldOrder) -> *const c_char;
pub fn gst_video_format_get_type() -> GType;
pub fn gst_video_format_from_fourcc(fourcc: u32) -> GstVideoFormat;
pub fn gst_video_format_from_masks(
depth: c_int,
bpp: c_int,
endianness: c_int,
red_mask: c_uint,
green_mask: c_uint,
blue_mask: c_uint,
alpha_mask: c_uint,
) -> GstVideoFormat;
pub fn gst_video_format_from_string(format: *const c_char) -> GstVideoFormat;
pub fn gst_video_format_get_info(format: GstVideoFormat) -> *const GstVideoFormatInfo;
#[cfg(any(feature = "v1_2", feature = "dox"))]
pub fn gst_video_format_get_palette(format: GstVideoFormat, size: *mut size_t)
-> gconstpointer;
pub fn gst_video_format_to_fourcc(format: GstVideoFormat) -> u32;
pub fn gst_video_format_to_string(format: GstVideoFormat) -> *const c_char;
pub fn gst_video_gamma_mode_get_type() -> GType;
pub fn gst_video_interlace_mode_get_type() -> GType;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_interlace_mode_from_string(mode: *const c_char) -> GstVideoInterlaceMode;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_interlace_mode_to_string(mode: GstVideoInterlaceMode) -> *const c_char;
pub fn gst_video_matrix_mode_get_type() -> GType;
pub fn gst_video_multiview_frame_packing_get_type() -> GType;
pub fn gst_video_multiview_mode_get_type() -> GType;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_multiview_mode_from_caps_string(
caps_mview_mode: *const c_char,
) -> GstVideoMultiviewMode;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_multiview_mode_to_caps_string(
mview_mode: GstVideoMultiviewMode,
) -> *const c_char;
pub fn gst_video_orientation_method_get_type() -> GType;
pub fn gst_video_primaries_mode_get_type() -> GType;
pub fn gst_video_resampler_method_get_type() -> GType;
pub fn gst_video_tile_mode_get_type() -> GType;
pub fn gst_video_tile_type_get_type() -> GType;
pub fn gst_video_transfer_function_get_type() -> GType;
pub fn gst_video_vbi_parser_result_get_type() -> GType;
pub fn gst_video_buffer_flags_get_type() -> GType;
pub fn gst_video_chroma_flags_get_type() -> GType;
pub fn gst_video_chroma_site_get_type() -> GType;
pub fn gst_video_dither_flags_get_type() -> GType;
pub fn gst_video_flags_get_type() -> GType;
pub fn gst_video_format_flags_get_type() -> GType;
pub fn gst_video_frame_flags_get_type() -> GType;
pub fn gst_video_frame_map_flags_get_type() -> GType;
pub fn gst_video_multiview_flags_get_type() -> GType;
pub fn gst_video_overlay_format_flags_get_type() -> GType;
pub fn gst_video_pack_flags_get_type() -> GType;
pub fn gst_video_resampler_flags_get_type() -> GType;
pub fn gst_video_scaler_flags_get_type() -> GType;
#[cfg(any(feature = "v1_8", feature = "dox"))]
pub fn gst_video_affine_transformation_meta_apply_matrix(
meta: *mut GstVideoAffineTransformationMeta,
matrix: *const [c_float; 16],
);
pub fn gst_video_affine_transformation_meta_get_info() -> *const gst::GstMetaInfo;
pub fn gst_video_alignment_reset(align: *mut GstVideoAlignment);
pub fn gst_video_caption_meta_get_info() -> *const gst::GstMetaInfo;
pub fn gst_video_chroma_resample_free(resample: *mut GstVideoChromaResample);
pub fn gst_video_chroma_resample_get_info(
resample: *mut GstVideoChromaResample,
n_lines: *mut c_uint,
offset: *mut c_int,
);
pub fn gst_video_chroma_resample_new(
method: GstVideoChromaMethod,
site: GstVideoChromaSite,
flags: GstVideoChromaFlags,
format: GstVideoFormat,
h_factor: c_int,
v_factor: c_int,
) -> *mut GstVideoChromaResample;
pub fn gst_video_codec_frame_get_type() -> GType;
pub fn gst_video_codec_frame_get_user_data(frame: *mut GstVideoCodecFrame) -> gpointer;
pub fn gst_video_codec_frame_ref(frame: *mut GstVideoCodecFrame) -> *mut GstVideoCodecFrame;
pub fn gst_video_codec_frame_set_user_data(
frame: *mut GstVideoCodecFrame,
user_data: gpointer,
notify: glib::GDestroyNotify,
);
pub fn gst_video_codec_frame_unref(frame: *mut GstVideoCodecFrame);
pub fn gst_video_codec_state_get_type() -> GType;
pub fn gst_video_codec_state_ref(state: *mut GstVideoCodecState) -> *mut GstVideoCodecState;
pub fn gst_video_codec_state_unref(state: *mut GstVideoCodecState);
pub fn gst_video_colorimetry_from_string(
cinfo: *mut GstVideoColorimetry,
color: *const c_char,
) -> gboolean;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_colorimetry_is_equal(
cinfo: *const GstVideoColorimetry,
other: *const GstVideoColorimetry,
) -> gboolean;
pub fn gst_video_colorimetry_matches(
cinfo: *const GstVideoColorimetry,
color: *const c_char,
) -> gboolean;
pub fn gst_video_colorimetry_to_string(cinfo: *const GstVideoColorimetry) -> *mut c_char;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_converter_frame(
convert: *mut GstVideoConverter,
src: *const GstVideoFrame,
dest: *mut GstVideoFrame,
);
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_converter_free(convert: *mut GstVideoConverter);
pub fn gst_video_converter_get_config(
convert: *mut GstVideoConverter,
) -> *const gst::GstStructure;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_converter_set_config(
convert: *mut GstVideoConverter,
config: *mut gst::GstStructure,
) -> gboolean;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_converter_new(
in_info: *mut GstVideoInfo,
out_info: *mut GstVideoInfo,
config: *mut gst::GstStructure,
) -> *mut GstVideoConverter;
pub fn gst_video_crop_meta_get_info() -> *const gst::GstMetaInfo;
pub fn gst_video_dither_free(dither: *mut GstVideoDither);
pub fn gst_video_dither_line(
dither: *mut GstVideoDither,
line: gpointer,
x: c_uint,
y: c_uint,
width: c_uint,
);
pub fn gst_video_dither_new(
method: GstVideoDitherMethod,
flags: GstVideoDitherFlags,
format: GstVideoFormat,
quantizer: *mut c_uint,
width: c_uint,
) -> *mut GstVideoDither;
pub fn gst_video_frame_copy(dest: *mut GstVideoFrame, src: *const GstVideoFrame) -> gboolean;
pub fn gst_video_frame_copy_plane(
dest: *mut GstVideoFrame,
src: *const GstVideoFrame,
plane: c_uint,
) -> gboolean;
pub fn gst_video_frame_map(
frame: *mut GstVideoFrame,
info: *mut GstVideoInfo,
buffer: *mut gst::GstBuffer,
flags: gst::GstMapFlags,
) -> gboolean;
pub fn gst_video_frame_map_id(
frame: *mut GstVideoFrame,
info: *mut GstVideoInfo,
buffer: *mut gst::GstBuffer,
id: c_int,
flags: gst::GstMapFlags,
) -> gboolean;
pub fn gst_video_frame_unmap(frame: *mut GstVideoFrame);
pub fn gst_video_gl_texture_upload_meta_upload(
meta: *mut GstVideoGLTextureUploadMeta,
texture_id: *mut c_uint,
) -> gboolean;
pub fn gst_video_gl_texture_upload_meta_get_info() -> *const gst::GstMetaInfo;
pub fn gst_video_info_get_type() -> GType;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_info_new() -> *mut GstVideoInfo;
pub fn gst_video_info_align(info: *mut GstVideoInfo, align: *mut GstVideoAlignment)
-> gboolean;
pub fn gst_video_info_convert(
info: *mut GstVideoInfo,
src_format: gst::GstFormat,
src_value: i64,
dest_format: gst::GstFormat,
dest_value: *mut i64,
) -> gboolean;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_info_copy(info: *const GstVideoInfo) -> *mut GstVideoInfo;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_info_free(info: *mut GstVideoInfo);
pub fn gst_video_info_from_caps(info: *mut GstVideoInfo, caps: *const gst::GstCaps)
-> gboolean;
pub fn gst_video_info_init(info: *mut GstVideoInfo);
pub fn gst_video_info_is_equal(
info: *const GstVideoInfo,
other: *const GstVideoInfo,
) -> gboolean;
pub fn gst_video_info_set_format(
info: *mut GstVideoInfo,
format: GstVideoFormat,
width: c_uint,
height: c_uint,
) -> gboolean;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_info_set_interlaced_format(
info: *mut GstVideoInfo,
format: GstVideoFormat,
mode: GstVideoInterlaceMode,
width: c_uint,
height: c_uint,
) -> gboolean;
pub fn gst_video_info_to_caps(info: *mut GstVideoInfo) -> *mut gst::GstCaps;
pub fn gst_video_meta_map(
meta: *mut GstVideoMeta,
plane: c_uint,
info: *mut gst::GstMapInfo,
data: *mut gpointer,
stride: *mut c_int,
flags: gst::GstMapFlags,
) -> gboolean;
pub fn gst_video_meta_unmap(
meta: *mut GstVideoMeta,
plane: c_uint,
info: *mut gst::GstMapInfo,
) -> gboolean;
pub fn gst_video_meta_get_info() -> *const gst::GstMetaInfo;
pub fn gst_video_meta_transform_scale_get_quark() -> glib::GQuark;
pub fn gst_video_overlay_composition_get_type() -> GType;
pub fn gst_video_overlay_composition_new(
rectangle: *mut GstVideoOverlayRectangle,
) -> *mut GstVideoOverlayComposition;
pub fn gst_video_overlay_composition_add_rectangle(
comp: *mut GstVideoOverlayComposition,
rectangle: *mut GstVideoOverlayRectangle,
);
pub fn gst_video_overlay_composition_blend(
comp: *mut GstVideoOverlayComposition,
video_buf: *mut GstVideoFrame,
) -> gboolean;
pub fn gst_video_overlay_composition_copy(
comp: *mut GstVideoOverlayComposition,
) -> *mut GstVideoOverlayComposition;
pub fn gst_video_overlay_composition_get_rectangle(
comp: *mut GstVideoOverlayComposition,
n: c_uint,
) -> *mut GstVideoOverlayRectangle;
pub fn gst_video_overlay_composition_get_seqnum(
comp: *mut GstVideoOverlayComposition,
) -> c_uint;
pub fn gst_video_overlay_composition_make_writable(
comp: *mut GstVideoOverlayComposition,
) -> *mut GstVideoOverlayComposition;
pub fn gst_video_overlay_composition_n_rectangles(
comp: *mut GstVideoOverlayComposition,
) -> c_uint;
pub fn gst_video_overlay_composition_meta_get_info() -> *const gst::GstMetaInfo;
pub fn gst_video_overlay_rectangle_get_type() -> GType;
pub fn gst_video_overlay_rectangle_new_raw(
pixels: *mut gst::GstBuffer,
render_x: c_int,
render_y: c_int,
render_width: c_uint,
render_height: c_uint,
flags: GstVideoOverlayFormatFlags,
) -> *mut GstVideoOverlayRectangle;
pub fn gst_video_overlay_rectangle_copy(
rectangle: *mut GstVideoOverlayRectangle,
) -> *mut GstVideoOverlayRectangle;
pub fn gst_video_overlay_rectangle_get_flags(
rectangle: *mut GstVideoOverlayRectangle,
) -> GstVideoOverlayFormatFlags;
pub fn gst_video_overlay_rectangle_get_global_alpha(
rectangle: *mut GstVideoOverlayRectangle,
) -> c_float;
pub fn gst_video_overlay_rectangle_get_pixels_argb(
rectangle: *mut GstVideoOverlayRectangle,
flags: GstVideoOverlayFormatFlags,
) -> *mut gst::GstBuffer;
pub fn gst_video_overlay_rectangle_get_pixels_ayuv(
rectangle: *mut GstVideoOverlayRectangle,
flags: GstVideoOverlayFormatFlags,
) -> *mut gst::GstBuffer;
pub fn gst_video_overlay_rectangle_get_pixels_raw(
rectangle: *mut GstVideoOverlayRectangle,
flags: GstVideoOverlayFormatFlags,
) -> *mut gst::GstBuffer;
pub fn gst_video_overlay_rectangle_get_pixels_unscaled_argb(
rectangle: *mut GstVideoOverlayRectangle,
flags: GstVideoOverlayFormatFlags,
) -> *mut gst::GstBuffer;
pub fn gst_video_overlay_rectangle_get_pixels_unscaled_ayuv(
rectangle: *mut GstVideoOverlayRectangle,
flags: GstVideoOverlayFormatFlags,
) -> *mut gst::GstBuffer;
pub fn gst_video_overlay_rectangle_get_pixels_unscaled_raw(
rectangle: *mut GstVideoOverlayRectangle,
flags: GstVideoOverlayFormatFlags,
) -> *mut gst::GstBuffer;
pub fn gst_video_overlay_rectangle_get_render_rectangle(
rectangle: *mut GstVideoOverlayRectangle,
render_x: *mut c_int,
render_y: *mut c_int,
render_width: *mut c_uint,
render_height: *mut c_uint,
) -> gboolean;
pub fn gst_video_overlay_rectangle_get_seqnum(
rectangle: *mut GstVideoOverlayRectangle,
) -> c_uint;
pub fn gst_video_overlay_rectangle_set_global_alpha(
rectangle: *mut GstVideoOverlayRectangle,
global_alpha: c_float,
);
pub fn gst_video_overlay_rectangle_set_render_rectangle(
rectangle: *mut GstVideoOverlayRectangle,
render_x: c_int,
render_y: c_int,
render_width: c_uint,
render_height: c_uint,
);
#[cfg(any(feature = "v1_14", feature = "dox"))]
pub fn gst_video_region_of_interest_meta_add_param(
meta: *mut GstVideoRegionOfInterestMeta,
s: *mut gst::GstStructure,
);
#[cfg(any(feature = "v1_14", feature = "dox"))]
pub fn gst_video_region_of_interest_meta_get_param(
meta: *mut GstVideoRegionOfInterestMeta,
name: *const c_char,
) -> *mut gst::GstStructure;
pub fn gst_video_region_of_interest_meta_get_info() -> *const gst::GstMetaInfo;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_resampler_clear(resampler: *mut GstVideoResampler);
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_resampler_init(
resampler: *mut GstVideoResampler,
method: GstVideoResamplerMethod,
flags: GstVideoResamplerFlags,
n_phases: c_uint,
n_taps: c_uint,
shift: c_double,
in_size: c_uint,
out_size: c_uint,
options: *mut gst::GstStructure,
) -> gboolean;
pub fn gst_video_scaler_2d(
hscale: *mut GstVideoScaler,
vscale: *mut GstVideoScaler,
format: GstVideoFormat,
src: gpointer,
src_stride: c_int,
dest: gpointer,
dest_stride: c_int,
x: c_uint,
y: c_uint,
width: c_uint,
height: c_uint,
);
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_scaler_combine_packed_YUV(
y_scale: *mut GstVideoScaler,
uv_scale: *mut GstVideoScaler,
in_format: GstVideoFormat,
out_format: GstVideoFormat,
) -> *mut GstVideoScaler;
pub fn gst_video_scaler_free(scale: *mut GstVideoScaler);
pub fn gst_video_scaler_get_coeff(
scale: *mut GstVideoScaler,
out_offset: c_uint,
in_offset: *mut c_uint,
n_taps: *mut c_uint,
) -> *const c_double;
pub fn gst_video_scaler_get_max_taps(scale: *mut GstVideoScaler) -> c_uint;
pub fn gst_video_scaler_horizontal(
scale: *mut GstVideoScaler,
format: GstVideoFormat,
src: gpointer,
dest: gpointer,
dest_offset: c_uint,
width: c_uint,
);
pub fn gst_video_scaler_vertical(
scale: *mut GstVideoScaler,
format: GstVideoFormat,
src_lines: *mut gpointer,
dest: gpointer,
dest_offset: c_uint,
width: c_uint,
);
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_scaler_new(
method: GstVideoResamplerMethod,
flags: GstVideoScalerFlags,
n_taps: c_uint,
in_size: c_uint,
out_size: c_uint,
options: *mut gst::GstStructure,
) -> *mut GstVideoScaler;
pub fn gst_video_time_code_get_type() -> GType;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_new(
fps_n: c_uint,
fps_d: c_uint,
latest_daily_jam: *mut glib::GDateTime,
flags: GstVideoTimeCodeFlags,
hours: c_uint,
minutes: c_uint,
seconds: c_uint,
frames: c_uint,
field_count: c_uint,
) -> *mut GstVideoTimeCode;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_new_empty() -> *mut GstVideoTimeCode;
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_time_code_new_from_date_time(
fps_n: c_uint,
fps_d: c_uint,
dt: *mut glib::GDateTime,
flags: GstVideoTimeCodeFlags,
field_count: c_uint,
) -> *mut GstVideoTimeCode;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_time_code_new_from_date_time_full(
fps_n: c_uint,
fps_d: c_uint,
dt: *mut glib::GDateTime,
flags: GstVideoTimeCodeFlags,
field_count: c_uint,
) -> *mut GstVideoTimeCode;
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_time_code_new_from_string(tc_str: *const c_char) -> *mut GstVideoTimeCode;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_add_frames(tc: *mut GstVideoTimeCode, frames: i64);
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_time_code_add_interval(
tc: *const GstVideoTimeCode,
tc_inter: *const GstVideoTimeCodeInterval,
) -> *mut GstVideoTimeCode;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_clear(tc: *mut GstVideoTimeCode);
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_compare(
tc1: *const GstVideoTimeCode,
tc2: *const GstVideoTimeCode,
) -> c_int;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_copy(tc: *const GstVideoTimeCode) -> *mut GstVideoTimeCode;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_frames_since_daily_jam(tc: *const GstVideoTimeCode) -> u64;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_free(tc: *mut GstVideoTimeCode);
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_increment_frame(tc: *mut GstVideoTimeCode);
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_init(
tc: *mut GstVideoTimeCode,
fps_n: c_uint,
fps_d: c_uint,
latest_daily_jam: *mut glib::GDateTime,
flags: GstVideoTimeCodeFlags,
hours: c_uint,
minutes: c_uint,
seconds: c_uint,
frames: c_uint,
field_count: c_uint,
);
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_time_code_init_from_date_time(
tc: *mut GstVideoTimeCode,
fps_n: c_uint,
fps_d: c_uint,
dt: *mut glib::GDateTime,
flags: GstVideoTimeCodeFlags,
field_count: c_uint,
);
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_time_code_init_from_date_time_full(
tc: *mut GstVideoTimeCode,
fps_n: c_uint,
fps_d: c_uint,
dt: *mut glib::GDateTime,
flags: GstVideoTimeCodeFlags,
field_count: c_uint,
) -> gboolean;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_is_valid(tc: *const GstVideoTimeCode) -> gboolean;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_nsec_since_daily_jam(tc: *const GstVideoTimeCode) -> u64;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_to_date_time(tc: *const GstVideoTimeCode) -> *mut glib::GDateTime;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_video_time_code_to_string(tc: *const GstVideoTimeCode) -> *mut c_char;
pub fn gst_video_time_code_interval_get_type() -> GType;
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_time_code_interval_new(
hours: c_uint,
minutes: c_uint,
seconds: c_uint,
frames: c_uint,
) -> *mut GstVideoTimeCodeInterval;
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_time_code_interval_new_from_string(
tc_inter_str: *const c_char,
) -> *mut GstVideoTimeCodeInterval;
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_time_code_interval_clear(tc: *mut GstVideoTimeCodeInterval);
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_time_code_interval_copy(
tc: *const GstVideoTimeCodeInterval,
) -> *mut GstVideoTimeCodeInterval;
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_time_code_interval_free(tc: *mut GstVideoTimeCodeInterval);
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_time_code_interval_init(
tc: *mut GstVideoTimeCodeInterval,
hours: c_uint,
minutes: c_uint,
seconds: c_uint,
frames: c_uint,
);
pub fn gst_video_time_code_meta_get_info() -> *const gst::GstMetaInfo;
pub fn gst_video_vbi_encoder_get_type() -> GType;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_vbi_encoder_new(
format: GstVideoFormat,
pixel_width: u32,
) -> *mut GstVideoVBIEncoder;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_vbi_encoder_add_ancillary(
encoder: *mut GstVideoVBIEncoder,
composite: gboolean,
DID: u8,
SDID_block_number: u8,
data: *const u8,
data_count: c_uint,
) -> gboolean;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_vbi_encoder_copy(
encoder: *const GstVideoVBIEncoder,
) -> *mut GstVideoVBIEncoder;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_vbi_encoder_free(encoder: *mut GstVideoVBIEncoder);
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_vbi_encoder_write_line(encoder: *mut GstVideoVBIEncoder, data: *mut u8);
pub fn gst_video_vbi_parser_get_type() -> GType;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_vbi_parser_new(
format: GstVideoFormat,
pixel_width: u32,
) -> *mut GstVideoVBIParser;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_vbi_parser_add_line(parser: *mut GstVideoVBIParser, data: *const u8);
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_vbi_parser_copy(parser: *const GstVideoVBIParser) -> *mut GstVideoVBIParser;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_vbi_parser_free(parser: *mut GstVideoVBIParser);
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_vbi_parser_get_ancillary(
parser: *mut GstVideoVBIParser,
anc: *mut GstVideoAncillary,
) -> GstVideoVBIParserResult;
pub fn gst_color_balance_channel_get_type() -> GType;
pub fn gst_video_aggregator_get_type() -> GType;
pub fn gst_video_aggregator_convert_pad_get_type() -> GType;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_aggregator_convert_pad_update_conversion_info(
pad: *mut GstVideoAggregatorConvertPad,
);
pub fn gst_video_aggregator_pad_get_type() -> GType;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_aggregator_pad_get_current_buffer(
pad: *mut GstVideoAggregatorPad,
) -> *mut gst::GstBuffer;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_aggregator_pad_get_prepared_frame(
pad: *mut GstVideoAggregatorPad,
) -> *mut GstVideoFrame;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_aggregator_pad_has_current_buffer(pad: *mut GstVideoAggregatorPad)
-> gboolean;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_aggregator_pad_set_needs_alpha(
pad: *mut GstVideoAggregatorPad,
needs_alpha: gboolean,
);
pub fn gst_video_buffer_pool_get_type() -> GType;
pub fn gst_video_buffer_pool_new() -> *mut gst::GstBufferPool;
pub fn gst_video_decoder_get_type() -> GType;
pub fn gst_video_decoder_add_to_frame(decoder: *mut GstVideoDecoder, n_bytes: c_int);
pub fn gst_video_decoder_allocate_output_buffer(
decoder: *mut GstVideoDecoder,
) -> *mut gst::GstBuffer;
pub fn gst_video_decoder_allocate_output_frame(
decoder: *mut GstVideoDecoder,
frame: *mut GstVideoCodecFrame,
) -> gst::GstFlowReturn;
#[cfg(any(feature = "v1_12", feature = "dox"))]
pub fn gst_video_decoder_allocate_output_frame_with_params(
decoder: *mut GstVideoDecoder,
frame: *mut GstVideoCodecFrame,
params: *mut gst::GstBufferPoolAcquireParams,
) -> gst::GstFlowReturn;
pub fn gst_video_decoder_drop_frame(
dec: *mut GstVideoDecoder,
frame: *mut GstVideoCodecFrame,
) -> gst::GstFlowReturn;
pub fn gst_video_decoder_finish_frame(
decoder: *mut GstVideoDecoder,
frame: *mut GstVideoCodecFrame,
) -> gst::GstFlowReturn;
pub fn gst_video_decoder_get_allocator(
decoder: *mut GstVideoDecoder,
allocator: *mut *mut gst::GstAllocator,
params: *mut gst::GstAllocationParams,
);
pub fn gst_video_decoder_get_buffer_pool(
decoder: *mut GstVideoDecoder,
) -> *mut gst::GstBufferPool;
pub fn gst_video_decoder_get_estimate_rate(dec: *mut GstVideoDecoder) -> c_int;
pub fn gst_video_decoder_get_frame(
decoder: *mut GstVideoDecoder,
frame_number: c_int,
) -> *mut GstVideoCodecFrame;
pub fn gst_video_decoder_get_frames(decoder: *mut GstVideoDecoder) -> *mut glib::GList;
pub fn gst_video_decoder_get_latency(
decoder: *mut GstVideoDecoder,
min_latency: *mut gst::GstClockTime,
max_latency: *mut gst::GstClockTime,
);
pub fn gst_video_decoder_get_max_decode_time(
decoder: *mut GstVideoDecoder,
frame: *mut GstVideoCodecFrame,
) -> gst::GstClockTimeDiff;
pub fn gst_video_decoder_get_max_errors(dec: *mut GstVideoDecoder) -> c_int;
#[cfg(any(feature = "v1_4", feature = "dox"))]
pub fn gst_video_decoder_get_needs_format(dec: *mut GstVideoDecoder) -> gboolean;
pub fn gst_video_decoder_get_oldest_frame(
decoder: *mut GstVideoDecoder,
) -> *mut GstVideoCodecFrame;
pub fn gst_video_decoder_get_output_state(
decoder: *mut GstVideoDecoder,
) -> *mut GstVideoCodecState;
pub fn gst_video_decoder_get_packetized(decoder: *mut GstVideoDecoder) -> gboolean;
#[cfg(any(feature = "v1_4", feature = "dox"))]
pub fn gst_video_decoder_get_pending_frame_size(decoder: *mut GstVideoDecoder) -> size_t;
#[cfg(any(feature = "v1_0_3", feature = "dox"))]
pub fn gst_video_decoder_get_qos_proportion(decoder: *mut GstVideoDecoder) -> c_double;
pub fn gst_video_decoder_have_frame(decoder: *mut GstVideoDecoder) -> gst::GstFlowReturn;
pub fn gst_video_decoder_merge_tags(
decoder: *mut GstVideoDecoder,
tags: *const gst::GstTagList,
mode: gst::GstTagMergeMode,
);
pub fn gst_video_decoder_negotiate(decoder: *mut GstVideoDecoder) -> gboolean;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_decoder_proxy_getcaps(
decoder: *mut GstVideoDecoder,
caps: *mut gst::GstCaps,
filter: *mut gst::GstCaps,
) -> *mut gst::GstCaps;
#[cfg(any(feature = "v1_2_2", feature = "dox"))]
pub fn gst_video_decoder_release_frame(
dec: *mut GstVideoDecoder,
frame: *mut GstVideoCodecFrame,
);
pub fn gst_video_decoder_set_estimate_rate(dec: *mut GstVideoDecoder, enabled: gboolean);
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_video_decoder_set_interlaced_output_state(
decoder: *mut GstVideoDecoder,
fmt: GstVideoFormat,
mode: GstVideoInterlaceMode,
width: c_uint,
height: c_uint,
reference: *mut GstVideoCodecState,
) -> *mut GstVideoCodecState;
pub fn gst_video_decoder_set_latency(
decoder: *mut GstVideoDecoder,
min_latency: gst::GstClockTime,
max_latency: gst::GstClockTime,
);
pub fn gst_video_decoder_set_max_errors(dec: *mut GstVideoDecoder, num: c_int);
#[cfg(any(feature = "v1_4", feature = "dox"))]
pub fn gst_video_decoder_set_needs_format(dec: *mut GstVideoDecoder, enabled: gboolean);
pub fn gst_video_decoder_set_output_state(
decoder: *mut GstVideoDecoder,
fmt: GstVideoFormat,
width: c_uint,
height: c_uint,
reference: *mut GstVideoCodecState,
) -> *mut GstVideoCodecState;
pub fn gst_video_decoder_set_packetized(decoder: *mut GstVideoDecoder, packetized: gboolean);
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_decoder_set_use_default_pad_acceptcaps(
decoder: *mut GstVideoDecoder,
use_: gboolean,
);
pub fn gst_video_encoder_get_type() -> GType;
pub fn gst_video_encoder_allocate_output_buffer(
encoder: *mut GstVideoEncoder,
size: size_t,
) -> *mut gst::GstBuffer;
pub fn gst_video_encoder_allocate_output_frame(
encoder: *mut GstVideoEncoder,
frame: *mut GstVideoCodecFrame,
size: size_t,
) -> gst::GstFlowReturn;
pub fn gst_video_encoder_finish_frame(
encoder: *mut GstVideoEncoder,
frame: *mut GstVideoCodecFrame,
) -> gst::GstFlowReturn;
pub fn gst_video_encoder_get_allocator(
encoder: *mut GstVideoEncoder,
allocator: *mut *mut gst::GstAllocator,
params: *mut gst::GstAllocationParams,
);
pub fn gst_video_encoder_get_frame(
encoder: *mut GstVideoEncoder,
frame_number: c_int,
) -> *mut GstVideoCodecFrame;
pub fn gst_video_encoder_get_frames(encoder: *mut GstVideoEncoder) -> *mut glib::GList;
pub fn gst_video_encoder_get_latency(
encoder: *mut GstVideoEncoder,
min_latency: *mut gst::GstClockTime,
max_latency: *mut gst::GstClockTime,
);
#[cfg(any(feature = "v1_14", feature = "dox"))]
pub fn gst_video_encoder_get_max_encode_time(
encoder: *mut GstVideoEncoder,
frame: *mut GstVideoCodecFrame,
) -> gst::GstClockTimeDiff;
pub fn gst_video_encoder_get_oldest_frame(
encoder: *mut GstVideoEncoder,
) -> *mut GstVideoCodecFrame;
pub fn gst_video_encoder_get_output_state(
encoder: *mut GstVideoEncoder,
) -> *mut GstVideoCodecState;
#[cfg(any(feature = "v1_14", feature = "dox"))]
pub fn gst_video_encoder_is_qos_enabled(encoder: *mut GstVideoEncoder) -> gboolean;
pub fn gst_video_encoder_merge_tags(
encoder: *mut GstVideoEncoder,
tags: *const gst::GstTagList,
mode: gst::GstTagMergeMode,
);
pub fn gst_video_encoder_negotiate(encoder: *mut GstVideoEncoder) -> gboolean;
pub fn gst_video_encoder_proxy_getcaps(
enc: *mut GstVideoEncoder,
caps: *mut gst::GstCaps,
filter: *mut gst::GstCaps,
) -> *mut gst::GstCaps;
pub fn gst_video_encoder_set_headers(encoder: *mut GstVideoEncoder, headers: *mut glib::GList);
pub fn gst_video_encoder_set_latency(
encoder: *mut GstVideoEncoder,
min_latency: gst::GstClockTime,
max_latency: gst::GstClockTime,
);
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_encoder_set_min_pts(encoder: *mut GstVideoEncoder, min_pts: gst::GstClockTime);
pub fn gst_video_encoder_set_output_state(
encoder: *mut GstVideoEncoder,
caps: *mut gst::GstCaps,
reference: *mut GstVideoCodecState,
) -> *mut GstVideoCodecState;
#[cfg(any(feature = "v1_14", feature = "dox"))]
pub fn gst_video_encoder_set_qos_enabled(encoder: *mut GstVideoEncoder, enabled: gboolean);
pub fn gst_video_filter_get_type() -> GType;
pub fn gst_video_multiview_flagset_get_type() -> GType;
pub fn gst_video_sink_get_type() -> GType;
pub fn gst_video_sink_center_rect(
src: GstVideoRectangle,
dst: GstVideoRectangle,
result: *mut GstVideoRectangle,
scaling: gboolean,
);
pub fn gst_color_balance_get_type() -> GType;
pub fn gst_color_balance_get_balance_type(balance: *mut GstColorBalance)
-> GstColorBalanceType;
pub fn gst_color_balance_get_value(
balance: *mut GstColorBalance,
channel: *mut GstColorBalanceChannel,
) -> c_int;
pub fn gst_color_balance_list_channels(balance: *mut GstColorBalance) -> *const glib::GList;
pub fn gst_color_balance_set_value(
balance: *mut GstColorBalance,
channel: *mut GstColorBalanceChannel,
value: c_int,
);
pub fn gst_color_balance_value_changed(
balance: *mut GstColorBalance,
channel: *mut GstColorBalanceChannel,
value: c_int,
);
pub fn gst_navigation_get_type() -> GType;
pub fn gst_navigation_event_get_type(event: *mut gst::GstEvent) -> GstNavigationEventType;
pub fn gst_navigation_event_parse_command(
event: *mut gst::GstEvent,
command: *mut GstNavigationCommand,
) -> gboolean;
pub fn gst_navigation_event_parse_key_event(
event: *mut gst::GstEvent,
key: *mut *const c_char,
) -> gboolean;
pub fn gst_navigation_event_parse_mouse_button_event(
event: *mut gst::GstEvent,
button: *mut c_int,
x: *mut c_double,
y: *mut c_double,
) -> gboolean;
pub fn gst_navigation_event_parse_mouse_move_event(
event: *mut gst::GstEvent,
x: *mut c_double,
y: *mut c_double,
) -> gboolean;
pub fn gst_navigation_message_get_type(
message: *mut gst::GstMessage,
) -> GstNavigationMessageType;
pub fn gst_navigation_message_new_angles_changed(
src: *mut gst::GstObject,
cur_angle: c_uint,
n_angles: c_uint,
) -> *mut gst::GstMessage;
pub fn gst_navigation_message_new_commands_changed(
src: *mut gst::GstObject,
) -> *mut gst::GstMessage;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_navigation_message_new_event(
src: *mut gst::GstObject,
event: *mut gst::GstEvent,
) -> *mut gst::GstMessage;
pub fn gst_navigation_message_new_mouse_over(
src: *mut gst::GstObject,
active: gboolean,
) -> *mut gst::GstMessage;
pub fn gst_navigation_message_parse_angles_changed(
message: *mut gst::GstMessage,
cur_angle: *mut c_uint,
n_angles: *mut c_uint,
) -> gboolean;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_navigation_message_parse_event(
message: *mut gst::GstMessage,
event: *mut *mut gst::GstEvent,
) -> gboolean;
pub fn gst_navigation_message_parse_mouse_over(
message: *mut gst::GstMessage,
active: *mut gboolean,
) -> gboolean;
pub fn gst_navigation_query_get_type(query: *mut gst::GstQuery) -> GstNavigationQueryType;
pub fn gst_navigation_query_new_angles() -> *mut gst::GstQuery;
pub fn gst_navigation_query_new_commands() -> *mut gst::GstQuery;
pub fn gst_navigation_query_parse_angles(
query: *mut gst::GstQuery,
cur_angle: *mut c_uint,
n_angles: *mut c_uint,
) -> gboolean;
pub fn gst_navigation_query_parse_commands_length(
query: *mut gst::GstQuery,
n_cmds: *mut c_uint,
) -> gboolean;
pub fn gst_navigation_query_parse_commands_nth(
query: *mut gst::GstQuery,
nth: c_uint,
cmd: *mut GstNavigationCommand,
) -> gboolean;
pub fn gst_navigation_query_set_angles(
query: *mut gst::GstQuery,
cur_angle: c_uint,
n_angles: c_uint,
);
pub fn gst_navigation_query_set_commands(query: *mut gst::GstQuery, n_cmds: c_int, ...);
pub fn gst_navigation_query_set_commandsv(
query: *mut gst::GstQuery,
n_cmds: c_int,
cmds: *mut GstNavigationCommand,
);
pub fn gst_navigation_send_command(
navigation: *mut GstNavigation,
command: GstNavigationCommand,
);
pub fn gst_navigation_send_event(
navigation: *mut GstNavigation,
structure: *mut gst::GstStructure,
);
pub fn gst_navigation_send_key_event(
navigation: *mut GstNavigation,
event: *const c_char,
key: *const c_char,
);
pub fn gst_navigation_send_mouse_event(
navigation: *mut GstNavigation,
event: *const c_char,
button: c_int,
x: c_double,
y: c_double,
);
pub fn gst_video_direction_get_type() -> GType;
pub fn gst_video_orientation_get_type() -> GType;
pub fn gst_video_orientation_get_hcenter(
video_orientation: *mut GstVideoOrientation,
center: *mut c_int,
) -> gboolean;
pub fn gst_video_orientation_get_hflip(
video_orientation: *mut GstVideoOrientation,
flip: *mut gboolean,
) -> gboolean;
pub fn gst_video_orientation_get_vcenter(
video_orientation: *mut GstVideoOrientation,
center: *mut c_int,
) -> gboolean;
pub fn gst_video_orientation_get_vflip(
video_orientation: *mut GstVideoOrientation,
flip: *mut gboolean,
) -> gboolean;
pub fn gst_video_orientation_set_hcenter(
video_orientation: *mut GstVideoOrientation,
center: c_int,
) -> gboolean;
pub fn gst_video_orientation_set_hflip(
video_orientation: *mut GstVideoOrientation,
flip: gboolean,
) -> gboolean;
pub fn gst_video_orientation_set_vcenter(
video_orientation: *mut GstVideoOrientation,
center: c_int,
) -> gboolean;
pub fn gst_video_orientation_set_vflip(
video_orientation: *mut GstVideoOrientation,
flip: gboolean,
) -> gboolean;
pub fn gst_video_overlay_get_type() -> GType;
#[cfg(any(feature = "v1_14", feature = "dox"))]
pub fn gst_video_overlay_install_properties(
oclass: *mut gobject::GObjectClass,
last_prop_id: c_int,
);
#[cfg(any(feature = "v1_14", feature = "dox"))]
pub fn gst_video_overlay_set_property(
object: *mut gobject::GObject,
last_prop_id: c_int,
property_id: c_uint,
value: *const gobject::GValue,
) -> gboolean;
pub fn gst_video_overlay_expose(overlay: *mut GstVideoOverlay);
pub fn gst_video_overlay_got_window_handle(overlay: *mut GstVideoOverlay, handle: uintptr_t);
pub fn gst_video_overlay_handle_events(overlay: *mut GstVideoOverlay, handle_events: gboolean);
pub fn gst_video_overlay_prepare_window_handle(overlay: *mut GstVideoOverlay);
pub fn gst_video_overlay_set_render_rectangle(
overlay: *mut GstVideoOverlay,
x: c_int,
y: c_int,
width: c_int,
height: c_int,
) -> gboolean;
pub fn gst_video_overlay_set_window_handle(overlay: *mut GstVideoOverlay, handle: uintptr_t);
#[cfg(any(feature = "v1_8", feature = "dox"))]
pub fn gst_buffer_add_video_affine_transformation_meta(
buffer: *mut gst::GstBuffer,
) -> *mut GstVideoAffineTransformationMeta;
#[cfg(any(feature = "v1_16", feature = "dox"))]
pub fn gst_buffer_add_video_caption_meta(
buffer: *mut gst::GstBuffer,
caption_type: GstVideoCaptionType,
data: *const u8,
size: size_t,
) -> *mut GstVideoCaptionMeta;
pub fn gst_buffer_add_video_gl_texture_upload_meta(
buffer: *mut gst::GstBuffer,
texture_orientation: GstVideoGLTextureOrientation,
n_textures: c_uint,
texture_type: *mut GstVideoGLTextureType,
upload: GstVideoGLTextureUpload,
user_data: gpointer,
user_data_copy: gobject::GBoxedCopyFunc,
user_data_free: gobject::GBoxedFreeFunc,
) -> *mut GstVideoGLTextureUploadMeta;
pub fn gst_buffer_add_video_meta(
buffer: *mut gst::GstBuffer,
flags: GstVideoFrameFlags,
format: GstVideoFormat,
width: c_uint,
height: c_uint,
) -> *mut GstVideoMeta;
pub fn gst_buffer_add_video_meta_full(
buffer: *mut gst::GstBuffer,
flags: GstVideoFrameFlags,
format: GstVideoFormat,
width: c_uint,
height: c_uint,
n_planes: c_uint,
offset: *mut [size_t; 4],
stride: *mut [c_int; 4],
) -> *mut GstVideoMeta;
pub fn gst_buffer_add_video_overlay_composition_meta(
buf: *mut gst::GstBuffer,
comp: *mut GstVideoOverlayComposition,
) -> *mut GstVideoOverlayCompositionMeta;
pub fn gst_buffer_add_video_region_of_interest_meta(
buffer: *mut gst::GstBuffer,
roi_type: *const c_char,
x: c_uint,
y: c_uint,
w: c_uint,
h: c_uint,
) -> *mut GstVideoRegionOfInterestMeta;
pub fn gst_buffer_add_video_region_of_interest_meta_id(
buffer: *mut gst::GstBuffer,
roi_type: glib::GQuark,
x: c_uint,
y: c_uint,
w: c_uint,
h: c_uint,
) -> *mut GstVideoRegionOfInterestMeta;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_buffer_add_video_time_code_meta(
buffer: *mut gst::GstBuffer,
tc: *mut GstVideoTimeCode,
) -> *mut GstVideoTimeCodeMeta;
#[cfg(any(feature = "v1_10", feature = "dox"))]
pub fn gst_buffer_add_video_time_code_meta_full(
buffer: *mut gst::GstBuffer,
fps_n: c_uint,
fps_d: c_uint,
latest_daily_jam: *mut glib::GDateTime,
flags: GstVideoTimeCodeFlags,
hours: c_uint,
minutes: c_uint,
seconds: c_uint,
frames: c_uint,
field_count: c_uint,
) -> *mut GstVideoTimeCodeMeta;
pub fn gst_buffer_get_video_meta(buffer: *mut gst::GstBuffer) -> *mut GstVideoMeta;
pub fn gst_buffer_get_video_meta_id(
buffer: *mut gst::GstBuffer,
id: c_int,
) -> *mut GstVideoMeta;
pub fn gst_buffer_get_video_region_of_interest_meta_id(
buffer: *mut gst::GstBuffer,
id: c_int,
) -> *mut GstVideoRegionOfInterestMeta;
pub fn gst_buffer_pool_config_get_video_alignment(
config: *mut gst::GstStructure,
align: *mut GstVideoAlignment,
) -> gboolean;
pub fn gst_buffer_pool_config_set_video_alignment(
config: *mut gst::GstStructure,
align: *mut GstVideoAlignment,
);
pub fn gst_is_video_overlay_prepare_window_handle_message(
msg: *mut gst::GstMessage,
) -> gboolean;
pub fn gst_video_affine_transformation_meta_api_get_type() -> GType;
pub fn gst_video_blend(
dest: *mut GstVideoFrame,
src: *mut GstVideoFrame,
x: c_int,
y: c_int,
global_alpha: c_float,
) -> gboolean;
pub fn gst_video_blend_scale_linear_RGBA(
src: *mut GstVideoInfo,
src_buffer: *mut gst::GstBuffer,
dest_height: c_int,
dest_width: c_int,
dest: *mut GstVideoInfo,
dest_buffer: *mut *mut gst::GstBuffer,
);
pub fn gst_video_calculate_display_ratio(
dar_n: *mut c_uint,
dar_d: *mut c_uint,
video_width: c_uint,
video_height: c_uint,
video_par_n: c_uint,
video_par_d: c_uint,
display_par_n: c_uint,
display_par_d: c_uint,
) -> gboolean;
pub fn gst_video_caption_meta_api_get_type() -> GType;
pub fn gst_video_chroma_from_string(s: *const c_char) -> GstVideoChromaSite;
pub fn gst_video_chroma_resample(
resample: *mut GstVideoChromaResample,
lines: *mut gpointer,
width: c_int,
);
pub fn gst_video_chroma_to_string(site: GstVideoChromaSite) -> *const c_char;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_color_transfer_decode(
func: GstVideoTransferFunction,
val: c_double,
) -> c_double;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_color_transfer_encode(
func: GstVideoTransferFunction,
val: c_double,
) -> c_double;
pub fn gst_video_convert_sample(
sample: *mut gst::GstSample,
to_caps: *const gst::GstCaps,
timeout: gst::GstClockTime,
error: *mut *mut glib::GError,
) -> *mut gst::GstSample;
pub fn gst_video_convert_sample_async(
sample: *mut gst::GstSample,
to_caps: *const gst::GstCaps,
timeout: gst::GstClockTime,
callback: GstVideoConvertSampleCallback,
user_data: gpointer,
destroy_notify: glib::GDestroyNotify,
);
pub fn gst_video_crop_meta_api_get_type() -> GType;
pub fn gst_video_event_is_force_key_unit(event: *mut gst::GstEvent) -> gboolean;
pub fn gst_video_event_new_downstream_force_key_unit(
timestamp: gst::GstClockTime,
stream_time: gst::GstClockTime,
running_time: gst::GstClockTime,
all_headers: gboolean,
count: c_uint,
) -> *mut gst::GstEvent;
pub fn gst_video_event_new_still_frame(in_still: gboolean) -> *mut gst::GstEvent;
pub fn gst_video_event_new_upstream_force_key_unit(
running_time: gst::GstClockTime,
all_headers: gboolean,
count: c_uint,
) -> *mut gst::GstEvent;
pub fn gst_video_event_parse_downstream_force_key_unit(
event: *mut gst::GstEvent,
timestamp: *mut gst::GstClockTime,
stream_time: *mut gst::GstClockTime,
running_time: *mut gst::GstClockTime,
all_headers: *mut gboolean,
count: *mut c_uint,
) -> gboolean;
pub fn gst_video_event_parse_still_frame(
event: *mut gst::GstEvent,
in_still: *mut gboolean,
) -> gboolean;
pub fn gst_video_event_parse_upstream_force_key_unit(
event: *mut gst::GstEvent,
running_time: *mut gst::GstClockTime,
all_headers: *mut gboolean,
count: *mut c_uint,
) -> gboolean;
pub fn gst_video_gl_texture_upload_meta_api_get_type() -> GType;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_guess_framerate(
duration: gst::GstClockTime,
dest_n: *mut c_int,
dest_d: *mut c_int,
) -> gboolean;
pub fn gst_video_meta_api_get_type() -> GType;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_multiview_get_doubled_height_modes() -> *const gobject::GValue;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_multiview_get_doubled_size_modes() -> *const gobject::GValue;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_multiview_get_doubled_width_modes() -> *const gobject::GValue;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_multiview_get_mono_modes() -> *const gobject::GValue;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_multiview_get_unpacked_modes() -> *const gobject::GValue;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_multiview_guess_half_aspect(
mv_mode: GstVideoMultiviewMode,
width: c_uint,
height: c_uint,
par_n: c_uint,
par_d: c_uint,
) -> gboolean;
#[cfg(any(feature = "v1_6", feature = "dox"))]
pub fn gst_video_multiview_video_info_change_mode(
info: *mut GstVideoInfo,
out_mview_mode: GstVideoMultiviewMode,
out_mview_flags: GstVideoMultiviewFlags,
);
pub fn gst_video_overlay_composition_meta_api_get_type() -> GType;
pub fn gst_video_region_of_interest_meta_api_get_type() -> GType;
#[cfg(any(feature = "v1_4", feature = "dox"))]
pub fn gst_video_tile_get_index(
mode: GstVideoTileMode,
x: c_int,
y: c_int,
x_tiles: c_int,
y_tiles: c_int,
) -> c_uint;
pub fn gst_video_time_code_meta_api_get_type() -> GType;
}