VideoFrame
视频帧的属性设置。
Java
public class VideoFrame implements RefCounted {
public interface Buffer extends RefCounted {
@CalledByNative("Buffer") int getWidth();
@CalledByNative("Buffer") int getHeight();
@CalledByNative("Buffer") I420Buffer toI420();
@Override @CalledByNative("Buffer") void release();
@Override @CalledByNative("Buffer") void retain();
@CalledByNative("Buffer")
Buffer cropAndScale(
int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth, int scaleHeight);
@CalledByNative("Buffer") @Nullable Buffer mirror(int frameRotation);
@CalledByNative("Buffer") @Nullable Buffer rotate(int frameRotation);
@CalledByNative("Buffer")
@Nullable
Buffer transform(int cropX, int cropY, int cropWidth, int cropHeight, int scaleWidth,
int scaleHeight, int frameRotation);
}
public interface I420Buffer extends Buffer {
@CalledByNative("I420Buffer") ByteBuffer getDataY();
@CalledByNative("I420Buffer") ByteBuffer getDataU();
@CalledByNative("I420Buffer") ByteBuffer getDataV();
@CalledByNative("I420Buffer") int getStrideY();
@CalledByNative("I420Buffer") int getStrideU();
@CalledByNative("I420Buffer") int getStrideV();
}
public interface I422Buffer extends Buffer {
@CalledByNative("I422Buffer") ByteBuffer getDataY();
@CalledByNative("I422Buffer") ByteBuffer getDataU();
@CalledByNative("I422Buffer") ByteBuffer getDataV();
@CalledByNative("I422Buffer") int getStrideY();
@CalledByNative("I422Buffer") int getStrideU();
@CalledByNative("I422Buffer") int getStrideV();
}
public interface RgbaBuffer extends Buffer { @CalledByNative("RgbaBuffer") ByteBuffer getData(); }
public interface TextureBuffer extends Buffer {
enum Type {
OES(GLES11Ext.GL_TEXTURE_EXTERNAL_OES),
RGB(GLES20.GL_TEXTURE_2D);
private final int glTarget;
private Type(final int glTarget) {
this.glTarget = glTarget;
}
public int getGlTarget() {
return glTarget;
}
}
enum ContextType {
EGL_CONTEXT_10,
EGL_CONTEXT_14;
}
Type getType();
@CalledByNative("TextureBuffer") int getTextureId();
Matrix getTransformMatrix();
@CalledByNative("TextureBuffer") EglBase.Context getEglBaseContext();
@CalledByNative("TextureBuffer") Object getSourceTexturePool();
@CalledByNative("TextureBuffer") long getNativeEglContext();
@CalledByNative("TextureBuffer") int getEglContextType();
@CalledByNative("TextureBuffer") float[] getTransformMatrixArray();
@CalledByNative("TextureBuffer") int getSequence();
@CalledByNative("TextureBuffer") boolean is10BitTexture();
}
public interface ColorSpace {
enum Range {
Invalid(0),
Limited(1),
Full(2),
Derived(3);
private final int range;
private Range(int range) {
this.range = range;
}
public int getRange() {
return range;
};
}
enum Matrix {
RGB(0),
BT709(1),
Unspecified(2),
FCC(4),
BT470BG(5),
SMPTE170M(6),
SMPTE240M(7),
YCOCG(8),
BT2020_NCL(9),
BT2020_CL(10),
SMPTE2085(11),
CDNCLS(12),
CDCLS(13),
BT2100_ICTCP(14);
private final int matrix;
private Matrix(int matrix) {
this.matrix = matrix;
}
public int getMatrix() {
return matrix;
};
}
enum Transfer {
BT709(1),
Unspecified(2),
GAMMA22(4),
GAMMA28(5),
SMPTE170M(6),
SMPTE240M(7),
LINEAR(8),
LOG(9),
LOG_SQRT(10),
IEC61966_2_4(11),
BT1361_ECG(12),
IEC61966_2_1(13),
BT2020_10(14),
BT2020_12(15),
SMPTEST2084(16),
SMPTEST428(17),
ARIB_STD_B67(18);
private final int transfer;
private Transfer(int transfer) {
this.transfer = transfer;
}
public int getTransfer() {
return transfer;
}
}
enum Primary {
BT709(1),
Unspecified(2),
BT470M(4),
BT470BG(5),
kSMPTE170M(6),
kSMPTE240M(7),
kFILM(8),
kBT2020(9),
kSMPTEST428(10),
kSMPTEST431(11),
kSMPTEST432(12),
kJEDECP22(22);
private final int primary;
private Primary(int primary) {
this.primary = primary;
}
public int getPrimary() {
return primary;
}
}
Range getRange();
Matrix getMatrix();
Transfer getTransfer();
Primary getPrimary();
}
public enum SourceType {
kFrontCamera,
kBackCamera,
kUnspecified,
}
private Buffer buffer;
private int rotation;
private long timestampNs;
private ColorSpace colorSpace;
private SourceType sourceType;
private float sampleAspectRatio;
private VideoFrameMetaInfo metaInfo = new VideoFrameMetaInfo();
private @Nullable ByteBuffer alphaBuffer;
private int alphaStitchMode;
public VideoFrame(Buffer buffer, int rotation, long timestampNs) {
this(buffer, rotation, timestampNs, new WrappedNativeColorSpace(), null, 0L, 1.0f, SourceType.kUnspecified.ordinal());
}
@CalledByNative
public VideoFrame(Buffer buffer, int rotation, long timestampNs, ColorSpace colorSpace,
ByteBuffer alphaBuffer, float sampleAspectRatio, int sourceType) {
if (buffer == null) {
throw new IllegalArgumentException("buffer not allowed to be null");
}
if (rotation % 90 != 0) {
throw new IllegalArgumentException("rotation must be a multiple of 90");
}
this.buffer = buffer;
this.rotation = rotation;
this.timestampNs = timestampNs;
this.colorSpace = colorSpace;
this.alphaBuffer = alphaBuffer;
this.sampleAspectRatio = sampleAspectRatio;
this.sourceType = SourceType.values()[sourceType];
}
@CalledByNative
public SourceType getSourceType() {
return sourceType;
}
public float getSampleAspectRatio() {
return sampleAspectRatio;
}
@CalledByNative
public Buffer getBuffer() {
return buffer;
}
@CalledByNative
public int getRotation() {
return rotation;
}
@CalledByNative
public int getAlphaStitchMode() {
return alphaStitchMode;
}
@CalledByNative
public long getTimestampNs() {
return timestampNs;
}
@CalledByNative
public VideoFrameMetaInfo getMetaInfo() {
return metaInfo;
}
public int getRotatedWidth() {
if (rotation % 180 == 0) {
return buffer.getWidth();
}
return buffer.getHeight();
}
public int getRotatedHeight() {
if (rotation % 180 == 0) {
return buffer.getHeight();
}
return buffer.getWidth();
}
public void replaceBuffer(Buffer buffer, int rotation, long timestampNs) {
release();
this.buffer = buffer;
this.rotation = rotation;
this.timestampNs = timestampNs;
}
public ColorSpace getColorSpace() {
return colorSpace;
}
@CalledByNative
public ByteBuffer getAlphaBuffer() {
return alphaBuffer;
}
public void fillAlphaData(ByteBuffer buffer) {
alphaBuffer = buffer;
}
public void setAlphaStitchMode(int mode) {
this.alphaStitchMode = mode;
}
@Override
public void retain() {
buffer.retain();
}
@Override
@CalledByNative
public void release() {
buffer.release();
}
}
缓冲区给出的是指向指针的指针,该接口不能修改缓冲区的指针,只能修改缓冲区的内容。
buffer
注意
该参数不可为空,否则会发生异常。
getRotatedWidth
:获取旋转后的视频帧宽度。getRotatedHeight
:获取旋转后的视频帧高度。replaceBuffer
:将缓冲区中的数据替换为新的视频帧。retain
:将缓冲区的引用计数加 1。release
:将缓冲区的引用计数减 1。当计数为 0 时,缓冲区的资源会被释放。
rotation
在渲染视频前设置该帧的顺时针旋转角度,目前支持 0 度、90 度、180 度,和 270 度。
timestampNs
视频帧的时间戳(纳秒)。
colorSpace
表示视频帧的色彩空间。详见 VideoColorSpace。
sourceType
在使用 SDK 采集视频时,表示该视频源的类型。
- kFrontCamera:前置摄像头。
- kBackCamera:后置摄像头。
- kUnspecified:(默认) 视频源类型未知。
sampleAspectRatio
单个像素的宽高比,即每个像素宽度与高度的比值。
alphaBuffer
采用人像分割算法输出的 Alpha 通道数据。该数据跟视频帧的尺寸一致,每个像素点的取值范围为 [0,255],其中 0 代表背景;255 代表前景(人像)。
你可以通过设置该参数,实现将视频背景自渲染为各种效果,例如:透明、纯色、图片、视频等。
注意
- 在自定义视频渲染场景下,需确保传入的视频帧和 alphaBuffer 均为 Full Range 类型;其他类型可能导致 Alpha 数据渲染不正常。
- 请务必确保 alphaBuffer 跟视频帧的尺寸 (width × height) 完全一致,否则可能会导致 App 崩溃。
alphaStitchMode
当视频帧中包含 Alpha 通道数据时,设置 alphaBuffer 和视频帧的相对位置。
- VIDEO_NO_ALPHA_STITCH (0):(默认)仅视频帧,即 alphaBuffer 不和视频帧拼接。
- VIDEO_ALPHA_STITCH_UP (1):alphaBuffer 位于视频帧的上方。
- VIDEO_ALPHA_STITCH_BELOW (2):alphaBuffer 位于视频帧的下方。
- VIDEO_ALPHA_STITCH_LEFT (3):alphaBuffer 位于视频帧的左侧。
- VIDEO_ALPHA_STITCH_RIGHT (4):alphaBuffer 位于视频帧的右侧。
metaInfo
视频帧中的元信息。该参数需要联系技术支持使用。