enableCustomVideoCapture API of TRTCCloud. Then, the TRTC SDK's camera capturing and image processing logic will be skipped, and only its encoding and transfer capabilities will be retained. Below is the sample code:TRTCCloud mTRTCCloud = TRTCCloud.shareInstance();mTRTCCloud.enableCustomVideoCapture(TRTCCloudDef.TRTC_VIDEO_STREAM_TYPE_BIG, true);
self.trtcCloud = [TRTCCloud sharedInstance];[self.trtcCloud enableCustomVideoCapture:TRTCVideoStreamTypeBig enable:YES];
liteav::ITRTCCloud* trtc_cloud = liteav::ITRTCCloud::getTRTCShareInstance();trtc_cloud->enableCustomVideoCapture(TRTCVideoStreamType::TRTCVideoStreamTypeBig, true);
sendCustomVideoData API of TRTCCloud to populate the TRTC SDK with your own video data. Below is the sample code:// Two schemes are available for Android: Texture (recommended) and Buffer. Texture is used as an example here.TRTCCloudDef.TRTCVideoFrame videoFrame = new TRTCCloudDef.TRTCVideoFrame();videoFrame.texture = new TRTCCloudDef.TRTCTexture();videoFrame.texture.textureId = textureId;videoFrame.texture.eglContext14 = eglContext;videoFrame.width = width;videoFrame.height = height;videoFrame.timestamp = timestamp;videoFrame.pixelFormat = TRTCCloudDef.TRTC_VIDEO_PIXEL_FORMAT_Texture_2D;videoFrame.bufferType = TRTCCloudDef.TRTC_VIDEO_BUFFER_TYPE_TEXTURE;mTRTCCloud.sendCustomVideoData(TRTCCloudDef.TRTC_VIDEO_STREAM_TYPE_BIG, videoFrame);
// On iOS and macOS, the video captured by the camera is in NV12 format. The natively supported and best-performing video frame format is CVPixelBufferRef, and I420 and OpenGL 2D texture formats are also supported. CVPixelBufferRef is used as an example here, which is recommended.TRTCVideoFrame *videoFrame = [[TRTCVideoFrame alloc] init];videoFrame.pixelFormat = TRTCVideoPixelFormat_NV12;videoFrame.bufferType = TRTCVideoBufferType_PixelBuffer;videoFrame.pixelBuffer = imageBuffer;videoFrame.timestamp = timeStamp;[[TRTCCloud sharedInstance] sendCustomVideoData:TRTCVideoStreamTypeBig frame:videoFrame];
// Only the Buffer scheme is available for Windows currently and is recommended for feature implementation.liteav::TRTCVideoFrame frame;frame.timestamp = getTRTCShareInstance()->generateCustomPTS();frame.videoFormat = liteav::TRTCVideoPixelFormat_I420;frame.bufferType = liteav::TRTCVideoBufferType_Buffer;frame.length = buffer_size;frame.data = array.data();frame.width = YUV_WIDTH;frame.height = YUV_HEIGHT;getTRTCShareInstance()->sendCustomVideoData(&frame);
TRTCVideoFrame) through the callback function onRenderVideoFrame. Then, you can customize the rendering of the received video frames. This process requires certain knowledge of OpenGL. We also provide API examples for different platforms:mTRTCCloud.setLocalVideoRenderListener(TRTCCloudDef.TRTC_VIDEO_PIXEL_FORMAT_Texture_2D, TRTCCloudDef.TRTC_VIDEO_BUFFER_TYPE_TEXTURE, new TRTCCloudListener.TRTCVideoRenderListener() {@Overridepublic void onRenderVideoFrame(String suserId int streamType, TRTCCloudDef.TRTCVideoFrame frame) {// For more information, see the custom rendering tool class `com.tencent.trtc.mediashare.helper.CustomFrameRender` in `TRTC-API-Example`}});
self.trtcCloud = [TRTCCloud sharedInstance];[self.trtcCloud setLocalVideoRenderDelegate:self pixelFormat:TRTCVideoPixelFormat_NV12 bufferType:TRTCVideoBufferType_PixelBuffer];
```// For specific implementation, see `test_custom_render.cpp` in `TRTC-API-Example-Qt`void TestCustomRender::onRenderVideoFrame(const char* userId,liteav::TRTCVideoStreamType streamType,liteav::TRTCVideoFrame* frame) {if (gl_yuv_widget_ == nullptr) {return;}if (streamType == liteav::TRTCVideoStreamType::TRTCVideoStreamTypeBig) {// Adjust the rendering windowemit renderViewSize(frame->width, frame->height);// Draw video framesgl_yuv_widget_->slotShowYuv(reinterpret_cast<uchar*>(frame->data),frame->width, frame->height);}}```
mTRTCCloud.setRemoteVideoRenderListener(userId, TRTCCloudDef.TRTC_VIDEO_PIXEL_FORMAT_I420, TRTCCloudDef.TRTC_VIDEO_BUFFER_TYPE_BYTE_ARRAY, new TRTCCloudListener.TRTCVideoRenderListener() {@Overridepublic void onRenderVideoFrame(String userId, int streamType, TRTCCloudDef.TRTCVideoFrame frame) {// For more information, see the custom rendering tool class `com.tencent.trtc.mediashare.helper.CustomFrameRender` in `TRTC-API-Example`}});
- (void)onRenderVideoFrame:(TRTCVideoFrame *)frameuserId:(NSString *)userIdstreamType:(TRTCVideoStreamType)streamType{// If `userId` is `nil`, the image rendered is the local image; otherwise it is a remote image.CFRetain(frame.pixelBuffer);__weak __typeof(self) weakSelf = self;dispatch_async(dispatch_get_main_queue(), ^{TestRenderVideoFrame *strongSelf = weakSelf;UIImageView* videoView = nil;if (userId) {videoView = [strongSelf.userVideoViews objectForKey:userId];}else {videoView = strongSelf.localVideoView;}videoView.image = [UIImage imageWithCIImage:[CIImage imageWithCVImageBuffer:frame.pixelBuffer]];videoView.contentMode = UIViewContentModeScaleAspectFit;CFRelease(frame.pixelBuffer);});}
```// For specific implementation, see `test_custom_render.cpp` in `TRTC-API-Example-Qt`void TestCustomRender::onRenderVideoFrame(const char* userId,liteav::TRTCVideoStreamType streamType,liteav::TRTCVideoFrame* frame) {if (gl_yuv_widget_ == nullptr) {return;}if (streamType == liteav::TRTCVideoStreamType::TRTCVideoStreamTypeBig) {// Adjust the rendering windowemit renderViewSize(frame->width, frame->height);// Draw video framesgl_yuv_widget_->slotShowYuv(reinterpret_cast<uchar*>(frame->data),frame->width, frame->height);}}```
Feedback