Commit 96982ae8 authored by Kateryna Kostiuk's avatar Kateryna Kostiuk

iOS: software decoding/encoding callback

- When call started check if chosen codec supports hardware encoding/
decoding on iOS (only h264 supports). For other codecs fallback to
software encoding.
- Modify avtarget, so it could convert software decoding frames to
images.

Change-Id: I72086fb06a970b2d695d20f41730eb711f8cafac
parent 3b49d335
......@@ -148,25 +148,102 @@ extern "C" {
}
+ (UIImage*)convertHardwareDecodedFrameToImage:(const AVFrame*)frame {
CIImage *image;
if ((CVPixelBufferRef)frame->data[3]) {
CIImage *image = [CIImage imageWithCVPixelBuffer: (CVPixelBufferRef)frame->data[3]];
if (auto matrix = av_frame_get_side_data(frame, AV_FRAME_DATA_DISPLAYMATRIX)) {
const int32_t* data = reinterpret_cast<int32_t*>(matrix->data);
auto rotation = av_display_rotation_get(data);
auto uiImageOrientation = [Utils uimageOrientationFromRotation:rotation];
auto ciImageOrientation = [Utils ciimageOrientationFromRotation:rotation];
if (@available(iOS 11.0, *)) {
image = [image imageByApplyingCGOrientation: ciImageOrientation];
} else if (@available(iOS 10.0, *)) {
image = [image imageByApplyingOrientation:static_cast<int>(ciImageOrientation)];
}
UIImage * imageUI = [UIImage imageWithCIImage:image scale:1 orientation: uiImageOrientation];
return imageUI;
image = [CIImage imageWithCVPixelBuffer: (CVPixelBufferRef)frame->data[3]];
} else {
auto buffer = [Utils converCVPixelBufferRefFromAVFrame: frame];
image = [CIImage imageWithCVPixelBuffer: buffer];
CFRelease(buffer);
}
if (!image) {
return [[UIImage alloc] init];
}
if (auto matrix = av_frame_get_side_data(frame, AV_FRAME_DATA_DISPLAYMATRIX)) {
const int32_t* data = reinterpret_cast<int32_t*>(matrix->data);
auto rotation = av_display_rotation_get(data);
auto uiImageOrientation = [Utils uimageOrientationFromRotation:rotation];
auto ciImageOrientation = [Utils ciimageOrientationFromRotation:rotation];
if (@available(iOS 11.0, *)) {
image = [image imageByApplyingCGOrientation: ciImageOrientation];
} else if (@available(iOS 10.0, *)) {
image = [image imageByApplyingOrientation:static_cast<int>(ciImageOrientation)];
}
UIImage * imageUI = [UIImage imageWithCIImage:image];
UIImage * imageUI = [UIImage imageWithCIImage:image scale:1 orientation: uiImageOrientation];
return imageUI;
}
return [[UIImage alloc] init];
UIImage * imageUI = [UIImage imageWithCIImage:image];
return imageUI;
}
+(CVPixelBufferRef)converCVPixelBufferRefFromAVFrame:(const AVFrame *)frame {
if (!frame || !frame->data[0]) {
return NULL;
}
CVPixelBufferRef pixelBuffer = NULL;
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
@(frame->linesize[0]), kCVPixelBufferBytesPerRowAlignmentKey,
[NSDictionary dictionary], kCVPixelBufferIOSurfacePropertiesKey,
nil];
int ret = CVPixelBufferCreate(kCFAllocatorDefault,
frame->width,
frame->height,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange,
(__bridge CFDictionaryRef)(options),
&pixelBuffer);
if (ret < 0) {
return nil;
}
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
size_t bytePerRowY = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
size_t bytesPerRowUV = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
uint8_t* base = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0));
if (bytePerRowY == frame->linesize[0]) {
memcpy(base, frame->data[0], bytePerRowY * frame->height);
} else {
[Utils copyLineByLineSrc: frame->data[0]
toDest: base
srcLinesize: frame->linesize[0]
destLinesize: bytePerRowY
height: frame->height];
}
if ((AVPixelFormat)frame->format == AV_PIX_FMT_NV12) {
base = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
if (bytesPerRowUV == frame->linesize[0]) {
memcpy(base, frame->data[1], bytesPerRowUV * frame->height/2);
} else {
[Utils copyLineByLineSrc: frame->data[1]
toDest: base
srcLinesize: frame->linesize[0]
destLinesize: bytesPerRowUV
height: frame->height/2];
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
return pixelBuffer;
}
base = static_cast<uint8_t*>(CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1));
for(size_t i = 0; i < frame->height / 2 * bytesPerRowUV / 2; i++ ){
*base++ = frame->data[1][i];
*base++ = frame->data[2][i];
}
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
return pixelBuffer;
}
+ (void)copyLineByLineSrc:(uint8_t*)src
toDest:(uint8_t*)dest
srcLinesize:(size_t)srcLinesize
destLinesize:(size_t)destLinesize
height:(size_t)height {
for (size_t i = 0; i < height ; i++) {
memcpy(dest, src, srcLinesize);
dest = dest + destLinesize;
src = src + srcLinesize;
}
}
+ (AVFrame*)configureHardwareDecodedFrame:(AVFrame*)frame
......
......@@ -30,7 +30,10 @@
- (void)addVideoDeviceWithName:(NSString*)deviceName withDevInfo:(NSDictionary*)deviceInfoDict;
- (void)setDefaultDevice:(NSString*)deviceName;
- (void)registerSinkTargetWithSinkId:sinkId withWidth:(NSInteger)w withHeight:(NSInteger)h;
- (void)registerSinkTargetWithSinkId:sinkId
withWidth:(NSInteger)w
withHeight:(NSInteger)h
withHardwareSupport:(BOOL)hardwareSupport;
- (void)removeSinkTargetWithSinkId:(NSString*)sinkId;
- (void)writeOutgoingFrameWithBuffer:(CVImageBufferRef)image
angle:(int)angle
......
......@@ -126,8 +126,10 @@ static id <VideoAdapterDelegate> _delegate;
int h,
bool is_mixer) {
if(VideoAdapter.delegate) {
NSString* rendererId = [NSString stringWithUTF8String:renderer_id.c_str()];;
[VideoAdapter.delegate decodingStartedWithRendererId:rendererId withWidth:(NSInteger)w withHeight:(NSInteger)h];
NSString* rendererId = [NSString stringWithUTF8String:renderer_id.c_str()];
std::map<std::string, std::string> callDetails = getCallDetails(renderer_id);
NSString* codecName = [NSString stringWithUTF8String: callDetails["VIDEO_CODEC"].c_str()];
[VideoAdapter.delegate decodingStartedWithRendererId:rendererId withWidth:(NSInteger)w withHeight:(NSInteger)h withCodec: codecName];
}
}));
......@@ -158,13 +160,16 @@ static id <VideoAdapterDelegate> _delegate;
#pragma mark -
- (void)registerSinkTargetWithSinkId:sinkId withWidth:(NSInteger)w withHeight:(NSInteger)h {
- (void)registerSinkTargetWithSinkId:sinkId
withWidth:(NSInteger)w
withHeight:(NSInteger)h
withHardwareSupport:(BOOL)hardwareSupport {
auto _sinkId = std::string([sinkId UTF8String]);
auto renderer = std::make_shared<Renderer>();
renderer->width = static_cast<int>(w);
renderer->height = static_cast<int>(h);
renderer->rendererId = sinkId;
if(self.getDecodingAccelerated) {
if(self.getDecodingAccelerated && hardwareSupport) {
renderer->bindAVSinkFunctions();
DRing::registerAVSinkTarget(_sinkId, renderer->avtarget);
} else {
......
......@@ -19,7 +19,10 @@
*/
@objc protocol VideoAdapterDelegate {
func decodingStarted(withRendererId rendererId: String, withWidth width: Int, withHeight height: Int)
func decodingStarted(withRendererId rendererId: String,
withWidth width: Int,
withHeight height: Int,
withCodec codec: String?)
func decodingStopped(withRendererId rendererId: String)
func startCapture(withDevice device: String)
func stopCapture()
......
......@@ -41,6 +41,11 @@ enum VideoError: Error {
case switchCameraFailed
}
enum VideoCodecs: String {
case H264
case VP8
}
protocol FrameExtractorDelegate: class {
func captured(imageBuffer: CVImageBuffer?, image: UIImage)
func updateDevicePisition(position: AVCaptureDevice.Position)
......@@ -306,6 +311,8 @@ class VideoService: FrameExtractorDelegate {
var recording = false
var codec = VideoCodecs.H264
init(withVideoAdapter videoAdapter: VideoAdapter) {
self.videoAdapter = videoAdapter
currentOrientation = camera.getOrientation
......@@ -454,14 +461,22 @@ extension VideoService: VideoAdapterDelegate {
return videoAdapter.getEncodingAccelerated()
}
func decodingStarted(withRendererId rendererId: String, withWidth width: Int, withHeight height: Int) {
func decodingStarted(withRendererId rendererId: String, withWidth width: Int, withHeight height: Int, withCodec codec: String?) {
if let codecName = codec {
self.codec = VideoCodecs(rawValue: codecName) ?? VideoCodecs.H264
}
self.log.debug("Decoding started...")
videoAdapter.registerSinkTarget(withSinkId: rendererId, withWidth: width, withHeight: height)
videoAdapter.registerSinkTarget(withSinkId: rendererId, withWidth: width, withHeight: height, withHardwareSupport: supportHardware())
}
func supportHardware() -> Bool {
return self.codec == VideoCodecs.H264
}
func decodingStopped(withRendererId rendererId: String) {
self.log.debug("Decoding stopped...")
videoAdapter.removeSinkTarget(withSinkId: rendererId)
self.codec = VideoCodecs.H264
}
func startCapture(withDevice device: String) {
......@@ -525,7 +540,7 @@ extension VideoService: VideoAdapterDelegate {
}
videoAdapter.writeOutgoingFrame(with: imageBuffer,
angle: Int32(self.angle),
useHardwareAcceleration: self.hardwareAccelerated,
useHardwareAcceleration: (self.hardwareAccelerated && supportHardware()),
recording: self.recording)
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment