Commit af148d1b authored by Kateryna Kostiuk's avatar Kateryna Kostiuk Committed by Adrien Béraud

video: frame rotation

Add rotation to frame instead of switching input device.
Keep old way as backup in case if device do not support format
compatible with av_pixel_format

Change-Id: I2c02e6c958f8f671ee0a762b10a2774ee8dfc074
parent 775b38d6
......@@ -37,5 +37,10 @@ struct AVFrame;
+ (std::vector<uint8_t>)vectorOfUInt8FromData:(NSData*)data;
+ (std::vector<std::map<std::string, std::string>>)arrayOfDictionnarisToVectorOfMap:(NSArray*)dictionaries;
+ (UIImage*)convertHardwareDecodedFrameToImage:(const AVFrame*)frame;
+ (AVFrame*)configureHardwareDecodedFrame:(AVFrame*)frame fromImageBuffer: (CVImageBufferRef) image;
+ (AVFrame*)configureHardwareDecodedFrame:(AVFrame*)frame
fromImageBuffer: (CVImageBufferRef)image
angle:(int) angle;
+ (AVFrame*)configureFrame:(AVFrame*)frame
fromImageBuffer: (CVImageBufferRef)image
angle:(int)angle;
@end
......@@ -22,6 +22,7 @@
#import "Utils.h"
extern "C" {
#include <libavutil/frame.h>
#include <libavutil/display.h>
}
@implementation Utils
......@@ -117,8 +118,9 @@ extern "C" {
return [[UIImage alloc] init];
}
+ (AVFrame*)configureHardwareDecodedFrame:(AVFrame*)frame fromImageBuffer:(CVImageBufferRef) image {
//get dimensions
+ (AVFrame*)configureHardwareDecodedFrame:(AVFrame*)frame
fromImageBuffer:(CVImageBufferRef)image
angle:(int)angle {
CVPixelBufferLockBaseAddress(image,0);
size_t width = CVPixelBufferGetWidth(image);
size_t height = CVPixelBufferGetHeight(image);
......@@ -127,6 +129,41 @@ extern "C" {
frame->format = AV_PIX_FMT_VIDEOTOOLBOX;
frame->width = static_cast<int>(width);
frame->height = static_cast<int>(height);
AVBufferRef* localFrameDataBuffer = angle == 0 ? nullptr : av_buffer_alloc(sizeof(int32_t) * 9);
if (!localFrameDataBuffer) {
return frame;
}
av_display_rotation_set(reinterpret_cast<int32_t*>(localFrameDataBuffer->data), angle);
av_frame_new_side_data_from_buf(frame, AV_FRAME_DATA_DISPLAYMATRIX, localFrameDataBuffer);
return frame;
}
+ (AVFrame*)configureFrame:(AVFrame*)frame
fromImageBuffer: (CVImageBufferRef)image
angle:(int) angle {
CVPixelBufferLockBaseAddress(image, 0);
int width = static_cast<int>(CVPixelBufferGetWidth(image));
int height = static_cast<int>(CVPixelBufferGetHeight(image));
frame->width = width;
frame->height = height;
frame->format = AV_PIX_FMT_NV12;
if (CVPixelBufferIsPlanar(image)) {
int planes = static_cast<int>(CVPixelBufferGetPlaneCount(image));
for (int i = 0; i < planes; i++) {
frame->data[i] = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(image, i);
frame->linesize[i] = static_cast<int>(CVPixelBufferGetBytesPerRowOfPlane(image, i));
}
} else {
frame->data[0] = (uint8_t *)CVPixelBufferGetBaseAddress(image);
frame->linesize[0] =static_cast<int>(CVPixelBufferGetBytesPerRow(image));
}
CVPixelBufferUnlockBaseAddress(image, 0);
AVBufferRef* localFrameDataBuffer = angle == 0 ? nullptr : av_buffer_alloc(sizeof(int32_t) * 9);
if (!localFrameDataBuffer) {
return frame;
}
av_display_rotation_set(reinterpret_cast<int32_t*>(localFrameDataBuffer->data), angle);
av_frame_new_side_data_from_buf(frame, AV_FRAME_DATA_DISPLAYMATRIX, localFrameDataBuffer);
return frame;
}
......
......@@ -31,7 +31,9 @@
- (void)addVideoDeviceWithName:(NSString*)deviceName withDevInfo:(NSDictionary*)deviceInfoDict;
- (void)registerSinkTargetWithSinkId:sinkId withWidth:(NSInteger)w withHeight:(NSInteger)h;
- (void)removeSinkTargetWithSinkId:(NSString*)sinkId;
- (void)writeOutgoingHardwareDecodedFrameWithBuffer:(CVImageBufferRef)image;
- (void)writeOutgoingFrameWithBuffer:(CVImageBufferRef)image
angle:(int)angle
useHardwareAcceleration:(BOOL)hardwareAccelerated;
- (void)writeOutgoingFrameWithImage:(UIImage*)image;
- (void)setDecodingAccelerated:(BOOL)state;
- (BOOL)getDecodingAccelerated;
......
......@@ -183,13 +183,24 @@ static id <VideoAdapterDelegate> _delegate;
}
}
- (void)writeOutgoingHardwareDecodedFrameWithBuffer:(CVImageBufferRef)image {
- (void)writeOutgoingFrameWithBuffer:(CVImageBufferRef)image
angle:(int)angle
useHardwareAcceleration:(BOOL)hardwareAccelerated {
auto frame = DRing::getNewFrame();
if(!frame) {
return;
}
auto avframe = frame->pointer();
[Utils configureHardwareDecodedFrame:(AVFrame*)avframe fromImageBuffer: image];
if(hardwareAccelerated) {
[Utils configureHardwareDecodedFrame:(AVFrame*)avframe
fromImageBuffer:image
angle:(int) angle];
} else {
[Utils configureFrame:(AVFrame*)avframe
fromImageBuffer:image
angle:(int) angle];
}
DRing::publishFrame();
}
......
......@@ -43,6 +43,7 @@ enum VideoError: Error {
protocol FrameExtractorDelegate: class {
func captured(imageBuffer: CVImageBuffer?, image: UIImage)
func supportAVPixelFormat(support: Bool)
}
class FrameExtractor: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
......@@ -55,6 +56,7 @@ class FrameExtractor: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
private let quality = AVCaptureSession.Preset.medium
private var orientation = AVCaptureVideoOrientation.portrait
var getOrientation: AVCaptureVideoOrientation {
return orientation
}
......@@ -161,6 +163,12 @@ class FrameExtractor: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
}
captureSession.addInput(captureDeviceInput)
let videoOutput = AVCaptureVideoDataOutput()
let types = videoOutput.availableVideoPixelFormatTypes
if types.contains(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
let settings = [kCVPixelBufferPixelFormatTypeKey as NSString: kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange]
videoOutput.videoSettings = settings as [String: Any]
self.delegate?.supportAVPixelFormat(support: true)
}
videoOutput.setSampleBufferDelegate(self, queue: sessionQueue)
guard captureSession.canAddOutput(videoOutput) else {
throw VideoError.setupOutputDeviceFailed
......@@ -197,9 +205,11 @@ class FrameExtractor: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
}
self.captureSession.removeInput(currentCameraInput)
var newCamera: AVCaptureDevice! = nil
var shouldMirrowVideoOutput = false
if let input = currentCameraInput as? AVCaptureDeviceInput {
if input.device.position == .back {
newCamera = self.selectCaptureDevice(withPosition: .front)
shouldMirrowVideoOutput = true
} else {
newCamera = self.selectCaptureDevice(withPosition: .back)
}
......@@ -230,6 +240,7 @@ class FrameExtractor: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
return Disposables.create {}
}
connection.videoOrientation = self.orientation
connection.isVideoMirrored = shouldMirrowVideoOutput
self.captureSession.commitConfiguration()
completable(.completed)
} else {
......@@ -288,15 +299,19 @@ class VideoService: FrameExtractorDelegate {
var cameraPosition = AVCaptureDevice.Position.front
let incomingVideoFrame = PublishSubject<UIImage?>()
let capturedVideoFrame = PublishSubject<UIImage?>()
var currentOrientation: AVCaptureVideoOrientation
private let log = SwiftyBeaver.self
private var blockOutgoingFrame = true
private var hardwareAccelerated = true
var angle: Int = 0
var supportAVPixelFormat = false
fileprivate let disposeBag = DisposeBag()
init(withVideoAdapter videoAdapter: VideoAdapter) {
self.videoAdapter = videoAdapter
currentOrientation = camera.getOrientation
VideoAdapter.delegate = self
camera.delegate = self
}
......@@ -346,16 +361,22 @@ class VideoService: FrameExtractorDelegate {
case .portraitUpsideDown:
newOrientation = AVCaptureVideoOrientation.portraitUpsideDown
case .landscapeLeft:
newOrientation = AVCaptureVideoOrientation.landscapeRight
case .landscapeRight:
newOrientation = AVCaptureVideoOrientation.landscapeLeft
case .landscapeRight:
newOrientation = AVCaptureVideoOrientation.landscapeRight
default:
newOrientation = AVCaptureVideoOrientation.portrait
}
if newOrientation == camera.getOrientation {
if newOrientation == self.currentOrientation {
self.log.warning("no orientation change required")
return
}
self.angle = self.mapDeviceOrientation(orientation: newOrientation)
self.currentOrientation = newOrientation
// in this case rotation will be performed when configure AVFrame
if hardwareAccelerated || supportAVPixelFormat {
return
}
self.blockOutgoingFrame = true
let deviceName: String =
(orientation == .landscapeLeft || orientation == .landscapeRight) ?
......@@ -368,6 +389,17 @@ class VideoService: FrameExtractorDelegate {
self.log.debug("camera re-orientation error: \(error)")
}).disposed(by: self.disposeBag)
}
func mapDeviceOrientation(orientation: AVCaptureVideoOrientation) -> Int {
switch orientation {
case AVCaptureVideoOrientation.landscapeRight:
return 270
case AVCaptureVideoOrientation.landscapeLeft:
return 90
default:
return 0
}
}
}
extension VideoService: VideoAdapterDelegate {
......@@ -424,15 +456,40 @@ extension VideoService: VideoAdapterDelegate {
self.incomingVideoFrame.onNext(image)
}
func getImageOrienation() -> UIImageOrientation {
switch self.currentOrientation {
case AVCaptureVideoOrientation.portrait:
return UIImageOrientation.up
case AVCaptureVideoOrientation.portraitUpsideDown:
return UIImageOrientation.down
case AVCaptureVideoOrientation.landscapeRight:
return UIImageOrientation.right
case AVCaptureVideoOrientation.landscapeLeft:
return UIImageOrientation.left
}
}
func captured(imageBuffer: CVImageBuffer?, image: UIImage) {
self.capturedVideoFrame.onNext(image)
if self.blockOutgoingFrame {
return
}
if self.hardwareAccelerated {
videoAdapter.writeOutgoingHardwareDecodedFrame(with: imageBuffer)
return
if self.hardwareAccelerated || self.supportAVPixelFormat {
if let cgImage = image.cgImage {
self.capturedVideoFrame
.onNext(UIImage(cgImage: cgImage,
scale: 1.0 ,
orientation: self.getImageOrienation()))
}
videoAdapter.writeOutgoingFrame(with: imageBuffer,
angle: Int32(self.angle),
useHardwareAcceleration: self.hardwareAccelerated)
} else {
self.capturedVideoFrame.onNext(image)
videoAdapter.writeOutgoingFrame(with: image)
}
videoAdapter.writeOutgoingFrame(with: image)
}
func supportAVPixelFormat(support: Bool) {
supportAVPixelFormat = support
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment