Commit 5d90c3bc authored by Kateryna Kostiuk's avatar Kateryna Kostiuk

media: link to avModel

Change-Id: I0a904813749d3ddc645878fad107aacdee9cd9bc
parent 8ba71157
......@@ -21,6 +21,7 @@
#import "RingWindowController.h"
#import "PreferencesWindowController.h"
#import <string>
@interface AppDelegate : NSObject <NSApplicationDelegate, NSUserNotificationCenterDelegate>
......@@ -28,5 +29,6 @@
- (void) showMainWindow;
- (void) showDialpad;
- (BOOL) checkForRingAccount;
- (std::vector<std::string>) getActiveCalls;
@end
......@@ -350,6 +350,9 @@ static void ReachabilityCallback(SCNetworkReachabilityRef __unused target, SCNet
[self.dialpad.window makeKeyAndOrderFront:self];
}
-(std::vector<std::string>) getActiveCalls {
return lrc->activeCalls();
}
- (BOOL) checkForRingAccount
{
......@@ -364,39 +367,6 @@ static void ReachabilityCallback(SCNetworkReachabilityRef __unused target, SCNet
forEventClass:kInternetEventClass andEventID:kAEGetURL];
}
/**
* Recognized patterns:
* - ring:<hash>
* - ring://<hash>
*/
- (void)handleGetURLEvent:(NSAppleEventDescriptor *)event withReplyEvent:(NSAppleEventDescriptor *)replyEvent
{
NSString* query = [[event paramDescriptorForKeyword:keyDirectObject] stringValue];
NSURL* url = [[NSURL alloc] initWithString:query];
NSString* ringID = [url host];
if (!ringID) {
//not a valid NSURL, try to parse query directly
ringID = [query substringFromIndex:@"ring:".length];
}
// check for a valid ring hash
NSCharacterSet *hexSet = [NSCharacterSet characterSetWithCharactersInString:@"0123456789abcdefABCDEF"];
BOOL valid = [[ringID stringByTrimmingCharactersInSet:hexSet] isEqualToString:@""];
if(valid && ringID.length == 40) {
Call* c = CallModel::instance().dialingCall();
c->setDialNumber(QString::fromNSString([NSString stringWithFormat:@"ring:%@",ringID]));
c << Call::Action::ACCEPT;
} else {
NSAlert *alert = [[NSAlert alloc] init];
[alert addButtonWithTitle:@"OK"];
[alert setMessageText:@"Error"];
[alert setInformativeText:@"ringID cannot be read from this URL."];
[alert setAlertStyle:NSWarningAlertStyle];
[alert runModal];
}
}
- (BOOL)applicationShouldHandleReopen:(NSApplication *)theApplication hasVisibleWindows:(BOOL)flag
{
if([self checkForRingAccount]) {
......
......@@ -18,8 +18,9 @@
*/
#import <Cocoa/Cocoa.h>
#import "LrcModelsProtocol.h"
@interface AudioPrefsVC : NSViewController <NSMenuDelegate, NSPathControlDelegate, NSOpenSavePanelDelegate> {
@interface AudioPrefsVC : NSViewController <LrcModelsProtocol> {
}
......
/*
* Copyright (C) 2015-2016 Savoir-faire Linux Inc.
* Author: Alexandre Lision <alexandre.lision@savoirfairelinux.com>
* Kateryna Kostiuk <kateryna.kostiuk@savoirfairelinux.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
......@@ -18,203 +19,73 @@
*/
#import "AudioPrefsVC.h"
#import <audio/settings.h>
#import <media/recordingmodel.h>
#import <QUrl>
#import <audio/inputdevicemodel.h>
#import <audio/outputdevicemodel.h>
#import <qitemselectionmodel.h>
#import "utils.h"
//LRC
#import <api/avmodel.h>
@interface AudioPrefsVC ()
@property (assign) IBOutlet NSPathControl *recordingsPathControl;
@property (assign) IBOutlet NSPopUpButton *outputDeviceList;
@property (assign) IBOutlet NSPopUpButton *inputDeviceList;
@property (assign) IBOutlet NSButton *alwaysRecordingButton;
@property (assign) IBOutlet NSButton *muteDTMFButton;
@property (assign) IBOutlet NSTextField *recordingHeaderTitle;
@property (assign) IBOutlet NSTextField *recordingpathLabel;
@property (assign) IBOutlet NSLayoutConstraint* audioMarginTopConstraint;
@property (assign) IBOutlet NSLayoutConstraint* audioMarginBottomConstraint;
@end
@implementation AudioPrefsVC
@synthesize recordingsPathControl, recordingHeaderTitle, recordingpathLabel;
@synthesize outputDeviceList;
@synthesize inputDeviceList;
@synthesize alwaysRecordingButton;
@synthesize muteDTMFButton;
@synthesize audioMarginTopConstraint, audioMarginBottomConstraint;
@synthesize avModel;
QMetaObject::Connection audioDeviceEvent;
- (void)loadView
-(id) initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil avModel:(lrc::api::AVModel*) avModel
{
[super loadView];
QModelIndex qInputIdx = Audio::Settings::instance().inputDeviceModel()->selectionModel()->currentIndex();
QModelIndex qOutputIdx = Audio::Settings::instance().outputDeviceModel()->selectionModel()->currentIndex();
[self.outputDeviceList addItemWithTitle:
Audio::Settings::instance().outputDeviceModel()->data(qOutputIdx, Qt::DisplayRole).toString().toNSString()];
[self.inputDeviceList addItemWithTitle:
Audio::Settings::instance().inputDeviceModel()->data(qInputIdx, Qt::DisplayRole).toString().toNSString()];
[self.alwaysRecordingButton setState:
media::RecordingModel::instance().isAlwaysRecording() ? NSOnState:NSOffState];
[self.muteDTMFButton setState:
Audio::Settings::instance().areDTMFMuted()?NSOnState:NSOffState];
NSArray* pathComponentArray = [self pathComponentArrayWithCurrentUrl:media::RecordingModel::instance().recordPath().toNSString()];
[recordingsPathControl setPathComponentCells:pathComponentArray];
if (appSandboxed()) {
[alwaysRecordingButton setHidden:YES];
[recordingsPathControl setEnabled:NO];
[recordingsPathControl setHidden: YES];
[recordingHeaderTitle setHidden: YES];
[recordingpathLabel setHidden: YES];
audioMarginTopConstraint.constant = 10.0f;
audioMarginBottomConstraint.constant = 67.0f;
if (self = [self initWithNibName:nibNameOrNil bundle:nibBundleOrNil])
{
self.avModel = avModel;
}
return self;
}
- (IBAction)toggleMuteDTMF:(NSButton *)sender
- (void)loadView
{
Audio::Settings::instance().setDTMFMuted([sender state] == NSOnState);
[super loadView];
[self connectdDeviceEvent];
[self addDevices];
}
- (IBAction)toggleAlwaysRecording:(NSButton *)sender
{
media::RecordingModel::instance().setAlwaysRecording([sender state] == NSOnState);
-(void) addDevices {
[inputDeviceList removeAllItems];
[outputDeviceList removeAllItems];
auto inputDevices = avModel->getAudioInputDevices();
auto inputDevice = avModel->getInputDevice();
for (auto device : inputDevices) {
[inputDeviceList addItemWithTitle: @(device.c_str())];
}
[inputDeviceList selectItemWithTitle:@(inputDevice.c_str())];
auto outputDevices = avModel->getAudioOutputDevices();
auto outputDevice = avModel->getOutputDevice();
for (auto device : outputDevices) {
[outputDeviceList addItemWithTitle: @(device.c_str())];
}
[outputDeviceList selectItemWithTitle:@(outputDevice.c_str())];
}
- (IBAction)pathControlSingleClick:(id)sender {
// Select that chosen component of the path.
NSArray* pathComponentArray = [self pathComponentArrayWithCurrentUrl:[[self.recordingsPathControl clickedPathComponentCell] URL].path];
[recordingsPathControl setPathComponentCells:pathComponentArray];
media::RecordingModel::instance().setRecordPath(QString::fromNSString([self.recordingsPathControl.URL path]));
-(void)connectdDeviceEvent {
QObject::disconnect(audioDeviceEvent);
audioDeviceEvent = QObject::connect(avModel,
&lrc::api::AVModel::deviceEvent,
[=]() {
[self addDevices];
});
}
- (IBAction)chooseOutput:(id)sender {
int index = [sender indexOfSelectedItem];
QModelIndex qIdx = Audio::Settings::instance().outputDeviceModel()->index(index, 0);
Audio::Settings::instance().outputDeviceModel()->selectionModel()->setCurrentIndex(
qIdx, QItemSelectionModel::ClearAndSelect);
auto output = [self.outputDeviceList itemTitleAtIndex:index];
avModel->setOutputDevice([output UTF8String]);
}
- (IBAction)chooseInput:(id)sender {
int index = [sender indexOfSelectedItem];
QModelIndex qIdx = Audio::Settings::instance().inputDeviceModel()->index(index, 0);
Audio::Settings::instance().inputDeviceModel()->selectionModel()->setCurrentIndex(
qIdx, QItemSelectionModel::ClearAndSelect);
}
#pragma mark - NSPathControl delegate methods
/*
Assemble a set of custom cells to display into an array to pass to the path control.
*/
- (NSArray *)pathComponentArrayWithCurrentUrl:(NSString *) url
{
NSMutableArray *pathComponentArray = [[NSMutableArray alloc] init];
NSFileManager *fileManager = [[NSFileManager alloc] init];
NSURL* downloadURL = [fileManager URLForDirectory:NSDownloadsDirectory inDomain:NSUserDomainMask appropriateForURL:nil create:NO error:nil];
NSPathComponentCell *componentCell;
componentCell = [self componentCellForType:kGenericFolderIcon withTitle:@"Downloads" URL:downloadURL];
[pathComponentArray addObject:componentCell];
NSString * downloads = [downloadURL path];
if([url isEqualToString:downloads]) {
return pathComponentArray;
}
if(![url isEqualToString:@""]) {
NSString * name = [url componentsSeparatedByString:@"/"].lastObject;
if(!name) {
return pathComponentArray;
}
componentCell = [self componentCellForType:kGenericFolderIcon withTitle:name URL:[NSURL URLWithString: url]];
[pathComponentArray addObject:componentCell];
}
return pathComponentArray;
}
/*
This method is used by pathComponentArray to create a NSPathComponent cell based on icon, title and URL information.
Each path component needs an icon, URL and title.
*/
- (NSPathComponentCell *)componentCellForType:(OSType)withIconType withTitle:(NSString *)title URL:(NSURL *)url
{
NSPathComponentCell *componentCell = [[NSPathComponentCell alloc] init];
NSImage *iconImage = [[NSWorkspace sharedWorkspace] iconForFileType:NSFileTypeForHFSTypeCode(withIconType)];
[componentCell setImage:iconImage];
[componentCell setURL:url];
[componentCell setTitle:title];
return componentCell;
}
/*
Delegate method of NSPathControl to determine how the NSOpenPanel will look/behave.
*/
- (void)pathControl:(NSPathControl *)pathControl willDisplayOpenPanel:(NSOpenPanel *)openPanel
{
NSLog(@"willDisplayOpenPanel");
[openPanel setAllowsMultipleSelection:NO];
[openPanel setCanChooseDirectories:YES];
[openPanel setCanChooseFiles:NO];
[openPanel setResolvesAliases:YES];
[openPanel setTitle:NSLocalizedString(@"Choose a directory", @"Open panel title")];
[openPanel setPrompt:NSLocalizedString(@"Choose directory", @"Open panel prompt for 'Choose a directory'")];
[openPanel setDelegate:self];
}
- (void)pathControl:(NSPathControl *)pathControl willPopUpMenu:(NSMenu *)menu
{
}
#pragma mark - NSOpenSavePanelDelegate delegate methods
- (BOOL)panel:(id)sender validateURL:(NSURL *)url error:(NSError **)outError
{
[recordingsPathControl setURL:url];
return YES;
}
- (BOOL) panel:(id)sender shouldEnableURL:(NSURL*)url {
return YES;
}
#pragma mark - NSMenuDelegate methods
- (BOOL)menu:(NSMenu *)menu updateItem:(NSMenuItem *)item atIndex:(NSInteger)index shouldCancel:(BOOL)shouldCancel
{
QModelIndex qIdx;
if (inputDeviceList.menu == menu) {
qIdx = Audio::Settings::instance().inputDeviceModel()->index(index);
[item setTitle:Audio::Settings::instance().inputDeviceModel()->data(qIdx, Qt::DisplayRole).toString().toNSString()];
} else {
qIdx = Audio::Settings::instance().outputDeviceModel()->index(index);
[item setTitle:Audio::Settings::instance().outputDeviceModel()->data(qIdx, Qt::DisplayRole).toString().toNSString()];
}
return YES;
}
- (NSInteger)numberOfItemsInMenu:(NSMenu *)menu
{
if (inputDeviceList.menu == menu)
return Audio::Settings::instance().inputDeviceModel()->rowCount();
else
return Audio::Settings::instance().outputDeviceModel()->rowCount();
auto input = [self.inputDeviceList itemTitleAtIndex:index];
avModel->setInputDevice([input UTF8String]);
}
@end
......@@ -23,6 +23,12 @@
#import "views/CallView.h"
#import <api/account.h>
namespace lrc {
namespace api {
class AVModel;
}
}
@protocol CallViewControllerDelegate
-(void) conversationInfoUpdatedFor:(const std::string&) conversationID;
......@@ -39,5 +45,6 @@
-(void) hideWithAnimation:(BOOL)animate;
-(void) setCurrentCall:(const std::string&)callUid
conversation:(const std::string&)convUid
account:(const lrc::api::account::Info*)account;
account:(const lrc::api::account::Info*)account
avModel:(lrc::api::AVModel *)avModel;
@end
This diff is collapsed.
......@@ -36,16 +36,6 @@
- (void)windowDidLoad {
[super windowDidLoad];
QObject::connect(CallModel::instance().selectionModel(),
&QItemSelectionModel::currentChanged,
[=](const QModelIndex &current, const QModelIndex &previous) {
[composerField setStringValue:@""];
[composerField setNeedsDisplay:YES];
if(!current.isValid()) {
[self.window close];
}
});
}
- (IBAction)dtmfPressed:(id)sender
......@@ -66,11 +56,6 @@
- (void) sendDTMF:(NSString*) dtmf
{
if (auto current = CallModel::instance().selectedCall()) {
current->playDTMF(QString::fromUtf8([dtmf UTF8String]));
}
[composerField setStringValue:
[NSString stringWithFormat: @"%@ %@", [composerField stringValue], dtmf]];
}
///Accessibility
......
......@@ -81,7 +81,7 @@ static auto const kVideoPrefsIdentifer = @"VideoPrefsIdentifer";
{
[[prefsContainer subviews]
makeObjectsPerformSelector:@selector(removeFromSuperview)];
currentVC = [[AudioPrefsVC alloc] initWithNibName:@"AudioPrefs" bundle:nil];
currentVC = [[AudioPrefsVC alloc] initWithNibName:@"AudioPrefs" bundle:nil avModel: self.avModel];
[self resizeWindowWithFrame:currentVC.view.frame];
[prefsContainer addSubview:currentVC.view];
}
......@@ -132,4 +132,9 @@ static auto const kVideoPrefsIdentifer = @"VideoPrefsIdentifer";
return (frame.size.height - contentRect.size.height);
}
- (BOOL)windowShouldClose:(id)sender {
[self.window orderOut:self];
return NO;
}
@end
......@@ -29,7 +29,7 @@ namespace lrc {
}
}
@interface RingWindowController : NSWindowController <NSSharingServicePickerDelegate, ChooseAccountDelegate, LrcModelsProtocol, CallViewControllerDelegate> {
@interface RingWindowController : NSWindowController <NSSharingServicePickerDelegate, ChooseAccountDelegate, LrcModelsProtocol, CallViewControllerDelegate,NSWindowDelegate> {
IBOutlet NSView *currentView;
}
......
......@@ -103,6 +103,8 @@ typedef NS_ENUM(NSInteger, ViewState) {
self.dataTransferModel = dataTransferModel;
self.behaviorController = behaviorController;
self.avModel = avModel;
self.avModel->useAVFrame(YES);
avModel->deactivateOldVideoModels();
}
return self;
}
......@@ -137,6 +139,7 @@ typedef NS_ENUM(NSInteger, ViewState) {
[settingsVC hide];
break;
case SHOW_CALL_SCREEN:
self.avModel->useAVFrame(YES);
[self accountSettingsShouldOpen: NO];
if (![currentCallVC.view superview]) {
[callView addSubview:[currentCallVC view] positioned:NSWindowAbove relativeTo:nil];
......@@ -223,7 +226,6 @@ typedef NS_ENUM(NSInteger, ViewState) {
NSResponder * viewNextResponder = [self nextResponder];
[self setNextResponder: [conversationVC getMessagesView]];
[[conversationVC getMessagesView] setNextResponder: viewNextResponder];
self.avModel->useAVFrame(YES);
}
- (void) connect
......@@ -240,7 +242,8 @@ typedef NS_ENUM(NSInteger, ViewState) {
[currentCallVC setCurrentCall:convInfo.callId
conversation:convInfo.uid
account:accInfo];
account:accInfo
avModel: avModel];
[self changeViewTo:SHOW_CALL_SCREEN];
});
......@@ -257,7 +260,8 @@ typedef NS_ENUM(NSInteger, ViewState) {
[currentCallVC setCurrentCall:convInfo.callId
conversation:convInfo.uid
account:accInfo];
account:accInfo
avModel: avModel];
[smartViewVC selectConversation: convInfo model:accInfo->conversationModel.get()];
[self changeViewTo:SHOW_CALL_SCREEN];
});
......@@ -589,7 +593,8 @@ typedef NS_ENUM(NSInteger, ViewState) {
}
[currentCallVC setCurrentCall:[callId UTF8String]
conversation:[conversationId UTF8String]
account:&accInfo];
account:&accInfo
avModel:avModel];
[self changeViewTo:SHOW_CALL_SCREEN];
}
......@@ -608,4 +613,9 @@ typedef NS_ENUM(NSInteger, ViewState) {
}
[self changeViewTo:SHOW_CONVERSATION_SCREEN];
}
- (BOOL)windowShouldClose:(id)sender {
[NSApp hide:nil];
return NO;
}
@end
This diff is collapsed.
......@@ -483,8 +483,6 @@ namespace Interfaces {
QVariant ImageManipulationDelegate::decorationRole(const Account* acc)
{
Q_UNUSED(acc)
if (auto pro = ProfileModel::instance().selectedProfile())
return contactPhoto(pro->person(), decorationSize);
return QVariant();
}
......
......@@ -23,5 +23,6 @@
@interface CallMTKView: MTKView
-(void)renderWithPixelBuffer:(CVPixelBufferRef)buffer size:(CGSize)size rotation: (float)rotation fillFrame: (bool)fill;
-(void)fillWithBlack;
-(void)setupView;
@property bool stopRendering;
@end
......@@ -53,60 +53,64 @@ struct Uniforms {
{
self = [super initWithFrame:frame];
if (self) {
id<MTLDevice> device = MTLCreateSystemDefaultDevice();
self.device = device;
commandQueue = [device newCommandQueue];
self.colorPixelFormat = MTLPixelFormatBGRA8Unorm;
commandQueue = [device newCommandQueue];
[self setupView];
}
return self;
}
CVReturn err = CVMetalTextureCacheCreate(kCFAllocatorDefault,
NULL,
self.device,
NULL,
&textureCache);
-(void)setupView {
id<MTLDevice> device = MTLCreateSystemDefaultDevice();
self.device = device;
commandQueue = [device newCommandQueue];
self.colorPixelFormat = MTLPixelFormatBGRA8Unorm;
commandQueue = [device newCommandQueue];
vertexBuffer = [device newBufferWithBytes:&kImagePlaneVertexData
length:sizeof(kImagePlaneVertexData)
options:MTLResourceCPUCacheModeDefaultCache];
CVReturn err = CVMetalTextureCacheCreate(kCFAllocatorDefault,
NULL,
self.device,
NULL,
&textureCache);
NSString *resourcePath = [[NSBundle mainBundle] resourcePath];
NSString *libraryPath = [resourcePath stringByAppendingPathComponent:@"Shader.metallib"];
id <MTLLibrary> library = [device newLibraryWithFile:libraryPath error:nil];
id<MTLFunction> vertexFunc = [library newFunctionWithName:@"imageVertex"];
id<MTLFunction> fragmentFunc = [library newFunctionWithName:@"imageFragment"];
vertexBuffer = [device newBufferWithBytes:&kImagePlaneVertexData
length:sizeof(kImagePlaneVertexData)
options:MTLResourceCPUCacheModeDefaultCache];
// Create a vertex descriptor for our image plane vertex buffer
MTLVertexDescriptor *imagePlaneVertexDescriptor = [[MTLVertexDescriptor alloc] init];
NSString *resourcePath = [[NSBundle mainBundle] resourcePath];
NSString *libraryPath = [resourcePath stringByAppendingPathComponent:@"Shader.metallib"];
id <MTLLibrary> library = [device newLibraryWithFile:libraryPath error:nil];
id<MTLFunction> vertexFunc = [library newFunctionWithName:@"imageVertex"];
id<MTLFunction> fragmentFunc = [library newFunctionWithName:@"imageFragment"];
// Positions.
imagePlaneVertexDescriptor.attributes[kVertexAttributePosition].format = MTLVertexFormatFloat2;
imagePlaneVertexDescriptor.attributes[kVertexAttributePosition].offset = 0;
imagePlaneVertexDescriptor.attributes[kVertexAttributePosition].bufferIndex = kBufferIndexMeshPositions;
// Create a vertex descriptor for our image plane vertex buffer
MTLVertexDescriptor *imagePlaneVertexDescriptor = [[MTLVertexDescriptor alloc] init];
// Texture coordinates.
imagePlaneVertexDescriptor.attributes[kVertexAttributeTexcoord].format = MTLVertexFormatFloat2;
imagePlaneVertexDescriptor.attributes[kVertexAttributeTexcoord].offset = 8;
imagePlaneVertexDescriptor.attributes[kVertexAttributeTexcoord].bufferIndex = kBufferIndexMeshPositions;
// Positions.
imagePlaneVertexDescriptor.attributes[kVertexAttributePosition].format = MTLVertexFormatFloat2;
imagePlaneVertexDescriptor.attributes[kVertexAttributePosition].offset = 0;
imagePlaneVertexDescriptor.attributes[kVertexAttributePosition].bufferIndex = kBufferIndexMeshPositions;
// Position Buffer Layout
imagePlaneVertexDescriptor.layouts[kBufferIndexMeshPositions].stride = 16;
imagePlaneVertexDescriptor.layouts[kBufferIndexMeshPositions].stepRate = 1;
imagePlaneVertexDescriptor.layouts[kBufferIndexMeshPositions].stepFunction = MTLVertexStepFunctionPerVertex;
// Texture coordinates.
imagePlaneVertexDescriptor.attributes[kVertexAttributeTexcoord].format = MTLVertexFormatFloat2;
imagePlaneVertexDescriptor.attributes[kVertexAttributeTexcoord].offset = 8;
imagePlaneVertexDescriptor.attributes[kVertexAttributeTexcoord].bufferIndex = kBufferIndexMeshPositions;
MTLRenderPipelineDescriptor *pipelineDescriptor = [MTLRenderPipelineDescriptor new];
pipelineDescriptor.vertexFunction = vertexFunc;
pipelineDescriptor.fragmentFunction = fragmentFunc;
pipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormatBGRA8Unorm;
pipelineDescriptor.vertexDescriptor = imagePlaneVertexDescriptor;
// Position Buffer Layout
imagePlaneVertexDescriptor.layouts[kBufferIndexMeshPositions].stride = 16;
imagePlaneVertexDescriptor.layouts[kBufferIndexMeshPositions].stepRate = 1;
imagePlaneVertexDescriptor.layouts[kBufferIndexMeshPositions].stepFunction = MTLVertexStepFunctionPerVertex;
pipeline = [device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:NULL];
MTLDepthStencilDescriptor *depthStateDescriptor = [[MTLDepthStencilDescriptor alloc] init];
depthStateDescriptor.depthCompareFunction = MTLCompareFunctionAlways;
depthStateDescriptor.depthWriteEnabled = NO;
depthState = [device newDepthStencilStateWithDescriptor:depthStateDescriptor];
self.preferredFramesPerSecond = 30;
}
return self;
MTLRenderPipelineDescriptor *pipelineDescriptor = [MTLRenderPipelineDescriptor new];
pipelineDescriptor.vertexFunction = vertexFunc;
pipelineDescriptor.fragmentFunction = fragmentFunc;
pipelineDescriptor.colorAttachments[0].pixelFormat = MTLPixelFormatBGRA8Unorm;
pipelineDescriptor.vertexDescriptor = imagePlaneVertexDescriptor;
pipeline = [device newRenderPipelineStateWithDescriptor:pipelineDescriptor error:NULL];
MTLDepthStencilDescriptor *depthStateDescriptor = [[MTLDepthStencilDescriptor alloc] init];
depthStateDescriptor.depthCompareFunction = MTLCompareFunctionAlways;
depthStateDescriptor.depthWriteEnabled = NO;
depthState = [device newDepthStencilStateWithDescriptor:depthStateDescriptor];
self.preferredFramesPerSecond = 30;
}
- (void)fillWithBlack {
......
......@@ -26,6 +26,10 @@
-(void) callShouldToggleFullScreen;
-(void) mouseIsMoving:(BOOL) move;
-(void) screenShare;
-(void) switchToDevice:(int)deviceID;
-(void) switchToFile:(std::string)uri;
-(std::vector<std::string>) getDeviceList;
@end
......
......@@ -20,19 +20,8 @@
#import "CallView.h"
#import "CallLayer.h"
#import <QItemSelectionModel>
#import <QAbstractProxyModel>
#import <QUrl>
#import <video/configurationproxy.h>
#import <video/sourcemodel.h>
#import <media/video.h>
#import <callmodel.h>
#import <video/previewmanager.h>
#import <video/renderer.h>
#import <video/device.h>
#import <video/devicemodel.h>
@interface CallView ()
@property NSMenu *contextualMenu;
......@@ -183,9 +172,11 @@
contextualMenu = [[NSMenu alloc] initWithTitle:@"Switch camera"];
for(int i = 0 ; i < Video::DeviceModel::instance().devices().size() ; ++i) {
Video::Device* device = Video::DeviceModel::instance().devices()[i];
[contextualMenu insertItemWithTitle:device->name().toNSString() action:@selector(switchInput:) keyEquivalent:@"" atIndex:i];
auto devices = [self.callDelegate getDeviceList];
for(int i = 0 ; i < devices.size() ; ++i) {
std::string device = devices[i];
[contextualMenu insertItemWithTitle:@(device.c_str()) action:@selector(switchInput:) keyEquivalent:@"" atIndex:i];
}
[contextualMenu insertItemWithTitle:NSLocalizedString(@"Share screen", @"Contextual menu entry")
action:@selector(captureScreen:)
......@@ -234,38 +225,23 @@
- (void) switchInput:(NSMenuItem*) sender
{
int index = [contextualMenu indexOfItem:sender];
Call* call = [self getCurrentCall];
if (call == nullptr) return;
if (auto outVideo = call->firstMedia<media::Video>(media::Media::Direction::OUT)) {
outVideo->sourceModel()->switchTo(Video::DeviceModel::instance().devices()[index]);
}
[self.callDelegate switchToDevice: index];
}
- (void) captureScreen:(NSMenuItem*) sender
{
Call* call = [self getCurrentCall];
if (call == nullptr) return;
if (auto outVideo = call->firstMedia<media::Video>(media::Media::Direction::OUT)) {
NSScreen *mainScreen = [NSScreen mainScreen];
NSRect screenFrame = mainScreen.frame;
QRect captureRect = QRect(screenFrame.origin.x, screenFrame.origin.y, screenFrame.size.width, screenFrame.size.height);
outVideo->sourceModel()->setDisplay(0, captureRect);
}
[self.callDelegate screenShare];
}
- (void) chooseFile:(NSMenuItem*) sender
{
Call* call = [self getCurrentCall];
if (call == nullptr) return;
NSOpenPanel *browsePanel = [[NSOpenPanel alloc] init];
[browsePanel setDirectoryURL:[NSURL URLWithString:NSHomeDirectory()]];
[browsePanel setCanChooseFiles:YES];
[browsePanel setCanChooseDirectories:NO];
[browsePanel setCanCreateDirectories:NO];
//NSMutableArray* fileTypes = [[NSMutableArray alloc] initWithArray:[NSImage imageTypes]];
NSMutableArray* fileTypes = [NSMutableArray array];
NSMutableArray* fileTypes = [[NSMutableArray alloc] initWithArray:[NSImage imageTypes]];
[fileTypes addObject:(__bridge NSString *)kUTTypeVideo];
[fileTypes addObject:(__bridge NSString *)kUTTypeMovie];
[fileTypes addObject:(__bridge NSString *)kUTTypeImage];
......@@ -273,23 +249,9 @@
[browsePanel beginSheetModalForWindow:[self window] completionHandler:^(NSInteger result) {
if (result == NSFileHandlingPanelOKButton) {
NSURL* theDoc = [[browsePanel URLs] objectAtIndex:0];
if (auto outVideo = call->firstMedia<media::Video>(media::Media::Direction::OUT)) {
outVideo->sourceModel()->setFile(QUrl::fromLocalFile(QString::fromUtf8([theDoc.path UTF8String])));
}
[self.callDelegate switchToFile: [theDoc.path UTF8String]];
}
}];
}
-(Call *) getCurrentCall {
auto calls = CallModel::instance().getActiveCalls();
Call* call = nullptr;
for (int i = 0; i< calls.size(); i++) {
if (calls.at(i)->historyId() == QString::fromStdString(self.callId)) {
return calls.at(i);
}
}
return call;
}
@end
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment