Commit ec03a19e authored by Philippe Gorley's avatar Philippe Gorley Committed by Adrien Béraud

audio: add ability to get stream information

Changes name from getStream to getInfo.

Change-Id: I17b2eedb03dd707146b30062e755c5cd80c559fb
parent 351856e7
......@@ -188,4 +188,12 @@ AudioInput::setMuted(bool isMuted)
muteState_ = isMuted;
}
MediaStream
AudioInput::getInfo() const
{
std::lock_guard<std::mutex> lk(fmtMutex_);
auto ms = MediaStream("a:local", format_, sent_samples);
return ms;
}
} // namespace ring
......@@ -33,6 +33,7 @@
namespace ring {
struct MediaStream;
class Resampler;
class AudioInput : public Observable<std::shared_ptr<AudioFrame>>
......@@ -46,6 +47,7 @@ public:
bool isCapturing() const { return loop_.isRunning(); }
void setFormat(const AudioFormat& fmt);
void setMuted(bool isMuted);
MediaStream getInfo() const;
private:
bool nextFromDevice(AudioFrame& frame);
......@@ -55,7 +57,7 @@ private:
AudioBuffer micData_;
bool muteState_ = false;
uint64_t sent_samples = 0;
std::mutex fmtMutex_ {};
mutable std::mutex fmtMutex_ {};
AudioFormat format_;
std::unique_ptr<Resampler> resampler_;
......
......@@ -150,6 +150,12 @@ AudioReceiveThread::addIOContext(SocketPair& socketPair)
demuxContext_.reset(socketPair.createIOContext(mtu_));
}
MediaStream
AudioReceiveThread::getInfo() const
{
return audioDecoder_->getStream("a:remote");
}
void
AudioReceiveThread::startLoop()
{
......
......@@ -33,6 +33,7 @@ namespace ring {
class MediaDecoder;
class MediaIOHandle;
struct MediaStream;
class RingBuffer;
class AudioReceiveThread : public Observable<std::shared_ptr<AudioFrame>>
......@@ -43,6 +44,9 @@ public:
const std::string& sdp,
const uint16_t mtu);
~AudioReceiveThread();
MediaStream getInfo() const;
void addIOContext(SocketPair &socketPair);
void startLoop();
......
......@@ -201,9 +201,9 @@ void MediaRecorder::update(Observable<std::shared_ptr<VideoFrame>>* ob, const st
{
MediaStream ms;
if (auto receiver = dynamic_cast<video::VideoReceiveThread*>(ob)) {
ms = receiver->getStream();
ms = receiver->getInfo();
} else if (auto input = dynamic_cast<video::VideoInput*>(ob)) {
ms = input->getStream();
ms = input->getInfo();
}
ms.firstTimestamp = v->pointer()->pts;
recordData(v->pointer(), ms);
......
......@@ -607,7 +607,7 @@ DeviceParams VideoInput::getParams() const
{ return decOpts_; }
MediaStream
VideoInput::getStream() const
VideoInput::getInfo() const
{
return decoder_->getStream("v:local");
}
......
......@@ -78,7 +78,7 @@ public:
int getHeight() const;
int getPixelFormat() const;
DeviceParams getParams() const;
MediaStream getStream() const;
MediaStream getInfo() const;
std::shared_future<DeviceParams> switchInput(const std::string& resource);
#if defined(__ANDROID__) || defined(RING_UWP) || (defined(TARGET_OS_IOS) && TARGET_OS_IOS)
......
......@@ -247,7 +247,7 @@ int VideoReceiveThread::getPixelFormat() const
{ return videoDecoder_->getPixelFormat(); }
MediaStream
VideoReceiveThread::getStream() const
VideoReceiveThread::getInfo() const
{
return videoDecoder_->getStream("v:remote");
}
......
......@@ -60,7 +60,7 @@ public:
int getWidth() const;
int getHeight() const;
int getPixelFormat() const;
MediaStream getStream() const;
MediaStream getInfo() const;
void triggerKeyFrameRequest();
private:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment