Commit e45e5451 authored by Simon Morlat's avatar Simon Morlat Committed by Sylvain Berfini
Browse files

Looks good.

parent bdef944c
......@@ -30,10 +30,15 @@ LINPHONE_BEGIN_NAMESPACE
MS2AudioMixer::MS2AudioMixer(MixerSession &session) : StreamMixer(session){
MSAudioConferenceParams ms_conf_params;
ms_conf_params.samplerate = lp_config_get_int(mSession.getCCore()->config, "sound", "conference_rate", 16000);
ms_conf_params.active_talker_callback = &MS2AudioMixer::sOnActiveTalkerChanged;
ms_conf_params.user_data = this;
mConference = ms_audio_conference_new(&ms_conf_params, mSession.getCCore()->factory);
}
MS2AudioMixer::~MS2AudioMixer(){
if (mTimer){
mSession.getCore().destroyTimer(mTimer);
}
if (mRecordEndpoint) {
stopRecording();
}
......@@ -43,12 +48,42 @@ MS2AudioMixer::~MS2AudioMixer(){
ms_audio_conference_destroy(mConference);
}
void MS2AudioMixer::connectEndpoint(MSAudioEndpoint *endpoint, bool muted){
void MS2AudioMixer::addListener(AudioMixerListener *listener){
if (mTimer == nullptr){
// Start the monitoring of the active talker since somebody wants this information.
mTimer = mSession.getCore().createTimer([this]() -> bool{
ms_audio_conference_process_events(mConference);
return true;
}, 50, "AudioConference events timer");
}
mListeners.push_back(listener);
}
void MS2AudioMixer::removeListener(AudioMixerListener *listener){
mListeners.remove(listener);
}
void MS2AudioMixer::sOnActiveTalkerChanged(MSAudioConference *audioconf, MSAudioEndpoint *ep){
const MSAudioConferenceParams *params = ms_audio_conference_get_params(audioconf);
MS2AudioMixer *zis = static_cast<MS2AudioMixer*>(params->user_data);
zis->onActiveTalkerChanged(ep);
}
void MS2AudioMixer::onActiveTalkerChanged(MSAudioEndpoint *ep){
StreamsGroup *sg = (StreamsGroup*)ms_audio_endpoint_get_user_data(ep);
for (auto & l : mListeners){
l->onActiveTalkerChanged(sg);
}
}
void MS2AudioMixer::connectEndpoint(Stream *as, MSAudioEndpoint *endpoint, bool muted){
ms_audio_endpoint_set_user_data(endpoint, &as->getGroup());
ms_audio_conference_add_member(mConference, endpoint);
ms_audio_conference_mute_member(mConference, endpoint, muted);
}
void MS2AudioMixer::disconnectEndpoint(MSAudioEndpoint *endpoint){
void MS2AudioMixer::disconnectEndpoint(Stream *as, MSAudioEndpoint *endpoint){
ms_audio_endpoint_set_user_data(endpoint, nullptr);
ms_audio_conference_remove_member(mConference, endpoint);
}
......
......@@ -382,7 +382,7 @@ void MS2AudioStream::render(const OfferAnswerContext &params, CallSession::State
if (audioMixer){
mConferenceEndpoint = ms_audio_endpoint_get_from_stream(mStream, TRUE);
audioMixer->connectEndpoint(mConferenceEndpoint, (stream->dir == SalStreamRecvOnly));
audioMixer->connectEndpoint(this, mConferenceEndpoint, (stream->dir == SalStreamRecvOnly));
}
getMediaSessionPrivate().getCurrentParams()->getPrivate()->setInConference(audioMixer != nullptr);
getMediaSessionPrivate().getCurrentParams()->enableLowBandwidth(getMediaSessionPrivate().getParams()->lowBandwidthEnabled());
......@@ -441,7 +441,7 @@ void MS2AudioStream::stop(){
if (mConferenceEndpoint){
// First disconnect from the mixer before stopping the stream.
getAudioMixer()->disconnectEndpoint(mConferenceEndpoint);
getAudioMixer()->disconnectEndpoint(this,mConferenceEndpoint);
ms_audio_endpoint_release_from_stream(mConferenceEndpoint);
mConferenceEndpoint = nullptr;
}
......
......@@ -26,7 +26,9 @@ LINPHONE_BEGIN_NAMESPACE
MixerSession::MixerSession(Core &core) : mCore(core){
mMixers[SalAudio].reset(new MS2AudioMixer(*this));
auto audioMixer = new MS2AudioMixer(*this);
audioMixer->addListener(this);
mMixers[SalAudio].reset(audioMixer);
#ifdef VIDEO_ENABLED
mMixers[SalVideo].reset(new MS2VideoMixer(*this));
#endif
......@@ -45,6 +47,13 @@ void MixerSession::unjoinStreamsGroup(StreamsGroup &sg){
sg.unjoinMixerSession();
}
void MixerSession::setFocus(StreamsGroup *sg){
#ifdef VIDEO_ENABLED
MS2VideoMixer *mixer = dynamic_cast<MS2VideoMixer*>(mMixers[SalVideo].get());
if (mixer) mixer->setFocus(sg);
#endif
}
StreamMixer *MixerSession::getMixerByType(SalStreamType type){
return mMixers[type].get();
}
......@@ -63,6 +72,10 @@ void MixerSession::enableLocalParticipant(bool enabled){
}
}
void MixerSession::onActiveTalkerChanged(StreamsGroup *sg){
setFocus(sg);
}
StreamMixer::StreamMixer(MixerSession & session) : mSession(session){
}
......
......@@ -31,7 +31,16 @@ LINPHONE_BEGIN_NAMESPACE
class StreamMixer;
class MixerSession{
class AudioMixerListener{
public:
virtual ~AudioMixerListener() = default;
/*
* Notifies the active talker. By convention sg = nullptr means that the local participant is the active talker.
*/
virtual void onActiveTalkerChanged(StreamsGroup *sg) = 0;
};
class MixerSession : protected AudioMixerListener{
public:
MixerSession(Core &core);
~MixerSession();
......@@ -41,6 +50,9 @@ public:
void enableLocalParticipant(bool enabled);
Core & getCore() const;
LinphoneCore *getCCore()const;
void setFocus(StreamsGroup *sg);
protected:
virtual void onActiveTalkerChanged(StreamsGroup *sg) override;
private:
Core & mCore;
std::map<SalStreamType, std::unique_ptr<StreamMixer>> mMixers;
......@@ -72,8 +84,8 @@ class MS2AudioMixer : public StreamMixer, public AudioControlInterface{
public:
MS2AudioMixer(MixerSession & session);
~MS2AudioMixer();
void connectEndpoint(MSAudioEndpoint *endpoint, bool muted);
void disconnectEndpoint(MSAudioEndpoint *endpoint);
void connectEndpoint(Stream *as, MSAudioEndpoint *endpoint, bool muted);
void disconnectEndpoint(Stream *as, MSAudioEndpoint *endpoint);
virtual void enableLocalParticipant(bool enabled) override;
void setRecordPath(const std::string &path);
......@@ -95,28 +107,35 @@ public:
virtual void sendDtmf(int dtmf) override;
virtual void enableEchoCancellation(bool value) override;
virtual bool echoCancellationEnabled()const override;
void addListener(AudioMixerListener *listener);
void removeListener(AudioMixerListener *listener);
// Used for the tone manager.
AudioStream * getAudioStream();
private:
void onActiveTalkerChanged(MSAudioEndpoint *ep);
static void sOnActiveTalkerChanged(MSAudioConference *audioconf, MSAudioEndpoint *ep);
void addLocalParticipant();
void removeLocalParticipant();
RtpProfile *sMakeDummyProfile(int samplerate);
std::list<AudioMixerListener*> mListeners;
MSAudioConference *mConference = nullptr;
AudioStream *mLocalParticipantStream = nullptr;
MSAudioEndpoint *mLocalEndpoint = nullptr;
MSAudioEndpoint *mRecordEndpoint = nullptr;
RtpProfile *mLocalDummyProfile = nullptr;
std::string mRecordPath;
belle_sip_source_t *mTimer = nullptr;
bool mLocalMicEnabled = true;
};
class MS2VideoMixer : public StreamMixer, public MS2VideoControl{
public:
MS2VideoMixer(MixerSession & session);
void connectEndpoint(MSVideoEndpoint *endpoint, bool muted);
void disconnectEndpoint(MSVideoEndpoint *endpoint);
void connectEndpoint(Stream *vs, MSVideoEndpoint *endpoint, bool muted);
void disconnectEndpoint(Stream *vs, MSVideoEndpoint *endpoint);
virtual void enableLocalParticipant(bool enabled) override;
void setFocus(StreamsGroup *sg);
~MS2VideoMixer();
protected:
virtual void onSnapshotTaken(const std::string &filepath) override;
......
......@@ -33,14 +33,38 @@ MS2VideoMixer::MS2VideoMixer(MixerSession & session) : StreamMixer(session), MS2
mConference = ms_video_conference_new(mSession.getCCore()->factory, &params);
}
void MS2VideoMixer::connectEndpoint(MSVideoEndpoint *endpoint, bool muted){
void MS2VideoMixer::connectEndpoint(Stream *vs, MSVideoEndpoint *endpoint, bool muted){
ms_video_endpoint_set_user_data(endpoint, &vs->getGroup());
ms_video_conference_add_member(mConference, endpoint);
}
void MS2VideoMixer::disconnectEndpoint(MSVideoEndpoint *endpoint){
void MS2VideoMixer::disconnectEndpoint(Stream *vs, MSVideoEndpoint *endpoint){
ms_video_endpoint_set_user_data(endpoint, nullptr);
ms_video_conference_remove_member(mConference, endpoint);
}
void MS2VideoMixer::setFocus(StreamsGroup *sg){
MSVideoEndpoint *ep = nullptr;
if (sg == nullptr){
ep = mLocalEndpoint;
}else{
const bctbx_list_t *elem = ms_video_conference_get_members(mConference);
for (; elem != nullptr; elem = elem->next){
MSVideoEndpoint *ep_it = (MSVideoEndpoint *)elem->data;
if (ms_video_endpoint_get_user_data(ep_it) == sg){
ep = ep_it;
break;
}
}
}
if (ep){
ms_video_conference_set_focus(mConference, ep);
}else{
lError() << "MS2VideoMixer: cannot find endpoint requested for focus.";
}
}
RtpProfile *MS2VideoMixer::sMakeDummyProfile(){
RtpProfile *prof = rtp_profile_new("dummy video");
LinphonePayloadType *pt = linphone_core_get_payload_type(mSession.getCCore(), "VP8", 90000, -1);
......
......@@ -328,7 +328,7 @@ void MS2VideoStream::render(const OfferAnswerContext & ctx, CallSession::State t
}
if (videoMixer){
mConferenceEndpoint = ms_video_endpoint_get_from_stream(mStream, TRUE);
videoMixer->connectEndpoint(mConferenceEndpoint, (vstream->dir == SalStreamRecvOnly));
videoMixer->connectEndpoint(this, mConferenceEndpoint, (vstream->dir == SalStreamRecvOnly));
}
}
......@@ -339,7 +339,7 @@ void MS2VideoStream::stop(){
if (mConferenceEndpoint){
// First disconnect from the mixer before stopping the stream.
getVideoMixer()->disconnectEndpoint(mConferenceEndpoint);
getVideoMixer()->disconnectEndpoint(this, mConferenceEndpoint);
ms_video_endpoint_release_from_stream(mConferenceEndpoint);
mConferenceEndpoint = nullptr;
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment