diff --git a/TMessagesProj/build.gradle b/TMessagesProj/build.gradle index e35332c27..c3d87b041 100644 --- a/TMessagesProj/build.gradle +++ b/TMessagesProj/build.gradle @@ -300,7 +300,7 @@ android { } } - defaultConfig.versionCode = 2587 + defaultConfig.versionCode = 2594 applicationVariants.all { variant -> variant.outputs.all { output -> @@ -319,7 +319,7 @@ android { defaultConfig { minSdkVersion 16 targetSdkVersion 30 - versionName "8.6.0" + versionName "8.6.1" vectorDrawables.generatedDensities = ['mdpi', 'hdpi', 'xhdpi', 'xxhdpi'] diff --git a/TMessagesProj/jni/CMakeLists.txt b/TMessagesProj/jni/CMakeLists.txt index dfe087cf4..38c78fd0a 100644 --- a/TMessagesProj/jni/CMakeLists.txt +++ b/TMessagesProj/jni/CMakeLists.txt @@ -404,7 +404,7 @@ target_compile_definitions(sqlite PUBLIC #voip include(${CMAKE_HOME_DIRECTORY}/voip/CMakeLists.txt) -set(NATIVE_LIB "tmessages.41") +set(NATIVE_LIB "tmessages.42") #tmessages add_library(${NATIVE_LIB} SHARED diff --git a/TMessagesProj/jni/voip/CMakeLists.txt b/TMessagesProj/jni/voip/CMakeLists.txt index 24a55ba22..03676b6d0 100644 --- a/TMessagesProj/jni/voip/CMakeLists.txt +++ b/TMessagesProj/jni/voip/CMakeLists.txt @@ -657,7 +657,6 @@ add_library(tgcalls STATIC voip/webrtc/api/video/video_adaptation_counters.cc voip/webrtc/api/video/video_frame_metadata.cc voip/webrtc/api/voip/voip_engine_factory.cc - voip/webrtc/api/video/i444_buffer.cc voip/webrtc/api/video/rtp_video_frame_assembler.cc voip/webrtc/api/numerics/samples_stats_counter.cc voip/webrtc/api/wrapping_async_dns_resolver.cc @@ -1181,6 +1180,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/audio_processing/vad/vad_audio_proc.cc voip/webrtc/modules/audio_processing/vad/vad_circular_buffer.cc voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc + voip/webrtc/modules/audio_processing/voice_detection.cc voip/webrtc/modules/audio_processing/optionally_built_submodule_creators.cc voip/webrtc/modules/audio_processing/capture_levels_adjuster/capture_levels_adjuster.cc voip/webrtc/modules/audio_processing/capture_levels_adjuster/audio_samples_scaler.cc @@ -1327,7 +1327,7 @@ add_library(tgcalls STATIC voip/webrtc/modules/video_capture/video_capture_impl.cc voip/webrtc/modules/video_coding/codec_timer.cc voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_decoder_absent.cc - voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_supported.cc + voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc voip/webrtc/modules/video_coding/codecs/h264/h264.cc voip/webrtc/modules/video_coding/codecs/h264/h264_color_space.cc voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -1409,7 +1409,6 @@ add_library(tgcalls STATIC voip/webrtc/modules/video_coding/codecs/av1/av1_svc_config.cc voip/webrtc/modules/video_coding/nack_requester.cc voip/webrtc/modules/video_coding/frame_buffer3.cc - voip/webrtc/modules/video_coding/frame_helpers.cc voip/webrtc/modules/video_coding/h264_packet_buffer.cc voip/webrtc/modules/video_processing/util/denoiser_filter.cc voip/webrtc/modules/video_processing/util/denoiser_filter_c.cc @@ -1635,12 +1634,7 @@ add_library(tgcalls STATIC voip/webrtc/video/receive_statistics_proxy2.cc voip/webrtc/video/call_stats2.cc voip/webrtc/video/alignment_adjuster.cc - voip/webrtc/video/frame_buffer_proxy.cc - voip/webrtc/video/decode_synchronizer.cc voip/webrtc/video/frame_cadence_adapter.cc - voip/webrtc/video/frame_decode_timing.cc - voip/webrtc/video/task_queue_frame_decode_scheduler.cc - voip/webrtc/video/video_receive_stream_timeout_tracker.cc voip/webrtc/audio/audio_level.cc voip/webrtc/audio/audio_receive_stream.cc voip/webrtc/audio/audio_send_stream.cc @@ -1786,7 +1780,7 @@ add_library(voipandroid STATIC voip/webrtc/sdk/android/native_api/video/wrapper.cc voip/webrtc/sdk/android/native_api/network_monitor/network_monitor.cc voip/webrtc/sdk/android/src/jni/android_histogram.cc - voip/webrtc/sdk/android/src/jni/libaom_av1_codec.cc + voip/webrtc/sdk/android/src/jni/av1_codec.cc voip/webrtc/sdk/android/src/jni/egl_base_10_impl.cc voip/webrtc/sdk/android/src/jni/android_metrics.cc voip/webrtc/sdk/android/src/jni/android_network_monitor.cc diff --git a/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp b/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp index d89b75ea1..bf59892ad 100644 --- a/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp +++ b/TMessagesProj/jni/voip/org_telegram_messenger_voip_Instance.cpp @@ -209,6 +209,7 @@ struct InstanceHolder { std::unique_ptr groupNativeInstance; std::shared_ptr _videoCapture; std::shared_ptr _screenVideoCapture; + std::shared_ptr> _sink; std::shared_ptr _platformContext; std::map remoteGroupSinks; bool useScreencast = false; @@ -754,7 +755,8 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati holder->nativeInstance = tgcalls::Meta::Create(v, std::move(descriptor)); holder->_videoCapture = videoCapture; holder->_platformContext = platformContext; - holder->nativeInstance->setIncomingVideoOutput(webrtc::JavaToNativeVideoSink(env, remoteSink)); + holder->_sink = webrtc::JavaToNativeVideoSink(env, remoteSink); + holder->nativeInstance->setIncomingVideoOutput(holder->_sink); holder->nativeInstance->setNetworkType(parseNetworkType(networkType)); holder->nativeInstance->setRequestedVideoAspect(aspectRatio); return reinterpret_cast(holder); diff --git a/TMessagesProj/jni/voip/tgcalls/Manager.cpp b/TMessagesProj/jni/voip/tgcalls/Manager.cpp index 5069b58e3..a38406456 100644 --- a/TMessagesProj/jni/voip/tgcalls/Manager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/Manager.cpp @@ -139,9 +139,9 @@ void Manager::sendSignalingAsync(int delayMs, int cause) { } }; if (delayMs) { - _thread->PostDelayedTask(std::move(task), delayMs); + _thread->PostDelayedTask(RTC_FROM_HERE, std::move(task), delayMs); } else { - _thread->PostTask(std::move(task)); + _thread->PostTask(RTC_FROM_HERE, std::move(task)); } } @@ -149,7 +149,7 @@ void Manager::start() { const auto weak = std::weak_ptr(shared_from_this()); const auto thread = _thread; const auto sendSignalingMessage = [=](Message &&message) { - thread->PostTask([=, message = std::move(message)]() mutable { + thread->PostTask(RTC_FROM_HERE, [=, message = std::move(message)]() mutable { const auto strong = weak.lock(); if (!strong) { return; @@ -167,7 +167,7 @@ void Manager::start() { rtcServers, std::move(proxy), [=](const NetworkManager::State &state) { - thread->PostTask([=] { + thread->PostTask(RTC_FROM_HERE, [=] { const auto strong = weak.lock(); if (!strong) { return; @@ -200,7 +200,7 @@ void Manager::start() { }); }, [=](DecryptedMessage &&message) { - thread->PostTask([=, message = std::move(message)]() mutable { + thread->PostTask(RTC_FROM_HERE, [=, message = std::move(message)]() mutable { if (const auto strong = weak.lock()) { strong->receiveMessage(std::move(message)); } @@ -216,9 +216,9 @@ void Manager::start() { } }; if (delayMs) { - thread->PostDelayedTask(task, delayMs); + thread->PostDelayedTask(RTC_FROM_HERE, task, delayMs); } else { - thread->PostTask(task); + thread->PostTask(RTC_FROM_HERE, task); } }); })); @@ -232,7 +232,7 @@ void Manager::start() { videoCapture, sendSignalingMessage, [=](Message &&message) { - thread->PostTask([=, message = std::move(message)]() mutable { + thread->PostTask(RTC_FROM_HERE, [=, message = std::move(message)]() mutable { const auto strong = weak.lock(); if (!strong) { return; @@ -362,7 +362,7 @@ void Manager::getNetworkStats(std::function comp CallStats callStats; networkManager->fillCallStats(callStats); - thread->PostTask([weak, networkStats, completion = std::move(completion), callStats = std::move(callStats), statsLogPath = statsLogPath] { + thread->PostTask(RTC_FROM_HERE, [weak, networkStats, completion = std::move(completion), callStats = std::move(callStats), statsLogPath = statsLogPath] { const auto strong = weak.lock(); if (!strong) { return; diff --git a/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp b/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp index a1071f47e..2db2b783f 100644 --- a/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/MediaManager.cpp @@ -314,7 +314,7 @@ _platformContext(platformContext) { webrtc::AudioProcessingBuilder builder; std::unique_ptr audioProcessor = std::make_unique([this](float level) { - this->_thread->PostTask([this, level](){ + this->_thread->PostTask(RTC_FROM_HERE, [this, level](){ auto strong = this; strong->_currentMyAudioLevel = level; }); @@ -436,7 +436,7 @@ void MediaManager::start() { // Here we hope that thread outlives the sink rtc::Thread *thread = _thread; std::unique_ptr incomingSink(new AudioTrackSinkInterfaceImpl([weak, thread](float level) { - thread->PostTask([weak, level] { + thread->PostTask(RTC_FROM_HERE, [weak, level] { if (const auto strong = weak.lock()) { strong->_currentAudioLevel = level; } @@ -545,7 +545,7 @@ void MediaManager::sendOutgoingMediaStateMessage() { void MediaManager::beginStatsTimer(int timeoutMs) { const auto weak = std::weak_ptr(shared_from_this()); - _thread->PostDelayedTask([weak]() { + _thread->PostDelayedTask(RTC_FROM_HERE, [weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -556,7 +556,7 @@ void MediaManager::beginStatsTimer(int timeoutMs) { void MediaManager::beginLevelsTimer(int timeoutMs) { const auto weak = std::weak_ptr(shared_from_this()); - _thread->PostDelayedTask([weak]() { + _thread->PostDelayedTask(RTC_FROM_HERE, [weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -653,7 +653,7 @@ void MediaManager::setSendVideo(std::shared_ptr videoCapt const auto object = GetVideoCaptureAssumingSameThread(_videoCapture.get()); _isScreenCapture = object->isScreenCapture(); object->setStateUpdated([=](VideoState state) { - thread->PostTask([=] { + thread->PostTask(RTC_FROM_HERE, [=] { if (const auto strong = weak.lock()) { strong->setOutgoingVideoState(state); } diff --git a/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp b/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp index dccee7adc..faa7f50e7 100644 --- a/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/NetworkManager.cpp @@ -274,7 +274,7 @@ void NetworkManager::logCurrentNetworkState() { void NetworkManager::checkConnectionTimeout() { const auto weak = std::weak_ptr(shared_from_this()); - _thread->PostDelayedTask([weak]() { + _thread->PostDelayedTask(RTC_FROM_HERE, [weak]() { auto strong = weak.lock(); if (!strong) { return; diff --git a/TMessagesProj/jni/voip/tgcalls/ThreadLocalObject.h b/TMessagesProj/jni/voip/tgcalls/ThreadLocalObject.h index cd96ac34d..3c6c51cf5 100644 --- a/TMessagesProj/jni/voip/tgcalls/ThreadLocalObject.h +++ b/TMessagesProj/jni/voip/tgcalls/ThreadLocalObject.h @@ -19,20 +19,20 @@ public: _thread(thread), _valueHolder(std::make_unique()) { assert(_thread != nullptr); - _thread->PostTask([valueHolder = _valueHolder.get(), generator = std::forward(generator)]() mutable { + _thread->PostTask(RTC_FROM_HERE, [valueHolder = _valueHolder.get(), generator = std::forward(generator)]() mutable { valueHolder->_value.reset(generator()); }); } ~ThreadLocalObject() { - _thread->PostTask([valueHolder = std::move(_valueHolder)](){ + _thread->PostTask(RTC_FROM_HERE, [valueHolder = std::move(_valueHolder)](){ valueHolder->_value.reset(); }); } template void perform(const rtc::Location& posted_from, FunctorT &&functor) { - _thread->PostTask([valueHolder = _valueHolder.get(), f = std::forward(functor)]() mutable { + _thread->PostTask(posted_from, [valueHolder = _valueHolder.get(), f = std::forward(functor)]() mutable { assert(valueHolder->_value != nullptr); f(valueHolder->_value.get()); }); diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp index fc53be4e3..f124ded4c 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupInstanceCustomImpl.cpp @@ -929,8 +929,7 @@ public: std::string streamId = std::string("stream") + ssrc.name(); - _audioChannel = _channelManager->CreateVoiceChannel(_call, cricket::MediaConfig(), std::string("audio") + uint32ToString(ssrc.networkSsrc), false, GroupNetworkManager::getDefaulCryptoOptions(), audioOptions); - _audioChannel->SetRtpTransport(rtpTransport); + _audioChannel = _channelManager->CreateVoiceChannel(_call, cricket::MediaConfig(), rtpTransport, _threads->getWorkerThread(), std::string("audio") + uint32ToString(ssrc.networkSsrc), false, GroupNetworkManager::getDefaulCryptoOptions(), randomIdGenerator, audioOptions); const uint8_t opusPTimeMs = 120; @@ -967,9 +966,8 @@ public: streamParams.set_stream_ids({ streamId }); incomingAudioDescription->AddStream(streamParams); - std::string error_desc; - _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, error_desc); - _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, error_desc); + _audioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); + _audioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); _audioChannel->SetPayloadTypeDemuxingEnabled(false); outgoingAudioDescription.reset(); @@ -1104,11 +1102,10 @@ public: incomingVideoDescription->AddStream(videoRecvStreamParams); - _videoChannel = _channelManager->CreateVideoChannel(_call, cricket::MediaConfig(), std::string("video") + uint32ToString(mid), false, GroupNetworkManager::getDefaulCryptoOptions(), cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); - _videoChannel->SetRtpTransport(rtpTransport); - std::string error_desc; - _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, error_desc); - _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, error_desc); + _videoChannel = _channelManager->CreateVideoChannel(_call, cricket::MediaConfig(), rtpTransport, _threads->getWorkerThread(), std::string("video") + uint32ToString(mid), false, GroupNetworkManager::getDefaulCryptoOptions(), randomIdGenerator, cricket::VideoOptions(), _videoBitrateAllocatorFactory.get()); + + _videoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); + _videoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); _videoChannel->SetPayloadTypeDemuxingEnabled(false); _videoChannel->media_channel()->SetSink(_mainVideoSsrc, _videoSink.get()); @@ -1472,7 +1469,7 @@ public: _networkManager.reset(new ThreadLocalObject(_threads->getNetworkThread(), [weak, threads = _threads] () mutable { return new GroupNetworkManager( [=](const GroupNetworkManager::State &state) { - threads->getMediaThread()->PostTask([=] { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [=] { const auto strong = weak.lock(); if (!strong) { return; @@ -1484,28 +1481,28 @@ public: if (!isUnresolved) { return; } - threads->getMediaThread()->PostTask([weak, message, isUnresolved]() mutable { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, message, isUnresolved]() mutable { if (const auto strong = weak.lock()) { strong->receivePacket(message, isUnresolved); } }); }, [=](bool isDataChannelOpen) { - threads->getMediaThread()->PostTask([weak, isDataChannelOpen]() mutable { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, isDataChannelOpen]() mutable { if (const auto strong = weak.lock()) { strong->updateIsDataChannelOpen(isDataChannelOpen); } }); }, [=](std::string const &message) { - threads->getMediaThread()->PostTask([weak, message]() { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, message]() { if (const auto strong = weak.lock()) { strong->receiveDataChannelMessage(message); } }); }, [=](uint32_t ssrc, uint8_t audioLevel, bool isSpeech) { - threads->getMediaThread()->PostTask([weak, ssrc, audioLevel, isSpeech]() { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc, audioLevel, isSpeech]() { if (const auto strong = weak.lock()) { strong->updateSsrcAudioLevel(ssrc, audioLevel, isSpeech); } @@ -1521,7 +1518,7 @@ public: #if USE_RNNOISE audioProcessor = std::make_unique([weak, threads = _threads](GroupLevelValue const &level) { - threads->getMediaThread()->PostTask([weak, level](){ + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, level](){ auto strong = weak.lock(); if (!strong) { return; @@ -1654,8 +1651,8 @@ public: if (_videoContentType == VideoContentType::Screencast) { videoOptions.is_screencast = true; } - _outgoingVideoChannel = _channelManager->CreateVideoChannel(_call.get(), cricket::MediaConfig(), "1", false, GroupNetworkManager::getDefaulCryptoOptions(), videoOptions, _videoBitrateAllocatorFactory.get()); - _outgoingVideoChannel->SetRtpTransport(_rtpTransport); + _outgoingVideoChannel = _channelManager->CreateVideoChannel(_call.get(), cricket::MediaConfig(), _rtpTransport, _threads->getWorkerThread(), "1", false, GroupNetworkManager::getDefaulCryptoOptions(), _uniqueRandomIdGenerator.get(), videoOptions, _videoBitrateAllocatorFactory.get()); + if (!_outgoingVideoChannel) { RTC_LOG(LS_ERROR) << "Could not create outgoing video channel."; return; @@ -1715,9 +1712,8 @@ public: incomingVideoDescription->set_bandwidth(1300000); _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { - std::string error_desc; - _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, error_desc); - _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, error_desc); + _outgoingVideoChannel->SetRemoteContent(incomingVideoDescription.get(), webrtc::SdpType::kAnswer, nullptr); + _outgoingVideoChannel->SetLocalContent(outgoingVideoDescription.get(), webrtc::SdpType::kOffer, nullptr); _outgoingVideoChannel->SetPayloadTypeDemuxingEnabled(false); }); @@ -1851,21 +1847,21 @@ public: audioOptions.auto_gain_control = false; audioOptions.highpass_filter = false; audioOptions.typing_detection = false; -// audioOptions.experimental_agc = false; -// audioOptions.experimental_ns = false; + audioOptions.experimental_agc = false; + audioOptions.experimental_ns = false; audioOptions.residual_echo_detector = false; } else { audioOptions.echo_cancellation = true; audioOptions.noise_suppression = true; -// audioOptions.experimental_ns = true; + audioOptions.experimental_ns = true; audioOptions.residual_echo_detector = true; } std::vector streamIds; streamIds.push_back("1"); - _outgoingAudioChannel = _channelManager->CreateVoiceChannel(_call.get(), cricket::MediaConfig(), "0", false, GroupNetworkManager::getDefaulCryptoOptions(), audioOptions); - _outgoingAudioChannel->SetRtpTransport(_rtpTransport); + _outgoingAudioChannel = _channelManager->CreateVoiceChannel(_call.get(), cricket::MediaConfig(), _rtpTransport, _threads->getWorkerThread(), "0", false, GroupNetworkManager::getDefaulCryptoOptions(), _uniqueRandomIdGenerator.get(), audioOptions); + const uint8_t opusMinBitrateKbps = _outgoingAudioBitrateKbit; const uint8_t opusMaxBitrateKbps = _outgoingAudioBitrateKbit; const uint8_t opusStartBitrateKbps = _outgoingAudioBitrateKbit; @@ -1901,9 +1897,8 @@ public: incomingAudioDescription->set_bandwidth(1300000); _threads->getWorkerThread()->Invoke(RTC_FROM_HERE, [&]() { - std::string error_desc; - _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, error_desc); - _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, error_desc); + _outgoingAudioChannel->SetLocalContent(outgoingAudioDescription.get(), webrtc::SdpType::kOffer, nullptr); + _outgoingAudioChannel->SetRemoteContent(incomingAudioDescription.get(), webrtc::SdpType::kAnswer, nullptr); _outgoingAudioChannel->SetPayloadTypeDemuxingEnabled(false); _outgoingAudioChannel->Enable(true); }); @@ -1949,7 +1944,7 @@ public: void beginLevelsTimer(int timeoutMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask([weak]() { + _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -2010,7 +2005,7 @@ public: void beginAudioChannelCleanupTimer(int delayMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask([weak]() { + _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -2039,7 +2034,7 @@ public: void beginRemoteConstraintsUpdateTimer(int delayMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask([weak]() { + _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -2053,7 +2048,7 @@ public: void beginNetworkStatusTimer(int delayMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask([weak]() { + _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -2362,7 +2357,7 @@ public: } void receiveRtcpPacket(rtc::CopyOnWriteBuffer const &packet, int64_t timestamp) { - _threads->getWorkerThread()->PostTask([this, packet, timestamp]() { + _threads->getWorkerThread()->PostTask(RTC_FROM_HERE, [this, packet, timestamp]() { _call->Receiver()->DeliverPacket(webrtc::MediaType::ANY, packet, timestamp); }); } @@ -2393,7 +2388,7 @@ public: _pendingOutgoingVideoConstraintRequestId += 1; const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask([weak, requestId]() { + _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak, requestId]() { auto strong = weak.lock(); if (!strong) { return; @@ -2493,7 +2488,7 @@ public: const auto weak = std::weak_ptr(shared_from_this()); auto task = _requestMediaChannelDescriptions(requestSsrcs, [weak, threads = _threads, requestId](std::vector &&descriptions) { - threads->getWorkerThread()->PostTask([weak, requestId, descriptions = std::move(descriptions)]() mutable { + threads->getWorkerThread()->PostTask(RTC_FROM_HERE, [weak, requestId, descriptions = std::move(descriptions)]() mutable { auto strong = weak.lock(); if (!strong) { return; @@ -3046,7 +3041,7 @@ public: if (ssrc.actualSsrc != ssrc.networkSsrc) { if (_audioLevelsUpdated) { onAudioSinkUpdate = [weak, ssrc = ssrc, threads = _threads](AudioSinkImpl::Update update) { - threads->getMediaThread()->PostTask([weak, ssrc, update]() { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, ssrc, update]() { auto strong = weak.lock(); if (!strong) { return; diff --git a/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp b/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp index 131ccb879..8f133e9b6 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/GroupNetworkManager.cpp @@ -509,7 +509,7 @@ webrtc::RtpTransport *GroupNetworkManager::getRtpTransport() { void GroupNetworkManager::checkConnectionTimeout() { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getNetworkThread()->PostDelayedTask([weak]() { + _threads->getNetworkThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -553,7 +553,7 @@ void GroupNetworkManager::DtlsReadyToSend(bool isReadyToSend) { if (isReadyToSend) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getNetworkThread()->PostTask([weak]() { + _threads->getNetworkThread()->PostTask(RTC_FROM_HERE, [weak]() { const auto strong = weak.lock(); if (!strong) { return; diff --git a/TMessagesProj/jni/voip/tgcalls/group/StreamingMediaContext.cpp b/TMessagesProj/jni/voip/tgcalls/group/StreamingMediaContext.cpp index 660ce63b1..905532f9a 100644 --- a/TMessagesProj/jni/voip/tgcalls/group/StreamingMediaContext.cpp +++ b/TMessagesProj/jni/voip/tgcalls/group/StreamingMediaContext.cpp @@ -258,7 +258,7 @@ public: void beginRenderTimer(int timeoutMs) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask([weak]() { + _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -572,7 +572,7 @@ public: if (!_pendingRequestTimeTask && _pendingRequestTimeDelayTaskId == 0) { const auto weak = std::weak_ptr(shared_from_this()); _pendingRequestTimeTask = _requestCurrentTime([weak, threads = _threads](int64_t timestamp) { - threads->getMediaThread()->PostTask([weak, timestamp]() { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, timestamp]() { auto strong = weak.lock(); if (!strong) { return; @@ -590,7 +590,7 @@ public: strong->_pendingRequestTimeDelayTaskId = taskId; strong->_nextPendingRequestTimeDelayTaskId++; - strong->_threads->getMediaThread()->PostDelayedTask([weak, taskId]() { + strong->_threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak, taskId]() { auto strong = weak.lock(); if (!strong) { return; @@ -770,7 +770,7 @@ public: const auto weakPart = std::weak_ptr(part); std::function handleResult = [weak, weakSegment, weakPart, threads = _threads, segmentTimestamp](BroadcastPart &&part) { - threads->getMediaThread()->PostTask([weak, weakSegment, weakPart, part = std::move(part), segmentTimestamp]() mutable { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, weakSegment, weakPart, part = std::move(part), segmentTimestamp]() mutable { auto strong = weak.lock(); if (!strong) { return; @@ -885,7 +885,7 @@ public: if (minDelayedRequestTimeout < INT32_MAX) { const auto weak = std::weak_ptr(shared_from_this()); - _threads->getMediaThread()->PostDelayedTask([weak]() { + _threads->getMediaThread()->PostDelayedTask(RTC_FROM_HERE, [weak]() { auto strong = weak.lock(); if (!strong) { return; @@ -904,7 +904,7 @@ public: const auto weakPart = std::weak_ptr(part); std::function handleResult = [weak, weakPart, threads = _threads, completion](BroadcastPart &&part) { - threads->getMediaThread()->PostTask([weak, weakPart, part = std::move(part), completion]() mutable { + threads->getMediaThread()->PostTask(RTC_FROM_HERE, [weak, weakPart, part = std::move(part), completion]() mutable { auto strong = weak.lock(); if (!strong) { return; diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame.h b/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame.h index 0f3ca80dd..20b9d994b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio/audio_frame.h @@ -18,6 +18,7 @@ #include "api/audio/channel_layout.h" #include "api/rtp_packet_infos.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -59,9 +60,6 @@ class AudioFrame { AudioFrame(); - AudioFrame(const AudioFrame&) = delete; - AudioFrame& operator=(const AudioFrame&) = delete; - // Resets all members to their default state. void Reset(); // Same as Reset(), but leaves mute state unchanged. Muting a frame requires @@ -168,6 +166,8 @@ class AudioFrame { // capture timestamp of a received frame is found in `packet_infos_`. // This timestamp MUST be based on the same clock as rtc::TimeMillis(). absl::optional absolute_capture_timestamp_ms_; + + RTC_DISALLOW_COPY_AND_ASSIGN(AudioFrame); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h index 1fd403652..c2ee79772 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio/echo_canceller3_config.h @@ -112,7 +112,7 @@ struct RTC_EXPORT EchoCanceller3Config { bool echo_can_saturate = true; bool bounded_erl = false; bool erle_onset_compensation_in_dominant_nearend = false; - bool use_conservative_tail_frequency_response = true; + bool use_conservative_tail_frequency_response = false; } ep_strength; struct EchoAudibility { diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h index 41138741b..336e38449 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_codecs/audio_decoder.h @@ -20,6 +20,7 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -36,9 +37,6 @@ class AudioDecoder { AudioDecoder() = default; virtual ~AudioDecoder() = default; - AudioDecoder(const AudioDecoder&) = delete; - AudioDecoder& operator=(const AudioDecoder&) = delete; - class EncodedAudioFrame { public: struct DecodeResult { @@ -189,6 +187,9 @@ class AudioDecoder { int sample_rate_hz, int16_t* decoded, SpeechType* speech_type); + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoder); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_options.cc b/TMessagesProj/jni/voip/webrtc/api/audio_options.cc index fad35cb88..6832bbe29 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_options.cc +++ b/TMessagesProj/jni/voip/webrtc/api/audio_options.cc @@ -55,11 +55,16 @@ void AudioOptions::SetAll(const AudioOptions& change) { SetFrom(&audio_jitter_buffer_enable_rtx_handling, change.audio_jitter_buffer_enable_rtx_handling); SetFrom(&typing_detection, change.typing_detection); + SetFrom(&experimental_agc, change.experimental_agc); + SetFrom(&experimental_ns, change.experimental_ns); SetFrom(&residual_echo_detector, change.residual_echo_detector); + SetFrom(&tx_agc_target_dbov, change.tx_agc_target_dbov); + SetFrom(&tx_agc_digital_compression_gain, + change.tx_agc_digital_compression_gain); + SetFrom(&tx_agc_limiter, change.tx_agc_limiter); SetFrom(&combined_audio_video_bwe, change.combined_audio_video_bwe); SetFrom(&audio_network_adaptor, change.audio_network_adaptor); SetFrom(&audio_network_adaptor_config, change.audio_network_adaptor_config); - SetFrom(&init_recording_on_send, change.init_recording_on_send); } bool AudioOptions::operator==(const AudioOptions& o) const { @@ -79,11 +84,15 @@ bool AudioOptions::operator==(const AudioOptions& o) const { audio_jitter_buffer_enable_rtx_handling == o.audio_jitter_buffer_enable_rtx_handling && typing_detection == o.typing_detection && + experimental_agc == o.experimental_agc && + experimental_ns == o.experimental_ns && residual_echo_detector == o.residual_echo_detector && + tx_agc_target_dbov == o.tx_agc_target_dbov && + tx_agc_digital_compression_gain == o.tx_agc_digital_compression_gain && + tx_agc_limiter == o.tx_agc_limiter && combined_audio_video_bwe == o.combined_audio_video_bwe && audio_network_adaptor == o.audio_network_adaptor && - audio_network_adaptor_config == o.audio_network_adaptor_config && - init_recording_on_send == o.init_recording_on_send; + audio_network_adaptor_config == o.audio_network_adaptor_config; } std::string AudioOptions::ToString() const { @@ -108,10 +117,15 @@ std::string AudioOptions::ToString() const { ToStringIfSet(&result, "audio_jitter_buffer_enable_rtx_handling", audio_jitter_buffer_enable_rtx_handling); ToStringIfSet(&result, "typing", typing_detection); + ToStringIfSet(&result, "experimental_agc", experimental_agc); + ToStringIfSet(&result, "experimental_ns", experimental_ns); ToStringIfSet(&result, "residual_echo_detector", residual_echo_detector); + ToStringIfSet(&result, "tx_agc_target_dbov", tx_agc_target_dbov); + ToStringIfSet(&result, "tx_agc_digital_compression_gain", + tx_agc_digital_compression_gain); + ToStringIfSet(&result, "tx_agc_limiter", tx_agc_limiter); ToStringIfSet(&result, "combined_audio_video_bwe", combined_audio_video_bwe); ToStringIfSet(&result, "audio_network_adaptor", audio_network_adaptor); - ToStringIfSet(&result, "init_recording_on_send", init_recording_on_send); result << "}"; return result.str(); } diff --git a/TMessagesProj/jni/voip/webrtc/api/audio_options.h b/TMessagesProj/jni/voip/webrtc/api/audio_options.h index 3fcc38d83..1b0d1ad0b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/audio_options.h +++ b/TMessagesProj/jni/voip/webrtc/api/audio_options.h @@ -60,14 +60,15 @@ struct RTC_EXPORT AudioOptions { absl::optional audio_jitter_buffer_min_delay_ms; // Audio receiver jitter buffer (NetEq) should handle retransmitted packets. absl::optional audio_jitter_buffer_enable_rtx_handling; - // Deprecated. - // TODO(bugs.webrtc.org/11226): Remove. // Audio processing to detect typing. absl::optional typing_detection; - // TODO(bugs.webrtc.org/11539): Deprecated, replaced by - // webrtc::CreateEchoDetector() and injection when creating the audio - // processing module. + absl::optional experimental_agc; + absl::optional experimental_ns; + // Note that tx_agc_* only applies to non-experimental AGC. absl::optional residual_echo_detector; + absl::optional tx_agc_target_dbov; + absl::optional tx_agc_digital_compression_gain; + absl::optional tx_agc_limiter; // Enable combined audio+bandwidth BWE. // TODO(pthatcher): This flag is set from the // "googCombinedAudioVideoBwe", but not used anywhere. So delete it, @@ -79,10 +80,6 @@ struct RTC_EXPORT AudioOptions { absl::optional audio_network_adaptor; // Config string for audio network adaptor. absl::optional audio_network_adaptor_config; - // Pre-initialize the ADM for recording when starting to send. Default to - // true. - // TODO(webrtc:13566): Remove this option. See issue for details. - absl::optional init_recording_on_send; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc b/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc index c41b6d6fb..008fce3e8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/api/create_peerconnection_factory.cc @@ -49,10 +49,6 @@ rtc::scoped_refptr CreatePeerConnectionFactory( dependencies.task_queue_factory.get()); dependencies.trials = std::make_unique(); - if (network_thread) { - // TODO(bugs.webrtc.org/13145): Add an rtc::SocketFactory* argument. - dependencies.socket_factory = network_thread->socketserver(); - } cricket::MediaEngineDependencies media_dependencies; media_dependencies.task_queue_factory = dependencies.task_queue_factory.get(); media_dependencies.adm = std::move(default_adm); diff --git a/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.cc b/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.cc index faebc0972..a68ff8feb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.cc @@ -20,27 +20,11 @@ DtlsTransportInformation::DtlsTransportInformation(DtlsTransportState state) DtlsTransportInformation::DtlsTransportInformation( DtlsTransportState state, - absl::optional role, absl::optional tls_version, absl::optional ssl_cipher_suite, absl::optional srtp_cipher_suite, std::unique_ptr remote_ssl_certificates) : state_(state), - role_(role), - tls_version_(tls_version), - ssl_cipher_suite_(ssl_cipher_suite), - srtp_cipher_suite_(srtp_cipher_suite), - remote_ssl_certificates_(std::move(remote_ssl_certificates)) {} - -// Deprecated version -DtlsTransportInformation::DtlsTransportInformation( - DtlsTransportState state, - absl::optional tls_version, - absl::optional ssl_cipher_suite, - absl::optional srtp_cipher_suite, - std::unique_ptr remote_ssl_certificates) - : state_(state), - role_(absl::nullopt), tls_version_(tls_version), ssl_cipher_suite_(ssl_cipher_suite), srtp_cipher_suite_(srtp_cipher_suite), @@ -49,7 +33,6 @@ DtlsTransportInformation::DtlsTransportInformation( DtlsTransportInformation::DtlsTransportInformation( const DtlsTransportInformation& c) : state_(c.state()), - role_(c.role_), tls_version_(c.tls_version_), ssl_cipher_suite_(c.ssl_cipher_suite_), srtp_cipher_suite_(c.srtp_cipher_suite_), @@ -60,7 +43,6 @@ DtlsTransportInformation::DtlsTransportInformation( DtlsTransportInformation& DtlsTransportInformation::operator=( const DtlsTransportInformation& c) { state_ = c.state(); - role_ = c.role_; tls_version_ = c.tls_version_; ssl_cipher_suite_ = c.ssl_cipher_suite_; srtp_cipher_suite_ = c.srtp_cipher_suite_; diff --git a/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.h b/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.h index 7b0151249..86715b040 100644 --- a/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/dtls_transport_interface.h @@ -36,11 +36,6 @@ enum class DtlsTransportState { kNumValues }; -enum class DtlsTransportTlsRole { - kServer, // Other end sends CLIENT_HELLO - kClient // This end sends CLIENT_HELLO -}; - // This object gives snapshot information about the changeable state of a // DTLSTransport. class RTC_EXPORT DtlsTransportInformation { @@ -49,19 +44,10 @@ class RTC_EXPORT DtlsTransportInformation { explicit DtlsTransportInformation(DtlsTransportState state); DtlsTransportInformation( DtlsTransportState state, - absl::optional role, absl::optional tls_version, absl::optional ssl_cipher_suite, absl::optional srtp_cipher_suite, std::unique_ptr remote_ssl_certificates); - ABSL_DEPRECATED("Use version with role parameter") - DtlsTransportInformation( - DtlsTransportState state, - absl::optional tls_version, - absl::optional ssl_cipher_suite, - absl::optional srtp_cipher_suite, - std::unique_ptr remote_ssl_certificates); - // Copy and assign DtlsTransportInformation(const DtlsTransportInformation& c); DtlsTransportInformation& operator=(const DtlsTransportInformation& c); @@ -71,7 +57,6 @@ class RTC_EXPORT DtlsTransportInformation { default; DtlsTransportState state() const { return state_; } - absl::optional role() const { return role_; } absl::optional tls_version() const { return tls_version_; } absl::optional ssl_cipher_suite() const { return ssl_cipher_suite_; } absl::optional srtp_cipher_suite() const { return srtp_cipher_suite_; } @@ -82,7 +67,6 @@ class RTC_EXPORT DtlsTransportInformation { private: DtlsTransportState state_; - absl::optional role_; absl::optional tls_version_; absl::optional ssl_cipher_suite_; absl::optional srtp_cipher_suite_; diff --git a/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h b/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h index 8f47a102e..40e278345 100644 --- a/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h +++ b/TMessagesProj/jni/voip/webrtc/api/jsep_ice_candidate.h @@ -22,6 +22,7 @@ #include "api/candidate.h" #include "api/jsep.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -63,10 +64,6 @@ class JsepCandidateCollection : public IceCandidateCollection { // Move constructor is defined so that a vector of JsepCandidateCollections // can be resized. JsepCandidateCollection(JsepCandidateCollection&& o); - - JsepCandidateCollection(const JsepCandidateCollection&) = delete; - JsepCandidateCollection& operator=(const JsepCandidateCollection&) = delete; - // Returns a copy of the candidate collection. JsepCandidateCollection Clone() const; size_t count() const override; @@ -83,6 +80,8 @@ class JsepCandidateCollection : public IceCandidateCollection { private: std::vector> candidates_; + + RTC_DISALLOW_COPY_AND_ASSIGN(JsepCandidateCollection); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h b/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h index 0b65734ea..a4300eba9 100644 --- a/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h +++ b/TMessagesProj/jni/voip/webrtc/api/jsep_session_description.h @@ -22,6 +22,7 @@ #include "api/candidate.h" #include "api/jsep.h" #include "api/jsep_ice_candidate.h" +#include "rtc_base/constructor_magic.h" namespace cricket { class SessionDescription; @@ -42,9 +43,6 @@ class JsepSessionDescription : public SessionDescriptionInterface { absl::string_view session_version); virtual ~JsepSessionDescription(); - JsepSessionDescription(const JsepSessionDescription&) = delete; - JsepSessionDescription& operator=(const JsepSessionDescription&) = delete; - // Takes ownership of `description`. bool Initialize(std::unique_ptr description, const std::string& session_id, @@ -84,6 +82,8 @@ class JsepSessionDescription : public SessionDescriptionInterface { bool GetMediasectionIndex(const IceCandidateInterface* candidate, size_t* index); int GetMediasectionIndex(const cricket::Candidate& candidate); + + RTC_DISALLOW_COPY_AND_ASSIGN(JsepSessionDescription); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h b/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h index 7e010289a..d61dd9849 100644 --- a/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/media_stream_interface.h @@ -333,8 +333,6 @@ class MediaStreamInterface : public rtc::RefCountInterface, virtual rtc::scoped_refptr FindVideoTrack( const std::string& track_id) = 0; - // Takes ownership of added tracks. - // TODO(hta): Should take scoped_refptr rather than raw pointer. virtual bool AddTrack(AudioTrackInterface* track) = 0; virtual bool AddTrack(VideoTrackInterface* track) = 0; virtual bool RemoveTrack(AudioTrackInterface* track) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/api/metronome/metronome.h b/TMessagesProj/jni/voip/webrtc/api/metronome/metronome.h deleted file mode 100644 index fc5f350db..000000000 --- a/TMessagesProj/jni/voip/webrtc/api/metronome/metronome.h +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_METRONOME_METRONOME_H_ -#define API_METRONOME_METRONOME_H_ - -#include "api/task_queue/task_queue_base.h" -#include "api/units/time_delta.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// The Metronome posts OnTick() on task queues provided by its listeners' task -// queue periodically. The metronome can be used as an alternative to using -// PostDelayedTask on a thread or task queue for coalescing work and reducing -// the number of idle-wakeups. -// -// Listeners can be added and removed from any sequence, but it is illegal to -// remove a listener from an OnTick invocation. -// -// The metronome concept is still under experimentation, and may not be availble -// in all platforms or applications. See https://crbug.com/1253787 for more -// details. -// -// Metronome implementations must be thread-safe. -class RTC_EXPORT Metronome { - public: - class RTC_EXPORT TickListener { - public: - virtual ~TickListener() = default; - - // OnTick is run on the task queue provided by OnTickTaskQueue each time the - // metronome ticks. - virtual void OnTick() = 0; - - // The task queue that OnTick will run on. Must not be null. - virtual TaskQueueBase* OnTickTaskQueue() = 0; - }; - - virtual ~Metronome() = default; - - // Adds a tick listener to the metronome. Once this method has returned - // OnTick will be invoked on each metronome tick. A listener may - // only be added to the metronome once. - virtual void AddListener(TickListener* listener) = 0; - - // Removes the tick listener from the metronome. Once this method has returned - // OnTick will never be called again. This method must not be called from - // within OnTick. - virtual void RemoveListener(TickListener* listener) = 0; - - // Returns the current tick period of the metronome. - virtual TimeDelta TickPeriod() const = 0; -}; - -} // namespace webrtc - -#endif // API_METRONOME_METRONOME_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc index 9f159ea73..230731c42 100644 --- a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.cc @@ -41,6 +41,12 @@ PeerConnectionInterface::RTCConfiguration::RTCConfiguration( PeerConnectionInterface::RTCConfiguration::~RTCConfiguration() = default; +RTCError PeerConnectionInterface::RemoveTrackNew( + rtc::scoped_refptr sender) { + return RTCError(RemoveTrack(sender) ? RTCErrorType::NONE + : RTCErrorType::INTERNAL_ERROR); +} + RTCError PeerConnectionInterface::SetConfiguration( const PeerConnectionInterface::RTCConfiguration& config) { return RTCError(); diff --git a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h index 4c4a638ad..6d42b848b 100644 --- a/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/peer_connection_interface.h @@ -95,7 +95,6 @@ #include "api/jsep.h" #include "api/media_stream_interface.h" #include "api/media_types.h" -#include "api/metronome/metronome.h" #include "api/neteq/neteq_factory.h" #include "api/network_state_predictor.h" #include "api/packet_socket_factory.h" @@ -170,10 +169,9 @@ class StatsObserver : public rtc::RefCountInterface { }; enum class SdpSemantics { - // TODO(https://crbug.com/webrtc/13528): Remove support for kPlanB. kPlanB_DEPRECATED, kPlanB [[deprecated]] = kPlanB_DEPRECATED, - kUnifiedPlan, + kUnifiedPlan }; class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { @@ -624,26 +622,27 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // cost. absl::optional network_preference; - // Configure the SDP semantics used by this PeerConnection. By default, this - // is Unified Plan which is compliant to the WebRTC 1.0 specification. It is - // possible to overrwite this to the deprecated Plan B SDP format, but note - // that kPlanB will be deleted at some future date, see - // https://crbug.com/webrtc/13528. + // Configure the SDP semantics used by this PeerConnection. Note that the + // WebRTC 1.0 specification requires kUnifiedPlan semantics. The + // RtpTransceiver API is only available with kUnifiedPlan semantics. // - // kUnifiedPlan will cause the PeerConnection to create offers and answers - // with multiple m= sections where each m= section maps to one RtpSender and - // one RtpReceiver (an RtpTransceiver), either both audio or both video. - // This will also cause the PeerConnection to ignore all but the first - // a=ssrc lines that form a Plan B streams (if the PeerConnection is given - // Plan B SDP to process). + // kUnifiedPlan will cause PeerConnection to create offers and answers with + // multiple m= sections where each m= section maps to one RtpSender and one + // RtpReceiver (an RtpTransceiver), either both audio or both video. This + // will also cause PeerConnection to ignore all but the first a=ssrc lines + // that form a Plan B stream. // - // kPlanB will cause the PeerConnection to create offers and answers with at + // kPlanB will cause PeerConnection to create offers and answers with at // most one audio and one video m= section with multiple RtpSenders and // RtpReceivers specified as multiple a=ssrc lines within the section. This // will also cause PeerConnection to ignore all but the first m= section of - // the same media type (if the PeerConnection is given Unified Plan SDP to - // process). - SdpSemantics sdp_semantics = SdpSemantics::kUnifiedPlan; + // the same media type. + // + // For users who have to interwork with legacy WebRTC implementations, + // it is possible to specify kPlanB until the code is finally removed. + // + // For all other users, specify kUnifiedPlan. + SdpSemantics sdp_semantics = SdpSemantics::kPlanB_DEPRECATED; // TODO(bugs.webrtc.org/9891) - Move to crypto_options or remove. // Actively reset the SRTP parameters whenever the DTLS transports @@ -806,25 +805,23 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { rtc::scoped_refptr track, const std::vector& stream_ids) = 0; - // Removes the connection between a MediaStreamTrack and the PeerConnection. - // Stops sending on the RtpSender and marks the + // Remove an RtpSender from this PeerConnection. + // Returns true on success. + // TODO(steveanton): Replace with signature that returns RTCError. + virtual bool RemoveTrack(RtpSenderInterface* sender) = 0; + + // Plan B semantics: Removes the RtpSender from this PeerConnection. + // Unified Plan semantics: Stop sending on the RtpSender and mark the // corresponding RtpTransceiver direction as no longer sending. - // https://w3c.github.io/webrtc-pc/#dom-rtcpeerconnection-removetrack // // Errors: // - INVALID_PARAMETER: `sender` is null or (Plan B only) the sender is not // associated with this PeerConnection. // - INVALID_STATE: PeerConnection is closed. - // - // Plan B semantics: Removes the RtpSender from this PeerConnection. - // // TODO(bugs.webrtc.org/9534): Rename to RemoveTrack once the other signature - // is removed; remove default implementation once upstream is updated. - virtual RTCError RemoveTrackOrError( - rtc::scoped_refptr sender) { - RTC_CHECK_NOTREACHED(); - return RTCError(); - } + // is removed. + virtual RTCError RemoveTrackNew( + rtc::scoped_refptr sender); // AddTransceiver creates a new RtpTransceiver and adds it to the set of // transceivers. Adding a transceiver will cause future calls to CreateOffer @@ -1298,6 +1295,14 @@ class PeerConnectionObserver { // A new ICE candidate has been gathered. virtual void OnIceCandidate(const IceCandidateInterface* candidate) = 0; + // Gathering of an ICE candidate failed. + // See https://w3c.github.io/webrtc-pc/#event-icecandidateerror + // `host_candidate` is a stringified socket address. + virtual void OnIceCandidateError(const std::string& host_candidate, + const std::string& url, + int error_code, + const std::string& error_text) {} + // Gathering of an ICE candidate failed. // See https://w3c.github.io/webrtc-pc/#event-icecandidateerror virtual void OnIceCandidateError(const std::string& address, @@ -1420,7 +1425,6 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final { rtc::Thread* network_thread = nullptr; rtc::Thread* worker_thread = nullptr; rtc::Thread* signaling_thread = nullptr; - rtc::SocketFactory* socket_factory = nullptr; std::unique_ptr task_queue_factory; std::unique_ptr media_engine; std::unique_ptr call_factory; @@ -1438,7 +1442,6 @@ struct RTC_EXPORT PeerConnectionFactoryDependencies final { std::unique_ptr trials; std::unique_ptr transport_controller_send_factory; - std::unique_ptr metronome; }; // PeerConnectionFactoryInterface is the factory interface used for creating @@ -1612,8 +1615,7 @@ inline constexpr absl::string_view PeerConnectionInterface::AsString( case SignalingState::kClosed: return "closed"; } - // This cannot happen. - // Not using "RTC_CHECK_NOTREACHED()" because AsString() is constexpr. + RTC_CHECK_NOTREACHED(); return ""; } @@ -1628,8 +1630,7 @@ inline constexpr absl::string_view PeerConnectionInterface::AsString( case IceGatheringState::kIceGatheringComplete: return "complete"; } - // This cannot happen. - // Not using "RTC_CHECK_NOTREACHED()" because AsString() is constexpr. + RTC_CHECK_NOTREACHED(); return ""; } @@ -1650,8 +1651,7 @@ inline constexpr absl::string_view PeerConnectionInterface::AsString( case PeerConnectionState::kClosed: return "closed"; } - // This cannot happen. - // Not using "RTC_CHECK_NOTREACHED()" because AsString() is constexpr. + RTC_CHECK_NOTREACHED(); return ""; } @@ -1673,12 +1673,10 @@ inline constexpr absl::string_view PeerConnectionInterface::AsString( case kIceConnectionClosed: return "closed"; case kIceConnectionMax: - // This cannot happen. - // Not using "RTC_CHECK_NOTREACHED()" because AsString() is constexpr. + RTC_CHECK_NOTREACHED(); return ""; } - // This cannot happen. - // Not using "RTC_CHECK_NOTREACHED()" because AsString() is constexpr. + RTC_CHECK_NOTREACHED(); return ""; } diff --git a/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h b/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h index f20228b74..931cb2076 100644 --- a/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h +++ b/TMessagesProj/jni/voip/webrtc/api/ref_counted_base.h @@ -12,6 +12,7 @@ #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_counter.h" namespace rtc { @@ -20,9 +21,6 @@ class RefCountedBase { public: RefCountedBase() = default; - RefCountedBase(const RefCountedBase&) = delete; - RefCountedBase& operator=(const RefCountedBase&) = delete; - void AddRef() const { ref_count_.IncRef(); } RefCountReleaseStatus Release() const { const auto status = ref_count_.DecRef(); @@ -41,6 +39,8 @@ class RefCountedBase { private: mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; + + RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedBase); }; // Template based version of `RefCountedBase` for simple implementations that do @@ -61,9 +61,6 @@ class RefCountedNonVirtual { public: RefCountedNonVirtual() = default; - RefCountedNonVirtual(const RefCountedNonVirtual&) = delete; - RefCountedNonVirtual& operator=(const RefCountedNonVirtual&) = delete; - void AddRef() const { ref_count_.IncRef(); } RefCountReleaseStatus Release() const { // If you run into this assert, T has virtual methods. There are two @@ -91,6 +88,8 @@ class RefCountedNonVirtual { private: mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; + + RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedNonVirtual); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_error.h b/TMessagesProj/jni/voip/webrtc/api/rtc_error.h index 42ceed18d..1376793a0 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_error.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_error.h @@ -244,7 +244,7 @@ class RTCErrorOr { // // REQUIRES: !error.ok(). This requirement is DCHECKed. RTCErrorOr(RTCError&& error) : error_(std::move(error)) { // NOLINT - RTC_DCHECK(!error_.ok()); + RTC_DCHECK(!error.ok()); } // Constructs a new RTCErrorOr with the given value. After calling this diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event.h b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event.h index 8697a25a7..51db8f0b4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event.h @@ -27,7 +27,7 @@ class RtcEvent { // of Type. This leaks the information of existing subclasses into the // superclass, but the *actual* information - rtclog::StreamConfig, etc. - // is kept separate. - enum class Type : uint32_t { + enum class Type { AlrStateEvent, RouteChangeEvent, RemoteEstimateEvent, @@ -53,9 +53,7 @@ class RtcEvent { GenericPacketSent, GenericPacketReceived, GenericAckReceived, - FrameDecoded, - BeginV3Log = 0x2501580, - EndV3Log = 0x2501581 + FrameDecoded }; RtcEvent(); @@ -65,13 +63,6 @@ class RtcEvent { virtual bool IsConfigEvent() const = 0; - // Events are grouped by Type before being encoded. - // Optionally, `GetGroupKey` can be overloaded to group the - // events by a secondary key (in addition to the event type.) - // This can, in some cases, improve compression efficiency - // e.g. by grouping events by SSRC. - virtual uint32_t GetGroupKey() const { return 0; } - int64_t timestamp_ms() const { return timestamp_us_ / 1000; } int64_t timestamp_us() const { return timestamp_us_; } diff --git a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log.h b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log.h index 7b42cdc02..86613ddd8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtc_event_log/rtc_event_log.h @@ -29,7 +29,7 @@ class RtcEventLog { // TODO(eladalon): Get rid of the legacy encoding and this enum once all // clients have migrated to the new format. - enum class EncodingType { Legacy, NewFormat, ProtoFree }; + enum class EncodingType { Legacy, NewFormat }; virtual ~RtcEventLog() = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_infos.h b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_infos.h index 031e33332..2ca317403 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_packet_infos.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_packet_infos.h @@ -18,7 +18,6 @@ #include "api/ref_counted_base.h" #include "api/rtp_packet_info.h" #include "api/scoped_refptr.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -80,7 +79,7 @@ class RTC_EXPORT RtpPacketInfos { size_type size() const { return entries().size(); } private: - class Data final : public rtc::RefCountedNonVirtual { + class Data : public rtc::RefCountedBase { public: static rtc::scoped_refptr Create(const vector_type& entries) { // Performance optimization for the empty case. @@ -88,7 +87,7 @@ class RTC_EXPORT RtpPacketInfos { return nullptr; } - return rtc::make_ref_counted(entries); + return new Data(entries); } static rtc::scoped_refptr Create(vector_type&& entries) { @@ -97,16 +96,16 @@ class RTC_EXPORT RtpPacketInfos { return nullptr; } - return rtc::make_ref_counted(std::move(entries)); + return new Data(std::move(entries)); } const vector_type& entries() const { return entries_; } + private: explicit Data(const vector_type& entries) : entries_(entries) {} explicit Data(vector_type&& entries) : entries_(std::move(entries)) {} - ~Data() = default; + ~Data() override {} - private: const vector_type entries_; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc index c48b8da02..feba39348 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.cc @@ -11,7 +11,6 @@ #include #include -#include #include #include "api/array_view.h" @@ -281,14 +280,6 @@ const std::vector RtpExtension::DeduplicateHeaderExtensions( } } - // Sort the returned vector to make comparisons of header extensions reliable. - // In order of priority, we sort by uri first, then encrypt and id last. - std::sort(filtered.begin(), filtered.end(), - [](const RtpExtension& a, const RtpExtension& b) { - return std::tie(a.uri, a.encrypt, a.id) < - std::tie(b.uri, b.encrypt, b.id); - }); - return filtered; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h index 45cedfdd9..84f3a0e84 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_parameters.h @@ -286,9 +286,6 @@ struct RTC_EXPORT RtpExtension { bool encrypt); // Returns a list of extensions where any extension URI is unique. - // The returned list will be sorted by uri first, then encrypt and id last. - // Having the list sorted allows the caller fo compare filtered lists for - // equality to detect when changes have been made. static const std::vector DeduplicateHeaderExtensions( const std::vector& extensions, Filter filter); diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc index 7267b286b..454e450c8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.cc @@ -44,6 +44,33 @@ void RtpTransceiverInterface::StopInternal() { << "DEBUG: RtpTransceiverInterface::StopInternal called"; } +RTCError RtpTransceiverInterface::SetCodecPreferences( + rtc::ArrayView) { + RTC_DCHECK_NOTREACHED() << "Not implemented"; + return {}; +} + +std::vector RtpTransceiverInterface::codec_preferences() + const { + return {}; +} + +std::vector +RtpTransceiverInterface::HeaderExtensionsToOffer() const { + return {}; +} + +webrtc::RTCError RtpTransceiverInterface::SetOfferedRtpHeaderExtensions( + rtc::ArrayView + header_extensions_to_offer) { + return webrtc::RTCError(webrtc::RTCErrorType::UNSUPPORTED_OPERATION); +} + +std::vector +RtpTransceiverInterface::HeaderExtensionsNegotiated() const { + return {}; +} + // TODO(bugs.webrtc.org/11839) Remove default implementations when clients // are updated. void RtpTransceiverInterface::SetDirection( diff --git a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h index c9d911fac..4799c4b15 100644 --- a/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/rtp_transceiver_interface.h @@ -97,7 +97,8 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // transceiver's stop() method has been called, but the negotiation with // the other end for shutting down the transceiver is not yet done. // https://w3c.github.io/webrtc-pc/#dfn-stopping-0 - virtual bool stopping() const = 0; + // TODO(hta): Remove default implementation. + virtual bool stopping() const; // The direction attribute indicates the preferred direction of this // transceiver, which will be used in calls to CreateOffer and CreateAnswer. @@ -146,28 +147,28 @@ class RTC_EXPORT RtpTransceiverInterface : public rtc::RefCountInterface { // by WebRTC for this transceiver. // https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-setcodecpreferences virtual RTCError SetCodecPreferences( - rtc::ArrayView codecs) = 0; - virtual std::vector codec_preferences() const = 0; + rtc::ArrayView codecs); + virtual std::vector codec_preferences() const; // Readonly attribute which contains the set of header extensions that was set // with SetOfferedRtpHeaderExtensions, or a default set if it has not been // called. // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface virtual std::vector HeaderExtensionsToOffer() - const = 0; + const; // Readonly attribute which is either empty if negotation has not yet // happened, or a vector of the negotiated header extensions. // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface virtual std::vector HeaderExtensionsNegotiated() - const = 0; + const; // The SetOfferedRtpHeaderExtensions method modifies the next SDP negotiation // so that it negotiates use of header extensions which are not kStopped. // https://w3c.github.io/webrtc-extensions/#rtcrtptransceiver-interface virtual webrtc::RTCError SetOfferedRtpHeaderExtensions( rtc::ArrayView - header_extensions_to_offer) = 0; + header_extensions_to_offer); protected: ~RtpTransceiverInterface() override = default; diff --git a/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h b/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h index f38d962e1..dec3094b4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats/rtcstats_objects.h @@ -225,6 +225,7 @@ class RTC_EXPORT RTCIceCandidateStats : public RTCStats { // TODO(hbos): Support enum types? "RTCStatsMember"? RTCStatsMember candidate_type; RTCStatsMember priority; + // TODO(hbos): Not collected by `RTCStatsCollector`. crbug.com/632723 RTCStatsMember url; protected: diff --git a/TMessagesProj/jni/voip/webrtc/api/stats_types.cc b/TMessagesProj/jni/voip/webrtc/api/stats_types.cc index b044e4ab1..1090643f1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats_types.cc +++ b/TMessagesProj/jni/voip/webrtc/api/stats_types.cc @@ -648,7 +648,6 @@ const char* StatsReport::Value::display_name() const { return "googTrackId"; case kStatsValueNameTimingFrameInfo: return "googTimingFrameInfo"; - // TODO(bugs.webrtc.org/11226): Remove. case kStatsValueNameTypingNoiseState: return "googTypingNoiseState"; case kStatsValueNameWritable: diff --git a/TMessagesProj/jni/voip/webrtc/api/stats_types.h b/TMessagesProj/jni/voip/webrtc/api/stats_types.h index e7dd528e6..b7cb8eff7 100644 --- a/TMessagesProj/jni/voip/webrtc/api/stats_types.h +++ b/TMessagesProj/jni/voip/webrtc/api/stats_types.h @@ -22,6 +22,7 @@ #include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/rtc_export.h" @@ -235,7 +236,6 @@ class RTC_EXPORT StatsReport { kStatsValueNameTrackId, kStatsValueNameTransmitBitrate, kStatsValueNameTransportType, - // TODO(bugs.webrtc.org/11226): Remove. kStatsValueNameTypingNoiseState, kStatsValueNameWritable, kStatsValueNameAudioDeviceUnderrunCounter, @@ -288,9 +288,6 @@ class RTC_EXPORT StatsReport { ~Value(); - Value(const Value&) = delete; - Value& operator=(const Value&) = delete; - // Support ref counting. Note that for performance reasons, we // don't use thread safe operations. Therefore, all operations // affecting the ref count (in practice, creation and copying of @@ -361,6 +358,8 @@ class RTC_EXPORT StatsReport { const char* static_string_; Id* id_; } value_; + + RTC_DISALLOW_COPY_AND_ASSIGN(Value); }; typedef rtc::scoped_refptr ValuePtr; @@ -370,9 +369,6 @@ class RTC_EXPORT StatsReport { explicit StatsReport(const Id& id); ~StatsReport(); - StatsReport(const StatsReport&) = delete; - StatsReport& operator=(const StatsReport&) = delete; - // Factory functions for various types of stats IDs. static Id NewBandwidthEstimationId(); static Id NewTypedId(StatsType type, const std::string& id); @@ -412,6 +408,8 @@ class RTC_EXPORT StatsReport { const Id id_; double timestamp_; // Time since 1970-01-01T00:00:00Z in milliseconds. Values values_; + + RTC_DISALLOW_COPY_AND_ASSIGN(StatsReport); }; // Typedef for an array of const StatsReport pointers. diff --git a/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h b/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h index b7c92f864..d8af6e67d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h +++ b/TMessagesProj/jni/voip/webrtc/api/task_queue/task_queue_base.h @@ -11,7 +11,6 @@ #define API_TASK_QUEUE_TASK_QUEUE_BASE_H_ #include -#include #include "api/task_queue/queued_task.h" #include "rtc_base/system/rtc_export.h" @@ -25,16 +24,6 @@ namespace webrtc { // known task queue, use IsCurrent(). class RTC_LOCKABLE RTC_EXPORT TaskQueueBase { public: - enum class DelayPrecision { - // This may include up to a 17 ms leeway in addition to OS timer precision. - // See PostDelayedTask() for more information. - kLow, - // This does not have the additional delay that kLow has, but it is still - // limited by OS timer precision. See PostDelayedHighPrecisionTask() for - // more information. - kHigh, - }; - // Starts destruction of the task queue. // On return ensures no task are running and no new tasks are able to start // on the task queue. @@ -59,70 +48,14 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueueBase { // May be called on any thread or task queue, including this task queue. virtual void PostTask(std::unique_ptr task) = 0; - // Prefer PostDelayedTask() over PostDelayedHighPrecisionTask() whenever - // possible. - // // Schedules a task to execute a specified number of milliseconds from when - // the call is made, using "low" precision. All scheduling is affected by - // OS-specific leeway and current workloads which means that in terms of - // precision there are no hard guarantees, but in addition to the OS induced - // leeway, "low" precision adds up to a 17 ms additional leeway. The purpose - // of this leeway is to achieve more efficient CPU scheduling and reduce Idle - // Wake Up frequency. - // - // The task may execute with [-1, 17 + OS induced leeway) ms additional delay. - // - // Avoid making assumptions about the precision of the OS scheduler. On macOS, - // the OS induced leeway may be 10% of sleep interval. On Windows, 1 ms - // precision timers may be used but there are cases, such as when running on - // battery, when the timer precision can be as poor as 15 ms. - // - // "Low" precision is not implemented everywhere yet. Where not yet - // implemented, PostDelayedTask() has "high" precision. See - // https://crbug.com/webrtc/13583 for more information. - // + // the call is made. The precision should be considered as "best effort" + // and in some cases, such as on Windows when all high precision timers have + // been used up, can be off by as much as 15 millseconds. // May be called on any thread or task queue, including this task queue. virtual void PostDelayedTask(std::unique_ptr task, uint32_t milliseconds) = 0; - // Prefer PostDelayedTask() over PostDelayedHighPrecisionTask() whenever - // possible. - // - // Schedules a task to execute a specified number of milliseconds from when - // the call is made, using "high" precision. All scheduling is affected by - // OS-specific leeway and current workloads which means that in terms of - // precision there are no hard guarantees. - // - // The task may execute with [-1, OS induced leeway] ms additional delay. - // - // Avoid making assumptions about the precision of the OS scheduler. On macOS, - // the OS induced leeway may be 10% of sleep interval. On Windows, 1 ms - // precision timers may be used but there are cases, such as when running on - // battery, when the timer precision can be as poor as 15 ms. - // - // May be called on any thread or task queue, including this task queue. - virtual void PostDelayedHighPrecisionTask(std::unique_ptr task, - uint32_t milliseconds) { - // Remove default implementation when dependencies have implemented this - // method. - PostDelayedTask(std::move(task), milliseconds); - } - - // As specified by |precision|, calls either PostDelayedTask() or - // PostDelayedHighPrecisionTask(). - void PostDelayedTaskWithPrecision(DelayPrecision precision, - std::unique_ptr task, - uint32_t milliseconds) { - switch (precision) { - case DelayPrecision::kLow: - PostDelayedTask(std::move(task), milliseconds); - break; - case DelayPrecision::kHigh: - PostDelayedHighPrecisionTask(std::move(task), milliseconds); - break; - } - } - // Returns the task queue that is running the current thread. // Returns nullptr if this thread is not associated with any task queue. // May be called on any thread or task queue, including this task queue. diff --git a/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h b/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h index 4a262564a..80ae20c3c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/dummy_peer_connection.h @@ -45,7 +45,9 @@ class DummyPeerConnection : public PeerConnectionInterface { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } - RTCError RemoveTrackOrError( + bool RemoveTrack(RtpSenderInterface* sender) override { return false; } + + RTCError RemoveTrackNew( rtc::scoped_refptr sender) override { return RTCError(RTCErrorType::UNSUPPORTED_OPERATION, "Not implemented"); } diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h index 7cc17a842..e863cac6e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_async_dns_resolver.h @@ -24,8 +24,8 @@ class MockAsyncDnsResolverResult : public AsyncDnsResolverResult { MOCK_METHOD(bool, GetResolvedAddress, (int, rtc::SocketAddress*), - (const, override)); - MOCK_METHOD(int, GetError, (), (const, override)); + (const override)); + MOCK_METHOD(int, GetError, (), (const override)); }; class MockAsyncDnsResolver : public AsyncDnsResolverInterface { @@ -34,7 +34,7 @@ class MockAsyncDnsResolver : public AsyncDnsResolverInterface { Start, (const rtc::SocketAddress&, std::function), (override)); - MOCK_METHOD(AsyncDnsResolverResult&, result, (), (const, override)); + MOCK_METHOD(AsyncDnsResolverResult&, result, (), (const override)); }; class MockAsyncDnsResolverFactory : public AsyncDnsResolverFactoryInterface { diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h index 40f7edb08..9346ffd63 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_data_channel.h @@ -22,8 +22,7 @@ class MockDataChannelInterface final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return rtc::scoped_refptr( - new MockDataChannelInterface()); + return new MockDataChannelInterface(); } MOCK_METHOD(void, diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h index 17a30a877..29521e6e2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_media_stream_interface.h @@ -22,7 +22,7 @@ class MockAudioSource final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return rtc::scoped_refptr(new MockAudioSource()); + return new MockAudioSource(); } MOCK_METHOD(void, @@ -55,7 +55,7 @@ class MockAudioSource final class MockAudioTrack final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return rtc::scoped_refptr(new MockAudioTrack()); + return new MockAudioTrack(); } MOCK_METHOD(void, @@ -67,7 +67,7 @@ class MockAudioTrack final : public rtc::RefCountedObject { (ObserverInterface * observer), (override)); MOCK_METHOD(std::string, kind, (), (const, override)); - MOCK_METHOD(std::string, id, (), (const, override)); + MOCK_METHOD(std::string, id, (), (const override)); MOCK_METHOD(bool, enabled, (), (const, override)); MOCK_METHOD(bool, set_enabled, (bool enable), (override)); MOCK_METHOD(TrackState, state, (), (const, override)); diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h index 6bab595b5..c2f2435fb 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_peer_connection_factory_interface.h @@ -23,8 +23,7 @@ class MockPeerConnectionFactoryInterface final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return rtc::scoped_refptr( - new MockPeerConnectionFactoryInterface()); + return new MockPeerConnectionFactoryInterface(); } MOCK_METHOD(void, SetOptions, (const Options&), (override)); @@ -48,11 +47,11 @@ class MockPeerConnectionFactoryInterface final MOCK_METHOD(RtpCapabilities, GetRtpSenderCapabilities, (cricket::MediaType), - (const, override)); + (const override)); MOCK_METHOD(RtpCapabilities, GetRtpReceiverCapabilities, (cricket::MediaType), - (const, override)); + (const override)); MOCK_METHOD(rtc::scoped_refptr, CreateLocalMediaStream, (const std::string&), diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_peerconnectioninterface.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_peerconnectioninterface.h index effd24e29..cd67d32a1 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_peerconnectioninterface.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_peerconnectioninterface.h @@ -29,7 +29,7 @@ class MockPeerConnectionInterface : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return rtc::make_ref_counted(); + return new MockPeerConnectionInterface(); } // PeerConnectionInterface @@ -48,8 +48,9 @@ class MockPeerConnectionInterface (rtc::scoped_refptr, const std::vector&), (override)); + MOCK_METHOD(bool, RemoveTrack, (RtpSenderInterface*), (override)); MOCK_METHOD(RTCError, - RemoveTrackOrError, + RemoveTrackNew, (rtc::scoped_refptr), (override)); MOCK_METHOD(RTCErrorOr>, @@ -76,15 +77,15 @@ class MockPeerConnectionInterface MOCK_METHOD(std::vector>, GetSenders, (), - (const, override)); + (const override)); MOCK_METHOD(std::vector>, GetReceivers, (), - (const, override)); + (const override)); MOCK_METHOD(std::vector>, GetTransceivers, (), - (const, override)); + (const override)); MOCK_METHOD(bool, GetStats, (StatsObserver*, MediaStreamTrackInterface*, StatsOutputLevel), @@ -104,7 +105,7 @@ class MockPeerConnectionInterface MOCK_METHOD(rtc::scoped_refptr, GetSctpTransport, (), - (const, override)); + (const override)); MOCK_METHOD(RTCErrorOr>, CreateDataChannelOrError, (const std::string&, const DataChannelInit*), @@ -112,27 +113,27 @@ class MockPeerConnectionInterface MOCK_METHOD(const SessionDescriptionInterface*, local_description, (), - (const, override)); + (const override)); MOCK_METHOD(const SessionDescriptionInterface*, remote_description, (), - (const, override)); + (const override)); MOCK_METHOD(const SessionDescriptionInterface*, current_local_description, (), - (const, override)); + (const override)); MOCK_METHOD(const SessionDescriptionInterface*, current_remote_description, (), - (const, override)); + (const override)); MOCK_METHOD(const SessionDescriptionInterface*, pending_local_description, (), - (const, override)); + (const override)); MOCK_METHOD(const SessionDescriptionInterface*, pending_remote_description, (), - (const, override)); + (const override)); MOCK_METHOD(void, RestartIce, (), (override)); MOCK_METHOD(void, CreateOffer, diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h index 5ea9028b7..a0a08c477 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtp_transceiver.h @@ -23,7 +23,7 @@ class MockRtpTransceiver final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return rtc::scoped_refptr(new MockRtpTransceiver()); + return new MockRtpTransceiver(); } MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); @@ -70,10 +70,6 @@ class MockRtpTransceiver final HeaderExtensionsToOffer, (), (const, override)); - MOCK_METHOD(std::vector, - HeaderExtensionsNegotiated, - (), - (const, override)); MOCK_METHOD(webrtc::RTCError, SetOfferedRtpHeaderExtensions, (rtc::ArrayView diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpreceiver.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpreceiver.h index 4bcf064b2..a0b79e0be 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpreceiver.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpreceiver.h @@ -24,20 +24,20 @@ class MockRtpReceiver : public rtc::RefCountedObject { MOCK_METHOD(rtc::scoped_refptr, track, (), - (const, override)); + (const override)); MOCK_METHOD(std::vector>, streams, (), - (const, override)); - MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); - MOCK_METHOD(std::string, id, (), (const, override)); - MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); + (const override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const override)); + MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(RtpParameters, GetParameters, (), (const override)); MOCK_METHOD(void, SetObserver, (RtpReceiverObserverInterface*), (override)); MOCK_METHOD(void, SetJitterBufferMinimumDelay, (absl::optional), (override)); - MOCK_METHOD(std::vector, GetSources, (), (const, override)); + MOCK_METHOD(std::vector, GetSources, (), (const override)); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpsender.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpsender.h index e4d6399ee..f12a6185a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpsender.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_rtpsender.h @@ -25,21 +25,21 @@ class MockRtpSender : public rtc::RefCountedObject { MOCK_METHOD(rtc::scoped_refptr, track, (), - (const, override)); - MOCK_METHOD(uint32_t, ssrc, (), (const, override)); - MOCK_METHOD(cricket::MediaType, media_type, (), (const, override)); - MOCK_METHOD(std::string, id, (), (const, override)); - MOCK_METHOD(std::vector, stream_ids, (), (const, override)); + (const override)); + MOCK_METHOD(uint32_t, ssrc, (), (const override)); + MOCK_METHOD(cricket::MediaType, media_type, (), (const override)); + MOCK_METHOD(std::string, id, (), (const override)); + MOCK_METHOD(std::vector, stream_ids, (), (const override)); MOCK_METHOD(std::vector, init_send_encodings, (), - (const, override)); - MOCK_METHOD(RtpParameters, GetParameters, (), (const, override)); + (const override)); + MOCK_METHOD(RtpParameters, GetParameters, (), (const override)); MOCK_METHOD(RTCError, SetParameters, (const RtpParameters&), (override)); MOCK_METHOD(rtc::scoped_refptr, GetDtmfSender, (), - (const, override)); + (const override)); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_transformable_video_frame.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_transformable_video_frame.h index 5cebcaba8..36798b5d7 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_transformable_video_frame.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_transformable_video_frame.h @@ -21,9 +21,9 @@ namespace webrtc { class MockTransformableVideoFrame : public webrtc::TransformableVideoFrameInterface { public: - MOCK_METHOD(rtc::ArrayView, GetData, (), (const, override)); + MOCK_METHOD(rtc::ArrayView, GetData, (), (const override)); MOCK_METHOD(void, SetData, (rtc::ArrayView data), (override)); - MOCK_METHOD(uint32_t, GetTimestamp, (), (const, override)); + MOCK_METHOD(uint32_t, GetTimestamp, (), (const override)); MOCK_METHOD(uint32_t, GetSsrc, (), (const, override)); MOCK_METHOD(bool, IsKeyFrame, (), (const, override)); MOCK_METHOD(std::vector, GetAdditionalData, (), (const, override)); diff --git a/TMessagesProj/jni/voip/webrtc/api/test/mock_video_track.h b/TMessagesProj/jni/voip/webrtc/api/test/mock_video_track.h index 705d13509..58a531bf4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/mock_video_track.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/mock_video_track.h @@ -24,7 +24,7 @@ class MockVideoTrack final : public rtc::RefCountedObject { public: static rtc::scoped_refptr Create() { - return rtc::scoped_refptr(new MockVideoTrack()); + return new MockVideoTrack(); } // NotifierInterface diff --git a/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h b/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h index 1bd2eb263..303671c12 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/peerconnection_quality_test_fixture.h @@ -371,11 +371,6 @@ class PeerConnectionE2EQualityTestFixture { std::unique_ptr tls_cert_verifier) = 0; virtual PeerConfigurer* SetIceTransportFactory( std::unique_ptr factory) = 0; - // Flags to set on `cricket::PortAllocator`. These flags will be added - // to the default ones that are presented on the port allocator. - // For possible values check p2p/base/port_allocator.h. - virtual PeerConfigurer* SetPortAllocatorExtraFlags( - uint32_t extra_flags) = 0; // Add new video stream to the call that will be sent from this peer. // Default implementation of video frames generator will be used. @@ -401,22 +396,6 @@ class PeerConnectionE2EQualityTestFixture { // Set the audio stream for the call from this peer. If this method won't // be invoked, this peer will send no audio. virtual PeerConfigurer* SetAudioConfig(AudioConfig config) = 0; - - // Set if ULP FEC should be used or not. False by default. - virtual PeerConfigurer* SetUseUlpFEC(bool value) = 0; - // Set if Flex FEC should be used or not. False by default. - // Client also must enable `enable_flex_fec_support` in the `RunParams` to - // be able to use this feature. - virtual PeerConfigurer* SetUseFlexFEC(bool value) = 0; - // Specifies how much video encoder target bitrate should be different than - // target bitrate, provided by WebRTC stack. Must be greater than 0. Can be - // used to emulate overshooting of video encoders. This multiplier will - // be applied for all video encoder on both sides for all layers. Bitrate - // estimated by WebRTC stack will be multiplied by this multiplier and then - // provided into VideoEncoder::SetRates(...). 1.0 by default. - virtual PeerConfigurer* SetVideoEncoderBitrateMultiplier( - double multiplier) = 0; - // If is set, an RTCEventLog will be saved in that location and it will be // available for further analysis. virtual PeerConfigurer* SetRtcEventLogPath(std::string path) = 0; @@ -448,9 +427,15 @@ class PeerConnectionE2EQualityTestFixture { // it will be shut downed. TimeDelta run_duration; - // If set to true peers will be able to use Flex FEC, otherwise they won't - // be able to negotiate it even if it's enabled on per peer level. - bool enable_flex_fec_support = false; + bool use_ulp_fec = false; + bool use_flex_fec = false; + // Specifies how much video encoder target bitrate should be different than + // target bitrate, provided by WebRTC stack. Must be greater then 0. Can be + // used to emulate overshooting of video encoders. This multiplier will + // be applied for all video encoder on both sides for all layers. Bitrate + // estimated by WebRTC stack will be multiplied on this multiplier and then + // provided into VideoEncoder::SetRates(...). + double video_encoder_bitrate_multiplier = 1.0; // If true will set conference mode in SDP media section for all video // tracks for all peers. bool use_conference_mode = false; diff --git a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.cc b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.cc index f082b1e93..0cf00da85 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.cc +++ b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.cc @@ -86,10 +86,6 @@ std::map VideoCodecTestStats::VideoStatistics::ToMap() map["framerate_fps"] = std::to_string(framerate_fps); map["enc_speed_fps"] = std::to_string(enc_speed_fps); map["dec_speed_fps"] = std::to_string(dec_speed_fps); - map["avg_encode_latency_sec"] = std::to_string(avg_encode_latency_sec); - map["max_encode_latency_sec"] = std::to_string(max_encode_latency_sec); - map["avg_decode_latency_sec"] = std::to_string(avg_decode_latency_sec); - map["max_decode_latency_sec"] = std::to_string(max_decode_latency_sec); map["avg_delay_sec"] = std::to_string(avg_delay_sec); map["max_key_frame_delay_sec"] = std::to_string(max_key_frame_delay_sec); map["max_delta_frame_delay_sec"] = std::to_string(max_delta_frame_delay_sec); diff --git a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h index a05985a66..3f862338e 100644 --- a/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h +++ b/TMessagesProj/jni/voip/webrtc/api/test/videocodec_test_stats.h @@ -101,11 +101,6 @@ class VideoCodecTestStats { float enc_speed_fps = 0.0f; float dec_speed_fps = 0.0f; - float avg_encode_latency_sec = 0.0f; - float max_encode_latency_sec = 0.0f; - float avg_decode_latency_sec = 0.0f; - float max_decode_latency_sec = 0.0f; - float avg_delay_sec = 0.0f; float max_key_frame_delay_sec = 0.0f; float max_delta_frame_delay_sec = 0.0f; diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/network_types.cc b/TMessagesProj/jni/voip/webrtc/api/transport/network_types.cc index d6495ce49..745194015 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/network_types.cc +++ b/TMessagesProj/jni/voip/webrtc/api/transport/network_types.cc @@ -103,4 +103,8 @@ bool PacedPacketInfo::operator==(const PacedPacketInfo& rhs) const { probe_cluster_min_bytes == rhs.probe_cluster_min_bytes; } +ProcessInterval::ProcessInterval() = default; +ProcessInterval::ProcessInterval(const ProcessInterval&) = default; +ProcessInterval::~ProcessInterval() = default; + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h b/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h index 29a7cf770..4e96b0f12 100644 --- a/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h +++ b/TMessagesProj/jni/voip/webrtc/api/transport/network_types.h @@ -241,6 +241,9 @@ struct NetworkControlUpdate { // Process control struct ProcessInterval { + ProcessInterval(); + ProcessInterval(const ProcessInterval&); + ~ProcessInterval(); Timestamp at_time = Timestamp::PlusInfinity(); absl::optional pacer_queue; }; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc b/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc index 86d1a698a..42d6b06b8 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_frame.cc @@ -13,7 +13,7 @@ namespace webrtc { bool EncodedFrame::delayed_by_retransmission() const { - return false; + return 0; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h index 88df34916..987645b56 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/encoded_image.h @@ -154,16 +154,6 @@ class RTC_EXPORT EncodedImage { return encoded_data_ ? encoded_data_->data() : nullptr; } - // Returns whether the encoded image can be considered to be of target - // quality. - bool IsAtTargetQuality() const { return at_target_quality_; } - - // Sets that the encoded image can be considered to be of target quality to - // true or false. - void SetAtTargetQuality(bool at_target_quality) { - at_target_quality_ = at_target_quality; - } - uint32_t _encodedWidth = 0; uint32_t _encodedHeight = 0; // NTP time of the capture time in local timebase in milliseconds. @@ -210,8 +200,6 @@ class RTC_EXPORT EncodedImage { // https://w3c.github.io/webrtc-pc/#dom-rtcrtpreceiver-getcontributingsources RtpPacketInfos packet_infos_; bool retransmission_allowed_ = true; - // True if the encoded image can be considered to be of target quality. - bool at_target_quality_ = false; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.cc deleted file mode 100644 index 8bf9f7662..000000000 --- a/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.cc +++ /dev/null @@ -1,211 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "api/video/i444_buffer.h" - -#include - -#include -#include - -#include "api/video/i420_buffer.h" -#include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" -#include "third_party/libyuv/include/libyuv/convert.h" -#include "third_party/libyuv/include/libyuv/planar_functions.h" -#include "third_party/libyuv/include/libyuv/scale.h" - -// Aligning pointer to 64 bytes for improved performance, e.g. use SIMD. -static const int kBufferAlignment = 64; - -namespace webrtc { - -namespace { - -int I444DataSize(int height, int stride_y, int stride_u, int stride_v) { - return stride_y * height + stride_u * height + stride_v * height; -} - -} // namespace - -I444Buffer::I444Buffer(int width, int height) - : I444Buffer(width, height, width, (width), (width)) {} - -I444Buffer::I444Buffer(int width, - int height, - int stride_y, - int stride_u, - int stride_v) - : width_(width), - height_(height), - stride_y_(stride_y), - stride_u_(stride_u), - stride_v_(stride_v), - data_(static_cast( - AlignedMalloc(I444DataSize(height, stride_y, stride_u, stride_v), - kBufferAlignment))) { - RTC_DCHECK_GT(width, 0); - RTC_DCHECK_GT(height, 0); - RTC_DCHECK_GE(stride_y, width); - RTC_DCHECK_GE(stride_u, (width)); - RTC_DCHECK_GE(stride_v, (width)); -} - -I444Buffer::~I444Buffer() {} - -// static -rtc::scoped_refptr I444Buffer::Create(int width, int height) { - return rtc::make_ref_counted(width, height); -} - -// static -rtc::scoped_refptr I444Buffer::Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v) { - return rtc::make_ref_counted(width, height, stride_y, stride_u, - stride_v); -} - -// static -rtc::scoped_refptr I444Buffer::Copy( - const I444BufferInterface& source) { - return Copy(source.width(), source.height(), source.DataY(), source.StrideY(), - source.DataU(), source.StrideU(), source.DataV(), - source.StrideV()); -} - -// static -rtc::scoped_refptr I444Buffer::Copy(int width, - int height, - const uint8_t* data_y, - int stride_y, - const uint8_t* data_u, - int stride_u, - const uint8_t* data_v, - int stride_v) { - // Note: May use different strides than the input data. - rtc::scoped_refptr buffer = Create(width, height); - RTC_CHECK_EQ(0, libyuv::I444Copy(data_y, stride_y, data_u, stride_u, data_v, - stride_v, buffer->MutableDataY(), - buffer->StrideY(), buffer->MutableDataU(), - buffer->StrideU(), buffer->MutableDataV(), - buffer->StrideV(), width, height)); - return buffer; -} - -// static -rtc::scoped_refptr I444Buffer::Rotate( - const I444BufferInterface& src, - VideoRotation rotation) { - RTC_CHECK(src.DataY()); - RTC_CHECK(src.DataU()); - RTC_CHECK(src.DataV()); - - int rotated_width = src.width(); - int rotated_height = src.height(); - if (rotation == webrtc::kVideoRotation_90 || - rotation == webrtc::kVideoRotation_270) { - std::swap(rotated_width, rotated_height); - } - - rtc::scoped_refptr buffer = - I444Buffer::Create(rotated_width, rotated_height); - - RTC_CHECK_EQ(0, - libyuv::I444Rotate( - src.DataY(), src.StrideY(), src.DataU(), src.StrideU(), - src.DataV(), src.StrideV(), buffer->MutableDataY(), - buffer->StrideY(), buffer->MutableDataU(), buffer->StrideU(), - buffer->MutableDataV(), buffer->StrideV(), src.width(), - src.height(), static_cast(rotation))); - - return buffer; -} - -rtc::scoped_refptr I444Buffer::ToI420() { - rtc::scoped_refptr i420_buffer = - I420Buffer::Create(width(), height()); - libyuv::I444ToI420(DataY(), StrideY(), DataU(), StrideU(), DataV(), StrideV(), - i420_buffer->MutableDataY(), i420_buffer->StrideY(), - i420_buffer->MutableDataU(), i420_buffer->StrideU(), - i420_buffer->MutableDataV(), i420_buffer->StrideV(), - width(), height()); - return i420_buffer; -} - -void I444Buffer::InitializeData() { - memset(data_.get(), 0, - I444DataSize(height_, stride_y_, stride_u_, stride_v_)); -} - -int I444Buffer::width() const { - return width_; -} - -int I444Buffer::height() const { - return height_; -} - -const uint8_t* I444Buffer::DataY() const { - return data_.get(); -} -const uint8_t* I444Buffer::DataU() const { - return data_.get() + stride_y_ * height_; -} -const uint8_t* I444Buffer::DataV() const { - return data_.get() + stride_y_ * height_ + stride_u_ * ((height_)); -} - -int I444Buffer::StrideY() const { - return stride_y_; -} -int I444Buffer::StrideU() const { - return stride_u_; -} -int I444Buffer::StrideV() const { - return stride_v_; -} - -uint8_t* I444Buffer::MutableDataY() { - return const_cast(DataY()); -} -uint8_t* I444Buffer::MutableDataU() { - return const_cast(DataU()); -} -uint8_t* I444Buffer::MutableDataV() { - return const_cast(DataV()); -} - -void I444Buffer::CropAndScaleFrom(const I444BufferInterface& src, - int offset_x, - int offset_y, - int crop_width, - int crop_height) { - RTC_CHECK_LE(crop_width, src.width()); - RTC_CHECK_LE(crop_height, src.height()); - RTC_CHECK_LE(crop_width + offset_x, src.width()); - RTC_CHECK_LE(crop_height + offset_y, src.height()); - RTC_CHECK_GE(offset_x, 0); - RTC_CHECK_GE(offset_y, 0); - - const uint8_t* y_plane = src.DataY() + src.StrideY() * offset_y + offset_x; - const uint8_t* u_plane = src.DataU() + src.StrideU() * offset_y + offset_x; - const uint8_t* v_plane = src.DataV() + src.StrideV() * offset_y + offset_x; - int res = - libyuv::I444Scale(y_plane, src.StrideY(), u_plane, src.StrideU(), v_plane, - src.StrideV(), crop_width, crop_height, MutableDataY(), - StrideY(), MutableDataU(), StrideU(), MutableDataV(), - StrideV(), width(), height(), libyuv::kFilterBox); - - RTC_DCHECK_EQ(res, 0); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.h deleted file mode 100644 index 557bf4f3e..000000000 --- a/TMessagesProj/jni/voip/webrtc/api/video/i444_buffer.h +++ /dev/null @@ -1,104 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef API_VIDEO_I444_BUFFER_H_ -#define API_VIDEO_I444_BUFFER_H_ - -#include - -#include - -#include "api/scoped_refptr.h" -#include "api/video/video_frame_buffer.h" -#include "api/video/video_rotation.h" -#include "rtc_base/memory/aligned_malloc.h" -#include "rtc_base/system/rtc_export.h" - -namespace webrtc { - -// Plain I444 buffer in standard memory. -// I444 represents an image with in YUV format withouth any chroma subsampling. -// https://en.wikipedia.org/wiki/Chroma_subsampling#4:4:4 -class RTC_EXPORT I444Buffer : public I444BufferInterface { - public: - static rtc::scoped_refptr Create(int width, int height); - static rtc::scoped_refptr Create(int width, - int height, - int stride_y, - int stride_u, - int stride_v); - - // Create a new buffer and copy the pixel data. - static rtc::scoped_refptr Copy(const I444BufferInterface& buffer); - - static rtc::scoped_refptr Copy(int width, - int height, - const uint8_t* data_y, - int stride_y, - const uint8_t* data_u, - int stride_u, - const uint8_t* data_v, - int stride_v); - - // Returns a rotated copy of |src|. - static rtc::scoped_refptr Rotate(const I444BufferInterface& src, - VideoRotation rotation); - - rtc::scoped_refptr ToI420() final; - const I420BufferInterface* GetI420() const final { return nullptr; } - - // Sets all three planes to all zeros. Used to work around for - // quirks in memory checkers - // (https://bugs.chromium.org/p/libyuv/issues/detail?id=377) and - // ffmpeg (http://crbug.com/390941). - // TODO(nisse): Deprecated. Should be deleted if/when those issues - // are resolved in a better way. Or in the mean time, use SetBlack. - void InitializeData(); - - int width() const override; - int height() const override; - const uint8_t* DataY() const override; - const uint8_t* DataU() const override; - const uint8_t* DataV() const override; - - int StrideY() const override; - int StrideU() const override; - int StrideV() const override; - - uint8_t* MutableDataY(); - uint8_t* MutableDataU(); - uint8_t* MutableDataV(); - - // Scale the cropped area of |src| to the size of |this| buffer, and - // write the result into |this|. - void CropAndScaleFrom(const I444BufferInterface& src, - int offset_x, - int offset_y, - int crop_width, - int crop_height); - - protected: - I444Buffer(int width, int height); - I444Buffer(int width, int height, int stride_y, int stride_u, int stride_v); - - ~I444Buffer() override; - - private: - const int width_; - const int height_; - const int stride_y_; - const int stride_u_; - const int stride_v_; - const std::unique_ptr data_; -}; - -} // namespace webrtc - -#endif // API_VIDEO_I444_BUFFER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc index 6c46f782a..2b493dcc4 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.cc @@ -11,7 +11,6 @@ #include "api/video/video_frame_buffer.h" #include "api/video/i420_buffer.h" -#include "api/video/i444_buffer.h" #include "api/video/nv12_buffer.h" #include "rtc_base/checks.h" @@ -95,7 +94,7 @@ int I420BufferInterface::ChromaHeight() const { } rtc::scoped_refptr I420BufferInterface::ToI420() { - return rtc::scoped_refptr(this); + return this; } const I420BufferInterface* I420BufferInterface::GetI420() const { @@ -118,19 +117,6 @@ int I444BufferInterface::ChromaHeight() const { return height(); } -rtc::scoped_refptr I444BufferInterface::CropAndScale( - int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) { - rtc::scoped_refptr result = - I444Buffer::Create(scaled_width, scaled_height); - result->CropAndScaleFrom(*this, offset_x, offset_y, crop_width, crop_height); - return result; -} - VideoFrameBuffer::Type I010BufferInterface::type() const { return Type::kI010; } diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h index 6098a4811..7b0782f9c 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_frame_buffer.h @@ -184,13 +184,6 @@ class I444BufferInterface : public PlanarYuv8Buffer { int ChromaWidth() const final; int ChromaHeight() const final; - rtc::scoped_refptr CropAndScale(int offset_x, - int offset_y, - int crop_width, - int crop_height, - int scaled_width, - int scaled_height) override; - protected: ~I444BufferInterface() override {} }; diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h b/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h index 5eb4ebfd7..d66a235da 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_source_interface.h @@ -97,10 +97,6 @@ class VideoSourceInterface { // RemoveSink must guarantee that at the time the method returns, // there is no current and no future calls to VideoSinkInterface::OnFrame. virtual void RemoveSink(VideoSinkInterface* sink) = 0; - - // Request underlying source to capture a new frame. - // TODO(crbug/1255737): make pure virtual once downstream projects adapt. - virtual void RequestRefreshFrame() {} }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h index 3aee5b705..743524b35 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_stream_encoder_settings.h @@ -23,13 +23,19 @@ class EncoderSwitchRequestCallback { public: virtual ~EncoderSwitchRequestCallback() {} - // Requests switch to next negotiated encoder. + struct Config { + std::string codec_name; + absl::optional param; + absl::optional value; + }; + + // Requests that encoder fallback is performed. virtual void RequestEncoderFallback() = 0; - // Requests switch to a specific encoder. If the encoder is not available and - // `allow_default_fallback` is `true` the default fallback is invoked. - virtual void RequestEncoderSwitch(const SdpVideoFormat& format, - bool allow_default_fallback) = 0; + // Requests that a switch to a specific encoder is performed. + virtual void RequestEncoderSwitch(const Config& conf) = 0; + + virtual void RequestEncoderSwitch(const SdpVideoFormat& format) = 0; }; struct VideoStreamEncoderSettings { diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_timing.cc b/TMessagesProj/jni/voip/webrtc/api/video/video_timing.cc index 0483c20e6..df1bc4857 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_timing.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_timing.cc @@ -11,7 +11,6 @@ #include "api/video/video_timing.h" #include "api/array_view.h" -#include "api/units/time_delta.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "rtc_base/strings/string_builder.h" @@ -26,14 +25,6 @@ uint16_t VideoSendTiming::GetDeltaCappedMs(int64_t base_ms, int64_t time_ms) { return rtc::saturated_cast(time_ms - base_ms); } -uint16_t VideoSendTiming::GetDeltaCappedMs(TimeDelta delta) { - if (delta < TimeDelta::Zero()) { - RTC_DLOG(LS_ERROR) << "Delta " << delta.ms() - << "ms expected to be positive"; - } - return rtc::saturated_cast(delta.ms()); -} - TimingFrameInfo::TimingFrameInfo() : rtp_timestamp(0), capture_time_ms(-1), diff --git a/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h b/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h index 698477a81..dd8febb3d 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h +++ b/TMessagesProj/jni/voip/webrtc/api/video/video_timing.h @@ -16,8 +16,6 @@ #include #include -#include "api/units/time_delta.h" - namespace webrtc { // Video timing timestamps in ms counted from capture_time_ms of a frame. @@ -36,7 +34,6 @@ struct VideoSendTiming { // https://webrtc.org/experiments/rtp-hdrext/video-timing/ extension stores // 16-bit deltas of timestamps from packet capture time. static uint16_t GetDeltaCappedMs(int64_t base_ms, int64_t time_ms); - static uint16_t GetDeltaCappedMs(TimeDelta delta); uint16_t encode_start_delta_ms; uint16_t encode_finish_delta_ms; diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc index 02b43ba4f..fa4775818 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.cc @@ -68,8 +68,7 @@ constexpr ProfilePattern kProfilePatterns[] = { {0x58, BitPattern("10xx0000"), H264Profile::kProfileBaseline}, {0x4D, BitPattern("0x0x0000"), H264Profile::kProfileMain}, {0x64, BitPattern("00000000"), H264Profile::kProfileHigh}, - {0x64, BitPattern("00001100"), H264Profile::kProfileConstrainedHigh}, - {0xF4, BitPattern("00000000"), H264Profile::kProfilePredictiveHigh444}}; + {0x64, BitPattern("00001100"), H264Profile::kProfileConstrainedHigh}}; struct LevelConstraint { const int max_macroblocks_per_second; @@ -229,9 +228,6 @@ absl::optional H264ProfileLevelIdToString( case H264Profile::kProfileHigh: profile_idc_iop_string = "6400"; break; - case H264Profile::kProfilePredictiveHigh444: - profile_idc_iop_string = "f400"; - break; // Unrecognized profile. default: return absl::nullopt; diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h index 4b46ad329..51d025cd7 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/h264_profile_level_id.h @@ -25,7 +25,6 @@ enum class H264Profile { kProfileMain, kProfileConstrainedHigh, kProfileHigh, - kProfilePredictiveHigh444, }; // All values are equal to ten times the level number, except level 1b which is diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc index 8d4516e09..5fbedd99a 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.cc @@ -174,23 +174,4 @@ VideoCodecType PayloadStringToCodecType(const std::string& name) { return kVideoCodecGeneric; } -VideoCodecComplexity VideoCodec::GetVideoEncoderComplexity() const { - if (complexity_.has_value()) { - return complexity_.value(); - } - switch (codecType) { - case kVideoCodecVP8: - return VP8().complexity; - case kVideoCodecVP9: - return VP9().complexity; - default: - return VideoCodecComplexity::kComplexityNormal; - } -} - -void VideoCodec::SetVideoEncoderComplexity( - VideoCodecComplexity complexity_setting) { - complexity_ = complexity_setting; -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h index f85a0be4a..e7f8650a2 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_codec.h @@ -129,9 +129,6 @@ class RTC_EXPORT VideoCodec { scalability_mode_ = std::string(scalability_mode); } - VideoCodecComplexity GetVideoEncoderComplexity() const; - void SetVideoEncoderComplexity(VideoCodecComplexity complexity_setting); - // Public variables. TODO(hta): Make them private with accessors. VideoCodecType codecType; @@ -196,9 +193,6 @@ class RTC_EXPORT VideoCodec { // This will allow removing the VideoCodec* types from this file. VideoCodecUnion codec_specific_; std::string scalability_mode_; - // 'complexity_' indicates the CPU capability of the client. It's used to - // determine encoder CPU complexity (e.g., cpu_used for VP8, VP9. and AV1). - absl::optional complexity_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc index dfd348a31..182d9db66 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_decoder_software_fallback_wrapper.cc @@ -16,6 +16,7 @@ #include #include +#include "absl/base/macros.h" #include "api/video/encoded_image.h" #include "api/video_codecs/video_decoder.h" #include "modules/video_coding/include/video_error_codes.h" @@ -214,7 +215,7 @@ int32_t VideoDecoderSoftwareFallbackWrapper::Decode( } // Fallback decoder initialized, fall-through. - [[fallthrough]]; + ABSL_FALLTHROUGH_INTENDED; } case DecoderType::kFallback: return fallback_decoder_->Decode(input_image, missing_frames, diff --git a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc index 39c52a008..72e08a704 100644 --- a/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/api/video_codecs/video_encoder_software_fallback_wrapper.cc @@ -155,7 +155,7 @@ class VideoEncoderSoftwareFallbackWrapper final : public VideoEncoder { RTC_LOG(LS_WARNING) << "Trying to access encoder in uninitialized fallback wrapper."; // Return main encoder to preserve previous behavior. - [[fallthrough]]; + ABSL_FALLTHROUGH_INTENDED; case EncoderState::kMainEncoderUsed: return encoder_.get(); case EncoderState::kFallbackDueToFailure: diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h index ee518c645..444ec4586 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_receive_stream.h @@ -29,7 +29,9 @@ namespace webrtc { class PacketRouter; +class ProcessThread; class RtcEventLog; +class RtpPacketReceived; class RtpStreamReceiverControllerInterface; class RtpStreamReceiverInterface; diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc index 194f09cf6..2a80ea893 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.cc @@ -102,23 +102,6 @@ AudioTransportImpl::AudioTransportImpl( AudioTransportImpl::~AudioTransportImpl() {} -int32_t AudioTransportImpl::RecordedDataIsAvailable( - const void* audio_data, - const size_t number_of_frames, - const size_t bytes_per_sample, - const size_t number_of_channels, - const uint32_t sample_rate, - const uint32_t audio_delay_milliseconds, - const int32_t clock_drift, - const uint32_t volume, - const bool key_pressed, - uint32_t& new_mic_volume) { // NOLINT: to avoid changing APIs - return RecordedDataIsAvailable( - audio_data, number_of_frames, bytes_per_sample, number_of_channels, - sample_rate, audio_delay_milliseconds, clock_drift, volume, key_pressed, - new_mic_volume, /* estimated_capture_time_ns */ 0); -} - // Not used in Chromium. Process captured audio and distribute to all sending // streams, and try to do this at the lowest possible sample rate. int32_t AudioTransportImpl::RecordedDataIsAvailable( @@ -131,9 +114,7 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( const int32_t /*clock_drift*/, const uint32_t /*volume*/, const bool key_pressed, - uint32_t& /*new_mic_volume*/, - const int64_t - estimated_capture_time_ns) { // NOLINT: to avoid changing APIs + uint32_t& /*new_mic_volume*/) { // NOLINT: to avoid changing APIs RTC_DCHECK(audio_data); RTC_DCHECK_GE(number_of_channels, 1); RTC_DCHECK_LE(number_of_channels, 2); @@ -163,8 +144,25 @@ int32_t AudioTransportImpl::RecordedDataIsAvailable( ProcessCaptureFrame(audio_delay_milliseconds, key_pressed, swap_stereo_channels, audio_processing_, audio_frame.get()); - audio_frame->set_absolute_capture_timestamp_ms(estimated_capture_time_ns / - 1000000); + + // Typing detection (utilizes the APM/VAD decision). We let the VAD determine + // if we're using this feature or not. + // TODO(solenberg): GetConfig() takes a lock. Work around that. + bool typing_detected = false; + if (audio_processing_ && + audio_processing_->GetConfig().voice_detection.enabled) { + if (audio_frame->vad_activity_ != AudioFrame::kVadUnknown) { + bool vad_active = audio_frame->vad_activity_ == AudioFrame::kVadActive; + typing_detected = typing_detection_.Process(key_pressed, vad_active); + } + } + + // Copy frame and push to each sending stream. The copy is required since an + // encoding task will be posted internally to each stream. + { + MutexLock lock(&capture_lock_); + typing_noise_detected_ = typing_detected; + } RTC_DCHECK_GT(audio_frame->samples_per_channel_, 0); if (async_audio_processing_) @@ -272,4 +270,8 @@ void AudioTransportImpl::SetStereoChannelSwapping(bool enable) { swap_stereo_channels_ = enable; } +bool AudioTransportImpl::typing_noise_detected() const { + MutexLock lock(&capture_lock_); + return typing_noise_detected_; +} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h index 89999560c..f3ca2fa84 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h +++ b/TMessagesProj/jni/voip/webrtc/audio/audio_transport_impl.h @@ -41,34 +41,21 @@ class AudioTransportImpl : public AudioTransport { ~AudioTransportImpl() override; - // TODO(bugs.webrtc.org/13620) Deprecate this function int32_t RecordedDataIsAvailable(const void* audioSamples, - size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, - uint32_t totalDelayMS, - int32_t clockDrift, - uint32_t currentMicLevel, - bool keyPressed, + const size_t nSamples, + const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samplesPerSec, + const uint32_t totalDelayMS, + const int32_t clockDrift, + const uint32_t currentMicLevel, + const bool keyPressed, uint32_t& newMicLevel) override; - int32_t RecordedDataIsAvailable(const void* audioSamples, - size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, - uint32_t totalDelayMS, - int32_t clockDrift, - uint32_t currentMicLevel, - bool keyPressed, - uint32_t& newMicLevel, - int64_t estimated_capture_time_ns) override; - - int32_t NeedMorePlayData(size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, + int32_t NeedMorePlayData(const size_t nSamples, + const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samplesPerSec, void* audioSamples, size_t& nSamplesOut, int64_t* elapsed_time_ms, @@ -86,9 +73,7 @@ class AudioTransportImpl : public AudioTransport { int send_sample_rate_hz, size_t send_num_channels); void SetStereoChannelSwapping(bool enable); - // Deprecated. - // TODO(bugs.webrtc.org/11226): Remove. - bool typing_noise_detected() const { return false; } + bool typing_noise_detected() const; private: void SendProcessedData(std::unique_ptr audio_frame); @@ -105,6 +90,7 @@ class AudioTransportImpl : public AudioTransport { std::vector audio_senders_ RTC_GUARDED_BY(capture_lock_); int send_sample_rate_hz_ RTC_GUARDED_BY(capture_lock_) = 8000; size_t send_num_channels_ RTC_GUARDED_BY(capture_lock_) = 1; + bool typing_noise_detected_ RTC_GUARDED_BY(capture_lock_) = false; bool swap_stereo_channels_ RTC_GUARDED_BY(capture_lock_) = false; PushResampler capture_resampler_; TypingDetection typing_detection_; diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc index f2fd34ab1..6c16f435f 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_receive.cc @@ -39,6 +39,7 @@ #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/format_macros.h" #include "rtc_base/location.h" @@ -846,11 +847,14 @@ CallReceiveStatistics ChannelReceive::GetRTCPStatistics() const { absl::optional rtcp_sr_stats = rtp_rtcp_->GetSenderReportStats(); if (rtcp_sr_stats.has_value()) { + // Number of seconds since 1900 January 1 00:00 GMT (see + // https://tools.ietf.org/html/rfc868). + constexpr int64_t kNtpJan1970Millisecs = + 2208988800 * rtc::kNumMillisecsPerSec; stats.last_sender_report_timestamp_ms = - rtcp_sr_stats->last_arrival_timestamp.ToMs() - - rtc::kNtpJan1970Millisecs; + rtcp_sr_stats->last_arrival_timestamp.ToMs() - kNtpJan1970Millisecs; stats.last_sender_report_remote_timestamp_ms = - rtcp_sr_stats->last_remote_timestamp.ToMs() - rtc::kNtpJan1970Millisecs; + rtcp_sr_stats->last_remote_timestamp.ToMs() - kNtpJan1970Millisecs; stats.sender_reports_packets_sent = rtcp_sr_stats->packets_sent; stats.sender_reports_bytes_sent = rtcp_sr_stats->bytes_sent; stats.sender_reports_reports_count = rtcp_sr_stats->reports_count; diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.cc index c9865cbe1..c9e8a8b29 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_receive_frame_transformer_delegate.cc @@ -79,7 +79,7 @@ void ChannelReceiveFrameTransformerDelegate::Transform( void ChannelReceiveFrameTransformerDelegate::OnTransformedFrame( std::unique_ptr frame) { - rtc::scoped_refptr delegate(this); + rtc::scoped_refptr delegate = this; channel_receive_thread_->PostTask(ToQueuedTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { delegate->ReceiveFrame(std::move(frame)); diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc index d1135e5ad..2ad031e54 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_send.cc @@ -31,6 +31,7 @@ #include "modules/audio_processing/rms_level.h" #include "modules/pacing/packet_router.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" +#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/event.h" #include "rtc_base/format_macros.h" diff --git a/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.cc index 29bb0b81d..eee4cd0d9 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/audio/channel_send_frame_transformer_delegate.cc @@ -102,7 +102,7 @@ void ChannelSendFrameTransformerDelegate::OnTransformedFrame( MutexLock lock(&send_lock_); if (!send_frame_callback_) return; - rtc::scoped_refptr delegate(this); + rtc::scoped_refptr delegate = this; encoder_queue_->PostTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { delegate->SendFrame(std::move(frame)); diff --git a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h index 6c3aec6fa..439393585 100644 --- a/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h +++ b/TMessagesProj/jni/voip/webrtc/audio/voip/voip_core.h @@ -53,6 +53,9 @@ class VoipCore : public VoipEngine, public VoipVolumeControl { public: // Construct VoipCore with provided arguments. + // ProcessThread implementation can be injected by `process_thread` + // (mainly for testing purpose) and when set to nullptr, default + // implementation will be used. VoipCore(rtc::scoped_refptr encoder_factory, rtc::scoped_refptr decoder_factory, std::unique_ptr task_queue_factory, diff --git a/TMessagesProj/jni/voip/webrtc/base/allocator/partition_allocator/partition_alloc_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/allocator/partition_allocator/partition_alloc_perftest.cc deleted file mode 100644 index e6afbd949..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/allocator/partition_allocator/partition_alloc_perftest.cc +++ /dev/null @@ -1,295 +0,0 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include -#include - -#include "base/allocator/partition_allocator/partition_alloc.h" -#include "base/strings/stringprintf.h" -#include "base/threading/platform_thread.h" -#include "base/time/time.h" -#include "base/timer/lap_timer.h" -#include "build/build_config.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -namespace base { -namespace { - -// Change kTimeLimit to something higher if you need more time to capture a -// trace. -constexpr base::TimeDelta kTimeLimit = base::TimeDelta::FromSeconds(2); -constexpr int kWarmupRuns = 5; -constexpr int kTimeCheckInterval = 100000; - -// Size constants are mostly arbitrary, but try to simulate something like CSS -// parsing which consists of lots of relatively small objects. -constexpr int kMultiBucketMinimumSize = 24; -constexpr int kMultiBucketIncrement = 13; -// Final size is 24 + (13 * 22) = 310 bytes. -constexpr int kMultiBucketRounds = 22; - -constexpr char kMetricPrefixMemoryAllocation[] = "MemoryAllocation"; -constexpr char kMetricThroughput[] = "throughput"; -constexpr char kMetricTimePerAllocation[] = "time_per_allocation"; - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixMemoryAllocation, - story_name); - reporter.RegisterImportantMetric(kMetricThroughput, "runs/s"); - reporter.RegisterImportantMetric(kMetricTimePerAllocation, "ns"); - return reporter; -} - -enum class AllocatorType { kSystem, kPartitionAlloc }; - -class Allocator { - public: - Allocator() = default; - virtual ~Allocator() = default; - virtual void Init() {} - virtual void* Alloc(size_t size) = 0; - virtual void Free(void* data) = 0; -}; - -class SystemAllocator : public Allocator { - public: - SystemAllocator() = default; - ~SystemAllocator() override = default; - void* Alloc(size_t size) override { return malloc(size); } - void Free(void* data) override { free(data); } -}; - -class PartitionAllocator : public Allocator { - public: - PartitionAllocator() - : alloc_(std::make_unique()) {} - ~PartitionAllocator() override = default; - - void Init() override { alloc_->init(); } - void* Alloc(size_t size) override { return alloc_->root()->Alloc(size, ""); } - void Free(void* data) override { return alloc_->root()->Free(data); } - - private: - std::unique_ptr alloc_; -}; - -class TestLoopThread : public PlatformThread::Delegate { - public: - explicit TestLoopThread(OnceCallback test_fn) - : test_fn_(std::move(test_fn)) { - CHECK(PlatformThread::Create(0, this, &thread_handle_)); - } - - float Run() { - PlatformThread::Join(thread_handle_); - return laps_per_second_; - } - - void ThreadMain() override { laps_per_second_ = std::move(test_fn_).Run(); } - - OnceCallback test_fn_; - PlatformThreadHandle thread_handle_; - std::atomic laps_per_second_; -}; - -void DisplayResults(const std::string& story_name, - float iterations_per_second) { - auto reporter = SetUpReporter(story_name); - reporter.AddResult(kMetricThroughput, iterations_per_second); - reporter.AddResult(kMetricTimePerAllocation, - static_cast(1e9 / iterations_per_second)); -} - -class MemoryAllocationPerfNode { - public: - MemoryAllocationPerfNode* GetNext() const { return next_; } - void SetNext(MemoryAllocationPerfNode* p) { next_ = p; } - static void FreeAll(MemoryAllocationPerfNode* first, Allocator* alloc) { - MemoryAllocationPerfNode* cur = first; - while (cur != nullptr) { - MemoryAllocationPerfNode* next = cur->GetNext(); - alloc->Free(cur); - cur = next; - } - } - - private: - MemoryAllocationPerfNode* next_ = nullptr; -}; - -#if !defined(OS_ANDROID) -float SingleBucket(Allocator* allocator) { - auto* first = - reinterpret_cast(allocator->Alloc(40)); - - LapTimer timer(kWarmupRuns, kTimeLimit, kTimeCheckInterval); - MemoryAllocationPerfNode* cur = first; - do { - auto* next = - reinterpret_cast(allocator->Alloc(40)); - CHECK_NE(next, nullptr); - cur->SetNext(next); - cur = next; - timer.NextLap(); - } while (!timer.HasTimeLimitExpired()); - // next_ = nullptr only works if the class constructor is called (it's not - // called in this case because then we can allocate arbitrary-length - // payloads.) - cur->SetNext(nullptr); - - MemoryAllocationPerfNode::FreeAll(first, allocator); - return timer.LapsPerSecond(); -} -#endif // defined(OS_ANDROID) - -float SingleBucketWithFree(Allocator* allocator) { - // Allocate an initial element to make sure the bucket stays set up. - void* elem = allocator->Alloc(40); - - LapTimer timer(kWarmupRuns, kTimeLimit, kTimeCheckInterval); - do { - void* cur = allocator->Alloc(40); - CHECK_NE(cur, nullptr); - allocator->Free(cur); - timer.NextLap(); - } while (!timer.HasTimeLimitExpired()); - - allocator->Free(elem); - return timer.LapsPerSecond(); -} - -#if !defined(OS_ANDROID) -float MultiBucket(Allocator* allocator) { - auto* first = - reinterpret_cast(allocator->Alloc(40)); - MemoryAllocationPerfNode* cur = first; - - LapTimer timer(kWarmupRuns, kTimeLimit, kTimeCheckInterval); - do { - for (int i = 0; i < kMultiBucketRounds; i++) { - auto* next = reinterpret_cast(allocator->Alloc( - kMultiBucketMinimumSize + (i * kMultiBucketIncrement))); - CHECK_NE(next, nullptr); - cur->SetNext(next); - cur = next; - } - timer.NextLap(); - } while (!timer.HasTimeLimitExpired()); - cur->SetNext(nullptr); - - MemoryAllocationPerfNode::FreeAll(first, allocator); - - return timer.LapsPerSecond() * kMultiBucketRounds; -} -#endif // defined(OS_ANDROID) - -float MultiBucketWithFree(Allocator* allocator) { - std::vector elems; - elems.reserve(kMultiBucketRounds); - // Do an initial round of allocation to make sure that the buckets stay in - // use (and aren't accidentally released back to the OS). - for (int i = 0; i < kMultiBucketRounds; i++) { - void* cur = - allocator->Alloc(kMultiBucketMinimumSize + (i * kMultiBucketIncrement)); - CHECK_NE(cur, nullptr); - elems.push_back(cur); - } - - LapTimer timer(kWarmupRuns, kTimeLimit, kTimeCheckInterval); - do { - for (int i = 0; i < kMultiBucketRounds; i++) { - void* cur = allocator->Alloc(kMultiBucketMinimumSize + - (i * kMultiBucketIncrement)); - CHECK_NE(cur, nullptr); - allocator->Free(cur); - } - timer.NextLap(); - } while (!timer.HasTimeLimitExpired()); - - for (void* ptr : elems) { - allocator->Free(ptr); - } - - return timer.LapsPerSecond() * kMultiBucketRounds; -} - -std::unique_ptr CreateAllocator(AllocatorType type) { - if (type == AllocatorType::kSystem) - return std::make_unique(); - return std::make_unique(); -} - -void RunTest(int thread_count, - AllocatorType alloc_type, - float (*test_fn)(Allocator*), - const char* story_base_name) { - auto alloc = CreateAllocator(alloc_type); - alloc->Init(); - - std::vector> threads; - for (int i = 0; i < thread_count; ++i) { - threads.push_back(std::make_unique( - BindOnce(test_fn, Unretained(alloc.get())))); - } - - uint64_t total_laps_per_second = 0; - uint64_t min_laps_per_second = std::numeric_limits::max(); - for (int i = 0; i < thread_count; ++i) { - uint64_t laps_per_second = threads[i]->Run(); - min_laps_per_second = std::min(laps_per_second, min_laps_per_second); - total_laps_per_second += laps_per_second; - } - - std::string name = base::StringPrintf( - "%s.%s_%s_%d", kMetricPrefixMemoryAllocation, story_base_name, - alloc_type == AllocatorType::kSystem ? "System" : "PartitionAlloc", - thread_count); - - DisplayResults(name + "_total", total_laps_per_second); - DisplayResults(name + "_worst", min_laps_per_second); -} - -class MemoryAllocationPerfTest - : public testing::TestWithParam> {}; - -INSTANTIATE_TEST_SUITE_P( - , - MemoryAllocationPerfTest, - ::testing::Combine(::testing::Values(1, 2, 3, 4), - ::testing::Values(AllocatorType::kSystem, - AllocatorType::kPartitionAlloc))); - -// This test (and the other one below) allocates a large amount of memory, which -// can cause issues on Android. -#if !defined(OS_ANDROID) -TEST_P(MemoryAllocationPerfTest, SingleBucket) { - auto params = GetParam(); - RunTest(std::get<0>(params), std::get<1>(params), SingleBucket, - "SingleBucket"); -} -#endif - -TEST_P(MemoryAllocationPerfTest, SingleBucketWithFree) { - auto params = GetParam(); - RunTest(std::get<0>(params), std::get<1>(params), SingleBucketWithFree, - "SingleBucketWithFree"); -} - -#if !defined(OS_ANDROID) -TEST_P(MemoryAllocationPerfTest, MultiBucket) { - auto params = GetParam(); - RunTest(std::get<0>(params), std::get<1>(params), MultiBucket, "MultiBucket"); -} -#endif - -TEST_P(MemoryAllocationPerfTest, MultiBucketWithFree) { - auto params = GetParam(); - RunTest(std::get<0>(params), std::get<1>(params), MultiBucketWithFree, - "MultiBucketWithFree"); -} - -} // namespace - -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/allocator/partition_allocator/spin_lock_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/allocator/partition_allocator/spin_lock_perftest.cc deleted file mode 100644 index c60e3631d..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/allocator/partition_allocator/spin_lock_perftest.cc +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/allocator/partition_allocator/spin_lock.h" -#include "base/threading/platform_thread.h" -#include "base/time/time.h" -#include "base/timer/lap_timer.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -namespace base { -namespace { - -constexpr int kWarmupRuns = 1; -constexpr TimeDelta kTimeLimit = TimeDelta::FromSeconds(1); -constexpr int kTimeCheckInterval = 100000; - -constexpr char kMetricPrefixSpinLock[] = "SpinLock."; -constexpr char kMetricLockUnlockThroughput[] = "lock_unlock_throughput"; -constexpr char kStoryBaseline[] = "baseline_story"; -constexpr char kStoryWithCompetingThread[] = "with_competing_thread"; - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixSpinLock, story_name); - reporter.RegisterImportantMetric(kMetricLockUnlockThroughput, "runs/s"); - return reporter; -} - -class Spin : public PlatformThread::Delegate { - public: - Spin(subtle::SpinLock* lock, size_t* data) - : lock_(lock), data_(data), should_stop_(false) {} - ~Spin() override = default; - - void ThreadMain() override { - while (!should_stop_.load(std::memory_order_relaxed)) { - lock_->lock(); - (*data_)++; - lock_->unlock(); - } - } - - void Stop() { should_stop_ = true; } - - private: - subtle::SpinLock* lock_; - size_t* data_; - std::atomic should_stop_; -}; - -} // namespace - -TEST(SpinLockPerfTest, Simple) { - LapTimer timer(kWarmupRuns, kTimeLimit, kTimeCheckInterval); - size_t data = 0; - - subtle::SpinLock lock; - - do { - lock.lock(); - data += 1; - lock.unlock(); - timer.NextLap(); - } while (!timer.HasTimeLimitExpired()); - - auto reporter = SetUpReporter(kStoryBaseline); - reporter.AddResult(kMetricLockUnlockThroughput, timer.LapsPerSecond()); -} - -TEST(SpinLockPerfTest, WithCompetingThread) { - LapTimer timer(kWarmupRuns, kTimeLimit, kTimeCheckInterval); - size_t data = 0; - - subtle::SpinLock lock; - - // Starts a competing thread executing the same loop as this thread. - Spin thread_main(&lock, &data); - PlatformThreadHandle thread_handle; - ASSERT_TRUE(PlatformThread::Create(0, &thread_main, &thread_handle)); - - do { - lock.lock(); - data += 1; - lock.unlock(); - timer.NextLap(); - } while (!timer.HasTimeLimitExpired()); - - thread_main.Stop(); - PlatformThread::Join(thread_handle); - - auto reporter = SetUpReporter(kStoryWithCompetingThread); - reporter.AddResult(kMetricLockUnlockThroughput, timer.LapsPerSecond()); -} - -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/android/orderfile/orderfile_call_graph_instrumentation_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/android/orderfile/orderfile_call_graph_instrumentation_perftest.cc deleted file mode 100644 index 5d7f274af..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/android/orderfile/orderfile_call_graph_instrumentation_perftest.cc +++ /dev/null @@ -1,155 +0,0 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/android/orderfile/orderfile_instrumentation.h" - -#include - -#include "base/android/library_loader/anchor_functions.h" -#include "base/strings/stringprintf.h" -#include "base/time/time.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_test.h" - -namespace base { -namespace android { -namespace orderfile { - -namespace { -const size_t kStep = sizeof(int); - -void CallRecordAddress(int iterations, size_t addr_count) { - for (int i = 0; i < iterations; i++) { - for (size_t caller_addr = kStartOfTextForTesting + kStep; - caller_addr < addr_count; caller_addr += kStep) { - for (size_t callee_addr = caller_addr + kStep; callee_addr < addr_count; - callee_addr += kStep) { - RecordAddressForTesting(callee_addr, caller_addr); - } - } - } -} - -void RunBenchmark(size_t iterations, size_t addresses_count, int threads) { - ResetForTesting(); - auto iterate = [iterations, addresses_count]() { - CallRecordAddress(iterations, addresses_count); - }; - if (threads != 1) { - for (int i = 0; i < threads - 1; ++i) - std::thread(iterate).detach(); - } - auto tick = base::TimeTicks::Now(); - iterate(); - auto tock = base::TimeTicks::Now(); - double nanos = static_cast((tock - tick).InNanoseconds()); - size_t addresses = (addresses_count - kStartOfTextForTesting - 1) / kStep; - double calls_count = (addresses * (addresses - 1)) / 2; - auto ns_per_call = nanos / (iterations * calls_count); - auto modifier = - base::StringPrintf("_%zu_%zu_%d", iterations, addresses_count, threads); - perf_test::PrintResult("RecordAddressCostPerCall", modifier, "", ns_per_call, - "ns", true); -} - -void CheckValid(size_t iterations, size_t addr_count) { - // |reached| is expected to be ordered by callee offset - auto reached = GetOrderedOffsetsForTesting(); - size_t buckets_per_callee = 9; // kTotalBuckets * 2 + 1. - size_t callers_per_callee = 3; - size_t addresses = (addr_count - kStartOfTextForTesting - 1) / kStep; - EXPECT_EQ((addresses - 1) * buckets_per_callee, reached.size()); - size_t expected_callee = kStartOfTextForTesting + 2 * kStep; - - for (size_t i = 0; i < reached.size(); i += buckets_per_callee) { - EXPECT_EQ(reached[i] / 4, (expected_callee - kStartOfTextForTesting) / 4); - size_t callee_index = i / buckets_per_callee; - for (size_t j = 0; j < callers_per_callee; j++) { - EXPECT_EQ(reached[i + j * 2 + 1], - j > callee_index ? 0UL : (j + 1) * kStep); - EXPECT_EQ(reached[i + j * 2 + 2], j > callee_index ? 0UL : iterations); - } - size_t misses = callee_index > 2 ? (callee_index - 2) * iterations : 0UL; - EXPECT_EQ(reached[i + 7], 0UL); - EXPECT_EQ(reached[i + 8], misses); - expected_callee += kStep; - } -} - -} // namespace - -class OrderfileInstrumentationTest : public ::testing::Test { - // Any tests need to run ResetForTesting() when they start. Because this - // perftest is built with instrumentation enabled, all code including - // ::testing::Test is instrumented. If ResetForTesting() is called earlier, - // for example in setUp(), any test harness code between setUp() and the - // actual test will change the instrumentation offset record in unpredictable - // ways and make these tests unreliable. -}; - -TEST_F(OrderfileInstrumentationTest, SequentialTest_10_5000) { - size_t iterations = 10; - size_t addr_count = 5000; - ResetForTesting(); - CallRecordAddress(iterations, addr_count); - Disable(); - CheckValid(iterations, addr_count); -} - -TEST_F(OrderfileInstrumentationTest, SequentialTest_10_10000) { - size_t iterations = 10; - size_t addr_count = 10000; - ResetForTesting(); - CallRecordAddress(iterations, addr_count); - Disable(); - CheckValid(iterations, addr_count); -} - -TEST_F(OrderfileInstrumentationTest, OutOfBoundsCaller) { - ResetForTesting(); - RecordAddressForTesting(1234, kStartOfTextForTesting); - RecordAddressForTesting(1234, kEndOfTextForTesting + 1); - Disable(); - auto reached = GetOrderedOffsetsForTesting(); - EXPECT_EQ(reached.size(), 9UL); - EXPECT_EQ(reached[0] / 4, (1234 - kStartOfTextForTesting) / 4); - for (size_t i = 1; i < 8; i++) { - EXPECT_EQ(reached[i], 0UL); - } - EXPECT_EQ(reached[8], 2UL); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_10_2000) { - RunBenchmark(10, 2000, 1); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_100_2000) { - RunBenchmark(100, 2000, 1); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_1000_2000_2) { - RunBenchmark(100, 2000, 2); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_1000_2000_3) { - RunBenchmark(100, 2000, 3); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_1000_2000_4) { - RunBenchmark(100, 2000, 4); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_1000_2000_6) { - RunBenchmark(100, 2000, 6); -} - -} // namespace orderfile -} // namespace android -} // namespace base - -// Custom runner implementation since base's one requires JNI on Android. -int main(int argc, char** argv) { - testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); -} diff --git a/TMessagesProj/jni/voip/webrtc/base/android/orderfile/orderfile_instrumentation_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/android/orderfile/orderfile_instrumentation_perftest.cc deleted file mode 100644 index e1a69c9b7..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/android/orderfile/orderfile_instrumentation_perftest.cc +++ /dev/null @@ -1,135 +0,0 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/android/orderfile/orderfile_instrumentation.h" - -#include - -#include "base/android/library_loader/anchor_functions.h" -#include "base/strings/stringprintf.h" -#include "base/time/time.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_test.h" - -namespace base { -namespace android { -namespace orderfile { - -namespace { - -// Records |addresses_count| distinct addresses |iterations| times, in -// |threads|. -void RunBenchmark(int iterations, int addresses_count, int threads) { - ResetForTesting(); - auto iterate = [iterations, addresses_count]() { - for (int i = 0; i < iterations; i++) { - for (size_t addr = kStartOfTextForTesting; - addr < static_cast(addresses_count); addr += sizeof(int)) { - RecordAddressForTesting(addr); - } - } - }; - if (threads != 1) { - for (int i = 0; i < threads - 1; ++i) - std::thread(iterate).detach(); - } - auto tick = base::TimeTicks::Now(); - iterate(); - auto tock = base::TimeTicks::Now(); - double nanos = static_cast((tock - tick).InNanoseconds()); - auto ns_per_call = - nanos / (iterations * static_cast(addresses_count)); - auto modifier = - base::StringPrintf("_%d_%d_%d", iterations, addresses_count, threads); - perf_test::PrintResult("RecordAddressCostPerCall", modifier, "", ns_per_call, - "ns", true); -} - -} // namespace - -class OrderfileInstrumentationTest : public ::testing::Test { - // Any tests need to run ResetForTesting() when they start. Because this - // perftest is built with instrumentation enabled, all code including - // ::testing::Test is instrumented. If ResetForTesting() is called earlier, - // for example in setUp(), any test harness code between setUp() and the - // actual test will change the instrumentation offset record in unpredictable - // ways and make these tests unreliable. -}; - -TEST_F(OrderfileInstrumentationTest, RecordOffset) { - ResetForTesting(); - size_t first = 1234, second = 1456; - RecordAddressForTesting(first); - RecordAddressForTesting(second); - RecordAddressForTesting(first); // No duplicates. - RecordAddressForTesting(first + 1); // 4 bytes granularity. - Disable(); - - auto reached = GetOrderedOffsetsForTesting(); - EXPECT_EQ(2UL, reached.size()); - EXPECT_EQ(first - kStartOfTextForTesting, reached[0]); - EXPECT_EQ(second - kStartOfTextForTesting, reached[1]); -} - -TEST_F(OrderfileInstrumentationTest, RecordingStops) { - ResetForTesting(); - size_t first = 1234, second = 1456, third = 1789; - RecordAddressForTesting(first); - RecordAddressForTesting(second); - Disable(); - RecordAddressForTesting(third); - - auto reached = GetOrderedOffsetsForTesting(); - ASSERT_EQ(2UL, reached.size()); - ASSERT_EQ(first - kStartOfTextForTesting, reached[0]); - ASSERT_EQ(second - kStartOfTextForTesting, reached[1]); -} - -TEST_F(OrderfileInstrumentationTest, OutOfBounds) { - ResetForTesting(); - EXPECT_DEATH(RecordAddressForTesting(kEndOfTextForTesting + 100), ""); - EXPECT_DEATH(RecordAddressForTesting(kStartOfTextForTesting - 100), ""); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_10_10000) { - RunBenchmark(10, 10000, 1); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_100_10000) { - RunBenchmark(100, 10000, 1); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_10_100000) { - RunBenchmark(10, 100000, 1); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_100_100000) { - RunBenchmark(100, 100000, 1); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_1000_100000_2) { - RunBenchmark(1000, 100000, 2); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_1000_100000_3) { - RunBenchmark(1000, 100000, 3); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_1000_100000_4) { - RunBenchmark(1000, 100000, 4); -} - -TEST(OrderfileInstrumentationPerfTest, RecordAddress_1000_100000_6) { - RunBenchmark(1000, 100000, 6); -} - -} // namespace orderfile -} // namespace android -} // namespace base - -// Custom runner implementation since base's one requires JNI on Android. -int main(int argc, char** argv) { - testing::InitGoogleTest(&argc, argv); - return RUN_ALL_TESTS(); -} diff --git a/TMessagesProj/jni/voip/webrtc/base/hash/sha1_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/hash/sha1_perftest.cc deleted file mode 100644 index 2bb06fb05..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/hash/sha1_perftest.cc +++ /dev/null @@ -1,88 +0,0 @@ -// Copyright (c) 2019 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/hash/sha1.h" - -#include -#include -#include -#include -#include - -#include "base/rand_util.h" -#include "base/strings/string_number_conversions.h" -#include "base/time/time.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -namespace { - -constexpr int kBytesPerMegabyte = 1000000; - -constexpr char kMetricPrefixSHA1[] = "SHA1."; -constexpr char kMetricRuntime[] = "runtime"; -constexpr char kMetricThroughput[] = "throughput"; -// Histograms automatically calculate mean, min, max, and standard deviation, -// but not median, so have a separate metric for our manually calculated median. -constexpr char kMetricMedianThroughput[] = "median_throughput"; - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixSHA1, story_name); - reporter.RegisterImportantMetric(kMetricRuntime, "us"); - reporter.RegisterImportantMetric(kMetricThroughput, "bytesPerSecond"); - reporter.RegisterImportantMetric(kMetricMedianThroughput, "bytesPerSecond"); - return reporter; -} - -} // namespace - -static void Timing(const size_t len) { - std::vector buf(len); - base::RandBytes(buf.data(), len); - - const int runs = 111; - std::vector utime(runs); - unsigned char digest[base::kSHA1Length]; - memset(digest, 0, base::kSHA1Length); - - double total_test_time = 0.0; - for (int i = 0; i < runs; ++i) { - auto start = base::TimeTicks::Now(); - base::SHA1HashBytes(buf.data(), len, digest); - auto end = base::TimeTicks::Now(); - utime[i] = end - start; - total_test_time += utime[i].InMicroseconds(); - } - - std::sort(utime.begin(), utime.end()); - const int med = runs / 2; - - // Simply dividing len by utime gets us MB/s, but we need B/s. - // MB/s = (len / (bytes/megabytes)) / (usecs / usecs/sec) - // MB/s = (len / 1,000,000)/(usecs / 1,000,000) - // MB/s = (len * 1,000,000)/(usecs * 1,000,000) - // MB/s = len/utime - double median_rate = kBytesPerMegabyte * len / utime[med].InMicroseconds(); - // Convert to a comma-separated string so we can report every data point. - std::string rates; - for (const auto& t : utime) { - rates += - base::NumberToString(kBytesPerMegabyte * len / t.InMicroseconds()) + - ","; - } - // Strip off trailing comma. - rates.pop_back(); - - auto reporter = SetUpReporter(base::NumberToString(len) + "_bytes"); - reporter.AddResult(kMetricRuntime, total_test_time); - reporter.AddResult(kMetricMedianThroughput, median_rate); - reporter.AddResultList(kMetricThroughput, rates); -} - -TEST(SHA1PerfTest, Speed) { - Timing(1024 * 1024U >> 1); - Timing(1024 * 1024U >> 5); - Timing(1024 * 1024U >> 6); - Timing(1024 * 1024U >> 7); -} diff --git a/TMessagesProj/jni/voip/webrtc/base/i18n/streaming_utf8_validator_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/i18n/streaming_utf8_validator_perftest.cc deleted file mode 100644 index 4f341388e..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/i18n/streaming_utf8_validator_perftest.cc +++ /dev/null @@ -1,232 +0,0 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -// All data that is passed through a WebSocket with type "Text" needs to be -// validated as UTF8. Since this is done on the IO thread, it needs to be -// reasonably fast. - -// We are only interested in the performance on valid UTF8. Invalid UTF8 will -// result in a connection failure, so is unlikely to become a source of -// performance issues. - -#include "base/i18n/streaming_utf8_validator.h" - -#include - -#include - -#include "base/bind.h" -#include "base/callback.h" -#include "base/macros.h" -#include "base/strings/string_util.h" -#include "base/strings/stringprintf.h" -#include "base/test/perf_time_logger.h" -#include "testing/gtest/include/gtest/gtest.h" - -namespace base { -namespace { - -// We want to test ranges of valid UTF-8 sequences. These ranges are inclusive. -// They are intended to be large enough that the validator needs to do -// meaningful work while being in some sense "realistic" (eg. control characters -// are not included). -const char kOneByteSeqRangeStart[] = " "; // U+0020 -const char kOneByteSeqRangeEnd[] = "~"; // U+007E - -const char kTwoByteSeqRangeStart[] = "\xc2\xa0"; // U+00A0 non-breaking space -const char kTwoByteSeqRangeEnd[] = "\xc9\x8f"; // U+024F small y with stroke - -const char kThreeByteSeqRangeStart[] = "\xe3\x81\x82"; // U+3042 Hiragana "a" -const char kThreeByteSeqRangeEnd[] = "\xe9\xbf\x83"; // U+9FC3 "to blink" - -const char kFourByteSeqRangeStart[] = "\xf0\xa0\x80\x8b"; // U+2000B -const char kFourByteSeqRangeEnd[] = "\xf0\xaa\x9a\xb2"; // U+2A6B2 - -// The different lengths of strings to test. -const size_t kTestLengths[] = {1, 32, 256, 32768, 1 << 20}; - -// Simplest possible byte-at-a-time validator, to provide a baseline -// for comparison. This is only tried on 1-byte UTF-8 sequences, as -// the results will not be meaningful with sequences containing -// top-bit-set bytes. -bool IsString7Bit(const std::string& s) { - for (auto it : s) { - if (it & 0x80) - return false; - } - return true; -} - -// Assumes that |previous| is a valid UTF-8 sequence, and attempts to return -// the next one. Is just barely smart enough to iterate through the ranges -// defined about. -std::string NextUtf8Sequence(const std::string& previous) { - DCHECK(StreamingUtf8Validator::Validate(previous)); - std::string next = previous; - for (int i = static_cast(previous.length() - 1); i >= 0; --i) { - // All bytes in a UTF-8 sequence except the first one are - // constrained to the range 0x80 to 0xbf, inclusive. When we - // increment past 0xbf, we carry into the previous byte. - if (i > 0 && next[i] == '\xbf') { - next[i] = '\x80'; - continue; // carry - } - ++next[i]; - break; // no carry - } - DCHECK(StreamingUtf8Validator::Validate(next)) - << "Result \"" << next << "\" failed validation"; - return next; -} - -typedef bool (*TestTargetType)(const std::string&); - -// Run fuction |target| over |test_string| |times| times, and report the results -// using |description|. -bool RunTest(const std::string& description, - TestTargetType target, - const std::string& test_string, - int times) { - base::PerfTimeLogger timer(description.c_str()); - bool result = true; - for (int i = 0; i < times; ++i) { - result = target(test_string) && result; - } - timer.Done(); - return result; -} - -// Construct a string by repeating |input| enough times to equal or exceed -// |length|. -std::string ConstructRepeatedTestString(const std::string& input, - size_t length) { - std::string output = input; - while (output.length() * 2 < length) { - output += output; - } - if (output.length() < length) { - output += ConstructRepeatedTestString(input, length - output.length()); - } - return output; -} - -// Construct a string by expanding the range of UTF-8 sequences -// between |input_start| and |input_end|, inclusive, and then -// repeating the resulting string until it equals or exceeds |length| -// bytes. |input_start| and |input_end| must be valid UTF-8 -// sequences. -std::string ConstructRangedTestString(const std::string& input_start, - const std::string& input_end, - size_t length) { - std::string output = input_start; - std::string input = input_start; - while (output.length() < length && input != input_end) { - input = NextUtf8Sequence(input); - output += input; - } - if (output.length() < length) { - output = ConstructRepeatedTestString(output, length); - } - return output; -} - -struct TestFunctionDescription { - TestTargetType function; - const char* function_name; -}; - -bool IsStringUTF8(const std::string& str) { - return base::IsStringUTF8(base::StringPiece(str)); -} - -// IsString7Bit is intentionally placed last so it can be excluded easily. -const TestFunctionDescription kTestFunctions[] = { - {&StreamingUtf8Validator::Validate, "StreamingUtf8Validator"}, - {&IsStringUTF8, "IsStringUTF8"}, {&IsString7Bit, "IsString7Bit"}}; - -// Construct a test string from |construct_test_string| for each of the lengths -// in |kTestLengths| in turn. For each string, run each test in |test_functions| -// for a number of iterations such that the total number of bytes validated -// is around 16MB. -void RunSomeTests( - const char format[], - base::RepeatingCallback construct_test_string, - const TestFunctionDescription* test_functions, - size_t test_count) { - for (auto length : kTestLengths) { - const std::string test_string = construct_test_string.Run(length); - const int real_length = static_cast(test_string.length()); - const int times = (1 << 24) / real_length; - for (size_t test_index = 0; test_index < test_count; ++test_index) { - EXPECT_TRUE(RunTest(StringPrintf(format, - test_functions[test_index].function_name, - real_length, - times), - test_functions[test_index].function, - test_string, - times)); - } - } -} - -TEST(StreamingUtf8ValidatorPerfTest, OneByteRepeated) { - RunSomeTests( - "%s: bytes=1 repeated length=%d repeat=%d", - base::BindRepeating(ConstructRepeatedTestString, kOneByteSeqRangeStart), - kTestFunctions, 3); -} - -TEST(StreamingUtf8ValidatorPerfTest, OneByteRange) { - RunSomeTests("%s: bytes=1 ranged length=%d repeat=%d", - base::BindRepeating(ConstructRangedTestString, - kOneByteSeqRangeStart, kOneByteSeqRangeEnd), - kTestFunctions, 3); -} - -TEST(StreamingUtf8ValidatorPerfTest, TwoByteRepeated) { - RunSomeTests( - "%s: bytes=2 repeated length=%d repeat=%d", - base::BindRepeating(ConstructRepeatedTestString, kTwoByteSeqRangeStart), - kTestFunctions, 2); -} - -TEST(StreamingUtf8ValidatorPerfTest, TwoByteRange) { - RunSomeTests("%s: bytes=2 ranged length=%d repeat=%d", - base::BindRepeating(ConstructRangedTestString, - kTwoByteSeqRangeStart, kTwoByteSeqRangeEnd), - kTestFunctions, 2); -} - -TEST(StreamingUtf8ValidatorPerfTest, ThreeByteRepeated) { - RunSomeTests( - "%s: bytes=3 repeated length=%d repeat=%d", - base::BindRepeating(ConstructRepeatedTestString, kThreeByteSeqRangeStart), - kTestFunctions, 2); -} - -TEST(StreamingUtf8ValidatorPerfTest, ThreeByteRange) { - RunSomeTests( - "%s: bytes=3 ranged length=%d repeat=%d", - base::BindRepeating(ConstructRangedTestString, kThreeByteSeqRangeStart, - kThreeByteSeqRangeEnd), - kTestFunctions, 2); -} - -TEST(StreamingUtf8ValidatorPerfTest, FourByteRepeated) { - RunSomeTests( - "%s: bytes=4 repeated length=%d repeat=%d", - base::BindRepeating(ConstructRepeatedTestString, kFourByteSeqRangeStart), - kTestFunctions, 2); -} - -TEST(StreamingUtf8ValidatorPerfTest, FourByteRange) { - RunSomeTests( - "%s: bytes=4 ranged length=%d repeat=%d", - base::BindRepeating(ConstructRangedTestString, kFourByteSeqRangeStart, - kFourByteSeqRangeEnd), - kTestFunctions, 2); -} - -} // namespace -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/json/json_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/json/json_perftest.cc deleted file mode 100644 index f7d1dffd2..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/json/json_perftest.cc +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/json/json_reader.h" -#include "base/json/json_writer.h" -#include "base/memory/ptr_util.h" -#include "base/strings/string_number_conversions.h" -#include "base/time/time.h" -#include "base/values.h" -#include "build/build_config.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -namespace base { - -namespace { - -constexpr char kMetricPrefixJSON[] = "JSON."; -constexpr char kMetricReadTime[] = "read_time"; -constexpr char kMetricWriteTime[] = "write_time"; - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixJSON, story_name); - reporter.RegisterImportantMetric(kMetricReadTime, "ms"); - reporter.RegisterImportantMetric(kMetricWriteTime, "ms"); - return reporter; -} - -// Generates a simple dictionary value with simple data types, a string and a -// list. -DictionaryValue GenerateDict() { - DictionaryValue root; - root.SetDoubleKey("Double", 3.141); - root.SetBoolKey("Bool", true); - root.SetIntKey("Int", 42); - root.SetStringKey("String", "Foo"); - - ListValue list; - list.Append(2.718); - list.Append(false); - list.Append(123); - list.Append("Bar"); - root.SetKey("List", std::move(list)); - - return root; -} - -// Generates a tree-like dictionary value with a size of O(breadth ** depth). -DictionaryValue GenerateLayeredDict(int breadth, int depth) { - if (depth == 1) - return GenerateDict(); - - DictionaryValue root = GenerateDict(); - DictionaryValue next = GenerateLayeredDict(breadth, depth - 1); - - for (int i = 0; i < breadth; ++i) { - root.SetKey("Dict" + base::NumberToString(i), next.Clone()); - } - - return root; -} - -} // namespace - -class JSONPerfTest : public testing::Test { - public: - void TestWriteAndRead(int breadth, int depth) { - std::string description = "Breadth: " + base::NumberToString(breadth) + - ", Depth: " + base::NumberToString(depth); - DictionaryValue dict = GenerateLayeredDict(breadth, depth); - std::string json; - - TimeTicks start_write = TimeTicks::Now(); - JSONWriter::Write(dict, &json); - TimeTicks end_write = TimeTicks::Now(); - auto reporter = SetUpReporter("breadth_" + base::NumberToString(breadth) + - "_depth_" + base::NumberToString(depth)); - reporter.AddResult(kMetricWriteTime, end_write - start_write); - - TimeTicks start_read = TimeTicks::Now(); - JSONReader::Read(json); - TimeTicks end_read = TimeTicks::Now(); - reporter.AddResult(kMetricReadTime, end_read - start_read); - } -}; - -// Times out on Android (crbug.com/906686). -#if defined(OS_ANDROID) -#define MAYBE_StressTest DISABLED_StressTest -#else -#define MAYBE_StressTest StressTest -#endif -TEST_F(JSONPerfTest, MAYBE_StressTest) { - for (int i = 0; i < 4; ++i) { - for (int j = 0; j < 12; ++j) { - TestWriteAndRead(i + 1, j + 1); - } - } -} - -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/message_loop/message_pump_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/message_loop/message_pump_perftest.cc deleted file mode 100644 index 10f05aff3..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/message_loop/message_pump_perftest.cc +++ /dev/null @@ -1,262 +0,0 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include -#include - -#include "base/bind.h" -#include "base/bind_helpers.h" -#include "base/format_macros.h" -#include "base/memory/ptr_util.h" -#include "base/message_loop/message_loop_current.h" -#include "base/message_loop/message_pump_type.h" -#include "base/single_thread_task_runner.h" -#include "base/strings/stringprintf.h" -#include "base/synchronization/condition_variable.h" -#include "base/synchronization/lock.h" -#include "base/synchronization/waitable_event.h" -#include "base/task/sequence_manager/sequence_manager_impl.h" -#include "base/threading/thread.h" -#include "base/time/time.h" -#include "build/build_config.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -#if defined(OS_ANDROID) -#include "base/android/java_handler_thread.h" -#endif - -namespace base { -namespace { - -constexpr char kMetricPrefixScheduleWork[] = "ScheduleWork."; -constexpr char kMetricMinBatchTime[] = "min_batch_time_per_task"; -constexpr char kMetricMaxBatchTime[] = "max_batch_time_per_task"; -constexpr char kMetricTotalTime[] = "total_time_per_task"; -constexpr char kMetricThreadTime[] = "thread_time_per_task"; - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixScheduleWork, story_name); - reporter.RegisterImportantMetric(kMetricMinBatchTime, "us"); - reporter.RegisterImportantMetric(kMetricMaxBatchTime, "us"); - reporter.RegisterImportantMetric(kMetricTotalTime, "us"); - reporter.RegisterImportantMetric(kMetricThreadTime, "us"); - return reporter; -} - -#if defined(OS_ANDROID) -class JavaHandlerThreadForTest : public android::JavaHandlerThread { - public: - explicit JavaHandlerThreadForTest(const char* name) - : android::JavaHandlerThread(name, base::ThreadPriority::NORMAL) {} - - using android::JavaHandlerThread::state; - using android::JavaHandlerThread::State; -}; -#endif - -} // namespace - -class ScheduleWorkTest : public testing::Test { - public: - ScheduleWorkTest() : counter_(0) {} - - void SetUp() override { - if (base::ThreadTicks::IsSupported()) - base::ThreadTicks::WaitUntilInitialized(); - } - - void Increment(uint64_t amount) { counter_ += amount; } - - void Schedule(int index) { - base::TimeTicks start = base::TimeTicks::Now(); - base::ThreadTicks thread_start; - if (ThreadTicks::IsSupported()) - thread_start = base::ThreadTicks::Now(); - base::TimeDelta minimum = base::TimeDelta::Max(); - base::TimeDelta maximum = base::TimeDelta(); - base::TimeTicks now, lastnow = start; - uint64_t schedule_calls = 0u; - do { - for (size_t i = 0; i < kBatchSize; ++i) { - target_message_loop_base()->GetMessagePump()->ScheduleWork(); - schedule_calls++; - } - now = base::TimeTicks::Now(); - base::TimeDelta laptime = now - lastnow; - lastnow = now; - minimum = std::min(minimum, laptime); - maximum = std::max(maximum, laptime); - } while (now - start < base::TimeDelta::FromSeconds(kTargetTimeSec)); - - scheduling_times_[index] = now - start; - if (ThreadTicks::IsSupported()) - scheduling_thread_times_[index] = - base::ThreadTicks::Now() - thread_start; - min_batch_times_[index] = minimum; - max_batch_times_[index] = maximum; - target_message_loop_base()->GetTaskRunner()->PostTask( - FROM_HERE, base::BindOnce(&ScheduleWorkTest::Increment, - base::Unretained(this), schedule_calls)); - } - - void ScheduleWork(MessagePumpType target_type, int num_scheduling_threads) { -#if defined(OS_ANDROID) - if (target_type == MessagePumpType::JAVA) { - java_thread_.reset(new JavaHandlerThreadForTest("target")); - java_thread_->Start(); - } else -#endif - { - target_.reset(new Thread("test")); - - Thread::Options options(target_type, 0u); - options.message_pump_type = target_type; - target_->StartWithOptions(options); - - // Without this, it's possible for the scheduling threads to start and run - // before the target thread. In this case, the scheduling threads will - // call target_message_loop()->ScheduleWork(), which dereferences the - // loop's message pump, which is only created after the target thread has - // finished starting. - target_->WaitUntilThreadStarted(); - } - - std::vector> scheduling_threads; - scheduling_times_.reset(new base::TimeDelta[num_scheduling_threads]); - scheduling_thread_times_.reset(new base::TimeDelta[num_scheduling_threads]); - min_batch_times_.reset(new base::TimeDelta[num_scheduling_threads]); - max_batch_times_.reset(new base::TimeDelta[num_scheduling_threads]); - - for (int i = 0; i < num_scheduling_threads; ++i) { - scheduling_threads.push_back(std::make_unique("posting thread")); - scheduling_threads[i]->Start(); - } - - for (int i = 0; i < num_scheduling_threads; ++i) { - scheduling_threads[i]->task_runner()->PostTask( - FROM_HERE, base::BindOnce(&ScheduleWorkTest::Schedule, - base::Unretained(this), i)); - } - - for (int i = 0; i < num_scheduling_threads; ++i) { - scheduling_threads[i]->Stop(); - } -#if defined(OS_ANDROID) - if (target_type == MessagePumpType::JAVA) { - java_thread_->Stop(); - java_thread_.reset(); - } else -#endif - { - target_->Stop(); - target_.reset(); - } - base::TimeDelta total_time; - base::TimeDelta total_thread_time; - base::TimeDelta min_batch_time = base::TimeDelta::Max(); - base::TimeDelta max_batch_time = base::TimeDelta(); - for (int i = 0; i < num_scheduling_threads; ++i) { - total_time += scheduling_times_[i]; - total_thread_time += scheduling_thread_times_[i]; - min_batch_time = std::min(min_batch_time, min_batch_times_[i]); - max_batch_time = std::max(max_batch_time, max_batch_times_[i]); - } - - std::string story_name = StringPrintf( - "%s_pump_from_%d_threads", - target_type == MessagePumpType::IO - ? "io" - : (target_type == MessagePumpType::UI ? "ui" : "default"), - num_scheduling_threads); - auto reporter = SetUpReporter(story_name); - reporter.AddResult(kMetricMinBatchTime, total_time.InMicroseconds() / - static_cast(counter_)); - reporter.AddResult( - kMetricMaxBatchTime, - max_batch_time.InMicroseconds() / static_cast(kBatchSize)); - reporter.AddResult(kMetricTotalTime, total_time.InMicroseconds() / - static_cast(counter_)); - if (ThreadTicks::IsSupported()) { - reporter.AddResult(kMetricThreadTime, total_thread_time.InMicroseconds() / - static_cast(counter_)); - } - } - - sequence_manager::internal::SequenceManagerImpl* target_message_loop_base() { -#if defined(OS_ANDROID) - if (java_thread_) { - return static_cast( - java_thread_->state()->sequence_manager.get()); - } -#endif - return MessageLoopCurrent::Get()->GetCurrentSequenceManagerImpl(); - } - - private: - std::unique_ptr target_; -#if defined(OS_ANDROID) - std::unique_ptr java_thread_; -#endif - std::unique_ptr scheduling_times_; - std::unique_ptr scheduling_thread_times_; - std::unique_ptr min_batch_times_; - std::unique_ptr max_batch_times_; - uint64_t counter_; - - static const size_t kTargetTimeSec = 5; - static const size_t kBatchSize = 1000; -}; - -TEST_F(ScheduleWorkTest, ThreadTimeToIOFromOneThread) { - ScheduleWork(MessagePumpType::IO, 1); -} - -TEST_F(ScheduleWorkTest, ThreadTimeToIOFromTwoThreads) { - ScheduleWork(MessagePumpType::IO, 2); -} - -TEST_F(ScheduleWorkTest, ThreadTimeToIOFromFourThreads) { - ScheduleWork(MessagePumpType::IO, 4); -} - -TEST_F(ScheduleWorkTest, ThreadTimeToUIFromOneThread) { - ScheduleWork(MessagePumpType::UI, 1); -} - -TEST_F(ScheduleWorkTest, ThreadTimeToUIFromTwoThreads) { - ScheduleWork(MessagePumpType::UI, 2); -} - -TEST_F(ScheduleWorkTest, ThreadTimeToUIFromFourThreads) { - ScheduleWork(MessagePumpType::UI, 4); -} - -TEST_F(ScheduleWorkTest, ThreadTimeToDefaultFromOneThread) { - ScheduleWork(MessagePumpType::DEFAULT, 1); -} - -TEST_F(ScheduleWorkTest, ThreadTimeToDefaultFromTwoThreads) { - ScheduleWork(MessagePumpType::DEFAULT, 2); -} - -TEST_F(ScheduleWorkTest, ThreadTimeToDefaultFromFourThreads) { - ScheduleWork(MessagePumpType::DEFAULT, 4); -} - -#if defined(OS_ANDROID) -TEST_F(ScheduleWorkTest, ThreadTimeToJavaFromOneThread) { - ScheduleWork(MessagePumpType::JAVA, 1); -} - -TEST_F(ScheduleWorkTest, ThreadTimeToJavaFromTwoThreads) { - ScheduleWork(MessagePumpType::JAVA, 2); -} - -TEST_F(ScheduleWorkTest, ThreadTimeToJavaFromFourThreads) { - ScheduleWork(MessagePumpType::JAVA, 4); -} -#endif - -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/observer_list_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/observer_list_perftest.cc deleted file mode 100644 index 08f0af93d..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/observer_list_perftest.cc +++ /dev/null @@ -1,131 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/observer_list.h" - -#include - -#include "base/logging.h" -#include "base/observer_list.h" -#include "base/strings/stringprintf.h" -#include "base/time/time.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -// Ask the compiler not to use a register for this counter, in case it decides -// to do magic optimizations like |counter += kLaps|. -volatile int g_observer_list_perf_test_counter; - -namespace base { - -constexpr char kMetricPrefixObserverList[] = "ObserverList."; -constexpr char kMetricNotifyTimePerObserver[] = "notify_time_per_observer"; - -namespace { - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixObserverList, story_name); - reporter.RegisterImportantMetric(kMetricNotifyTimePerObserver, "ns"); - return reporter; -} - -} // namespace - -class ObserverInterface { - public: - ObserverInterface() {} - virtual ~ObserverInterface() {} - virtual void Observe() const { ++g_observer_list_perf_test_counter; } - - private: - DISALLOW_COPY_AND_ASSIGN(ObserverInterface); -}; - -class UnsafeObserver : public ObserverInterface {}; - -class TestCheckedObserver : public CheckedObserver, public ObserverInterface {}; - -template -struct Pick { - // The ObserverList type to use. Checked observers need to be in a checked - // ObserverList. - using ObserverListType = ObserverList; - static const char* GetName() { return "CheckedObserver"; } -}; -template <> -struct Pick { - using ObserverListType = ObserverList::Unchecked; - static const char* GetName() { return "UnsafeObserver"; } -}; - -template -class ObserverListPerfTest : public ::testing::Test { - public: - using ObserverListType = typename Pick::ObserverListType; - - ObserverListPerfTest() {} - - private: - DISALLOW_COPY_AND_ASSIGN(ObserverListPerfTest); -}; - -typedef ::testing::Types ObserverTypes; -TYPED_TEST_SUITE(ObserverListPerfTest, ObserverTypes); - -// Performance test for base::ObserverList and Checked Observers. -TYPED_TEST(ObserverListPerfTest, NotifyPerformance) { - constexpr int kMaxObservers = 128; -#if DCHECK_IS_ON() - // The test takes about 100x longer in debug builds, mostly due to sequence - // checker overheads when WeakPtr gets involved. - constexpr int kLaps = 1000000; -#else - constexpr int kLaps = 100000000; -#endif - constexpr int kWarmupLaps = 100; - std::vector> observers; - - for (int observer_count = 0; observer_count <= kMaxObservers; - observer_count = observer_count ? observer_count * 2 : 1) { - typename TestFixture::ObserverListType list; - for (int i = 0; i < observer_count; ++i) - observers.push_back(std::make_unique()); - for (auto& o : observers) - list.AddObserver(o.get()); - - for (int i = 0; i < kWarmupLaps; ++i) { - for (auto& o : list) - o.Observe(); - } - g_observer_list_perf_test_counter = 0; - const int weighted_laps = kLaps / (observer_count + 1); - - TimeTicks start = TimeTicks::Now(); - for (int i = 0; i < weighted_laps; ++i) { - for (auto& o : list) - o.Observe(); - } - TimeDelta duration = TimeTicks::Now() - start; - - observers.clear(); - - EXPECT_EQ(observer_count * weighted_laps, - g_observer_list_perf_test_counter); - EXPECT_TRUE(observer_count == 0 || list.might_have_observers()); - - std::string story_name = - base::StringPrintf("%s_%d", Pick::GetName(), observer_count); - - // A typical value is 3-20 nanoseconds per observe in Release, 1000-2000ns - // in an optimized build with DCHECKs and 3000-6000ns in debug builds. - auto reporter = SetUpReporter(story_name); - reporter.AddResult( - kMetricNotifyTimePerObserver, - duration.InNanoseconds() / - static_cast(g_observer_list_perf_test_counter + - weighted_laps)); - } -} - -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/strings/string_util_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/strings/string_util_perftest.cc deleted file mode 100644 index 17cdb0522..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/strings/string_util_perftest.cc +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/strings/string_util.h" - -#include - -#include "base/time/time.h" -#include "build/build_config.h" -#include "testing/gtest/include/gtest/gtest.h" - -namespace base { - -template -void MeasureIsStringASCII(size_t str_length, size_t non_ascii_pos) { - String str(str_length, 'A'); - if (non_ascii_pos < str_length) - str[non_ascii_pos] = '\xAF'; - - TimeTicks t0 = TimeTicks::Now(); - for (size_t i = 0; i < 10000000; ++i) - IsStringASCII(str); - TimeDelta time = TimeTicks::Now() - t0; - printf( - "char-size:\t%zu\tlength:\t%zu\tnon-ascii-pos:\t%zu\ttime-ms:\t%" PRIu64 - "\n", - sizeof(typename String::value_type), str_length, non_ascii_pos, - time.InMilliseconds()); -} - -TEST(StringUtilTest, DISABLED_IsStringASCIIPerf) { - for (size_t str_length = 4; str_length <= 1024; str_length *= 2) { - for (size_t non_ascii_loc = 0; non_ascii_loc < 3; ++non_ascii_loc) { - size_t non_ascii_pos = str_length * non_ascii_loc / 2 + 2; - MeasureIsStringASCII(str_length, non_ascii_pos); - MeasureIsStringASCII(str_length, non_ascii_pos); -#if defined(WCHAR_T_IS_UTF32) - MeasureIsStringASCII>(str_length, - non_ascii_pos); -#endif - } - } -} - -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/synchronization/waitable_event_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/synchronization/waitable_event_perftest.cc deleted file mode 100644 index bcf911ee6..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/synchronization/waitable_event_perftest.cc +++ /dev/null @@ -1,189 +0,0 @@ -// Copyright 2017 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/synchronization/waitable_event.h" - -#include - -#include "base/threading/simple_thread.h" -#include "base/time/time.h" -#include "base/timer/elapsed_timer.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -namespace base { - -namespace { - -constexpr char kMetricPrefixWaitableEvent[] = "WaitableEvent."; -constexpr char kMetricWaitTime[] = "wait_time_per_sample"; -constexpr char kMetricSignalTime[] = "signal_time_per_sample"; -constexpr char kMetricElapsedCycles[] = "elapsed_cycles"; -constexpr char kStorySingleThread[] = "single_thread_1000_samples"; -constexpr char kStoryMultiThreadWaiter[] = "multi_thread_1000_samples_waiter"; -constexpr char kStoryMultiThreadSignaler[] = - "multi_thread_1000_samples_signaler"; -constexpr char kStoryTimedThroughput[] = "timed_throughput"; - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixWaitableEvent, - story_name); - reporter.RegisterImportantMetric(kMetricWaitTime, "ns"); - reporter.RegisterImportantMetric(kMetricSignalTime, "ns"); - reporter.RegisterImportantMetric(kMetricElapsedCycles, "count"); - return reporter; -} - -class TraceWaitableEvent { - public: - TraceWaitableEvent() = default; - ~TraceWaitableEvent() = default; - - void Signal() { - ElapsedTimer timer; - event_.Signal(); - total_signal_time_ += timer.Elapsed(); - ++signal_samples_; - } - - void Wait() { - ElapsedTimer timer; - event_.Wait(); - total_wait_time_ += timer.Elapsed(); - ++wait_samples_; - } - - bool TimedWaitUntil(const TimeTicks& end_time) { - ElapsedTimer timer; - const bool signaled = event_.TimedWait(end_time - timer.Begin()); - total_wait_time_ += timer.Elapsed(); - ++wait_samples_; - return signaled; - } - - bool IsSignaled() { return event_.IsSignaled(); } - - TimeDelta total_signal_time() const { return total_signal_time_; } - TimeDelta total_wait_time() const { return total_wait_time_; } - size_t signal_samples() const { return signal_samples_; } - size_t wait_samples() const { return wait_samples_; } - - private: - WaitableEvent event_{WaitableEvent::ResetPolicy::AUTOMATIC}; - - TimeDelta total_signal_time_; - TimeDelta total_wait_time_; - - size_t signal_samples_ = 0U; - size_t wait_samples_ = 0U; - - DISALLOW_COPY_AND_ASSIGN(TraceWaitableEvent); -}; - -class SignalerThread : public SimpleThread { - public: - SignalerThread(TraceWaitableEvent* waiter, TraceWaitableEvent* signaler) - : SimpleThread("WaitableEventPerfTest signaler"), - waiter_(waiter), - signaler_(signaler) {} - - ~SignalerThread() override = default; - - void Run() override { - while (!stop_event_.IsSignaled()) { - if (waiter_) - waiter_->Wait(); - if (signaler_) - signaler_->Signal(); - } - } - - // Signals the thread to stop on the next iteration of its loop (which - // will happen immediately if no |waiter_| is present or is signaled. - void RequestStop() { stop_event_.Signal(); } - - private: - WaitableEvent stop_event_; - TraceWaitableEvent* waiter_; - TraceWaitableEvent* signaler_; - DISALLOW_COPY_AND_ASSIGN(SignalerThread); -}; - -void PrintPerfWaitableEvent(const TraceWaitableEvent* event, - const std::string& story_name, - size_t* elapsed_cycles = nullptr) { - auto reporter = SetUpReporter(story_name); - reporter.AddResult( - kMetricSignalTime, - static_cast(event->total_signal_time().InNanoseconds()) / - event->signal_samples()); - reporter.AddResult( - kMetricWaitTime, - static_cast(event->total_wait_time().InNanoseconds()) / - event->wait_samples()); - if (elapsed_cycles) { - reporter.AddResult(kMetricElapsedCycles, *elapsed_cycles); - } -} - -} // namespace - -TEST(WaitableEventPerfTest, SingleThread) { - const size_t kSamples = 1000; - - TraceWaitableEvent event; - - for (size_t i = 0; i < kSamples; ++i) { - event.Signal(); - event.Wait(); - } - - PrintPerfWaitableEvent(&event, kStorySingleThread); -} - -TEST(WaitableEventPerfTest, MultipleThreads) { - const size_t kSamples = 1000; - - TraceWaitableEvent waiter; - TraceWaitableEvent signaler; - - // The other thread will wait and signal on the respective opposite events. - SignalerThread thread(&signaler, &waiter); - thread.Start(); - - for (size_t i = 0; i < kSamples; ++i) { - signaler.Signal(); - waiter.Wait(); - } - - // Signal the stop event and then make sure the signaler event it is - // waiting on is also signaled. - thread.RequestStop(); - signaler.Signal(); - - thread.Join(); - - PrintPerfWaitableEvent(&waiter, kStoryMultiThreadWaiter); - PrintPerfWaitableEvent(&signaler, kStoryMultiThreadSignaler); -} - -TEST(WaitableEventPerfTest, Throughput) { - TraceWaitableEvent event; - - SignalerThread thread(nullptr, &event); - thread.Start(); - - const TimeTicks end_time = TimeTicks::Now() + TimeDelta::FromSeconds(1); - size_t count = 0; - while (event.TimedWaitUntil(end_time)) { - ++count; - } - - thread.RequestStop(); - thread.Join(); - - PrintPerfWaitableEvent(&event, kStoryTimedThroughput, &count); -} - -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/sequence_manager_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/sequence_manager_perftest.cc deleted file mode 100644 index 463f82bf2..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/sequence_manager/sequence_manager_perftest.cc +++ /dev/null @@ -1,737 +0,0 @@ -// Copyright 2015 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include "base/task/sequence_manager/sequence_manager.h" - -#include -#include - -#include "base/bind.h" -#include "base/message_loop/message_pump_default.h" -#include "base/message_loop/message_pump_type.h" -#include "base/run_loop.h" -#include "base/sequence_checker.h" -#include "base/single_thread_task_runner.h" -#include "base/strings/stringprintf.h" -#include "base/synchronization/condition_variable.h" -#include "base/task/post_task.h" -#include "base/task/sequence_manager/task_queue_impl.h" -#include "base/task/sequence_manager/test/mock_time_domain.h" -#include "base/task/sequence_manager/test/sequence_manager_for_test.h" -#include "base/task/sequence_manager/test/test_task_queue.h" -#include "base/task/sequence_manager/test/test_task_time_observer.h" -#include "base/task/sequence_manager/thread_controller_with_message_pump_impl.h" -#include "base/task/task_traits.h" -#include "base/task/thread_pool.h" -#include "base/task/thread_pool/thread_pool_impl.h" -#include "base/task/thread_pool/thread_pool_instance.h" -#include "base/threading/thread.h" -#include "base/threading/thread_task_runner_handle.h" -#include "base/time/default_tick_clock.h" -#include "build/build_config.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -namespace base { -namespace sequence_manager { -namespace { -const int kNumTasks = 1000000; - -constexpr char kMetricPrefixSequenceManager[] = "SequenceManager."; -constexpr char kMetricPostTimePerTask[] = "post_time_per_task"; - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixSequenceManager, - story_name); - reporter.RegisterImportantMetric(kMetricPostTimePerTask, "us"); - return reporter; -} - -} // namespace - -// To reduce noise related to the OS timer, we use a mock time domain to -// fast forward the timers. -class PerfTestTimeDomain : public MockTimeDomain { - public: - PerfTestTimeDomain() : MockTimeDomain(TimeTicks::Now()) {} - ~PerfTestTimeDomain() override = default; - - Optional DelayTillNextTask(LazyNow* lazy_now) override { - Optional run_time = NextScheduledRunTime(); - if (!run_time) - return nullopt; - SetNowTicks(*run_time); - // Makes SequenceManager to continue immediately. - return TimeDelta(); - } - - void SetNextDelayedDoWork(LazyNow* lazy_now, TimeTicks run_time) override { - // De-dupe DoWorks. - if (NumberOfScheduledWakeUps() == 1u) - RequestDoWork(); - } - - private: - DISALLOW_COPY_AND_ASSIGN(PerfTestTimeDomain); -}; - -enum class PerfTestType { - // A SequenceManager with a ThreadControllerWithMessagePumpImpl driving the - // thread. - kUseSequenceManagerWithMessagePump, - kUseSequenceManagerWithUIMessagePump, - kUseSequenceManagerWithIOMessagePump, - kUseSequenceManagerWithMessagePumpAndRandomSampling, - - // A SingleThreadTaskRunner in the thread pool. - kUseSingleThreadInThreadPool, -}; - -// Customization point for SequenceManagerPerfTest which allows us to test -// various implementations. -class PerfTestDelegate { - public: - virtual ~PerfTestDelegate() = default; - - virtual const char* GetName() const = 0; - - virtual bool VirtualTimeIsSupported() const = 0; - - virtual bool MultipleQueuesSupported() const = 0; - - virtual scoped_refptr CreateTaskRunner() = 0; - - virtual void WaitUntilDone() = 0; - - virtual void SignalDone() = 0; -}; - -class BaseSequenceManagerPerfTestDelegate : public PerfTestDelegate { - public: - BaseSequenceManagerPerfTestDelegate() {} - - ~BaseSequenceManagerPerfTestDelegate() override = default; - - bool VirtualTimeIsSupported() const override { return true; } - - bool MultipleQueuesSupported() const override { return true; } - - scoped_refptr CreateTaskRunner() override { - scoped_refptr task_queue = - manager_->CreateTaskQueueWithType( - TaskQueue::Spec("test").SetTimeDomain(time_domain_.get())); - owned_task_queues_.push_back(task_queue); - return task_queue->task_runner(); - } - - void WaitUntilDone() override { - run_loop_.reset(new RunLoop()); - run_loop_->Run(); - } - - void SignalDone() override { run_loop_->Quit(); } - - SequenceManager* GetManager() const { return manager_.get(); } - - void SetSequenceManager(std::unique_ptr manager) { - manager_ = std::move(manager); - time_domain_ = std::make_unique(); - manager_->RegisterTimeDomain(time_domain_.get()); - } - - void ShutDown() { - owned_task_queues_.clear(); - manager_->UnregisterTimeDomain(time_domain_.get()); - manager_.reset(); - } - - private: - std::unique_ptr manager_; - std::unique_ptr time_domain_; - std::unique_ptr run_loop_; - std::vector> owned_task_queues_; -}; - -class SequenceManagerWithMessagePumpPerfTestDelegate - : public BaseSequenceManagerPerfTestDelegate { - public: - SequenceManagerWithMessagePumpPerfTestDelegate( - const char* name, - MessagePumpType type, - bool randomised_sampling_enabled = false) - : name_(name) { - auto settings = - SequenceManager::Settings::Builder() - .SetRandomisedSamplingEnabled(randomised_sampling_enabled) - .Build(); - SetSequenceManager(SequenceManagerForTest::Create( - std::make_unique( - MessagePump::Create(type), settings), - std::move(settings))); - - // ThreadControllerWithMessagePumpImpl doesn't provide a default task - // runner. - scoped_refptr default_task_queue = - GetManager()->template CreateTaskQueueWithType( - TaskQueue::Spec("default")); - GetManager()->SetDefaultTaskRunner(default_task_queue->task_runner()); - } - - ~SequenceManagerWithMessagePumpPerfTestDelegate() override { ShutDown(); } - - const char* GetName() const override { return name_; } - - private: - const char* const name_; -}; - -class SingleThreadInThreadPoolPerfTestDelegate : public PerfTestDelegate { - public: - SingleThreadInThreadPoolPerfTestDelegate() : done_cond_(&done_lock_) { - ThreadPoolInstance::Set( - std::make_unique<::base::internal::ThreadPoolImpl>("Test")); - ThreadPoolInstance::Get()->StartWithDefaultParams(); - } - - ~SingleThreadInThreadPoolPerfTestDelegate() override { - ThreadPoolInstance::Get()->JoinForTesting(); - ThreadPoolInstance::Set(nullptr); - } - - const char* GetName() const override { - return " single thread in ThreadPool "; - } - - bool VirtualTimeIsSupported() const override { return false; } - - bool MultipleQueuesSupported() const override { return false; } - - scoped_refptr CreateTaskRunner() override { - return ThreadPool::CreateSingleThreadTaskRunner( - {TaskPriority::USER_BLOCKING}); - } - - void WaitUntilDone() override { - AutoLock auto_lock(done_lock_); - done_cond_.Wait(); - } - - void SignalDone() override { - AutoLock auto_lock(done_lock_); - done_cond_.Signal(); - } - - private: - Lock done_lock_; - ConditionVariable done_cond_; -}; - -class TestCase { - public: - // |delegate| is assumed to outlive TestCase. - explicit TestCase(PerfTestDelegate* delegate) : delegate_(delegate) {} - - virtual ~TestCase() = default; - - virtual void Start() = 0; - - protected: - PerfTestDelegate* const delegate_; // NOT OWNED -}; - -class TaskSource { - public: - virtual ~TaskSource() = default; - - virtual void Start() = 0; -}; - -class SameThreadTaskSource : public TaskSource { - public: - SameThreadTaskSource(std::vector> task_runners, - size_t num_tasks) - : num_queues_(task_runners.size()), - num_tasks_(num_tasks), - task_closure_( - BindRepeating(&SameThreadTaskSource::TestTask, Unretained(this))), - task_runners_(std::move(task_runners)) { - DETACH_FROM_SEQUENCE(sequence_checker_); - } - - void Start() override { - num_tasks_in_flight_ = 1; - num_tasks_to_post_ = num_tasks_; - num_tasks_to_run_ = num_tasks_; - // Post the initial task instead of running it synchronously to ensure that - // all invocations happen on the same sequence. - PostTask(0); - } - - protected: - virtual void PostTask(unsigned int queue) = 0; - - virtual void SignalDone() = 0; - - void TestTask() { - DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); - - if (--num_tasks_to_run_ == 0) { - SignalDone(); - return; - } - - num_tasks_in_flight_--; - // NOTE there are only up to max_tasks_in_flight_ pending delayed tasks at - // any one time. Thanks to the lower_num_tasks_to_post going to zero if - // there are a lot of tasks in flight, the total number of task in flight at - // any one time is very variable. - unsigned int lower_num_tasks_to_post = - num_tasks_in_flight_ < (max_tasks_in_flight_ / 2) ? 1 : 0; - unsigned int max_tasks_to_post = - num_tasks_to_post_ % 2 ? lower_num_tasks_to_post : 10; - for (unsigned int i = 0; - i < max_tasks_to_post && num_tasks_in_flight_ < max_tasks_in_flight_ && - num_tasks_to_post_ > 0; - i++) { - // Choose a queue weighted towards queue 0. - unsigned int queue = num_tasks_to_post_ % (num_queues_ + 1); - if (queue == num_queues_) { - queue = 0; - } - PostTask(queue); - num_tasks_in_flight_++; - num_tasks_to_post_--; - } - } - - const size_t num_queues_; - const size_t num_tasks_; - const RepeatingClosure task_closure_; - const std::vector> task_runners_; - const unsigned int max_tasks_in_flight_ = 200; - unsigned int num_tasks_in_flight_; - unsigned int num_tasks_to_post_; - unsigned int num_tasks_to_run_; - SEQUENCE_CHECKER(sequence_checker_); -}; - -class CrossThreadTaskSource : public TaskSource { - public: - CrossThreadTaskSource(std::vector> task_runners, - size_t num_tasks) - : num_queues_(task_runners.size()), - num_tasks_(num_tasks), - task_closure_( - BindRepeating(&CrossThreadTaskSource::TestTask, Unretained(this))), - task_runners_(std::move(task_runners)) {} - - void Start() override { - num_tasks_in_flight_ = 0; - num_tasks_to_run_ = num_tasks_; - - for (size_t i = 0; i < num_tasks_; i++) { - while (num_tasks_in_flight_.load(std::memory_order_acquire) > - max_tasks_in_flight_) { - PlatformThread::YieldCurrentThread(); - } - // Choose a queue weighted towards queue 0. - unsigned int queue = i % (num_queues_ + 1); - if (queue == num_queues_) { - queue = 0; - } - PostTask(queue); - num_tasks_in_flight_++; - } - } - - protected: - virtual void PostTask(unsigned int queue) = 0; - - // Will be called on the main thread. - virtual void SignalDone() = 0; - - void TestTask() { - if (num_tasks_to_run_.fetch_sub(1) == 1) { - SignalDone(); - return; - } - num_tasks_in_flight_--; - } - - const size_t num_queues_; - const size_t num_tasks_; - const RepeatingClosure task_closure_; - const std::vector> task_runners_; - const unsigned int max_tasks_in_flight_ = 200; - std::atomic num_tasks_in_flight_; - std::atomic num_tasks_to_run_; -}; - -class SingleThreadImmediateTestCase : public TestCase { - public: - SingleThreadImmediateTestCase( - PerfTestDelegate* delegate, - std::vector> task_runners) - : TestCase(delegate), - task_source_(std::make_unique( - delegate, - std::move(task_runners), - kNumTasks)) {} - - void Start() override { task_source_->Start(); } - - private: - class SingleThreadImmediateTaskSource : public SameThreadTaskSource { - public: - SingleThreadImmediateTaskSource( - PerfTestDelegate* delegate, - std::vector> task_runners, - size_t num_tasks) - : SameThreadTaskSource(std::move(task_runners), num_tasks), - delegate_(delegate) {} - - ~SingleThreadImmediateTaskSource() override = default; - - void PostTask(unsigned int queue) override { - task_runners_[queue]->PostTask(FROM_HERE, task_closure_); - } - - void SignalDone() override { delegate_->SignalDone(); } - - PerfTestDelegate* delegate_; // NOT OWNED. - }; - - const std::unique_ptr task_source_; -}; - -class SingleThreadDelayedTestCase : public TestCase { - public: - SingleThreadDelayedTestCase( - PerfTestDelegate* delegate, - std::vector> task_runners) - : TestCase(delegate), - task_source_(std::make_unique( - delegate, - std::move(task_runners), - kNumTasks)) {} - - void Start() override { task_source_->Start(); } - - private: - class SingleThreadDelayedTaskSource : public SameThreadTaskSource { - public: - explicit SingleThreadDelayedTaskSource( - PerfTestDelegate* delegate, - std::vector> task_runners, - size_t num_tasks) - : SameThreadTaskSource(std::move(task_runners), num_tasks), - delegate_(delegate) {} - - ~SingleThreadDelayedTaskSource() override = default; - - void PostTask(unsigned int queue) override { - unsigned int delay = - num_tasks_to_post_ % 2 ? 1 : (10 + num_tasks_to_post_ % 10); - task_runners_[queue]->PostDelayedTask(FROM_HERE, task_closure_, - TimeDelta::FromMilliseconds(delay)); - } - - void SignalDone() override { delegate_->SignalDone(); } - - PerfTestDelegate* delegate_; // NOT OWNED. - }; - - const std::unique_ptr task_source_; -}; - -class TwoThreadTestCase : public TestCase { - public: - TwoThreadTestCase(PerfTestDelegate* delegate, - std::vector> task_runners) - : TestCase(delegate), - task_runners_(std::move(task_runners)), - num_tasks_(kNumTasks), - auxiliary_thread_("auxillary thread") { - auxiliary_thread_.Start(); - } - - ~TwoThreadTestCase() override { auxiliary_thread_.Stop(); } - - protected: - void Start() override { - done_count_ = 0; - same_thread_task_source_ = - std::make_unique(this, task_runners_, - num_tasks_ / 2); - cross_thread_task_scorce_ = - std::make_unique(this, task_runners_, - num_tasks_ / 2); - - auxiliary_thread_.task_runner()->PostTask( - FROM_HERE, base::BindOnce(&CrossThreadImmediateTaskSource::Start, - Unretained(cross_thread_task_scorce_.get()))); - same_thread_task_source_->Start(); - } - - class SingleThreadImmediateTaskSource : public SameThreadTaskSource { - public: - SingleThreadImmediateTaskSource( - TwoThreadTestCase* two_thread_test_case, - std::vector> task_runners, - size_t num_tasks) - : SameThreadTaskSource(std::move(task_runners), num_tasks), - two_thread_test_case_(two_thread_test_case) {} - - ~SingleThreadImmediateTaskSource() override = default; - - void PostTask(unsigned int queue) override { - task_runners_[queue]->PostTask(FROM_HERE, task_closure_); - } - - // Will be called on the main thread. - void SignalDone() override { two_thread_test_case_->SignalDone(); } - - TwoThreadTestCase* two_thread_test_case_; // NOT OWNED. - }; - - class CrossThreadImmediateTaskSource : public CrossThreadTaskSource { - public: - CrossThreadImmediateTaskSource( - TwoThreadTestCase* two_thread_test_case, - std::vector> task_runners, - size_t num_tasks) - : CrossThreadTaskSource(std::move(task_runners), num_tasks), - two_thread_test_case_(two_thread_test_case) {} - - ~CrossThreadImmediateTaskSource() override = default; - - void PostTask(unsigned int queue) override { - task_runners_[queue]->PostTask(FROM_HERE, task_closure_); - } - - // Will be called on the main thread. - void SignalDone() override { two_thread_test_case_->SignalDone(); } - - TwoThreadTestCase* two_thread_test_case_; // NOT OWNED. - }; - - void SignalDone() { - if (++done_count_ == 2) - delegate_->SignalDone(); - } - - private: - const std::vector> task_runners_; - const size_t num_tasks_; - Thread auxiliary_thread_; - std::unique_ptr same_thread_task_source_; - std::unique_ptr cross_thread_task_scorce_; - int done_count_ = 0; -}; - -class SequenceManagerPerfTest : public testing::TestWithParam { - public: - SequenceManagerPerfTest() = default; - - void SetUp() override { delegate_ = CreateDelegate(); } - - void TearDown() override { delegate_.reset(); } - - std::unique_ptr CreateDelegate() { - switch (GetParam()) { - case PerfTestType::kUseSequenceManagerWithMessagePump: - return std::make_unique( - " SequenceManager with MessagePumpDefault ", - MessagePumpType::DEFAULT); - - case PerfTestType::kUseSequenceManagerWithUIMessagePump: - return std::make_unique( - " SequenceManager with MessagePumpForUI ", MessagePumpType::UI); - - case PerfTestType::kUseSequenceManagerWithIOMessagePump: - return std::make_unique( - " SequenceManager with MessagePumpForIO ", MessagePumpType::IO); - - case PerfTestType::kUseSequenceManagerWithMessagePumpAndRandomSampling: - return std::make_unique( - " SequenceManager with MessagePumpDefault and random sampling ", - MessagePumpType::DEFAULT, true); - - case PerfTestType::kUseSingleThreadInThreadPool: - return std::make_unique(); - - default: - NOTREACHED(); - return nullptr; - } - } - - bool ShouldMeasureQueueScaling() const { - // To limit test run time, we only measure multiple queues specific sequence - // manager configurations. - return delegate_->MultipleQueuesSupported() && - GetParam() == PerfTestType::kUseSequenceManagerWithUIMessagePump; - } - - std::vector> CreateTaskRunners(int num) { - std::vector> task_runners; - for (int i = 0; i < num; i++) { - task_runners.push_back(delegate_->CreateTaskRunner()); - } - return task_runners; - } - - void Benchmark(const std::string& story_prefix, TestCase* TestCase) { - TimeTicks start = TimeTicks::Now(); - TimeTicks now; - TestCase->Start(); - delegate_->WaitUntilDone(); - now = TimeTicks::Now(); - - auto reporter = SetUpReporter(story_prefix + delegate_->GetName()); - reporter.AddResult( - kMetricPostTimePerTask, - (now - start).InMicroseconds() / static_cast(kNumTasks)); - } - - std::unique_ptr delegate_; -}; - -INSTANTIATE_TEST_SUITE_P( - All, - SequenceManagerPerfTest, - testing::Values( - PerfTestType::kUseSequenceManagerWithMessagePump, - PerfTestType::kUseSequenceManagerWithUIMessagePump, - PerfTestType::kUseSequenceManagerWithIOMessagePump, - PerfTestType::kUseSingleThreadInThreadPool, - PerfTestType::kUseSequenceManagerWithMessagePumpAndRandomSampling)); -TEST_P(SequenceManagerPerfTest, PostDelayedTasks_OneQueue) { - if (!delegate_->VirtualTimeIsSupported()) { - LOG(INFO) << "Unsupported"; - return; - } - - SingleThreadDelayedTestCase task_source(delegate_.get(), - CreateTaskRunners(1)); - Benchmark("post delayed tasks with one queue", &task_source); -} - -TEST_P(SequenceManagerPerfTest, PostDelayedTasks_FourQueues) { - if (!delegate_->VirtualTimeIsSupported() || !ShouldMeasureQueueScaling()) { - LOG(INFO) << "Unsupported"; - return; - } - - SingleThreadDelayedTestCase task_source(delegate_.get(), - CreateTaskRunners(4)); - Benchmark("post delayed tasks with four queues", &task_source); -} - -TEST_P(SequenceManagerPerfTest, PostDelayedTasks_EightQueues) { - if (!delegate_->VirtualTimeIsSupported() || !ShouldMeasureQueueScaling()) { - LOG(INFO) << "Unsupported"; - return; - } - - SingleThreadDelayedTestCase task_source(delegate_.get(), - CreateTaskRunners(8)); - Benchmark("post delayed tasks with eight queues", &task_source); -} - -TEST_P(SequenceManagerPerfTest, PostDelayedTasks_ThirtyTwoQueues) { - if (!delegate_->VirtualTimeIsSupported() || !ShouldMeasureQueueScaling()) { - LOG(INFO) << "Unsupported"; - return; - } - - SingleThreadDelayedTestCase task_source(delegate_.get(), - CreateTaskRunners(32)); - Benchmark("post delayed tasks with thirty two queues", &task_source); -} - -TEST_P(SequenceManagerPerfTest, PostImmediateTasks_OneQueue) { - SingleThreadImmediateTestCase task_source(delegate_.get(), - CreateTaskRunners(1)); - Benchmark("post immediate tasks with one queue", &task_source); -} - -TEST_P(SequenceManagerPerfTest, PostImmediateTasks_FourQueues) { - if (!ShouldMeasureQueueScaling()) { - LOG(INFO) << "Unsupported"; - return; - } - - SingleThreadImmediateTestCase task_source(delegate_.get(), - CreateTaskRunners(4)); - Benchmark("post immediate tasks with four queues", &task_source); -} - -TEST_P(SequenceManagerPerfTest, PostImmediateTasks_EightQueues) { - if (!ShouldMeasureQueueScaling()) { - LOG(INFO) << "Unsupported"; - return; - } - - SingleThreadImmediateTestCase task_source(delegate_.get(), - CreateTaskRunners(8)); - Benchmark("post immediate tasks with eight queues", &task_source); -} - -TEST_P(SequenceManagerPerfTest, PostImmediateTasks_ThirtyTwoQueues) { - if (!ShouldMeasureQueueScaling()) { - LOG(INFO) << "Unsupported"; - return; - } - - SingleThreadImmediateTestCase task_source(delegate_.get(), - CreateTaskRunners(32)); - Benchmark("post immediate tasks with thirty two queues", &task_source); -} - -TEST_P(SequenceManagerPerfTest, PostImmediateTasksFromTwoThreads_OneQueue) { - TwoThreadTestCase task_source(delegate_.get(), CreateTaskRunners(1)); - Benchmark("post immediate tasks with one queue from two threads", - &task_source); -} - -TEST_P(SequenceManagerPerfTest, PostImmediateTasksFromTwoThreads_FourQueues) { - if (!ShouldMeasureQueueScaling()) { - LOG(INFO) << "Unsupported"; - return; - } - - TwoThreadTestCase task_source(delegate_.get(), CreateTaskRunners(4)); - Benchmark("post immediate tasks with four queues from two threads", - &task_source); -} - -TEST_P(SequenceManagerPerfTest, PostImmediateTasksFromTwoThreads_EightQueues) { - if (!ShouldMeasureQueueScaling()) { - LOG(INFO) << "Unsupported"; - return; - } - - TwoThreadTestCase task_source(delegate_.get(), CreateTaskRunners(8)); - Benchmark("post immediate tasks with eight queues from two threads", - &task_source); -} - -TEST_P(SequenceManagerPerfTest, - PostImmediateTasksFromTwoThreads_ThirtyTwoQueues) { - if (!ShouldMeasureQueueScaling()) { - LOG(INFO) << "Unsupported"; - return; - } - - TwoThreadTestCase task_source(delegate_.get(), CreateTaskRunners(32)); - Benchmark("post immediate tasks with thirty two queues from two threads", - &task_source); -} - -// TODO(alexclarke): Add additional tests with different mixes of non-delayed vs -// delayed tasks. - -} // namespace sequence_manager -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/task/thread_pool/can_run_policy_test.h b/TMessagesProj/jni/voip/webrtc/base/task/thread_pool/can_run_policy_test.h deleted file mode 100644 index 96294d1fb..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/thread_pool/can_run_policy_test.h +++ /dev/null @@ -1,191 +0,0 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#ifndef BASE_TASK_THREAD_POOL_CAN_RUN_POLICY_TEST_H_ -#define BASE_TASK_THREAD_POOL_CAN_RUN_POLICY_TEST_H_ - -#include "base/synchronization/atomic_flag.h" -#include "base/task/thread_pool/task_tracker.h" -#include "base/task/thread_pool/test_utils.h" -#include "base/task_runner.h" -#include "base/test/bind_test_util.h" -#include "base/test/test_timeouts.h" -#include "base/test/test_waitable_event.h" -#include "base/threading/platform_thread.h" -#include "build/build_config.h" - -namespace base { -namespace internal { -namespace test { - -// Verify that tasks only run when allowed by the CanRunPolicy. |target| is the -// object on which DidUpdateCanRunPolicy() must be called after updating the -// CanRunPolicy in |task_tracker|. |create_task_runner| is a function that -// receives a TaskPriority and returns a TaskRunner. |task_tracker| is the -// TaskTracker. -template -void TestCanRunPolicyBasic(Target* target, - CreateTaskRunner create_task_runner, - TaskTracker* task_tracker) { - AtomicFlag foreground_can_run; - TestWaitableEvent foreground_did_run; - AtomicFlag best_effort_can_run; - TestWaitableEvent best_effort_did_run; - - task_tracker->SetCanRunPolicy(CanRunPolicy::kNone); - target->DidUpdateCanRunPolicy(); - - const auto user_visible_task_runner = - create_task_runner(TaskPriority::USER_VISIBLE); - user_visible_task_runner->PostTask(FROM_HERE, BindLambdaForTesting([&]() { - EXPECT_TRUE(foreground_can_run.IsSet()); - foreground_did_run.Signal(); - })); - const auto best_effort_task_runner = - create_task_runner(TaskPriority::BEST_EFFORT); - best_effort_task_runner->PostTask(FROM_HERE, BindLambdaForTesting([&]() { - EXPECT_TRUE(best_effort_can_run.IsSet()); - best_effort_did_run.Signal(); - })); - - PlatformThread::Sleep(TestTimeouts::tiny_timeout()); - - foreground_can_run.Set(); - task_tracker->SetCanRunPolicy(CanRunPolicy::kForegroundOnly); - target->DidUpdateCanRunPolicy(); - foreground_did_run.Wait(); - - PlatformThread::Sleep(TestTimeouts::tiny_timeout()); - - best_effort_can_run.Set(); - task_tracker->SetCanRunPolicy(CanRunPolicy::kAll); - target->DidUpdateCanRunPolicy(); - best_effort_did_run.Wait(); -} - -// Verify that if a task was allowed to run by the CanRunPolicy when it was -// posted, but the CanRunPolicy is updated to disallow it from running before it -// starts running, it doesn't run. |target| is the object on which -// DidUpdateCanRunPolicy() must be called after updating the CanRunPolicy in -// |task_tracker|. |create_task_runner| is a function that receives a -// TaskPriority and returns a *Sequenced*TaskRunner. |task_tracker| is the -// TaskTracker. -template -void TestCanRunPolicyChangedBeforeRun(Target* target, - CreateTaskRunner create_task_runner, - TaskTracker* task_tracker) { - constexpr struct { - // Descriptor for the test case. - const char* descriptor; - // Task priority being tested. - TaskPriority priority; - // Policy that disallows running tasks with |priority|. - CanRunPolicy disallow_policy; - // Policy that allows running tasks with |priority|. - CanRunPolicy allow_policy; - } kTestCases[] = { - {"BestEffort/kNone/kAll", TaskPriority::BEST_EFFORT, CanRunPolicy::kNone, - CanRunPolicy::kAll}, - {"BestEffort/kForegroundOnly/kAll", TaskPriority::BEST_EFFORT, - CanRunPolicy::kForegroundOnly, CanRunPolicy::kAll}, - {"UserVisible/kNone/kForegroundOnly", TaskPriority::USER_VISIBLE, - CanRunPolicy::kNone, CanRunPolicy::kForegroundOnly}, - {"UserVisible/kNone/kAll", TaskPriority::USER_VISIBLE, - CanRunPolicy::kNone, CanRunPolicy::kAll}}; - - for (auto& test_case : kTestCases) { - SCOPED_TRACE(test_case.descriptor); - - TestWaitableEvent first_task_started; - TestWaitableEvent first_task_blocked; - AtomicFlag second_task_can_run; - - task_tracker->SetCanRunPolicy(test_case.allow_policy); - target->DidUpdateCanRunPolicy(); - - const auto task_runner = create_task_runner(test_case.priority); - task_runner->PostTask( - FROM_HERE, BindLambdaForTesting([&]() { - first_task_started.Signal(); - first_task_blocked.Wait(); - })); - task_runner->PostTask(FROM_HERE, BindLambdaForTesting([&]() { - EXPECT_TRUE(second_task_can_run.IsSet()); - })); - - first_task_started.Wait(); - task_tracker->SetCanRunPolicy(test_case.disallow_policy); - target->DidUpdateCanRunPolicy(); - first_task_blocked.Signal(); - - PlatformThread::Sleep(TestTimeouts::tiny_timeout()); - - second_task_can_run.Set(); - task_tracker->SetCanRunPolicy(test_case.allow_policy); - target->DidUpdateCanRunPolicy(); - task_tracker->FlushForTesting(); - } -} - -// Regression test for https://crbug.com/950383 -template -void TestCanRunPolicyLoad(Target* target, - CreateTaskRunner create_task_runner, - TaskTracker* task_tracker) { - constexpr struct { - // Descriptor for the test case. - const char* descriptor; - // Task priority being tested. - TaskPriority priority; - // Policy that allows running tasks with |priority|. - CanRunPolicy allow_policy; - // Policy that disallows running tasks with |priority|. - CanRunPolicy disallow_policy; - } kTestCases[] = { - {"BestEffort/kAll/kNone", TaskPriority::BEST_EFFORT, CanRunPolicy::kAll, - CanRunPolicy::kNone}, - {"BestEffort/kAll/kForegroundOnly", TaskPriority::BEST_EFFORT, - CanRunPolicy::kAll, CanRunPolicy::kForegroundOnly}, - {"UserVisible/kForegroundOnly/kNone", TaskPriority::USER_VISIBLE, - CanRunPolicy::kForegroundOnly, CanRunPolicy::kNone}, - {"UserVisible/kAll/kNone", TaskPriority::USER_VISIBLE, CanRunPolicy::kAll, - CanRunPolicy::kNone}}; - - for (auto& test_case : kTestCases) { - SCOPED_TRACE(test_case.descriptor); - - task_tracker->SetCanRunPolicy(test_case.allow_policy); - target->DidUpdateCanRunPolicy(); - - const auto task_runner = create_task_runner(test_case.priority); - - // Post less tasks on iOS to avoid timeouts. - const size_t kLargeNumber = -#if defined(OS_IOS) - 16; -#else - 256; -#endif - for (size_t i = 0; i < kLargeNumber; ++i) - task_runner->PostTask(FROM_HERE, DoNothing()); - - // Change the CanRunPolicy concurrently with running tasks. - // This should not cause crashes. - for (size_t i = 0; i < kLargeNumber; ++i) { - task_tracker->SetCanRunPolicy(test_case.disallow_policy); - target->DidUpdateCanRunPolicy(); - - task_tracker->SetCanRunPolicy(test_case.allow_policy); - target->DidUpdateCanRunPolicy(); - } - - task_tracker->FlushForTesting(); - } -} - -} // namespace test -} // namespace internal -} // namespace base - -#endif // BASE_TASK_THREAD_POOL_CAN_RUN_POLICY_TEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/base/task/thread_pool/thread_pool_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/task/thread_pool/thread_pool_perftest.cc deleted file mode 100644 index 626d0e133..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/task/thread_pool/thread_pool_perftest.cc +++ /dev/null @@ -1,269 +0,0 @@ -// Copyright 2018 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include -#include -#include -#include - -#include "base/barrier_closure.h" -#include "base/bind.h" -#include "base/bind_helpers.h" -#include "base/callback.h" -#include "base/optional.h" -#include "base/synchronization/waitable_event.h" -#include "base/task/thread_pool.h" -#include "base/task/thread_pool/thread_pool_instance.h" -#include "base/threading/simple_thread.h" -#include "base/time/time.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -namespace base { -namespace internal { - -namespace { - -constexpr char kMetricPrefixThreadPool[] = "ThreadPool."; -constexpr char kMetricPostTaskThroughput[] = "post_task_throughput"; -constexpr char kMetricRunTaskThroughput[] = "run_task_throughput"; -constexpr char kMetricNumTasksPosted[] = "num_tasks_posted"; -constexpr char kStoryBindPostThenRunNoOp[] = "bind_post_then_run_noop_tasks"; -constexpr char kStoryPostThenRunNoOp[] = "post_then_run_noop_tasks"; -constexpr char kStoryPostThenRunNoOpManyThreads[] = - "post_then_run_noop_tasks_many_threads"; -constexpr char kStoryPostThenRunNoOpMoreThanRunningThreads[] = - "post_then_run_noop_tasks_more_than_running_threads"; -constexpr char kStoryPostRunNoOp[] = "post_run_noop_tasks"; -constexpr char kStoryPostRunNoOpManyThreads[] = - "post_run_noop_tasks_many_threads"; -constexpr char kStoryPostRunBusyManyThreads[] = - "post_run_busy_tasks_many_threads"; - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixThreadPool, story_name); - reporter.RegisterImportantMetric(kMetricPostTaskThroughput, "runs/s"); - reporter.RegisterImportantMetric(kMetricRunTaskThroughput, "runs/s"); - reporter.RegisterImportantMetric(kMetricNumTasksPosted, "count"); - return reporter; -} - -enum class ExecutionMode { - // Allows tasks to start running while tasks are being posted by posting - // threads. - kPostAndRun, - // Uses an execution fence to wait for all posting threads to be done before - // running tasks that were posted. - kPostThenRun, -}; - -// A thread that waits for the caller to signal an event before proceeding to -// call action.Run(). -class PostingThread : public SimpleThread { - public: - // Creates a PostingThread that waits on |start_event| before calling - // action.Run(). - PostingThread(WaitableEvent* start_event, - base::OnceClosure action, - base::OnceClosure completion) - : SimpleThread("PostingThread"), - start_event_(start_event), - action_(std::move(action)), - completion_(std::move(completion)) { - Start(); - } - - void Run() override { - start_event_->Wait(); - std::move(action_).Run(); - std::move(completion_).Run(); - } - - private: - WaitableEvent* const start_event_; - base::OnceClosure action_; - base::OnceClosure completion_; - - DISALLOW_COPY_AND_ASSIGN(PostingThread); -}; - -class ThreadPoolPerfTest : public testing::Test { - public: - // Posting actions: - - void ContinuouslyBindAndPostNoOpTasks(size_t num_tasks) { - scoped_refptr task_runner = ThreadPool::CreateTaskRunner({}); - for (size_t i = 0; i < num_tasks; ++i) { - ++num_tasks_pending_; - ++num_posted_tasks_; - task_runner->PostTask(FROM_HERE, - base::BindOnce( - [](std::atomic_size_t* num_task_pending) { - (*num_task_pending)--; - }, - &num_tasks_pending_)); - } - } - - void ContinuouslyPostNoOpTasks(size_t num_tasks) { - scoped_refptr task_runner = ThreadPool::CreateTaskRunner({}); - base::RepeatingClosure closure = base::BindRepeating( - [](std::atomic_size_t* num_task_pending) { (*num_task_pending)--; }, - &num_tasks_pending_); - for (size_t i = 0; i < num_tasks; ++i) { - ++num_tasks_pending_; - ++num_posted_tasks_; - task_runner->PostTask(FROM_HERE, closure); - } - } - - void ContinuouslyPostBusyWaitTasks(size_t num_tasks, - base::TimeDelta duration) { - scoped_refptr task_runner = ThreadPool::CreateTaskRunner({}); - base::RepeatingClosure closure = base::BindRepeating( - [](std::atomic_size_t* num_task_pending, base::TimeDelta duration) { - base::TimeTicks end_time = base::TimeTicks::Now() + duration; - while (base::TimeTicks::Now() < end_time) - ; - (*num_task_pending)--; - }, - Unretained(&num_tasks_pending_), duration); - for (size_t i = 0; i < num_tasks; ++i) { - ++num_tasks_pending_; - ++num_posted_tasks_; - task_runner->PostTask(FROM_HERE, closure); - } - } - - protected: - ThreadPoolPerfTest() { ThreadPoolInstance::Create("PerfTest"); } - - ~ThreadPoolPerfTest() override { ThreadPoolInstance::Set(nullptr); } - - void StartThreadPool(size_t num_running_threads, - size_t num_posting_threads, - base::RepeatingClosure post_action) { - ThreadPoolInstance::Get()->Start({num_running_threads}); - - base::RepeatingClosure done = BarrierClosure( - num_posting_threads, - base::BindOnce(&ThreadPoolPerfTest::OnCompletePostingTasks, - base::Unretained(this))); - - for (size_t i = 0; i < num_posting_threads; ++i) { - threads_.emplace_back(std::make_unique( - &start_posting_tasks_, post_action, done)); - } - } - - void OnCompletePostingTasks() { complete_posting_tasks_.Signal(); } - - void Benchmark(const std::string& story_name, ExecutionMode execution_mode) { - base::Optional execution_fence; - if (execution_mode == ExecutionMode::kPostThenRun) { - execution_fence.emplace(); - } - TimeTicks tasks_run_start = TimeTicks::Now(); - start_posting_tasks_.Signal(); - complete_posting_tasks_.Wait(); - post_task_duration_ = TimeTicks::Now() - tasks_run_start; - - if (execution_mode == ExecutionMode::kPostThenRun) { - tasks_run_start = TimeTicks::Now(); - execution_fence.reset(); - } - - // Wait for no pending tasks. - ThreadPoolInstance::Get()->FlushForTesting(); - tasks_run_duration_ = TimeTicks::Now() - tasks_run_start; - ASSERT_EQ(0U, num_tasks_pending_); - - for (auto& thread : threads_) - thread->Join(); - ThreadPoolInstance::Get()->JoinForTesting(); - - auto reporter = SetUpReporter(story_name); - reporter.AddResult( - kMetricPostTaskThroughput, - num_posted_tasks_ / - static_cast(post_task_duration_.InSecondsF())); - reporter.AddResult( - kMetricRunTaskThroughput, - num_posted_tasks_ / - static_cast(tasks_run_duration_.InSecondsF())); - reporter.AddResult(kMetricNumTasksPosted, num_posted_tasks_); - } - - private: - WaitableEvent start_posting_tasks_; - WaitableEvent complete_posting_tasks_; - - TimeDelta post_task_duration_; - TimeDelta tasks_run_duration_; - - std::atomic_size_t num_tasks_pending_{0}; - std::atomic_size_t num_posted_tasks_{0}; - - std::vector> threads_; - - DISALLOW_COPY_AND_ASSIGN(ThreadPoolPerfTest); -}; - -} // namespace - -TEST_F(ThreadPoolPerfTest, BindPostThenRunNoOpTasks) { - StartThreadPool( - 1, 1, - BindRepeating(&ThreadPoolPerfTest::ContinuouslyBindAndPostNoOpTasks, - Unretained(this), 10000)); - Benchmark(kStoryBindPostThenRunNoOp, ExecutionMode::kPostThenRun); -} - -TEST_F(ThreadPoolPerfTest, PostThenRunNoOpTasks) { - StartThreadPool(1, 1, - BindRepeating(&ThreadPoolPerfTest::ContinuouslyPostNoOpTasks, - Unretained(this), 10000)); - Benchmark(kStoryPostThenRunNoOp, ExecutionMode::kPostThenRun); -} - -TEST_F(ThreadPoolPerfTest, PostThenRunNoOpTasksManyThreads) { - StartThreadPool(4, 4, - BindRepeating(&ThreadPoolPerfTest::ContinuouslyPostNoOpTasks, - Unretained(this), 10000)); - Benchmark(kStoryPostThenRunNoOpManyThreads, ExecutionMode::kPostThenRun); -} - -TEST_F(ThreadPoolPerfTest, PostThenRunNoOpTasksMorePostingThanRunningThreads) { - StartThreadPool(1, 4, - BindRepeating(&ThreadPoolPerfTest::ContinuouslyPostNoOpTasks, - Unretained(this), 10000)); - Benchmark(kStoryPostThenRunNoOpMoreThanRunningThreads, - ExecutionMode::kPostThenRun); -} - -TEST_F(ThreadPoolPerfTest, PostRunNoOpTasks) { - StartThreadPool(1, 1, - BindRepeating(&ThreadPoolPerfTest::ContinuouslyPostNoOpTasks, - Unretained(this), 10000)); - Benchmark(kStoryPostRunNoOp, ExecutionMode::kPostAndRun); -} - -TEST_F(ThreadPoolPerfTest, PostRunNoOpTasksManyThreads) { - StartThreadPool(4, 4, - BindRepeating(&ThreadPoolPerfTest::ContinuouslyPostNoOpTasks, - Unretained(this), 10000)); - Benchmark(kStoryPostRunNoOpManyThreads, ExecutionMode::kPostAndRun); -} - -TEST_F(ThreadPoolPerfTest, PostRunBusyTasksManyThreads) { - StartThreadPool( - 4, 4, - BindRepeating(&ThreadPoolPerfTest::ContinuouslyPostBusyWaitTasks, - Unretained(this), 10000, - base::TimeDelta::FromMicroseconds(200))); - Benchmark(kStoryPostRunBusyManyThreads, ExecutionMode::kPostAndRun); -} - -} // namespace internal -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/threading/thread_local_storage_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/threading/thread_local_storage_perftest.cc deleted file mode 100644 index 0ad614641..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/threading/thread_local_storage_perftest.cc +++ /dev/null @@ -1,250 +0,0 @@ -// Copyright 2019 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include -#include -#include - -#include "base/barrier_closure.h" -#include "base/bind.h" -#include "base/bind_helpers.h" -#include "base/callback.h" -#include "base/synchronization/waitable_event.h" -#include "base/test/bind_test_util.h" -#include "base/threading/simple_thread.h" -#include "base/threading/thread_local_storage.h" -#include "base/time/time.h" -#include "build/build_config.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -#if defined(OS_WIN) -#include -#include "base/win/windows_types.h" -#elif defined(OS_POSIX) || defined(OS_FUCHSIA) -#include -#endif - -namespace base { -namespace internal { - -namespace { - -constexpr char kMetricPrefixThreadLocalStorage[] = "ThreadLocalStorage."; -constexpr char kMetricBaseRead[] = "read"; -constexpr char kMetricBaseWrite[] = "write"; -constexpr char kMetricBaseReadWrite[] = "read_write"; -constexpr char kMetricSuffixThroughput[] = "_throughput"; -constexpr char kMetricSuffixOperationTime[] = "_operation_time"; -constexpr char kStoryBaseTLS[] = "thread_local_storage"; -#if defined(OS_WIN) -constexpr char kStoryBasePlatformFLS[] = "platform_fiber_local_storage"; -#endif // defined(OS_WIN) -constexpr char kStoryBasePlatformTLS[] = "platform_thread_local_storage"; -constexpr char kStoryBaseCPPTLS[] = "c++_platform_thread_local_storage"; -constexpr char kStorySuffixFourThreads[] = "_4_threads"; - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixThreadLocalStorage, - story_name); - reporter.RegisterImportantMetric( - std::string(kMetricBaseRead) + kMetricSuffixThroughput, "runs/s"); - reporter.RegisterImportantMetric( - std::string(kMetricBaseRead) + kMetricSuffixOperationTime, "ns"); - reporter.RegisterImportantMetric( - std::string(kMetricBaseWrite) + kMetricSuffixThroughput, "runs/s"); - reporter.RegisterImportantMetric( - std::string(kMetricBaseWrite) + kMetricSuffixOperationTime, "ns"); - reporter.RegisterImportantMetric( - std::string(kMetricBaseReadWrite) + kMetricSuffixThroughput, "runs/s"); - reporter.RegisterImportantMetric( - std::string(kMetricBaseReadWrite) + kMetricSuffixOperationTime, "ns"); - return reporter; -} - -// A thread that waits for the caller to signal an event before proceeding to -// call action.Run(). -class TLSThread : public SimpleThread { - public: - // Creates a PostingThread that waits on |start_event| before calling - // action.Run(). - TLSThread(WaitableEvent* start_event, - base::OnceClosure action, - base::OnceClosure completion) - : SimpleThread("TLSThread"), - start_event_(start_event), - action_(std::move(action)), - completion_(std::move(completion)) { - Start(); - } - - void Run() override { - start_event_->Wait(); - std::move(action_).Run(); - std::move(completion_).Run(); - } - - private: - WaitableEvent* const start_event_; - base::OnceClosure action_; - base::OnceClosure completion_; - - DISALLOW_COPY_AND_ASSIGN(TLSThread); -}; - -class ThreadLocalStoragePerfTest : public testing::Test { - protected: - ThreadLocalStoragePerfTest() = default; - ~ThreadLocalStoragePerfTest() override = default; - - template - void Benchmark(const std::string& story_name, - Read read, - Write write, - size_t num_operation, - size_t num_threads) { - write(2); - - BenchmarkImpl(kMetricBaseRead, story_name, - base::BindLambdaForTesting([&]() { - volatile intptr_t total = 0; - for (size_t i = 0; i < num_operation; ++i) - total += read(); - }), - num_operation, num_threads); - - BenchmarkImpl(kMetricBaseWrite, story_name, - base::BindLambdaForTesting([&]() { - for (size_t i = 0; i < num_operation; ++i) - write(i); - }), - num_operation, num_threads); - - BenchmarkImpl(kMetricBaseReadWrite, story_name, - base::BindLambdaForTesting([&]() { - for (size_t i = 0; i < num_operation; ++i) - write(read() + 1); - }), - num_operation, num_threads); - } - - void BenchmarkImpl(const std::string& metric_base, - const std::string& story_name, - base::RepeatingClosure action, - size_t num_operation, - size_t num_threads) { - WaitableEvent start_thread; - WaitableEvent complete_thread; - - base::RepeatingClosure done = BarrierClosure( - num_threads, - base::BindLambdaForTesting([&]() { complete_thread.Signal(); })); - - std::vector> threads; - for (size_t i = 0; i < num_threads; ++i) { - threads.emplace_back( - std::make_unique(&start_thread, action, done)); - } - - TimeTicks operation_start = TimeTicks::Now(); - start_thread.Signal(); - complete_thread.Wait(); - TimeDelta operation_duration = TimeTicks::Now() - operation_start; - - for (auto& thread : threads) - thread->Join(); - - auto reporter = SetUpReporter(story_name); - reporter.AddResult(metric_base + kMetricSuffixThroughput, - num_operation / operation_duration.InSecondsF()); - size_t nanos_per_operation = - operation_duration.InNanoseconds() / num_operation; - reporter.AddResult(metric_base + kMetricSuffixOperationTime, - nanos_per_operation); - } - - private: - DISALLOW_COPY_AND_ASSIGN(ThreadLocalStoragePerfTest); -}; - -} // namespace - -TEST_F(ThreadLocalStoragePerfTest, ThreadLocalStorage) { - ThreadLocalStorage::Slot tls; - auto read = [&]() { return reinterpret_cast(tls.Get()); }; - auto write = [&](intptr_t value) { tls.Set(reinterpret_cast(value)); }; - - Benchmark(kStoryBaseTLS, read, write, 10000000, 1); - Benchmark(std::string(kStoryBaseTLS) + kStorySuffixFourThreads, read, write, - 10000000, 4); -} - -#if defined(OS_WIN) - -void WINAPI destroy(void*) {} - -TEST_F(ThreadLocalStoragePerfTest, PlatformFls) { - DWORD key = FlsAlloc(destroy); - ASSERT_NE(PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES, key); - - auto read = [&]() { return reinterpret_cast(FlsGetValue(key)); }; - auto write = [&](intptr_t value) { - FlsSetValue(key, reinterpret_cast(value)); - }; - - Benchmark(kStoryBasePlatformFLS, read, write, 10000000, 1); - Benchmark(std::string(kStoryBasePlatformFLS) + kStorySuffixFourThreads, read, - write, 10000000, 4); -} - -TEST_F(ThreadLocalStoragePerfTest, PlatformTls) { - DWORD key = TlsAlloc(); - ASSERT_NE(PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES, key); - - auto read = [&]() { return reinterpret_cast(TlsGetValue(key)); }; - auto write = [&](intptr_t value) { - TlsSetValue(key, reinterpret_cast(value)); - }; - - Benchmark(kStoryBasePlatformTLS, read, write, 10000000, 1); - Benchmark(std::string(kStoryBasePlatformTLS) + kStorySuffixFourThreads, read, - write, 10000000, 4); -} - -#elif defined(OS_POSIX) || defined(OS_FUCHSIA) - -TEST_F(ThreadLocalStoragePerfTest, PlatformTls) { - pthread_key_t key; - ASSERT_FALSE(pthread_key_create(&key, [](void*) {})); - ASSERT_NE(PlatformThreadLocalStorage::TLS_KEY_OUT_OF_INDEXES, key); - - auto read = [&]() { - return reinterpret_cast(pthread_getspecific(key)); - }; - auto write = [&](intptr_t value) { - pthread_setspecific(key, reinterpret_cast(value)); - }; - - Benchmark(kStoryBasePlatformTLS, read, write, 10000000, 1); - Benchmark(std::string(kStoryBasePlatformTLS) + kStorySuffixFourThreads, read, - write, 10000000, 4); -} - -#endif - -TEST_F(ThreadLocalStoragePerfTest, Cpp11Tls) { - thread_local intptr_t thread_local_variable; - - auto read = [&]() { return thread_local_variable; }; - auto write = [&](intptr_t value) { - reinterpret_cast(&thread_local_variable)[0] = value; - }; - - Benchmark(kStoryBaseCPPTLS, read, write, 10000000, 1); - Benchmark(std::string(kStoryBaseCPPTLS) + kStorySuffixFourThreads, read, - write, 10000000, 4); -} - -} // namespace internal -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/base/threading/thread_perftest.cc b/TMessagesProj/jni/voip/webrtc/base/threading/thread_perftest.cc deleted file mode 100644 index 36d03ba6b..000000000 --- a/TMessagesProj/jni/voip/webrtc/base/threading/thread_perftest.cc +++ /dev/null @@ -1,349 +0,0 @@ -// Copyright 2014 The Chromium Authors. All rights reserved. -// Use of this source code is governed by a BSD-style license that can be -// found in the LICENSE file. - -#include - -#include -#include - -#include "base/base_switches.h" -#include "base/bind.h" -#include "base/command_line.h" -#include "base/location.h" -#include "base/memory/ptr_util.h" -#include "base/message_loop/message_loop_current.h" -#include "base/single_thread_task_runner.h" -#include "base/strings/stringprintf.h" -#include "base/synchronization/condition_variable.h" -#include "base/synchronization/lock.h" -#include "base/synchronization/waitable_event.h" -#include "base/task/task_observer.h" -#include "base/threading/thread.h" -#include "base/time/time.h" -#include "build/build_config.h" -#include "testing/gtest/include/gtest/gtest.h" -#include "testing/perf/perf_result_reporter.h" - -#if defined(OS_POSIX) -#include -#endif - -namespace base { - -namespace { - -const int kNumRuns = 100000; - -constexpr char kMetricPrefixThread[] = "Thread."; -constexpr char kMetricClockTimePerHop[] = "wall_time_per_hop"; -constexpr char kMetricCpuTimePerHop[] = "cpu_time_per_hop"; -constexpr char kStoryBaseTask[] = "task"; -constexpr char kStoryBaseTaskWithObserver[] = "task_with_observer"; -constexpr char kStoryBaseWaitableEvent[] = "waitable_event"; -constexpr char kStoryBaseCondVar[] = "condition_variable"; -constexpr char kStorySuffixOneThread[] = "_1_thread"; -constexpr char kStorySuffixFourThreads[] = "_4_threads"; - -#if defined(OS_POSIX) -constexpr char kStoryBasePthreadCondVar[] = "pthread_condition_variable"; -#endif // defined(OS_POSIX) - -perf_test::PerfResultReporter SetUpReporter(const std::string& story_name) { - perf_test::PerfResultReporter reporter(kMetricPrefixThread, story_name); - reporter.RegisterImportantMetric(kMetricClockTimePerHop, "us"); - reporter.RegisterImportantMetric(kMetricCpuTimePerHop, "us"); - return reporter; -} - -// Base class for a threading perf-test. This sets up some threads for the -// test and measures the clock-time in addition to time spent on each thread. -class ThreadPerfTest : public testing::Test { - public: - ThreadPerfTest() - : done_(WaitableEvent::ResetPolicy::AUTOMATIC, - WaitableEvent::InitialState::NOT_SIGNALED) {} - - // To be implemented by each test. Subclass must uses threads_ such that - // their cpu-time can be measured. Test must return from PingPong() _and_ - // call FinishMeasurement from any thread to complete the test. - virtual void Init() { - if (ThreadTicks::IsSupported()) - ThreadTicks::WaitUntilInitialized(); - } - virtual void PingPong(int hops) = 0; - virtual void Reset() {} - - void TimeOnThread(base::ThreadTicks* ticks, base::WaitableEvent* done) { - *ticks = base::ThreadTicks::Now(); - done->Signal(); - } - - base::ThreadTicks ThreadNow(const base::Thread& thread) { - base::WaitableEvent done(WaitableEvent::ResetPolicy::AUTOMATIC, - WaitableEvent::InitialState::NOT_SIGNALED); - base::ThreadTicks ticks; - thread.task_runner()->PostTask( - FROM_HERE, base::BindOnce(&ThreadPerfTest::TimeOnThread, - base::Unretained(this), &ticks, &done)); - done.Wait(); - return ticks; - } - - void RunPingPongTest(const std::string& story_name, unsigned num_threads) { - // Create threads and collect starting cpu-time for each thread. - std::vector thread_starts; - while (threads_.size() < num_threads) { - threads_.push_back(std::make_unique("PingPonger")); - threads_.back()->Start(); - if (base::ThreadTicks::IsSupported()) - thread_starts.push_back(ThreadNow(*threads_.back())); - } - - Init(); - - base::TimeTicks start = base::TimeTicks::Now(); - PingPong(kNumRuns); - done_.Wait(); - base::TimeTicks end = base::TimeTicks::Now(); - - // Gather the cpu-time spent on each thread. This does one extra tasks, - // but that should be in the noise given enough runs. - base::TimeDelta thread_time; - while (threads_.size()) { - if (base::ThreadTicks::IsSupported()) { - thread_time += ThreadNow(*threads_.back()) - thread_starts.back(); - thread_starts.pop_back(); - } - threads_.pop_back(); - } - - Reset(); - - double num_runs = static_cast(kNumRuns); - double us_per_task_clock = (end - start).InMicroseconds() / num_runs; - double us_per_task_cpu = thread_time.InMicroseconds() / num_runs; - - auto reporter = SetUpReporter(story_name); - // Clock time per task. - reporter.AddResult(kMetricClockTimePerHop, us_per_task_clock); - - // Total utilization across threads if available (likely higher). - if (base::ThreadTicks::IsSupported()) { - reporter.AddResult(kMetricCpuTimePerHop, us_per_task_cpu); - } - } - - protected: - void FinishMeasurement() { done_.Signal(); } - std::vector> threads_; - - private: - base::WaitableEvent done_; -}; - -// Class to test task performance by posting empty tasks back and forth. -class TaskPerfTest : public ThreadPerfTest { - base::Thread* NextThread(int count) { - return threads_[count % threads_.size()].get(); - } - - void PingPong(int hops) override { - if (!hops) { - FinishMeasurement(); - return; - } - NextThread(hops)->task_runner()->PostTask( - FROM_HERE, base::BindOnce(&ThreadPerfTest::PingPong, - base::Unretained(this), hops - 1)); - } -}; - -// This tries to test the 'best-case' as well as the 'worst-case' task posting -// performance. The best-case keeps one thread alive such that it never yeilds, -// while the worse-case forces a context switch for every task. Four threads are -// used to ensure the threads do yeild (with just two it might be possible for -// both threads to stay awake if they can signal each other fast enough). -TEST_F(TaskPerfTest, TaskPingPong) { - RunPingPongTest(std::string(kStoryBaseTask) + kStorySuffixOneThread, 1); - RunPingPongTest(std::string(kStoryBaseTask) + kStorySuffixFourThreads, 4); -} - - -// Same as above, but add observers to test their perf impact. -class MessageLoopObserver : public base::TaskObserver { - public: - void WillProcessTask(const base::PendingTask& pending_task, - bool was_blocked_or_low_priority) override {} - void DidProcessTask(const base::PendingTask& pending_task) override {} -}; -MessageLoopObserver message_loop_observer; - -class TaskObserverPerfTest : public TaskPerfTest { - public: - void Init() override { - TaskPerfTest::Init(); - for (auto& i : threads_) { - i->task_runner()->PostTask( - FROM_HERE, BindOnce( - [](MessageLoopObserver* observer) { - MessageLoopCurrent::Get()->AddTaskObserver(observer); - }, - Unretained(&message_loop_observer))); - } - } -}; - -TEST_F(TaskObserverPerfTest, TaskPingPong) { - RunPingPongTest( - std::string(kStoryBaseTaskWithObserver) + kStorySuffixOneThread, 1); - RunPingPongTest( - std::string(kStoryBaseTaskWithObserver) + kStorySuffixFourThreads, 4); -} - -// Class to test our WaitableEvent performance by signaling back and fort. -// WaitableEvent is templated so we can also compare with other versions. -template -class EventPerfTest : public ThreadPerfTest { - public: - void Init() override { - for (size_t i = 0; i < threads_.size(); i++) { - events_.push_back(std::make_unique( - WaitableEvent::ResetPolicy::AUTOMATIC, - WaitableEvent::InitialState::NOT_SIGNALED)); - } - } - - void Reset() override { events_.clear(); } - - void WaitAndSignalOnThread(size_t event) { - size_t next_event = (event + 1) % events_.size(); - int my_hops = 0; - do { - events_[event]->Wait(); - my_hops = --remaining_hops_; // We own 'hops' between Wait and Signal. - events_[next_event]->Signal(); - } while (my_hops > 0); - // Once we are done, all threads will signal as hops passes zero. - // We only signal completion once, on the thread that reaches zero. - if (!my_hops) - FinishMeasurement(); - } - - void PingPong(int hops) override { - remaining_hops_ = hops; - for (size_t i = 0; i < threads_.size(); i++) { - threads_[i]->task_runner()->PostTask( - FROM_HERE, base::BindOnce(&EventPerfTest::WaitAndSignalOnThread, - base::Unretained(this), i)); - } - - // Kick off the Signal ping-ponging. - events_.front()->Signal(); - } - - int remaining_hops_; - std::vector> events_; -}; - -// Similar to the task posting test, this just tests similar functionality -// using WaitableEvents. We only test four threads (worst-case), but we -// might want to craft a way to test the best-case (where the thread doesn't -// end up blocking because the event is already signalled). -typedef EventPerfTest WaitableEventThreadPerfTest; -TEST_F(WaitableEventThreadPerfTest, EventPingPong) { - RunPingPongTest( - std::string(kStoryBaseWaitableEvent) + kStorySuffixFourThreads, 4); -} - -// Build a minimal event using ConditionVariable. -class ConditionVariableEvent { - public: - ConditionVariableEvent(WaitableEvent::ResetPolicy reset_policy, - WaitableEvent::InitialState initial_state) - : cond_(&lock_), signaled_(false) { - DCHECK_EQ(WaitableEvent::ResetPolicy::AUTOMATIC, reset_policy); - DCHECK_EQ(WaitableEvent::InitialState::NOT_SIGNALED, initial_state); - } - - void Signal() { - { - base::AutoLock scoped_lock(lock_); - signaled_ = true; - } - cond_.Signal(); - } - - void Wait() { - base::AutoLock scoped_lock(lock_); - while (!signaled_) - cond_.Wait(); - signaled_ = false; - } - - private: - base::Lock lock_; - base::ConditionVariable cond_; - bool signaled_; -}; - -// This is meant to test the absolute minimal context switching time -// using our own base synchronization code. -typedef EventPerfTest ConditionVariablePerfTest; -TEST_F(ConditionVariablePerfTest, EventPingPong) { - RunPingPongTest(std::string(kStoryBaseCondVar) + kStorySuffixFourThreads, 4); -} -#if defined(OS_POSIX) - -// Absolutely 100% minimal posix waitable event. If there is a better/faster -// way to force a context switch, we should use that instead. -class PthreadEvent { - public: - PthreadEvent(WaitableEvent::ResetPolicy reset_policy, - WaitableEvent::InitialState initial_state) { - DCHECK_EQ(WaitableEvent::ResetPolicy::AUTOMATIC, reset_policy); - DCHECK_EQ(WaitableEvent::InitialState::NOT_SIGNALED, initial_state); - pthread_mutex_init(&mutex_, nullptr); - pthread_cond_init(&cond_, nullptr); - signaled_ = false; - } - - ~PthreadEvent() { - pthread_cond_destroy(&cond_); - pthread_mutex_destroy(&mutex_); - } - - void Signal() { - pthread_mutex_lock(&mutex_); - signaled_ = true; - pthread_mutex_unlock(&mutex_); - pthread_cond_signal(&cond_); - } - - void Wait() { - pthread_mutex_lock(&mutex_); - while (!signaled_) - pthread_cond_wait(&cond_, &mutex_); - signaled_ = false; - pthread_mutex_unlock(&mutex_); - } - - private: - bool signaled_; - pthread_mutex_t mutex_; - pthread_cond_t cond_; -}; - -// This is meant to test the absolute minimal context switching time. -// If there is any faster way to do this we should substitute it in. -typedef EventPerfTest PthreadEventPerfTest; -TEST_F(PthreadEventPerfTest, EventPingPong) { - RunPingPongTest( - std::string(kStoryBasePthreadCondVar) + kStorySuffixFourThreads, 4); -} - -#endif - -} // namespace - -} // namespace base diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc index 5a1250ae8..876d4c0bf 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/broadcast_resource_listener.cc @@ -32,8 +32,7 @@ class BroadcastResourceListener::AdapterResource : public Resource { MutexLock lock(&lock_); if (!listener_) return; - listener_->OnResourceUsageStateMeasured(rtc::scoped_refptr(this), - usage_state); + listener_->OnResourceUsageStateMeasured(this, usage_state); } // Resource implementation. diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc index 66e6f0c36..d95cd75a9 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.cc @@ -27,8 +27,14 @@ namespace webrtc { ResourceAdaptationProcessor::ResourceListenerDelegate::ResourceListenerDelegate( ResourceAdaptationProcessor* processor) - : task_queue_(TaskQueueBase::Current()), processor_(processor) { - RTC_DCHECK(task_queue_); + : task_queue_(nullptr), processor_(processor) {} + +void ResourceAdaptationProcessor::ResourceListenerDelegate::SetTaskQueue( + TaskQueueBase* task_queue) { + RTC_DCHECK(!task_queue_); + RTC_DCHECK(task_queue); + task_queue_ = task_queue; + RTC_DCHECK_RUN_ON(task_queue_); } void ResourceAdaptationProcessor::ResourceListenerDelegate:: @@ -64,15 +70,14 @@ ResourceAdaptationProcessor::MitigationResultAndLogMessage:: ResourceAdaptationProcessor::ResourceAdaptationProcessor( VideoStreamAdapter* stream_adapter) - : task_queue_(TaskQueueBase::Current()), + : task_queue_(nullptr), resource_listener_delegate_( rtc::make_ref_counted(this)), resources_(), stream_adapter_(stream_adapter), last_reported_source_restrictions_(), previous_mitigation_results_() { - RTC_DCHECK(task_queue_); - stream_adapter_->AddRestrictionsListener(this); + RTC_DCHECK(stream_adapter_); } ResourceAdaptationProcessor::~ResourceAdaptationProcessor() { @@ -84,6 +89,16 @@ ResourceAdaptationProcessor::~ResourceAdaptationProcessor() { resource_listener_delegate_->OnProcessorDestroyed(); } +void ResourceAdaptationProcessor::SetTaskQueue(TaskQueueBase* task_queue) { + RTC_DCHECK(!task_queue_); + RTC_DCHECK(task_queue); + task_queue_ = task_queue; + resource_listener_delegate_->SetTaskQueue(task_queue); + RTC_DCHECK_RUN_ON(task_queue_); + // Now that we have the queue we can attach as adaptation listener. + stream_adapter_->AddRestrictionsListener(this); +} + void ResourceAdaptationProcessor::AddResourceLimitationsListener( ResourceLimitationsListener* limitations_listener) { RTC_DCHECK_RUN_ON(task_queue_); diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h index ca2fec08c..3e273081f 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor.h @@ -58,6 +58,8 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, VideoStreamAdapter* video_stream_adapter); ~ResourceAdaptationProcessor() override; + void SetTaskQueue(TaskQueueBase* task_queue) override; + // ResourceAdaptationProcessorInterface implementation. void AddResourceLimitationsListener( ResourceLimitationsListener* limitations_listener) override; @@ -88,6 +90,7 @@ class ResourceAdaptationProcessor : public ResourceAdaptationProcessorInterface, public: explicit ResourceListenerDelegate(ResourceAdaptationProcessor* processor); + void SetTaskQueue(TaskQueueBase* task_queue); void OnProcessorDestroyed(); // ResourceListener implementation. diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h index 472948815..8b1f94b73 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/resource_adaptation_processor_interface.h @@ -47,6 +47,8 @@ class ResourceAdaptationProcessorInterface { public: virtual ~ResourceAdaptationProcessorInterface(); + virtual void SetTaskQueue(TaskQueueBase* task_queue) = 0; + virtual void AddResourceLimitationsListener( ResourceLimitationsListener* limitations_listener) = 0; virtual void RemoveResourceLimitationsListener( diff --git a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc index ce1c300a7..c6560b316 100644 --- a/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/call/adaptation/video_stream_adapter.cc @@ -23,6 +23,7 @@ #include "call/adaptation/video_source_restrictions.h" #include "call/adaptation/video_stream_input_state.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -374,7 +375,7 @@ VideoStreamAdapter::RestrictionsOrState VideoStreamAdapter::GetAdaptationUpStep( return increase_frame_rate; } // else, increase resolution. - [[fallthrough]]; + ABSL_FALLTHROUGH_INTENDED; } case DegradationPreference::MAINTAIN_FRAMERATE: { // Attempt to increase pixel count. @@ -458,7 +459,7 @@ VideoStreamAdapter::GetAdaptationDownStep( return decrease_frame_rate; } // else, decrease resolution. - [[fallthrough]]; + ABSL_FALLTHROUGH_INTENDED; } case DegradationPreference::MAINTAIN_FRAMERATE: { return DecreaseResolution(input_state, current_restrictions); diff --git a/TMessagesProj/jni/voip/webrtc/call/call.cc b/TMessagesProj/jni/voip/webrtc/call/call.cc index bb62365cd..b30b92f86 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call.cc +++ b/TMessagesProj/jni/voip/webrtc/call/call.cc @@ -51,6 +51,7 @@ #include "modules/utility/include/process_thread.h" #include "modules/video_coding/fec_controller_default.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" @@ -208,9 +209,6 @@ class Call final : public webrtc::Call, TaskQueueFactory* task_queue_factory); ~Call() override; - Call(const Call&) = delete; - Call& operator=(const Call&) = delete; - // Implements webrtc::Call. PacketReceiver* Receiver() override; @@ -346,21 +344,12 @@ class Call final : public webrtc::Call, DeliveryStatus DeliverRtp(MediaType media_type, rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) RTC_RUN_ON(worker_thread_); - - AudioReceiveStream* FindAudioStreamForSyncGroup(const std::string& sync_group) - RTC_RUN_ON(worker_thread_); void ConfigureSync(const std::string& sync_group) RTC_RUN_ON(worker_thread_); void NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, - MediaType media_type, - bool use_send_side_bwe) + MediaType media_type) RTC_RUN_ON(worker_thread_); - bool IdentifyReceivedPacket(RtpPacketReceived& packet, - bool* use_send_side_bwe = nullptr); - bool RegisterReceiveStream(uint32_t ssrc, ReceiveStream* stream); - bool UnregisterReceiveStream(uint32_t ssrc); - void UpdateAggregateNetworkState(); // Ensure that necessary process threads are started, and any required @@ -371,7 +360,6 @@ class Call final : public webrtc::Call, TaskQueueFactory* const task_queue_factory_; TaskQueueBase* const worker_thread_; TaskQueueBase* const network_thread_; - const std::unique_ptr decode_sync_; RTC_NO_UNIQUE_ADDRESS SequenceChecker send_transport_sequence_checker_; const int num_cpu_cores_; @@ -394,11 +382,14 @@ class Call final : public webrtc::Call, // Audio, Video, and FlexFEC receive streams are owned by the client that // creates them. // TODO(bugs.webrtc.org/11993): Move audio_receive_streams_, - // video_receive_streams_ over to the network thread. + // video_receive_streams_ and sync_stream_mapping_ over to the network thread. std::set audio_receive_streams_ RTC_GUARDED_BY(worker_thread_); std::set video_receive_streams_ RTC_GUARDED_BY(worker_thread_); + std::map sync_stream_mapping_ + RTC_GUARDED_BY(worker_thread_); + // TODO(nisse): Should eventually be injected at creation, // with a single object in the bundled case. RtpStreamReceiverController audio_receiver_controller_ @@ -409,12 +400,10 @@ class Call final : public webrtc::Call, // This extra map is used for receive processing which is // independent of media type. - RTC_NO_UNIQUE_ADDRESS SequenceChecker receive_11993_checker_; - // TODO(bugs.webrtc.org/11993): Move receive_rtp_config_ over to the // network thread. std::map receive_rtp_config_ - RTC_GUARDED_BY(&receive_11993_checker_); + RTC_GUARDED_BY(worker_thread_); // Audio and Video send streams are owned by the client that creates them. std::map audio_send_ssrcs_ @@ -477,11 +466,11 @@ class Call final : public webrtc::Call, bool is_started_ RTC_GUARDED_BY(worker_thread_) = false; - // Sequence checker for outgoing network traffic. Could be the network thread. - // Could also be a pacer owned thread or TQ such as the TaskQueuePacedSender. RTC_NO_UNIQUE_ADDRESS SequenceChecker sent_packet_sequence_checker_; absl::optional last_sent_packet_ RTC_GUARDED_BY(sent_packet_sequence_checker_); + + RTC_DISALLOW_COPY_AND_ASSIGN(Call); }; } // namespace internal @@ -602,9 +591,8 @@ SharedModuleThread::~SharedModuleThread() = default; rtc::scoped_refptr SharedModuleThread::Create( std::unique_ptr process_thread, std::function on_one_ref_remaining) { - // Using `new` to access a non-public constructor. - return rtc::scoped_refptr(new SharedModuleThread( - std::move(process_thread), std::move(on_one_ref_remaining))); + return new SharedModuleThread(std::move(process_thread), + std::move(on_one_ref_remaining)); } void SharedModuleThread::EnsureStarted() { @@ -801,11 +789,6 @@ Call::Call(Clock* clock, // must be made on `worker_thread_` (i.e. they're one and the same). network_thread_(config.network_task_queue_ ? config.network_task_queue_ : worker_thread_), - decode_sync_(config.metronome - ? std::make_unique(clock_, - config.metronome, - worker_thread_) - : nullptr), num_cpu_cores_(CpuInfo::DetectNumberOfCores()), module_process_thread_(std::move(module_process_thread)), call_stats_(new CallStats(clock_, worker_thread_)), @@ -834,7 +817,6 @@ Call::Call(Clock* clock, RTC_DCHECK(network_thread_); RTC_DCHECK(worker_thread_->IsCurrent()); - receive_11993_checker_.Detach(); send_transport_sequence_checker_.Detach(); sent_packet_sequence_checker_.Detach(); @@ -982,7 +964,7 @@ webrtc::AudioReceiveStream* Call::CreateAudioReceiveStream( // TODO(bugs.webrtc.org/11993): Update the below on the network thread. // We could possibly set up the audio_receiver_controller_ association up // as part of the async setup. - RegisterReceiveStream(config.rtp.remote_ssrc, receive_stream); + receive_rtp_config_.emplace(config.rtp.remote_ssrc, receive_stream); ConfigureSync(config.sync_group); @@ -1015,12 +997,12 @@ void Call::DestroyAudioReceiveStream( audio_receive_streams_.erase(audio_receive_stream); - // After calling erase(), call ConfigureSync. This will clear associated - // video streams or associate them with a different audio stream if one exists - // for this sync_group. - ConfigureSync(audio_receive_stream->config().sync_group); - - UnregisterReceiveStream(ssrc); + const auto it = sync_stream_mapping_.find(config.sync_group); + if (it != sync_stream_mapping_.end() && it->second == audio_receive_stream) { + sync_stream_mapping_.erase(it); + ConfigureSync(config.sync_group); + } + receive_rtp_config_.erase(ssrc); UpdateAggregateNetworkState(); // TODO(bugs.webrtc.org/11993): Consider if deleting `audio_receive_stream` @@ -1097,6 +1079,10 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { VideoSendStream* send_stream_impl = static_cast(send_stream); + VideoSendStream::RtpStateMap rtp_states; + VideoSendStream::RtpPayloadStateMap rtp_payload_states; + send_stream_impl->StopPermanentlyAndGetRtpStates(&rtp_states, + &rtp_payload_states); auto it = video_send_ssrcs_.begin(); while (it != video_send_ssrcs_.end()) { @@ -1116,10 +1102,6 @@ void Call::DestroyVideoSendStream(webrtc::VideoSendStream* send_stream) { if (video_send_streams_.empty()) video_send_streams_empty_.store(true, std::memory_order_relaxed); - VideoSendStream::RtpStateMap rtp_states; - VideoSendStream::RtpPayloadStateMap rtp_payload_states; - send_stream_impl->StopPermanentlyAndGetRtpStates(&rtp_states, - &rtp_payload_states); for (const auto& kv : rtp_states) { suspended_video_send_ssrcs_[kv.first] = kv.second; } @@ -1154,7 +1136,7 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( task_queue_factory_, this, num_cpu_cores_, transport_send_->packet_router(), std::move(configuration), call_stats_.get(), clock_, new VCMTiming(clock_), - &nack_periodic_processor_, decode_sync_.get()); + &nack_periodic_processor_); // TODO(bugs.webrtc.org/11993): Set this up asynchronously on the network // thread. receive_stream->RegisterWithTransport(&video_receiver_controller_); @@ -1165,9 +1147,9 @@ webrtc::VideoReceiveStream* Call::CreateVideoReceiveStream( // stream. Since the transport_send_cc negotiation is per payload // type, we may get an incorrect value for the rtx stream, but // that is unlikely to matter in practice. - RegisterReceiveStream(rtp.rtx_ssrc, receive_stream); + receive_rtp_config_.emplace(rtp.rtx_ssrc, receive_stream); } - RegisterReceiveStream(rtp.remote_ssrc, receive_stream); + receive_rtp_config_.emplace(rtp.remote_ssrc, receive_stream); video_receive_streams_.insert(receive_stream); ConfigureSync(receive_stream->sync_group()); @@ -1192,9 +1174,9 @@ void Call::DestroyVideoReceiveStream( // Remove all ssrcs pointing to a receive stream. As RTX retransmits on a // separate SSRC there can be either one or two. - UnregisterReceiveStream(rtp.remote_ssrc); + receive_rtp_config_.erase(rtp.remote_ssrc); if (rtp.rtx_ssrc) { - UnregisterReceiveStream(rtp.rtx_ssrc); + receive_rtp_config_.erase(rtp.rtx_ssrc); } video_receive_streams_.erase(receive_stream_impl); ConfigureSync(receive_stream_impl->sync_group()); @@ -1227,7 +1209,10 @@ FlexfecReceiveStream* Call::CreateFlexfecReceiveStream( // TODO(bugs.webrtc.org/11993): Set this up asynchronously on the network // thread. receive_stream->RegisterWithTransport(&video_receiver_controller_); - RegisterReceiveStream(config.rtp.remote_ssrc, receive_stream); + + RTC_DCHECK(receive_rtp_config_.find(config.rtp.remote_ssrc) == + receive_rtp_config_.end()); + receive_rtp_config_.emplace(config.rtp.remote_ssrc, receive_stream); // TODO(brandtr): Store config in RtcEventLog here. @@ -1245,7 +1230,7 @@ void Call::DestroyFlexfecReceiveStream(FlexfecReceiveStream* receive_stream) { RTC_DCHECK(receive_stream != nullptr); const FlexfecReceiveStream::RtpConfig& rtp = receive_stream->rtp_config(); - UnregisterReceiveStream(rtp.remote_ssrc); + receive_rtp_config_.erase(rtp.remote_ssrc); // Remove all SSRCs pointing to the FlexfecReceiveStreamImpl to be // destroyed. @@ -1465,37 +1450,51 @@ void Call::OnAllocationLimitsChanged(BitrateAllocationLimits limits) { } // RTC_RUN_ON(worker_thread_) -AudioReceiveStream* Call::FindAudioStreamForSyncGroup( - const std::string& sync_group) { - RTC_DCHECK_RUN_ON(&receive_11993_checker_); - if (!sync_group.empty()) { +void Call::ConfigureSync(const std::string& sync_group) { + // TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. + // Set sync only if there was no previous one. + if (sync_group.empty()) + return; + + AudioReceiveStream* sync_audio_stream = nullptr; + // Find existing audio stream. + const auto it = sync_stream_mapping_.find(sync_group); + if (it != sync_stream_mapping_.end()) { + sync_audio_stream = it->second; + } else { + // No configured audio stream, see if we can find one. for (AudioReceiveStream* stream : audio_receive_streams_) { - if (stream->config().sync_group == sync_group) - return stream; + if (stream->config().sync_group == sync_group) { + if (sync_audio_stream != nullptr) { + RTC_LOG(LS_WARNING) + << "Attempting to sync more than one audio stream " + "within the same sync group. This is not " + "supported in the current implementation."; + break; + } + sync_audio_stream = stream; + } } } - - return nullptr; -} - -// TODO(bugs.webrtc.org/11993): Expect to be called on the network thread. -// RTC_RUN_ON(worker_thread_) -void Call::ConfigureSync(const std::string& sync_group) { - // `audio_stream` may be nullptr when clearing the audio stream for a group. - AudioReceiveStream* audio_stream = FindAudioStreamForSyncGroup(sync_group); - + if (sync_audio_stream) + sync_stream_mapping_[sync_group] = sync_audio_stream; size_t num_synced_streams = 0; for (VideoReceiveStream2* video_stream : video_receive_streams_) { if (video_stream->sync_group() != sync_group) continue; ++num_synced_streams; - // TODO(bugs.webrtc.org/4762): Support synchronizing more than one A/V pair. - // Attempting to sync more than one audio/video pair within the same sync - // group is not supported in the current implementation. + if (num_synced_streams > 1) { + // TODO(pbos): Support synchronizing more than one A/V pair. + // https://code.google.com/p/webrtc/issues/detail?id=4762 + RTC_LOG(LS_WARNING) + << "Attempting to sync more than one audio/video pair " + "within the same sync group. This is not supported in " + "the current implementation."; + } // Only sync the first A/V pair within this sync group. if (num_synced_streams == 1) { // sync_audio_stream may be null and that's ok. - video_stream->SetSync(audio_stream); + video_stream->SetSync(sync_audio_stream); } else { video_stream->SetSync(nullptr); } @@ -1583,11 +1582,22 @@ PacketReceiver::DeliveryStatus Call::DeliverRtp(MediaType media_type, RTC_DCHECK(media_type == MediaType::AUDIO || media_type == MediaType::VIDEO || is_keep_alive_packet); - bool use_send_side_bwe = false; - if (!IdentifyReceivedPacket(parsed_packet, &use_send_side_bwe)) + auto it = receive_rtp_config_.find(parsed_packet.Ssrc()); + if (it == receive_rtp_config_.end()) { + RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc " + << parsed_packet.Ssrc(); + // Destruction of the receive stream, including deregistering from the + // RtpDemuxer, is not protected by the `worker_thread_`. + // But deregistering in the `receive_rtp_config_` map is. So by not passing + // the packet on to demuxing in this case, we prevent incoming packets to be + // passed on via the demuxer to a receive stream which is being torned down. return DELIVERY_UNKNOWN_SSRC; + } - NotifyBweOfReceivedPacket(parsed_packet, media_type, use_send_side_bwe); + parsed_packet.IdentifyExtensions( + RtpHeaderExtensionMap(it->second->rtp_config().extensions)); + + NotifyBweOfReceivedPacket(parsed_packet, media_type); // RateCounters expect input parameter as int, save it as int, // instead of converting each time it is passed to RateCounter::Add below. @@ -1638,8 +1648,20 @@ void Call::OnRecoveredPacket(const uint8_t* packet, size_t length) { parsed_packet.set_recovered(true); - if (!IdentifyReceivedPacket(parsed_packet)) + auto it = receive_rtp_config_.find(parsed_packet.Ssrc()); + if (it == receive_rtp_config_.end()) { + RTC_LOG(LS_ERROR) << "receive_rtp_config_ lookup failed for ssrc " + << parsed_packet.Ssrc(); + // Destruction of the receive stream, including deregistering from the + // RtpDemuxer, is not protected by the `worker_thread_`. + // But deregistering in the `receive_rtp_config_` map is. + // So by not passing the packet on to demuxing in this case, we prevent + // incoming packets to be passed on via the demuxer to a receive stream + // which is being torn down. return; + } + parsed_packet.IdentifyExtensions( + RtpHeaderExtensionMap(it->second->rtp_config().extensions)); // TODO(brandtr): Update here when we support protecting audio packets too. parsed_packet.set_payload_type_frequency(kVideoPayloadTypeFrequency); @@ -1648,8 +1670,11 @@ void Call::OnRecoveredPacket(const uint8_t* packet, size_t length) { // RTC_RUN_ON(worker_thread_) void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, - MediaType media_type, - bool use_send_side_bwe) { + MediaType media_type) { + auto it = receive_rtp_config_.find(packet.Ssrc()); + bool use_send_side_bwe = (it != receive_rtp_config_.end()) && + UseSendSideBwe(it->second->rtp_config()); + RTPHeader header; packet.GetHeader(&header); @@ -1680,45 +1705,6 @@ void Call::NotifyBweOfReceivedPacket(const RtpPacketReceived& packet, } } -bool Call::IdentifyReceivedPacket(RtpPacketReceived& packet, - bool* use_send_side_bwe /*= nullptr*/) { - RTC_DCHECK_RUN_ON(&receive_11993_checker_); - auto it = receive_rtp_config_.find(packet.Ssrc()); - if (it == receive_rtp_config_.end()) { - RTC_DLOG(LS_WARNING) << "receive_rtp_config_ lookup failed for ssrc " - << packet.Ssrc(); - return false; - } - - packet.IdentifyExtensions( - RtpHeaderExtensionMap(it->second->rtp_config().extensions)); - - if (use_send_side_bwe) { - *use_send_side_bwe = UseSendSideBwe(it->second->rtp_config()); - } - - return true; -} - -bool Call::RegisterReceiveStream(uint32_t ssrc, ReceiveStream* stream) { - RTC_DCHECK_RUN_ON(&receive_11993_checker_); - RTC_DCHECK(stream); - auto inserted = receive_rtp_config_.emplace(ssrc, stream); - if (!inserted.second) { - RTC_DLOG(LS_WARNING) << "ssrc already registered: " << ssrc; - } - return inserted.second; -} - -bool Call::UnregisterReceiveStream(uint32_t ssrc) { - RTC_DCHECK_RUN_ON(&receive_11993_checker_); - size_t erased = receive_rtp_config_.erase(ssrc); - if (!erased) { - RTC_DLOG(LS_WARNING) << "ssrc wasn't registered: " << ssrc; - } - return erased != 0u; -} - } // namespace internal } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/call.h b/TMessagesProj/jni/voip/webrtc/call/call.h index 11451c5c8..f6388c3c7 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call.h +++ b/TMessagesProj/jni/voip/webrtc/call/call.h @@ -40,7 +40,13 @@ namespace webrtc { // SharedModuleThread supports a callback that is issued when only one reference // remains, which is used to indicate to the original owner that the thread may // be discarded. -class SharedModuleThread final { +class SharedModuleThread : public rtc::RefCountInterface { + protected: + SharedModuleThread(std::unique_ptr process_thread, + std::function on_one_ref_remaining); + friend class rtc::scoped_refptr; + ~SharedModuleThread() override; + public: // Allows injection of an externally created process thread. static rtc::scoped_refptr Create( @@ -52,28 +58,16 @@ class SharedModuleThread final { ProcessThread* process_thread(); private: - friend class rtc::scoped_refptr; - SharedModuleThread(std::unique_ptr process_thread, - std::function on_one_ref_remaining); - ~SharedModuleThread(); - - void AddRef() const; - rtc::RefCountReleaseStatus Release() const; + void AddRef() const override; + rtc::RefCountReleaseStatus Release() const override; class Impl; mutable std::unique_ptr impl_; }; -// A Call represents a two-way connection carrying zero or more outgoing -// and incoming media streams, transported over one or more RTP transports. - // A Call instance can contain several send and/or receive streams. All streams // are assumed to have the same remote endpoint and will share bitrate estimates // etc. - -// When using the PeerConnection API, there is an one to one relationship -// between the PeerConnection and the Call. - class Call { public: using Config = CallConfig; diff --git a/TMessagesProj/jni/voip/webrtc/call/call_config.h b/TMessagesProj/jni/voip/webrtc/call/call_config.h index ef505a4b0..f14979015 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_config.h +++ b/TMessagesProj/jni/voip/webrtc/call/call_config.h @@ -11,7 +11,6 @@ #define CALL_CALL_CONFIG_H_ #include "api/fec_controller.h" -#include "api/metronome/metronome.h" #include "api/neteq/neteq_factory.h" #include "api/network_state_predictor.h" #include "api/rtc_error.h" @@ -76,8 +75,6 @@ struct CallConfig { // RtpTransportControllerSend to use for this call. RtpTransportControllerSendFactoryInterface* rtp_transport_controller_send_factory = nullptr; - - Metronome* metronome = nullptr; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/call_factory.cc b/TMessagesProj/jni/voip/webrtc/call/call_factory.cc index 40357850a..aeb3cbdaa 100644 --- a/TMessagesProj/jni/voip/webrtc/call/call_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/call/call_factory.cc @@ -15,34 +15,26 @@ #include #include #include -#include #include "absl/types/optional.h" #include "api/test/simulated_network.h" -#include "api/units/time_delta.h" #include "call/call.h" #include "call/degraded_call.h" #include "call/rtp_transport_config.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/field_trial_list.h" -#include "rtc_base/experiments/field_trial_parser.h" +#include "system_wrappers/include/field_trial.h" namespace webrtc { namespace { -using TimeScopedNetworkConfig = DegradedCall::TimeScopedNetworkConfig; - -bool ParseConfigParam(const WebRtcKeyValueConfig& trials, - absl::string_view exp_name, - int* field) { - std::string group = trials.Lookup(exp_name); +bool ParseConfigParam(std::string exp_name, int* field) { + std::string group = field_trial::FindFullName(exp_name); if (group.empty()) return false; return (sscanf(group.c_str(), "%d", field) == 1); } -absl::optional ParseDegradationConfig( - const WebRtcKeyValueConfig& trials, +absl::optional ParseDegradationConfig( bool send) { std::string exp_prefix = "WebRTCFakeNetwork"; if (send) { @@ -51,92 +43,33 @@ absl::optional ParseDegradationConfig( exp_prefix += "Receive"; } - TimeScopedNetworkConfig config; + webrtc::BuiltInNetworkBehaviorConfig config; bool configured = false; configured |= - ParseConfigParam(trials, exp_prefix + "DelayMs", &config.queue_delay_ms); - configured |= ParseConfigParam(trials, exp_prefix + "DelayStdDevMs", + ParseConfigParam(exp_prefix + "DelayMs", &config.queue_delay_ms); + configured |= ParseConfigParam(exp_prefix + "DelayStdDevMs", &config.delay_standard_deviation_ms); int queue_length = 0; - if (ParseConfigParam(trials, exp_prefix + "QueueLength", &queue_length)) { + if (ParseConfigParam(exp_prefix + "QueueLength", &queue_length)) { RTC_CHECK_GE(queue_length, 0); config.queue_length_packets = queue_length; configured = true; } - configured |= ParseConfigParam(trials, exp_prefix + "CapacityKbps", - &config.link_capacity_kbps); - configured |= ParseConfigParam(trials, exp_prefix + "LossPercent", - &config.loss_percent); + configured |= + ParseConfigParam(exp_prefix + "CapacityKbps", &config.link_capacity_kbps); + configured |= + ParseConfigParam(exp_prefix + "LossPercent", &config.loss_percent); int allow_reordering = 0; - if (ParseConfigParam(trials, exp_prefix + "AllowReordering", - &allow_reordering)) { + if (ParseConfigParam(exp_prefix + "AllowReordering", &allow_reordering)) { config.allow_reordering = true; configured = true; } - configured |= ParseConfigParam(trials, exp_prefix + "AvgBurstLossLength", + configured |= ParseConfigParam(exp_prefix + "AvgBurstLossLength", &config.avg_burst_loss_length); - return configured ? absl::optional(config) - : absl::nullopt; + return configured + ? absl::optional(config) + : absl::nullopt; } - -std::vector GetNetworkConfigs( - const WebRtcKeyValueConfig& trials, - bool send) { - FieldTrialStructList trials_list( - {FieldTrialStructMember("queue_length_packets", - [](TimeScopedNetworkConfig* p) { - // FieldTrialParser does not natively support - // size_t type, so use this ugly cast as - // workaround. - return reinterpret_cast( - &p->queue_length_packets); - }), - FieldTrialStructMember( - "queue_delay_ms", - [](TimeScopedNetworkConfig* p) { return &p->queue_delay_ms; }), - FieldTrialStructMember("delay_standard_deviation_ms", - [](TimeScopedNetworkConfig* p) { - return &p->delay_standard_deviation_ms; - }), - FieldTrialStructMember( - "link_capacity_kbps", - [](TimeScopedNetworkConfig* p) { return &p->link_capacity_kbps; }), - FieldTrialStructMember( - "loss_percent", - [](TimeScopedNetworkConfig* p) { return &p->loss_percent; }), - FieldTrialStructMember( - "allow_reordering", - [](TimeScopedNetworkConfig* p) { return &p->allow_reordering; }), - FieldTrialStructMember("avg_burst_loss_length", - [](TimeScopedNetworkConfig* p) { - return &p->avg_burst_loss_length; - }), - FieldTrialStructMember( - "packet_overhead", - [](TimeScopedNetworkConfig* p) { return &p->packet_overhead; }), - FieldTrialStructMember("codel_active_queue_management", - [](TimeScopedNetworkConfig* p) { - return &p->codel_active_queue_management; - }), - FieldTrialStructMember( - "duration", - [](TimeScopedNetworkConfig* p) { return &p->duration; })}, - {}); - ParseFieldTrial({&trials_list}, - trials.Lookup(send ? "WebRTC-FakeNetworkSendConfig" - : "WebRTC-FakeNetworkReceiveConfig")); - std::vector configs = trials_list.Get(); - if (configs.empty()) { - // Try legacy fallback trials. - absl::optional fallback_config = - ParseDegradationConfig(trials, send); - if (fallback_config.has_value()) { - configs.push_back(*fallback_config); - } - } - return configs; -} - } // namespace CallFactory::CallFactory() { @@ -145,18 +78,14 @@ CallFactory::CallFactory() { Call* CallFactory::CreateCall(const Call::Config& config) { RTC_DCHECK_RUN_ON(&call_thread_); - RTC_DCHECK(config.trials); - - std::vector send_degradation_configs = - GetNetworkConfigs(*config.trials, /*send=*/true); - std::vector - receive_degradation_configs = - GetNetworkConfigs(*config.trials, /*send=*/false); + absl::optional send_degradation_config = + ParseDegradationConfig(true); + absl::optional + receive_degradation_config = ParseDegradationConfig(false); RtpTransportConfig transportConfig = config.ExtractTransportConfig(); - if (!send_degradation_configs.empty() || - !receive_degradation_configs.empty()) { + if (send_degradation_config || receive_degradation_config) { return new DegradedCall( std::unique_ptr(Call::Create( config, Clock::GetRealTimeClock(), @@ -165,7 +94,8 @@ Call* CallFactory::CreateCall(const Call::Config& config) { config.rtp_transport_controller_send_factory->Create( transportConfig, Clock::GetRealTimeClock(), ProcessThread::Create("PacerThread")))), - send_degradation_configs, receive_degradation_configs); + send_degradation_config, receive_degradation_config, + config.task_queue_factory); } if (!module_thread_) { diff --git a/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc b/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc index 0d01e8696..546208549 100644 --- a/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc +++ b/TMessagesProj/jni/voip/webrtc/call/degraded_call.cc @@ -18,13 +18,13 @@ namespace webrtc { DegradedCall::FakeNetworkPipeOnTaskQueue::FakeNetworkPipeOnTaskQueue( - TaskQueueBase* task_queue, - const ScopedTaskSafety& task_safety, + TaskQueueFactory* task_queue_factory, Clock* clock, std::unique_ptr network_behavior) : clock_(clock), - task_queue_(task_queue), - task_safety_(task_safety), + task_queue_(task_queue_factory->CreateTaskQueue( + "DegradedSendQueue", + TaskQueueFactory::Priority::NORMAL)), pipe_(clock, std::move(network_behavior)) {} void DegradedCall::FakeNetworkPipeOnTaskQueue::SendRtp( @@ -61,22 +61,21 @@ bool DegradedCall::FakeNetworkPipeOnTaskQueue::Process() { return false; } - task_queue_->PostTask(ToQueuedTask(task_safety_, [this, time_to_next] { - RTC_DCHECK_RUN_ON(task_queue_); + task_queue_.PostTask([this, time_to_next]() { + RTC_DCHECK_RUN_ON(&task_queue_); int64_t next_process_time = *time_to_next + clock_->TimeInMilliseconds(); if (!next_process_ms_ || next_process_time < *next_process_ms_) { next_process_ms_ = next_process_time; - task_queue_->PostDelayedHighPrecisionTask( - ToQueuedTask(task_safety_, - [this] { - RTC_DCHECK_RUN_ON(task_queue_); - if (!Process()) { - next_process_ms_.reset(); - } - }), + task_queue_.PostDelayedTask( + [this]() { + RTC_DCHECK_RUN_ON(&task_queue_); + if (!Process()) { + next_process_ms_.reset(); + } + }, *time_to_next); } - })); + }); return true; } @@ -128,37 +127,27 @@ bool DegradedCall::FakeNetworkPipeTransportAdapter::SendRtcp( DegradedCall::DegradedCall( std::unique_ptr call, - const std::vector& send_configs, - const std::vector& receive_configs) + absl::optional send_config, + absl::optional receive_config, + TaskQueueFactory* task_queue_factory) : clock_(Clock::GetRealTimeClock()), call_(std::move(call)), - send_config_index_(0), - send_configs_(send_configs), + task_queue_factory_(task_queue_factory), + send_config_(send_config), send_simulated_network_(nullptr), - receive_config_index_(0), - receive_configs_(receive_configs) { - if (!receive_configs_.empty()) { - auto network = std::make_unique(receive_configs_[0]); + receive_config_(receive_config) { + if (receive_config_) { + auto network = std::make_unique(*receive_config_); receive_simulated_network_ = network.get(); receive_pipe_ = std::make_unique(clock_, std::move(network)); receive_pipe_->SetReceiver(call_->Receiver()); - if (receive_configs_.size() > 1) { - call_->network_thread()->PostDelayedTask( - ToQueuedTask(task_safety_, [this] { UpdateReceiveNetworkConfig(); }), - receive_configs_[0].duration.ms()); - } } - if (!send_configs_.empty()) { - auto network = std::make_unique(send_configs_[0]); + if (send_config_) { + auto network = std::make_unique(*send_config_); send_simulated_network_ = network.get(); send_pipe_ = std::make_unique( - call_->network_thread(), task_safety_, clock_, std::move(network)); - if (send_configs_.size() > 1) { - call_->network_thread()->PostDelayedTask( - ToQueuedTask(task_safety_, [this] { UpdateSendNetworkConfig(); }), - send_configs_[0].duration.ms()); - } + task_queue_factory_, clock_, std::move(network)); } } @@ -166,7 +155,7 @@ DegradedCall::~DegradedCall() = default; AudioSendStream* DegradedCall::CreateAudioSendStream( const AudioSendStream::Config& config) { - if (!send_configs_.empty()) { + if (send_config_) { auto transport_adapter = std::make_unique( send_pipe_.get(), call_.get(), clock_, config.send_transport); AudioSendStream::Config degrade_config = config; @@ -200,7 +189,7 @@ VideoSendStream* DegradedCall::CreateVideoSendStream( VideoSendStream::Config config, VideoEncoderConfig encoder_config) { std::unique_ptr transport_adapter; - if (!send_configs_.empty()) { + if (send_config_) { transport_adapter = std::make_unique( send_pipe_.get(), call_.get(), clock_, config.send_transport); config.send_transport = transport_adapter.get(); @@ -218,7 +207,7 @@ VideoSendStream* DegradedCall::CreateVideoSendStream( VideoEncoderConfig encoder_config, std::unique_ptr fec_controller) { std::unique_ptr transport_adapter; - if (!send_configs_.empty()) { + if (send_config_) { transport_adapter = std::make_unique( send_pipe_.get(), call_.get(), clock_, config.send_transport); config.send_transport = transport_adapter.get(); @@ -262,7 +251,7 @@ void DegradedCall::AddAdaptationResource( } PacketReceiver* DegradedCall::Receiver() { - if (!receive_configs_.empty()) { + if (receive_config_) { return this; } return call_->Receiver(); @@ -310,7 +299,7 @@ void DegradedCall::OnUpdateSyncGroup(AudioReceiveStream& stream, } void DegradedCall::OnSentPacket(const rtc::SentPacket& sent_packet) { - if (!send_configs_.empty()) { + if (send_config_) { // If we have a degraded send-transport, we have already notified call // about the supposed network send time. Discard the actual network send // time in order to properly fool the BWE. @@ -336,21 +325,4 @@ PacketReceiver::DeliveryStatus DegradedCall::DeliverPacket( receive_pipe_->Process(); return status; } - -void DegradedCall::UpdateSendNetworkConfig() { - send_config_index_ = (send_config_index_ + 1) % send_configs_.size(); - send_simulated_network_->SetConfig(send_configs_[send_config_index_]); - call_->network_thread()->PostDelayedTask( - ToQueuedTask(task_safety_, [this] { UpdateSendNetworkConfig(); }), - send_configs_[send_config_index_].duration.ms()); -} - -void DegradedCall::UpdateReceiveNetworkConfig() { - receive_config_index_ = (receive_config_index_ + 1) % receive_configs_.size(); - receive_simulated_network_->SetConfig( - receive_configs_[receive_config_index_]); - call_->network_thread()->PostDelayedTask( - ToQueuedTask(task_safety_, [this] { UpdateReceiveNetworkConfig(); }), - receive_configs_[receive_config_index_].duration.ms()); -} } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/degraded_call.h b/TMessagesProj/jni/voip/webrtc/call/degraded_call.h index dd80a0c5d..70dc12680 100644 --- a/TMessagesProj/jni/voip/webrtc/call/degraded_call.h +++ b/TMessagesProj/jni/voip/webrtc/call/degraded_call.h @@ -17,7 +17,6 @@ #include #include #include -#include #include "absl/types/optional.h" #include "api/call/transport.h" @@ -36,23 +35,20 @@ #include "call/simulated_network.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" +#include "modules/utility/include/process_thread.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "system_wrappers/include/clock.h" namespace webrtc { class DegradedCall : public Call, private PacketReceiver { public: - struct TimeScopedNetworkConfig : public BuiltInNetworkBehaviorConfig { - TimeDelta duration = TimeDelta::PlusInfinity(); - }; - explicit DegradedCall( std::unique_ptr call, - const std::vector& send_configs, - const std::vector& receive_configs); + absl::optional send_config, + absl::optional receive_config, + TaskQueueFactory* task_queue_factory); ~DegradedCall() override; // Implements Call. @@ -114,8 +110,7 @@ class DegradedCall : public Call, private PacketReceiver { class FakeNetworkPipeOnTaskQueue { public: FakeNetworkPipeOnTaskQueue( - TaskQueueBase* task_queue, - const ScopedTaskSafety& task_safety, + TaskQueueFactory* task_queue_factory, Clock* clock, std::unique_ptr network_behavior); @@ -134,8 +129,7 @@ class DegradedCall : public Call, private PacketReceiver { bool Process(); Clock* const clock_; - TaskQueueBase* const task_queue_; - const ScopedTaskSafety& task_safety_; + rtc::TaskQueue task_queue_; FakeNetworkPipe pipe_; absl::optional next_process_ms_ RTC_GUARDED_BY(&task_queue_); }; @@ -164,16 +158,14 @@ class DegradedCall : public Call, private PacketReceiver { Transport* const real_transport_; }; - void SetClientBitratePreferences( - const webrtc::BitrateSettings& preferences) override {} - void UpdateSendNetworkConfig(); - void UpdateReceiveNetworkConfig(); - Clock* const clock_; const std::unique_ptr call_; - ScopedTaskSafety task_safety_; - size_t send_config_index_; - const std::vector send_configs_; + TaskQueueFactory* const task_queue_factory_; + + void SetClientBitratePreferences( + const webrtc::BitrateSettings& preferences) override {} + + const absl::optional send_config_; SimulatedNetwork* send_simulated_network_; std::unique_ptr send_pipe_; std::map> @@ -181,8 +173,7 @@ class DegradedCall : public Call, private PacketReceiver { std::map> video_send_transport_adapters_; - size_t receive_config_index_; - const std::vector receive_configs_; + const absl::optional receive_config_; SimulatedNetwork* receive_simulated_network_; std::unique_ptr receive_pipe_; }; diff --git a/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.cc b/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.cc index 8a03e0ce7..4b5579dfc 100644 --- a/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.cc +++ b/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.cc @@ -18,6 +18,7 @@ #include #include "api/media_types.h" +#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "system_wrappers/include/clock.h" diff --git a/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.h b/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.h index be72e9163..fadae337f 100644 --- a/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.h +++ b/TMessagesProj/jni/voip/webrtc/call/fake_network_pipe.h @@ -23,6 +23,7 @@ #include "api/test/simulated_network.h" #include "call/call.h" #include "call/simulated_packet_receiver.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -108,9 +109,6 @@ class FakeNetworkPipe : public SimulatedPacketReceiverInterface { ~FakeNetworkPipe() override; - FakeNetworkPipe(const FakeNetworkPipe&) = delete; - FakeNetworkPipe& operator=(const FakeNetworkPipe&) = delete; - void SetClockOffset(int64_t offset_ms); // Must not be called in parallel with DeliverPacket or Process. @@ -230,6 +228,8 @@ class FakeNetworkPipe : public SimulatedPacketReceiverInterface { int64_t last_log_time_us_; std::map active_transports_ RTC_GUARDED_BY(config_lock_); + + RTC_DISALLOW_COPY_AND_ASSIGN(FakeNetworkPipe); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.cc b/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.cc index a78448170..eda5c7f05 100644 --- a/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/call/flexfec_receive_stream_impl.cc @@ -23,6 +23,7 @@ #include "modules/rtp_rtcp/include/flexfec_receiver.h" #include "modules/rtp_rtcp/include/receive_statistics.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" +#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" diff --git a/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc b/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc index abd0f1f34..8e589b1f8 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rampup_tests.cc @@ -329,11 +329,9 @@ void RampUpTester::PollStats() { } } -void RampUpTester::ReportResult( - const std::string& measurement, - size_t value, - const std::string& units, - test::ImproveDirection improve_direction) const { +void RampUpTester::ReportResult(const std::string& measurement, + size_t value, + const std::string& units) const { webrtc::test::PrintResult( measurement, "", ::testing::UnitTest::GetInstance()->current_test_info()->name(), value, @@ -393,21 +391,16 @@ void RampUpTester::TriggerTestDone() { } if (report_perf_stats_) { - ReportResult("ramp-up-media-sent", media_sent, "bytes", - test::ImproveDirection::kBiggerIsBetter); - ReportResult("ramp-up-padding-sent", padding_sent, "bytes", - test::ImproveDirection::kSmallerIsBetter); - ReportResult("ramp-up-rtx-media-sent", rtx_media_sent, "bytes", - test::ImproveDirection::kBiggerIsBetter); - ReportResult("ramp-up-rtx-padding-sent", rtx_padding_sent, "bytes", - test::ImproveDirection::kSmallerIsBetter); + ReportResult("ramp-up-media-sent", media_sent, "bytes"); + ReportResult("ramp-up-padding-sent", padding_sent, "bytes"); + ReportResult("ramp-up-rtx-media-sent", rtx_media_sent, "bytes"); + ReportResult("ramp-up-rtx-padding-sent", rtx_padding_sent, "bytes"); if (ramp_up_finished_ms_ >= 0) { ReportResult("ramp-up-time", ramp_up_finished_ms_ - test_start_ms_, - "milliseconds", test::ImproveDirection::kSmallerIsBetter); + "milliseconds"); } ReportResult("ramp-up-average-network-latency", - send_transport_->GetAverageDelayMs(), "milliseconds", - test::ImproveDirection::kSmallerIsBetter); + send_transport_->GetAverageDelayMs(), "milliseconds"); } } @@ -534,8 +527,7 @@ void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) { if (report_perf_stats_) { webrtc::test::PrintResult("ramp_up_down_up", GetModifierString(), "first_rampup", now - state_start_ms_, "ms", - false, - test::ImproveDirection::kSmallerIsBetter); + false); } // Apply loss during the transition between states if FEC is enabled. forward_transport_config_.loss_percent = loss_rates_[test_state_]; @@ -551,8 +543,7 @@ void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) { if (report_perf_stats_) { webrtc::test::PrintResult("ramp_up_down_up", GetModifierString(), "rampdown", now - state_start_ms_, "ms", - false, - test::ImproveDirection::kSmallerIsBetter); + false); } // Apply loss during the transition between states if FEC is enabled. forward_transport_config_.loss_percent = loss_rates_[test_state_]; @@ -566,11 +557,9 @@ void RampUpDownUpTester::EvolveTestState(int bitrate_bps, bool suspended) { if (report_perf_stats_) { webrtc::test::PrintResult("ramp_up_down_up", GetModifierString(), "second_rampup", now - state_start_ms_, - "ms", false, - test::ImproveDirection::kSmallerIsBetter); + "ms", false); ReportResult("ramp-up-down-up-average-network-latency", - send_transport_->GetAverageDelayMs(), "milliseconds", - test::ImproveDirection::kSmallerIsBetter); + send_transport_->GetAverageDelayMs(), "milliseconds"); } // Apply loss during the transition between states if FEC is enabled. forward_transport_config_.loss_percent = loss_rates_[test_state_]; diff --git a/TMessagesProj/jni/voip/webrtc/call/rampup_tests.h b/TMessagesProj/jni/voip/webrtc/call/rampup_tests.h index 0432e662b..075de6d88 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rampup_tests.h +++ b/TMessagesProj/jni/voip/webrtc/call/rampup_tests.h @@ -25,7 +25,6 @@ #include "rtc_base/event.h" #include "rtc_base/task_utils/repeating_task.h" #include "test/call_test.h" -#include "test/testsupport/perf_test.h" namespace webrtc { @@ -67,8 +66,7 @@ class RampUpTester : public test::EndToEndTest { void ReportResult(const std::string& measurement, size_t value, - const std::string& units, - test::ImproveDirection improve_direction) const; + const std::string& units) const; void TriggerTestDone(); Clock* const clock_; diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_bitrate_configurator.h b/TMessagesProj/jni/voip/webrtc/call/rtp_bitrate_configurator.h index 5cb779a3b..7ad83f8b0 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_bitrate_configurator.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_bitrate_configurator.h @@ -14,6 +14,7 @@ #include "absl/types/optional.h" #include "api/transport/bitrate_settings.h" #include "api/units/data_rate.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -23,10 +24,6 @@ class RtpBitrateConfigurator { public: explicit RtpBitrateConfigurator(const BitrateConstraints& bitrate_config); ~RtpBitrateConfigurator(); - - RtpBitrateConfigurator(const RtpBitrateConfigurator&) = delete; - RtpBitrateConfigurator& operator=(const RtpBitrateConfigurator&) = delete; - BitrateConstraints GetConfig() const; // The greater min and smaller max set by this and SetClientBitratePreferences @@ -71,6 +68,8 @@ class RtpBitrateConfigurator { // Bandwidth cap applied for relayed calls. DataRate max_bitrate_over_relay_ = DataRate::PlusInfinity(); + + RTC_DISALLOW_COPY_AND_ASSIGN(RtpBitrateConfigurator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc index 3924f84df..28962fd2e 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.cc @@ -39,32 +39,15 @@ size_t RemoveFromMapByValue(Map* map, const Value& value) { return EraseIf(*map, [&](const auto& elem) { return elem.second == value; }); } -// Temp fix: MID in SDP is allowed to be slightly longer than what's allowed -// in the RTP demuxer. Truncate if needed; this won't match, but it only -// makes sense in places that wouldn't use this for matching anyway. -// TODO(bugs.webrtc.org/12517): remove when length 16 is policed by parser. -std::string CheckMidLength(absl::string_view mid) { - std::string new_mid(mid); - if (new_mid.length() > BaseRtpStringExtension::kMaxValueSizeBytes) { - RTC_LOG(LS_WARNING) << "`mid` attribute too long. Truncating."; - new_mid.resize(BaseRtpStringExtension::kMaxValueSizeBytes); - } - return new_mid; -} - } // namespace -RtpDemuxerCriteria::RtpDemuxerCriteria( - absl::string_view mid, - absl::string_view rsid /*= absl::string_view()*/) - : mid_(CheckMidLength(mid)), rsid_(rsid) {} - RtpDemuxerCriteria::RtpDemuxerCriteria() = default; RtpDemuxerCriteria::~RtpDemuxerCriteria() = default; bool RtpDemuxerCriteria::operator==(const RtpDemuxerCriteria& other) const { - return mid_ == other.mid_ && rsid_ == other.rsid_ && ssrcs_ == other.ssrcs_ && - payload_types_ == other.payload_types_; + return this->mid == other.mid && this->rsid == other.rsid && + this->ssrcs == other.ssrcs && + this->payload_types == other.payload_types; } bool RtpDemuxerCriteria::operator!=(const RtpDemuxerCriteria& other) const { @@ -73,16 +56,16 @@ bool RtpDemuxerCriteria::operator!=(const RtpDemuxerCriteria& other) const { std::string RtpDemuxerCriteria::ToString() const { rtc::StringBuilder sb; - sb << "{mid: " << (mid_.empty() ? "" : mid_) - << ", rsid: " << (rsid_.empty() ? "" : rsid_) << ", ssrcs: ["; + sb << "{mid: " << (mid.empty() ? "" : mid) + << ", rsid: " << (rsid.empty() ? "" : rsid) << ", ssrcs: ["; - for (auto ssrc : ssrcs_) { + for (auto ssrc : ssrcs) { sb << ssrc << ", "; } sb << "], payload_types = ["; - for (auto pt : payload_types_) { + for (auto pt : payload_types) { sb << pt << ", "; } @@ -121,60 +104,60 @@ RtpDemuxer::~RtpDemuxer() { bool RtpDemuxer::AddSink(const RtpDemuxerCriteria& criteria, RtpPacketSinkInterface* sink) { - RTC_DCHECK(!criteria.payload_types().empty() || !criteria.ssrcs().empty() || - !criteria.mid().empty() || !criteria.rsid().empty()); - RTC_DCHECK(criteria.mid().empty() || IsLegalMidName(criteria.mid())); - RTC_DCHECK(criteria.rsid().empty() || IsLegalRsidName(criteria.rsid())); + RTC_DCHECK(!criteria.payload_types.empty() || !criteria.ssrcs.empty() || + !criteria.mid.empty() || !criteria.rsid.empty()); + RTC_DCHECK(criteria.mid.empty() || IsLegalMidName(criteria.mid)); + RTC_DCHECK(criteria.rsid.empty() || IsLegalRsidName(criteria.rsid)); RTC_DCHECK(sink); // We return false instead of DCHECKing for logical conflicts with the new // criteria because new sinks are created according to user-specified SDP and // we do not want to crash due to a data validation error. if (CriteriaWouldConflict(criteria)) { - RTC_LOG(LS_ERROR) << "Unable to add sink=" << sink - << " due to conflicting criteria " << criteria.ToString(); + RTC_LOG(LS_ERROR) << "Unable to add sink = " << sink + << " due conflicting criteria " << criteria.ToString(); return false; } - if (!criteria.mid().empty()) { - if (criteria.rsid().empty()) { - sink_by_mid_.emplace(criteria.mid(), sink); + if (!criteria.mid.empty()) { + if (criteria.rsid.empty()) { + sink_by_mid_.emplace(criteria.mid, sink); } else { - sink_by_mid_and_rsid_.emplace( - std::make_pair(criteria.mid(), criteria.rsid()), sink); + sink_by_mid_and_rsid_.emplace(std::make_pair(criteria.mid, criteria.rsid), + sink); } } else { - if (!criteria.rsid().empty()) { - sink_by_rsid_.emplace(criteria.rsid(), sink); + if (!criteria.rsid.empty()) { + sink_by_rsid_.emplace(criteria.rsid, sink); } } - for (uint32_t ssrc : criteria.ssrcs()) { + for (uint32_t ssrc : criteria.ssrcs) { sink_by_ssrc_.emplace(ssrc, sink); } - for (uint8_t payload_type : criteria.payload_types()) { + for (uint8_t payload_type : criteria.payload_types) { sinks_by_pt_.emplace(payload_type, sink); } RefreshKnownMids(); - RTC_DLOG(LS_INFO) << "Added sink = " << sink << " for criteria " - << criteria.ToString(); + RTC_LOG(LS_INFO) << "Added sink = " << sink << " for criteria " + << criteria.ToString(); return true; } bool RtpDemuxer::CriteriaWouldConflict( const RtpDemuxerCriteria& criteria) const { - if (!criteria.mid().empty()) { - if (criteria.rsid().empty()) { + if (!criteria.mid.empty()) { + if (criteria.rsid.empty()) { // If the MID is in the known_mids_ set, then there is already a sink // added for this MID directly, or there is a sink already added with a // MID, RSID pair for our MID and some RSID. // Adding this criteria would cause one of these rules to be shadowed, so // reject this new criteria. - if (known_mids_.find(criteria.mid()) != known_mids_.end()) { + if (known_mids_.find(criteria.mid) != known_mids_.end()) { RTC_LOG(LS_INFO) << criteria.ToString() << " would conflict with known mid"; return true; @@ -182,7 +165,7 @@ bool RtpDemuxer::CriteriaWouldConflict( } else { // If the exact rule already exists, then reject this duplicate. const auto sink_by_mid_and_rsid = sink_by_mid_and_rsid_.find( - std::make_pair(criteria.mid(), criteria.rsid())); + std::make_pair(criteria.mid, criteria.rsid)); if (sink_by_mid_and_rsid != sink_by_mid_and_rsid_.end()) { RTC_LOG(LS_INFO) << criteria.ToString() << " would conflict with existing sink = " @@ -193,7 +176,7 @@ bool RtpDemuxer::CriteriaWouldConflict( // If there is already a sink registered for the bare MID, then this // criteria will never receive any packets because they will just be // directed to that MID sink, so reject this new criteria. - const auto sink_by_mid = sink_by_mid_.find(criteria.mid()); + const auto sink_by_mid = sink_by_mid_.find(criteria.mid); if (sink_by_mid != sink_by_mid_.end()) { RTC_LOG(LS_INFO) << criteria.ToString() << " would conflict with existing sink = " @@ -203,7 +186,7 @@ bool RtpDemuxer::CriteriaWouldConflict( } } - for (uint32_t ssrc : criteria.ssrcs()) { + for (uint32_t ssrc : criteria.ssrcs) { const auto sink_by_ssrc = sink_by_ssrc_.find(ssrc); if (sink_by_ssrc != sink_by_ssrc_.end()) { RTC_LOG(LS_INFO) << criteria.ToString() @@ -234,13 +217,14 @@ void RtpDemuxer::RefreshKnownMids() { bool RtpDemuxer::AddSink(uint32_t ssrc, RtpPacketSinkInterface* sink) { RtpDemuxerCriteria criteria; - criteria.ssrcs().insert(ssrc); + criteria.ssrcs.insert(ssrc); return AddSink(criteria, sink); } void RtpDemuxer::AddSink(const std::string& rsid, RtpPacketSinkInterface* sink) { - RtpDemuxerCriteria criteria(absl::string_view() /* mid */, rsid); + RtpDemuxerCriteria criteria; + criteria.rsid = rsid; AddSink(criteria, sink); } @@ -252,7 +236,11 @@ bool RtpDemuxer::RemoveSink(const RtpPacketSinkInterface* sink) { RemoveFromMapByValue(&sink_by_mid_and_rsid_, sink) + RemoveFromMapByValue(&sink_by_rsid_, sink); RefreshKnownMids(); - return num_removed > 0; + bool removed = num_removed > 0; + if (removed) { + RTC_LOG(LS_INFO) << "Removed sink = " << sink << " bindings"; + } + return removed; } bool RtpDemuxer::OnRtpPacket(const RtpPacketReceived& packet) { @@ -427,11 +415,11 @@ void RtpDemuxer::AddSsrcSinkBinding(uint32_t ssrc, auto it = result.first; bool inserted = result.second; if (inserted) { - RTC_DLOG(LS_INFO) << "Added sink = " << sink - << " binding with SSRC=" << ssrc; + RTC_LOG(LS_INFO) << "Added sink = " << sink + << " binding with SSRC=" << ssrc; } else if (it->second != sink) { - RTC_DLOG(LS_INFO) << "Updated sink = " << sink - << " binding with SSRC=" << ssrc; + RTC_LOG(LS_INFO) << "Updated sink = " << sink + << " binding with SSRC=" << ssrc; it->second = sink; } } diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h index 5fd37b461..fb65fce36 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_demuxer.h @@ -16,7 +16,6 @@ #include #include -#include "absl/strings/string_view.h" #include "rtc_base/containers/flat_map.h" #include "rtc_base/containers/flat_set.h" @@ -27,10 +26,7 @@ class RtpPacketSinkInterface; // This struct describes the criteria that will be used to match packets to a // specific sink. -class RtpDemuxerCriteria { - public: - explicit RtpDemuxerCriteria(absl::string_view mid, - absl::string_view rsid = absl::string_view()); +struct RtpDemuxerCriteria { RtpDemuxerCriteria(); ~RtpDemuxerCriteria(); @@ -38,37 +34,23 @@ class RtpDemuxerCriteria { bool operator!=(const RtpDemuxerCriteria& other) const; // If not the empty string, will match packets with this MID. - const std::string& mid() const { return mid_; } - - // Return string representation of demux criteria to facilitate logging - std::string ToString() const; + std::string mid; // If not the empty string, will match packets with this as their RTP stream // ID or repaired RTP stream ID. // Note that if both MID and RSID are specified, this will only match packets // that have both specified (either through RTP header extensions, SSRC // latching or RTCP). - const std::string& rsid() const { return rsid_; } + std::string rsid; - // The criteria will match packets with any of these SSRCs. - const flat_set& ssrcs() const { return ssrcs_; } + // Will match packets with any of these SSRCs. + flat_set ssrcs; - // Writable accessor for directly modifying the list of ssrcs. - flat_set& ssrcs() { return ssrcs_; } + // Will match packets with any of these payload types. + flat_set payload_types; - // The criteria will match packets with any of these payload types. - const flat_set& payload_types() const { return payload_types_; } - - // Writable accessor for directly modifying the list of payload types. - flat_set& payload_types() { return payload_types_; } - - private: - // Intentionally private member variables to encourage specifying them via the - // constructor and consider them to be const as much as possible. - const std::string mid_; - const std::string rsid_; - flat_set ssrcs_; - flat_set payload_types_; + // Return string representation of demux criteria to facilitate logging + std::string ToString() const; }; // This class represents the RTP demuxing, for a single RTP session (i.e., one diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h index 1f03fab6d..48b0ec2c6 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_payload_params.h @@ -32,7 +32,7 @@ class RtpRtcp; // TODO(nisse): Make these properties not codec specific. class RtpPayloadParams final { public: - RtpPayloadParams(uint32_t ssrc, + RtpPayloadParams(const uint32_t ssrc, const RtpPayloadState* state, const WebRtcKeyValueConfig& trials); RtpPayloadParams(const RtpPayloadParams& other); diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_config.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_config.h index 3a2c76b3d..9aa9f14c1 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_config.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_config.h @@ -18,6 +18,7 @@ #include "api/transport/bitrate_settings.h" #include "api/transport/network_control.h" #include "api/transport/webrtc_key_value_config.h" +#include "modules/utility/include/process_thread.h" #include "rtc_base/task_queue.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc index 230b048ce..c9388e47a 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.cc @@ -559,10 +559,11 @@ void RtpTransportControllerSend::OnReceivedRtcpReceiverReport( void RtpTransportControllerSend::OnAddPacket( const RtpPacketSendInfo& packet_info) { + feedback_demuxer_.AddPacket(packet_info); + Timestamp creation_time = Timestamp::Millis(clock_->TimeInMilliseconds()); task_queue_.PostTask([this, packet_info, creation_time]() { RTC_DCHECK_RUN_ON(&task_queue_); - feedback_demuxer_.AddPacket(packet_info); transport_feedback_adapter_.AddPacket( packet_info, send_side_bwe_with_overhead_ ? transport_overhead_bytes_per_packet_ : 0, @@ -572,10 +573,10 @@ void RtpTransportControllerSend::OnAddPacket( void RtpTransportControllerSend::OnTransportFeedback( const rtcp::TransportFeedback& feedback) { + feedback_demuxer_.OnTransportFeedback(feedback); auto feedback_time = Timestamp::Millis(clock_->TimeInMilliseconds()); task_queue_.PostTask([this, feedback, feedback_time]() { RTC_DCHECK_RUN_ON(&task_queue_); - feedback_demuxer_.OnTransportFeedback(feedback); absl::optional feedback_msg = transport_feedback_adapter_.ProcessTransportFeedback(feedback, feedback_time); diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h index e5ff16268..62af78ceb 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_transport_controller_send.h @@ -32,6 +32,7 @@ #include "modules/pacing/rtp_packet_pacer.h" #include "modules/pacing/task_queue_paced_sender.h" #include "modules/utility/include/process_thread.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/network_route.h" #include "rtc_base/race_checker.h" #include "rtc_base/task_queue.h" @@ -62,10 +63,6 @@ class RtpTransportControllerSend final const WebRtcKeyValueConfig* trials); ~RtpTransportControllerSend() override; - RtpTransportControllerSend(const RtpTransportControllerSend&) = delete; - RtpTransportControllerSend& operator=(const RtpTransportControllerSend&) = - delete; - // TODO(tommi): Change to std::unique_ptr<>. RtpVideoSenderInterface* CreateRtpVideoSender( const std::map& suspended_ssrcs, @@ -218,6 +215,7 @@ class RtpTransportControllerSend final // `task_queue_` is defined last to ensure all pending tasks are cancelled // and deleted before any other members. rtc::TaskQueue task_queue_; + RTC_DISALLOW_COPY_AND_ASSIGN(RtpTransportControllerSend); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc index 35e6beeb7..b95370545 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.cc @@ -25,6 +25,7 @@ #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" #include "modules/rtp_rtcp/source/rtp_sender.h" +#include "modules/utility/include/process_thread.h" #include "modules/video_coding/include/video_codec_interface.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" @@ -393,7 +394,6 @@ RtpVideoSender::RtpVideoSender( encoder_target_rate_bps_(0), frame_counts_(rtp_config.ssrcs.size()), frame_count_observer_(observers.frame_count_observer) { - transport_checker_.Detach(); RTC_DCHECK_EQ(rtp_config_.ssrcs.size(), rtp_streams_.size()); if (send_side_bwe_with_overhead_ && has_packet_feedback_) transport_->IncludeOverheadInPacedSender(); @@ -445,6 +445,9 @@ RtpVideoSender::RtpVideoSender( fec_controller_->SetProtectionMethod(fec_enabled, NackEnabled()); fec_controller_->SetProtectionCallback(this); + // Signal congestion controller this object is ready for OnPacket* callbacks. + transport_->GetStreamFeedbackProvider()->RegisterStreamFeedbackObserver( + rtp_config_.ssrcs, this); // Construction happens on the worker thread (see Call::CreateVideoSendStream) // but subseqeuent calls to the RTP state will happen on one of two threads: @@ -457,44 +460,27 @@ RtpVideoSender::RtpVideoSender( } RtpVideoSender::~RtpVideoSender() { - // TODO(bugs.webrtc.org/13517): Remove once RtpVideoSender gets deleted on the - // transport task queue. - transport_checker_.Detach(); - SetActiveModulesLocked( std::vector(rtp_streams_.size(), /*active=*/false)); - - RTC_DCHECK(!registered_for_feedback_); + transport_->GetStreamFeedbackProvider()->DeRegisterStreamFeedbackObserver( + this); } void RtpVideoSender::SetActive(bool active) { - RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); if (active_ == active) return; - const std::vector active_modules(rtp_streams_.size(), active); SetActiveModulesLocked(active_modules); - - auto* feedback_provider = transport_->GetStreamFeedbackProvider(); - if (active && !registered_for_feedback_) { - feedback_provider->RegisterStreamFeedbackObserver(rtp_config_.ssrcs, this); - registered_for_feedback_ = true; - } else if (!active && registered_for_feedback_) { - feedback_provider->DeRegisterStreamFeedbackObserver(this); - registered_for_feedback_ = false; - } } void RtpVideoSender::SetActiveModules(const std::vector active_modules) { - RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); return SetActiveModulesLocked(active_modules); } void RtpVideoSender::SetActiveModulesLocked( const std::vector active_modules) { - RTC_DCHECK_RUN_ON(&transport_checker_); RTC_DCHECK_EQ(rtp_streams_.size(), active_modules.size()); active_ = false; for (size_t i = 0; i < active_modules.size(); ++i) { @@ -528,7 +514,6 @@ void RtpVideoSender::SetActiveModulesLocked( } bool RtpVideoSender::IsActive() { - RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); return IsActiveLocked(); } @@ -637,7 +622,6 @@ EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( void RtpVideoSender::OnBitrateAllocationUpdated( const VideoBitrateAllocation& bitrate) { - RTC_DCHECK_RUN_ON(&transport_checker_); MutexLock lock(&mutex_); if (IsActiveLocked()) { if (rtp_streams_.size() == 1) { diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h index 702380450..7e5de9876 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender.h @@ -35,6 +35,7 @@ #include "modules/rtp_rtcp/source/rtp_sender_video.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "modules/rtp_rtcp/source/rtp_video_header.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/rate_limiter.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -88,15 +89,12 @@ class RtpVideoSender : public RtpVideoSenderInterface, rtc::scoped_refptr frame_transformer); ~RtpVideoSender() override; - RtpVideoSender(const RtpVideoSender&) = delete; - RtpVideoSender& operator=(const RtpVideoSender&) = delete; - // RtpVideoSender will only route packets if being active, all packets will be // dropped otherwise. void SetActive(bool active) RTC_LOCKS_EXCLUDED(mutex_) override; // Sets the sending status of the rtp modules and appropriately sets the // payload router to active if any rtp modules are active. - void SetActiveModules(std::vector active_modules) + void SetActiveModules(const std::vector active_modules) RTC_LOCKS_EXCLUDED(mutex_) override; bool IsActive() RTC_LOCKS_EXCLUDED(mutex_) override; @@ -153,7 +151,7 @@ class RtpVideoSender : public RtpVideoSenderInterface, private: bool IsActiveLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - void SetActiveModulesLocked(std::vector active_modules) + void SetActiveModulesLocked(const std::vector active_modules) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void UpdateModuleSendingState() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); void ConfigureProtection(); @@ -172,15 +170,10 @@ class RtpVideoSender : public RtpVideoSenderInterface, const bool has_packet_feedback_; const bool simulate_generic_structure_; - // Semantically equivalent to checking for `transport_->GetWorkerQueue()` - // but some tests need to be updated to call from the correct context. - RTC_NO_UNIQUE_ADDRESS SequenceChecker transport_checker_; - - // TODO(bugs.webrtc.org/13517): Remove mutex_ once RtpVideoSender runs on the + // TODO(holmer): Remove mutex_ once RtpVideoSender runs on the // transport task queue. mutable Mutex mutex_; bool active_ RTC_GUARDED_BY(mutex_); - bool registered_for_feedback_ RTC_GUARDED_BY(transport_checker_) = false; const std::unique_ptr fec_controller_; bool fec_allowed_ RTC_GUARDED_BY(mutex_); @@ -212,6 +205,8 @@ class RtpVideoSender : public RtpVideoSenderInterface, // This map is set at construction time and never changed, but it's // non-trivial to make it properly const. std::map ssrc_to_rtp_module_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RtpVideoSender); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h index acb68e3ae..a0b4baccb 100644 --- a/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h +++ b/TMessagesProj/jni/voip/webrtc/call/rtp_video_sender_interface.h @@ -36,7 +36,7 @@ class RtpVideoSenderInterface : public EncodedImageCallback, virtual void SetActive(bool active) = 0; // Sets the sending status of the rtp modules and appropriately sets the // RtpVideoSender to active if any rtp modules are active. - virtual void SetActiveModules(std::vector active_modules) = 0; + virtual void SetActiveModules(const std::vector active_modules) = 0; virtual bool IsActive() = 0; virtual void OnNetworkAvailability(bool network_available) = 0; diff --git a/TMessagesProj/jni/voip/webrtc/call/version.cc b/TMessagesProj/jni/voip/webrtc/call/version.cc index 12fbc9bcc..f8147c9d1 100644 --- a/TMessagesProj/jni/voip/webrtc/call/version.cc +++ b/TMessagesProj/jni/voip/webrtc/call/version.cc @@ -13,7 +13,7 @@ namespace webrtc { // The timestamp is always in UTC. -const char* const kSourceTimestamp = "WebRTC source stamp 2022-02-28T04:03:34"; +const char* const kSourceTimestamp = "WebRTC source stamp 2021-12-09T04:05:04"; void LoadWebRTCVersionInRegister() { // Using volatile to instruct the compiler to not optimize `p` away even diff --git a/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h b/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h index 614d5dba7..d39762834 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/video_receive_stream.h @@ -191,10 +191,6 @@ class VideoReceiveStream : public MediaReceiveStream { bool receiver_reference_time_report = false; } rtcp_xr; - // How to request keyframes from a remote sender. Applies only if lntf is - // disabled. - KeyFrameReqMethod keyframe_method = KeyFrameReqMethod::kPliRtcp; - // See LntfConfig for description. LntfConfig lntf; diff --git a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h index 356d8c809..f899a7371 100644 --- a/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/call/video_send_stream.h @@ -208,7 +208,8 @@ class VideoSendStream { // Note: This starts stream activity if it is inactive and one of the layers // is active. This stops stream activity if it is active and all layers are // inactive. - virtual void UpdateActiveSimulcastLayers(std::vector active_layers) = 0; + virtual void UpdateActiveSimulcastLayers( + const std::vector active_layers) = 0; // Starts stream activity. // When a stream is active, it can receive, process and deliver packets. diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/audio_converter.h b/TMessagesProj/jni/voip/webrtc/common_audio/audio_converter.h index 4afbb6d0f..e12e601b2 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/audio_converter.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/audio_converter.h @@ -15,6 +15,8 @@ #include +#include "rtc_base/constructor_magic.h" + namespace webrtc { // Format conversion (remixing and resampling) for audio. Only simple remixing @@ -33,9 +35,6 @@ class AudioConverter { size_t dst_frames); virtual ~AudioConverter() {} - AudioConverter(const AudioConverter&) = delete; - AudioConverter& operator=(const AudioConverter&) = delete; - // Convert `src`, containing `src_size` samples, to `dst`, having a sample // capacity of `dst_capacity`. Both point to a series of buffers containing // the samples for each channel. The sizes must correspond to the format @@ -65,6 +64,8 @@ class AudioConverter { const size_t src_frames_; const size_t dst_channels_; const size_t dst_frames_; + + RTC_DISALLOW_COPY_AND_ASSIGN(AudioConverter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_sinc_resampler.h b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_sinc_resampler.h index 7946ef8f8..88792d427 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_sinc_resampler.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/push_sinc_resampler.h @@ -17,6 +17,7 @@ #include #include "common_audio/resampler/sinc_resampler.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -32,9 +33,6 @@ class PushSincResampler : public SincResamplerCallback { PushSincResampler(size_t source_frames, size_t destination_frames); ~PushSincResampler() override; - PushSincResampler(const PushSincResampler&) = delete; - PushSincResampler& operator=(const PushSincResampler&) = delete; - // Perform the resampling. `source_frames` must always equal the // `source_frames` provided at construction. `destination_capacity` must be // at least as large as `destination_frames`. Returns the number of samples @@ -74,6 +72,8 @@ class PushSincResampler : public SincResamplerCallback { // Used to assert we are only requested for as much data as is available. size_t source_available_; + + RTC_DISALLOW_COPY_AND_ASSIGN(PushSincResampler); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h index b89bba7ab..d071e96f4 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinc_resampler.h @@ -18,6 +18,7 @@ #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/gtest_prod_util.h" #include "rtc_base/memory/aligned_malloc.h" #include "rtc_base/system/arch.h" @@ -63,9 +64,6 @@ class SincResampler { SincResamplerCallback* read_cb); virtual ~SincResampler(); - SincResampler(const SincResampler&) = delete; - SincResampler& operator=(const SincResampler&) = delete; - // Resample `frames` of data from `read_cb_` into `destination`. void Resample(size_t frames, float* destination); @@ -174,6 +172,8 @@ class SincResampler { float* const r2_; float* r3_; float* r4_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SincResampler); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h index a57cbfef0..8534119e5 100644 --- a/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h +++ b/TMessagesProj/jni/voip/webrtc/common_audio/resampler/sinusoidal_linear_chirp_source.h @@ -15,6 +15,7 @@ #define COMMON_AUDIO_RESAMPLER_SINUSOIDAL_LINEAR_CHIRP_SOURCE_H_ #include "common_audio/resampler/sinc_resampler.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -32,10 +33,6 @@ class SinusoidalLinearChirpSource : public SincResamplerCallback { ~SinusoidalLinearChirpSource() override {} - SinusoidalLinearChirpSource(const SinusoidalLinearChirpSource&) = delete; - SinusoidalLinearChirpSource& operator=(const SinusoidalLinearChirpSource&) = - delete; - void Run(size_t frames, float* destination) override; double Frequency(size_t position); @@ -49,6 +46,8 @@ class SinusoidalLinearChirpSource : public SincResamplerCallback { double k_; size_t current_index_; double delay_samples_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SinusoidalLinearChirpSource); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h b/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h index f26a9f7be..539a6cc0f 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h +++ b/TMessagesProj/jni/voip/webrtc/common_video/include/video_frame_buffer_pool.h @@ -17,7 +17,6 @@ #include "api/scoped_refptr.h" #include "api/video/i420_buffer.h" -#include "api/video/i444_buffer.h" #include "api/video/nv12_buffer.h" #include "rtc_base/race_checker.h" #include "rtc_base/ref_counted_object.h" @@ -44,7 +43,6 @@ class VideoFrameBufferPool { // and there are less than `max_number_of_buffers` pending, a buffer is // created. Returns null otherwise. rtc::scoped_refptr CreateI420Buffer(int width, int height); - rtc::scoped_refptr CreateI444Buffer(int width, int height); rtc::scoped_refptr CreateNV12Buffer(int width, int height); // Changes the max amount of buffers in the pool to the new value. diff --git a/TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc b/TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc index 1511c9f30..15c668e78 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/incoming_video_stream.cc @@ -57,8 +57,7 @@ void IncomingVideoStream::Dequeue() { if (render_buffers_.HasPendingFrames()) { uint32_t wait_time = render_buffers_.TimeToNextFrameRelease(); - incoming_render_queue_.PostDelayedHighPrecisionTask([this]() { Dequeue(); }, - wait_time); + incoming_render_queue_.PostDelayedTask([this]() { Dequeue(); }, wait_time); } } diff --git a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc index 267cab1a7..9c88f0b0d 100644 --- a/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc +++ b/TMessagesProj/jni/voip/webrtc/common_video/video_frame_buffer_pool.cc @@ -20,17 +20,12 @@ namespace { bool HasOneRef(const rtc::scoped_refptr& buffer) { // Cast to rtc::RefCountedObject is safe because this function is only called // on locally created VideoFrameBuffers, which are either - // `rtc::RefCountedObject`, `rtc::RefCountedObject` or - // `rtc::RefCountedObject`. + // `rtc::RefCountedObject` or `rtc::RefCountedObject`. switch (buffer->type()) { case VideoFrameBuffer::Type::kI420: { return static_cast*>(buffer.get()) ->HasOneRef(); } - case VideoFrameBuffer::Type::kI444: { - return static_cast*>(buffer.get()) - ->HasOneRef(); - } case VideoFrameBuffer::Type::kNV12: { return static_cast*>(buffer.get()) ->HasOneRef(); @@ -121,37 +116,6 @@ rtc::scoped_refptr VideoFrameBufferPool::CreateI420Buffer( return buffer; } -rtc::scoped_refptr VideoFrameBufferPool::CreateI444Buffer( - int width, - int height) { - RTC_DCHECK_RUNS_SERIALIZED(&race_checker_); - - rtc::scoped_refptr existing_buffer = - GetExistingBuffer(width, height, VideoFrameBuffer::Type::kI444); - if (existing_buffer) { - // Cast is safe because the only way kI444 buffer is created is - // in the same function below, where |RefCountedObject| - // is created. - rtc::RefCountedObject* raw_buffer = - static_cast*>(existing_buffer.get()); - // Creates a new scoped_refptr, which is also pointing to the same - // RefCountedObject as buffer, increasing ref count. - return rtc::scoped_refptr(raw_buffer); - } - - if (buffers_.size() >= max_number_of_buffers_) - return nullptr; - // Allocate new buffer. - rtc::scoped_refptr buffer = - rtc::make_ref_counted(width, height); - - if (zero_initialize_) - buffer->InitializeData(); - - buffers_.push_back(buffer); - return buffer; -} - rtc::scoped_refptr VideoFrameBufferPool::CreateNV12Buffer( int width, int height) { diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/bit_writer.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/bit_writer.h index 421e7c437..85340c380 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/bit_writer.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/bit_writer.h @@ -20,6 +20,7 @@ #include "absl/strings/string_view.h" #include "rtc_base/bit_buffer.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -35,9 +36,6 @@ class BitWriter final { RTC_DCHECK_GT(byte_count, 0); } - BitWriter(const BitWriter&) = delete; - BitWriter& operator=(const BitWriter&) = delete; - void WriteBits(uint64_t val, size_t bit_count); void WriteBits(absl::string_view input); @@ -54,6 +52,8 @@ class BitWriter final { // to go anywhere near the limit, though, so this is good enough. size_t written_bits_; bool valid_; + + RTC_DISALLOW_COPY_AND_ASSIGN(BitWriter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc index 3a2bee1d3..a96d3a7dc 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/delta_encoding.cc @@ -21,6 +21,7 @@ #include "rtc_base/bit_buffer.h" #include "rtc_base/bitstream_reader.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -186,9 +187,6 @@ class FixedLengthDeltaEncoder final { absl::optional base, const std::vector>& values); - FixedLengthDeltaEncoder(const FixedLengthDeltaEncoder&) = delete; - FixedLengthDeltaEncoder& operator=(const FixedLengthDeltaEncoder&) = delete; - private: // Calculate min/max values of unsigned/signed deltas, given the bit width // of all the values in the series. @@ -251,6 +249,8 @@ class FixedLengthDeltaEncoder final { // ctor has finished running when this is constructed, so that the lower // bound on the buffer size would be guaranteed correct. std::unique_ptr writer_; + + RTC_DISALLOW_COPY_AND_ASSIGN(FixedLengthDeltaEncoder); }; // TODO(eladalon): Reduce the number of passes. @@ -566,9 +566,6 @@ class FixedLengthDeltaDecoder final { absl::optional base, size_t num_of_deltas); - FixedLengthDeltaDecoder(const FixedLengthDeltaDecoder&) = delete; - FixedLengthDeltaDecoder& operator=(const FixedLengthDeltaDecoder&) = delete; - private: // Reads the encoding header in `input` and returns a FixedLengthDeltaDecoder // with the corresponding configuration, that can be used to decode the @@ -622,6 +619,8 @@ class FixedLengthDeltaDecoder final { // The number of values to be known to be decoded. const size_t num_of_deltas_; + + RTC_DISALLOW_COPY_AND_ASSIGN(FixedLengthDeltaDecoder); }; bool FixedLengthDeltaDecoder::IsSuitableDecoderFor(const std::string& input) { diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc index add42ad15..ff72163a8 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.cc @@ -363,12 +363,6 @@ std::string RtcEventLogEncoderLegacy::Encode(const RtcEvent& event) { static_cast(event); return EncodeVideoSendStreamConfig(rtc_event); } - case RtcEvent::Type::BeginV3Log: - case RtcEvent::Type::EndV3Log: - // These special events are written as part of starting - // and stopping the log, and only as part of version 3 of the format. - RTC_DCHECK_NOTREACHED(); - break; case RtcEvent::Type::RouteChangeEvent: case RtcEvent::Type::RemoteEstimateEvent: case RtcEvent::Type::GenericPacketReceived: diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc index c51fd79b2..569f7eaa8 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.cc @@ -882,12 +882,6 @@ std::string RtcEventLogEncoderNewFormat::EncodeBatch( frames_decoded[rtc_event->ssrc()].emplace_back(rtc_event); break; } - case RtcEvent::Type::BeginV3Log: - case RtcEvent::Type::EndV3Log: - // These special events are written as part of starting - // and stopping the log, and only as part of version 3 of the format. - RTC_DCHECK_NOTREACHED(); - break; } } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.cc deleted file mode 100644 index 131aae1de..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.cc +++ /dev/null @@ -1,164 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h" - -#include -#include - -#include "absl/types/optional.h" -#include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" -#include "logging/rtc_event_log/encoder/var_int.h" -#include "logging/rtc_event_log/events/rtc_event_alr_state.h" -#include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" -#include "logging/rtc_event_log/events/rtc_event_audio_playout.h" -#include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" -#include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" -#include "logging/rtc_event_log/events/rtc_event_begin_log.h" -#include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" -#include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" -#include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" -#include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" -#include "logging/rtc_event_log/events/rtc_event_end_log.h" -#include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" -#include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" -#include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" -#include "logging/rtc_event_log/events/rtc_event_generic_packet_sent.h" -#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h" -#include "logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h" -#include "logging/rtc_event_log/events/rtc_event_probe_cluster_created.h" -#include "logging/rtc_event_log/events/rtc_event_probe_result_failure.h" -#include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" -#include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" -#include "logging/rtc_event_log/events/rtc_event_route_change.h" -#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" -#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" -#include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" -#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" -#include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" -#include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -std::string RtcEventLogEncoderV3::EncodeLogStart(int64_t timestamp_us, - int64_t utc_time_us) { - std::unique_ptr begin_log = - std::make_unique(Timestamp::Micros(timestamp_us), - Timestamp::Micros(utc_time_us)); - std::vector batch; - batch.push_back(begin_log.get()); - - std::string encoded_event = RtcEventBeginLog::Encode(batch); - - return encoded_event; -} - -std::string RtcEventLogEncoderV3::EncodeLogEnd(int64_t timestamp_us) { - std::unique_ptr end_log = - std::make_unique(Timestamp::Micros(timestamp_us)); - std::vector batch; - batch.push_back(end_log.get()); - - std::string encoded_event = RtcEventEndLog::Encode(batch); - - return encoded_event; -} - -RtcEventLogEncoderV3::RtcEventLogEncoderV3() { - encoders_[RtcEvent::Type::AlrStateEvent] = RtcEventAlrState::Encode; - encoders_[RtcEvent::Type::AudioNetworkAdaptation] = - RtcEventAudioNetworkAdaptation::Encode; - encoders_[RtcEvent::Type::AudioPlayout] = RtcEventAudioPlayout::Encode; - encoders_[RtcEvent::Type::AudioReceiveStreamConfig] = - RtcEventAudioReceiveStreamConfig::Encode; - encoders_[RtcEvent::Type::AudioSendStreamConfig] = - RtcEventAudioSendStreamConfig::Encode; - encoders_[RtcEvent::Type::BweUpdateDelayBased] = - RtcEventBweUpdateDelayBased::Encode; - encoders_[RtcEvent::Type::BweUpdateLossBased] = - RtcEventBweUpdateLossBased::Encode; - encoders_[RtcEvent::Type::DtlsTransportState] = - RtcEventDtlsTransportState::Encode; - encoders_[RtcEvent::Type::DtlsWritableState] = - RtcEventDtlsWritableState::Encode; - encoders_[RtcEvent::Type::FrameDecoded] = RtcEventFrameDecoded::Encode; - encoders_[RtcEvent::Type::GenericAckReceived] = - RtcEventGenericAckReceived::Encode; - encoders_[RtcEvent::Type::GenericPacketReceived] = - RtcEventGenericPacketReceived::Encode; - encoders_[RtcEvent::Type::GenericPacketSent] = - RtcEventGenericPacketSent::Encode; - encoders_[RtcEvent::Type::IceCandidatePairConfig] = - RtcEventIceCandidatePairConfig::Encode; - encoders_[RtcEvent::Type::IceCandidatePairEvent] = - RtcEventIceCandidatePair::Encode; - encoders_[RtcEvent::Type::ProbeClusterCreated] = - RtcEventProbeClusterCreated::Encode; - encoders_[RtcEvent::Type::ProbeResultFailure] = - RtcEventProbeResultFailure::Encode; - encoders_[RtcEvent::Type::ProbeResultSuccess] = - RtcEventProbeResultSuccess::Encode; - encoders_[RtcEvent::Type::RemoteEstimateEvent] = - RtcEventRemoteEstimate::Encode; - encoders_[RtcEvent::Type::RouteChangeEvent] = RtcEventRouteChange::Encode; - encoders_[RtcEvent::Type::RtcpPacketIncoming] = - RtcEventRtcpPacketIncoming::Encode; - encoders_[RtcEvent::Type::RtcpPacketOutgoing] = - RtcEventRtcpPacketOutgoing::Encode; - encoders_[RtcEvent::Type::RtpPacketIncoming] = - RtcEventRtpPacketIncoming::Encode; - encoders_[RtcEvent::Type::RtpPacketOutgoing] = - RtcEventRtpPacketOutgoing::Encode; - encoders_[RtcEvent::Type::VideoReceiveStreamConfig] = - RtcEventVideoReceiveStreamConfig::Encode; - encoders_[RtcEvent::Type::VideoSendStreamConfig] = - RtcEventVideoSendStreamConfig::Encode; -} - -std::string RtcEventLogEncoderV3::EncodeBatch( - std::deque>::const_iterator begin, - std::deque>::const_iterator end) { - struct EventGroupKey { - // Events are grouped by event type. For compression efficiency, - // events can optionally have a secondary key, in most cases the - // SSRC. - RtcEvent::Type type; - uint32_t secondary_group_key; - - bool operator<(EventGroupKey other) const { - return type < other.type || - (type == other.type && - secondary_group_key < other.secondary_group_key); - } - }; - - std::map> event_groups; - - for (auto it = begin; it != end; ++it) { - event_groups[{(*it)->GetType(), (*it)->GetGroupKey()}].push_back(it->get()); - } - - std::string encoded_output; - for (auto& kv : event_groups) { - auto it = encoders_.find(kv.first.type); - RTC_DCHECK(it != encoders_.end()); - if (it != encoders_.end()) { - auto& encoder = it->second; - // TODO(terelius): Use some "string builder" or preallocate? - encoded_output += encoder(kv.second); - } - } - - return encoded_output; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h deleted file mode 100644 index cb796ec56..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/encoder/rtc_event_log_encoder_v3.h +++ /dev/null @@ -1,46 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_V3_H_ -#define LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_V3_H_ - -#include -#include -#include -#include - -#include "api/array_view.h" -#include "logging/rtc_event_log/encoder/rtc_event_log_encoder.h" -#include "logging/rtc_event_log/events/rtc_event_definition.h" - -namespace webrtc { - -class RtcEventLogEncoderV3 final : public RtcEventLogEncoder { - public: - RtcEventLogEncoderV3(); - ~RtcEventLogEncoderV3() override = default; - - std::string EncodeBatch( - std::deque>::const_iterator begin, - std::deque>::const_iterator end) override; - - std::string EncodeLogStart(int64_t timestamp_us, - int64_t utc_time_us) override; - std::string EncodeLogEnd(int64_t timestamp_us) override; - - private: - std::map)>> - encoders_; -}; - -} // namespace webrtc - -#endif // LOGGING_RTC_EVENT_LOG_ENCODER_RTC_EVENT_LOG_ENCODER_V3_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h index 666fae1c6..59d633ced 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/fixed_length_encoding_parameters_v3.h @@ -33,7 +33,7 @@ class FixedLengthEncodingParametersV3 final { static FixedLengthEncodingParametersV3 CalculateParameters( uint64_t base, - rtc::ArrayView values, + const rtc::ArrayView values, uint64_t value_bit_width, bool values_optional); static absl::optional ParseDeltaHeader( diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/logged_rtp_rtcp.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/logged_rtp_rtcp.h deleted file mode 100644 index 053a16371..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/logged_rtp_rtcp.h +++ /dev/null @@ -1,259 +0,0 @@ -/* - * Copyright 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef LOGGING_RTC_EVENT_LOG_EVENTS_LOGGED_RTP_RTCP_H_ -#define LOGGING_RTC_EVENT_LOG_EVENTS_LOGGED_RTP_RTCP_H_ - -#include -#include - -#include "api/rtp_headers.h" -#include "api/units/timestamp.h" -#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" -#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" -#include "modules/rtp_rtcp/source/rtcp_packet/fir.h" -#include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" -#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" -#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" -#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/remb.h" -#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" -#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" - -namespace webrtc { - -struct LoggedRtpPacket { - LoggedRtpPacket(Timestamp timestamp, - RTPHeader header, - size_t header_length, - size_t total_length) - : timestamp(timestamp), - header(header), - header_length(header_length), - total_length(total_length) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp; - // TODO(terelius): This allocates space for 15 CSRCs even if none are used. - RTPHeader header; - size_t header_length; - size_t total_length; -}; - -struct LoggedRtpPacketIncoming { - LoggedRtpPacketIncoming(Timestamp timestamp, - RTPHeader header, - size_t header_length, - size_t total_length) - : rtp(timestamp, header, header_length, total_length) {} - int64_t log_time_us() const { return rtp.timestamp.us(); } - int64_t log_time_ms() const { return rtp.timestamp.ms(); } - Timestamp log_time() const { return rtp.timestamp; } - - LoggedRtpPacket rtp; -}; - -struct LoggedRtpPacketOutgoing { - LoggedRtpPacketOutgoing(Timestamp timestamp, - RTPHeader header, - size_t header_length, - size_t total_length) - : rtp(timestamp, header, header_length, total_length) {} - int64_t log_time_us() const { return rtp.timestamp.us(); } - int64_t log_time_ms() const { return rtp.timestamp.ms(); } - Timestamp log_time() const { return rtp.timestamp; } - - LoggedRtpPacket rtp; -}; - -struct LoggedRtcpPacket { - LoggedRtcpPacket(Timestamp timestamp, const std::vector& packet) - : timestamp(timestamp), raw_data(packet) {} - LoggedRtcpPacket(Timestamp timestamp, const std::string& packet) - : timestamp(timestamp), raw_data(packet.size()) { - memcpy(raw_data.data(), packet.data(), packet.size()); - } - - LoggedRtcpPacket(const LoggedRtcpPacket& rhs) = default; - - ~LoggedRtcpPacket() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp; - std::vector raw_data; -}; - -struct LoggedRtcpPacketIncoming { - LoggedRtcpPacketIncoming(Timestamp timestamp, - const std::vector& packet) - : rtcp(timestamp, packet) {} - LoggedRtcpPacketIncoming(Timestamp timestamp, const std::string& packet) - : rtcp(timestamp, packet) {} - - int64_t log_time_us() const { return rtcp.timestamp.us(); } - int64_t log_time_ms() const { return rtcp.timestamp.ms(); } - Timestamp log_time() const { return rtcp.timestamp; } - - LoggedRtcpPacket rtcp; -}; - -struct LoggedRtcpPacketOutgoing { - LoggedRtcpPacketOutgoing(Timestamp timestamp, - const std::vector& packet) - : rtcp(timestamp, packet) {} - LoggedRtcpPacketOutgoing(Timestamp timestamp, const std::string& packet) - : rtcp(timestamp, packet) {} - - int64_t log_time_us() const { return rtcp.timestamp.us(); } - int64_t log_time_ms() const { return rtcp.timestamp.ms(); } - Timestamp log_time() const { return rtcp.timestamp; } - - LoggedRtcpPacket rtcp; -}; - -struct LoggedRtcpPacketReceiverReport { - LoggedRtcpPacketReceiverReport() = default; - LoggedRtcpPacketReceiverReport(Timestamp timestamp, - const rtcp::ReceiverReport& rr) - : timestamp(timestamp), rr(rr) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::ReceiverReport rr; -}; - -struct LoggedRtcpPacketSenderReport { - LoggedRtcpPacketSenderReport() = default; - LoggedRtcpPacketSenderReport(Timestamp timestamp, - const rtcp::SenderReport& sr) - : timestamp(timestamp), sr(sr) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::SenderReport sr; -}; - -struct LoggedRtcpPacketExtendedReports { - LoggedRtcpPacketExtendedReports() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::ExtendedReports xr; -}; - -struct LoggedRtcpPacketRemb { - LoggedRtcpPacketRemb() = default; - LoggedRtcpPacketRemb(Timestamp timestamp, const rtcp::Remb& remb) - : timestamp(timestamp), remb(remb) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::Remb remb; -}; - -struct LoggedRtcpPacketNack { - LoggedRtcpPacketNack() = default; - LoggedRtcpPacketNack(Timestamp timestamp, const rtcp::Nack& nack) - : timestamp(timestamp), nack(nack) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::Nack nack; -}; - -struct LoggedRtcpPacketFir { - LoggedRtcpPacketFir() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::Fir fir; -}; - -struct LoggedRtcpPacketPli { - LoggedRtcpPacketPli() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::Pli pli; -}; - -struct LoggedRtcpPacketTransportFeedback { - LoggedRtcpPacketTransportFeedback() - : transport_feedback(/*include_timestamps=*/true, /*include_lost*/ true) { - } - LoggedRtcpPacketTransportFeedback( - Timestamp timestamp, - const rtcp::TransportFeedback& transport_feedback) - : timestamp(timestamp), transport_feedback(transport_feedback) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::TransportFeedback transport_feedback; -}; - -struct LoggedRtcpPacketLossNotification { - LoggedRtcpPacketLossNotification() = default; - LoggedRtcpPacketLossNotification( - Timestamp timestamp, - const rtcp::LossNotification& loss_notification) - : timestamp(timestamp), loss_notification(loss_notification) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::LossNotification loss_notification; -}; - -struct LoggedRtcpPacketBye { - LoggedRtcpPacketBye() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtcp::Bye bye; -}; - -} // namespace webrtc - -#endif // LOGGING_RTC_EVENT_LOG_EVENTS_LOGGED_RTP_RTCP_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc index 25941eb16..3c307b9ca 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.cc @@ -13,9 +13,6 @@ #include "absl/memory/memory.h" namespace webrtc { -constexpr RtcEvent::Type RtcEventAlrState::kType; -constexpr RtcEventDefinition - RtcEventAlrState::definition_; RtcEventAlrState::RtcEventAlrState(bool in_alr) : in_alr_(in_alr) {} @@ -28,11 +25,4 @@ std::unique_ptr RtcEventAlrState::Copy() const { return absl::WrapUnique(new RtcEventAlrState(*this)); } -RtcEventLogParseStatus RtcEventAlrState::Parse( - absl::string_view s, - bool batched, - std::vector& output) { - return RtcEventAlrState::definition_.ParseBatch(s, batched, output); -} - } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h index 9f595ecd9..74d66015e 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_alr_state.h @@ -12,32 +12,12 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ALR_STATE_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_definition.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" -#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" namespace webrtc { -struct LoggedAlrStateEvent { - LoggedAlrStateEvent() = default; - LoggedAlrStateEvent(Timestamp timestamp, bool in_alr) - : timestamp(timestamp), in_alr(in_alr) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - bool in_alr; -}; - class RtcEventAlrState final : public RtcEvent { public: static constexpr Type kType = Type::AlrStateEvent; @@ -52,26 +32,22 @@ class RtcEventAlrState final : public RtcEvent { bool in_alr() const { return in_alr_; } - static std::string Encode(rtc::ArrayView batch) { - return RtcEventAlrState::definition_.EncodeBatch(batch); - } - - static RtcEventLogParseStatus Parse(absl::string_view s, - bool batched, - std::vector& output); - private: RtcEventAlrState(const RtcEventAlrState& other); const bool in_alr_; +}; - static constexpr RtcEventDefinition - definition_{{"AlrState", RtcEventAlrState::kType}, - {&RtcEventAlrState::in_alr_, - &LoggedAlrStateEvent::in_alr, - {"in_alr", /*id=*/1, FieldType::kFixed8, /*width=*/1}}}; +struct LoggedAlrStateEvent { + LoggedAlrStateEvent() = default; + LoggedAlrStateEvent(Timestamp timestamp, bool in_alr) + : timestamp(timestamp), in_alr(in_alr) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + bool in_alr; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h index d4cae3abf..aeeb28e21 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h @@ -12,31 +12,13 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_NETWORK_ADAPTATION_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" namespace webrtc { -struct LoggedAudioNetworkAdaptationEvent { - LoggedAudioNetworkAdaptationEvent() = default; - LoggedAudioNetworkAdaptationEvent(Timestamp timestamp, - const AudioEncoderRuntimeConfig& config) - : timestamp(timestamp), config(config) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - AudioEncoderRuntimeConfig config; -}; - struct AudioEncoderRuntimeConfig; class RtcEventAudioNetworkAdaptation final : public RtcEvent { @@ -54,25 +36,25 @@ class RtcEventAudioNetworkAdaptation final : public RtcEvent { const AudioEncoderRuntimeConfig& config() const { return *config_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventAudioNetworkAdaptation(const RtcEventAudioNetworkAdaptation& other); const std::unique_ptr config_; }; +struct LoggedAudioNetworkAdaptationEvent { + LoggedAudioNetworkAdaptationEvent() = default; + LoggedAudioNetworkAdaptationEvent(Timestamp timestamp, + const AudioEncoderRuntimeConfig& config) + : timestamp(timestamp), config(config) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + AudioEncoderRuntimeConfig config; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_NETWORK_ADAPTATION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc index 21a3f9266..dae61c4df 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.cc @@ -14,11 +14,6 @@ namespace webrtc { -constexpr RtcEventDefinition - RtcEventAudioPlayout::definition_; - RtcEventAudioPlayout::RtcEventAudioPlayout(uint32_t ssrc) : ssrc_(ssrc) {} RtcEventAudioPlayout::RtcEventAudioPlayout(const RtcEventAudioPlayout& other) diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h index 196c3ca24..00d07a65b 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_playout.h @@ -13,31 +13,13 @@ #include -#include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_definition.h" namespace webrtc { -struct LoggedAudioPlayoutEvent { - LoggedAudioPlayoutEvent() = default; - LoggedAudioPlayoutEvent(Timestamp timestamp, uint32_t ssrc) - : timestamp(timestamp), ssrc(ssrc) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - uint32_t ssrc; -}; - class RtcEventAudioPlayout final : public RtcEvent { public: static constexpr Type kType = Type::AudioPlayout; @@ -52,35 +34,22 @@ class RtcEventAudioPlayout final : public RtcEvent { uint32_t ssrc() const { return ssrc_; } - static std::string Encode(rtc::ArrayView batch) { - return RtcEventAudioPlayout::definition_.EncodeBatch(batch); - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::map>& output) { - std::vector temp_output; - auto status = RtcEventAudioPlayout::definition_.ParseBatch( - encoded_bytes, batched, temp_output); - for (const LoggedAudioPlayoutEvent& event : temp_output) { - output[event.ssrc].push_back(event); - } - return status; - } - private: RtcEventAudioPlayout(const RtcEventAudioPlayout& other); const uint32_t ssrc_; +}; - static constexpr RtcEventDefinition - definition_{{"AudioPlayout", RtcEventAudioPlayout::kType}, - {&RtcEventAudioPlayout::ssrc_, - &LoggedAudioPlayoutEvent::ssrc, - {"ssrc", /*id=*/1, FieldType::kFixed32, /*width=*/32}}}; +struct LoggedAudioPlayoutEvent { + LoggedAudioPlayoutEvent() = default; + LoggedAudioPlayoutEvent(Timestamp timestamp, uint32_t ssrc) + : timestamp(timestamp), ssrc(ssrc) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + uint32_t ssrc; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h index 9863e235a..ccf76025e 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h @@ -12,30 +12,13 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_RECEIVE_STREAM_CONFIG_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { -struct LoggedAudioRecvConfig { - LoggedAudioRecvConfig() = default; - LoggedAudioRecvConfig(Timestamp timestamp, const rtclog::StreamConfig config) - : timestamp(timestamp), config(config) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtclog::StreamConfig config; -}; - class RtcEventAudioReceiveStreamConfig final : public RtcEvent { public: static constexpr Type kType = Type::AudioReceiveStreamConfig; @@ -51,19 +34,6 @@ class RtcEventAudioReceiveStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventAudioReceiveStreamConfig( const RtcEventAudioReceiveStreamConfig& other); @@ -71,6 +41,18 @@ class RtcEventAudioReceiveStreamConfig final : public RtcEvent { const std::unique_ptr config_; }; +struct LoggedAudioRecvConfig { + LoggedAudioRecvConfig() = default; + LoggedAudioRecvConfig(Timestamp timestamp, const rtclog::StreamConfig config) + : timestamp(timestamp), config(config) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtclog::StreamConfig config; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_RECEIVE_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h index 550723bcf..4e93871ae 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h @@ -12,29 +12,12 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_SEND_STREAM_CONFIG_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { -struct LoggedAudioSendConfig { - LoggedAudioSendConfig() = default; - LoggedAudioSendConfig(Timestamp timestamp, const rtclog::StreamConfig config) - : timestamp(timestamp), config(config) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtclog::StreamConfig config; -}; - class RtcEventAudioSendStreamConfig final : public RtcEvent { public: static constexpr Type kType = Type::AudioSendStreamConfig; @@ -50,25 +33,23 @@ class RtcEventAudioSendStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventAudioSendStreamConfig(const RtcEventAudioSendStreamConfig& other); const std::unique_ptr config_; }; +struct LoggedAudioSendConfig { + LoggedAudioSendConfig() = default; + LoggedAudioSendConfig(Timestamp timestamp, const rtclog::StreamConfig config) + : timestamp(timestamp), config(config) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtclog::StreamConfig config; +}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_AUDIO_SEND_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.cc deleted file mode 100644 index 49b9effa9..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.cc +++ /dev/null @@ -1,73 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "logging/rtc_event_log/events/rtc_event_begin_log.h" - -#include "absl/strings/string_view.h" - -namespace webrtc { -constexpr RtcEvent::Type RtcEventBeginLog::kType; -constexpr EventParameters RtcEventBeginLog::event_params_; -constexpr FieldParameters RtcEventBeginLog::utc_start_time_params_; - -RtcEventBeginLog::RtcEventBeginLog(Timestamp timestamp, - Timestamp utc_start_time) - : RtcEvent(timestamp.us()), utc_start_time_ms_(utc_start_time.ms()) {} - -RtcEventBeginLog::RtcEventBeginLog(const RtcEventBeginLog& other) - : RtcEvent(other.timestamp_us_) {} - -RtcEventBeginLog::~RtcEventBeginLog() = default; - -std::string RtcEventBeginLog::Encode(rtc::ArrayView batch) { - EventEncoder encoder(event_params_, batch); - - encoder.EncodeField( - utc_start_time_params_, - ExtractRtcEventMember(batch, &RtcEventBeginLog::utc_start_time_ms_)); - - return encoder.AsString(); -} - -RtcEventLogParseStatus RtcEventBeginLog::Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - EventParser parser; - auto status = parser.Initialize(encoded_bytes, batched); - if (!status.ok()) - return status; - - rtc::ArrayView output_batch = - ExtendLoggedBatch(output, parser.NumEventsInBatch()); - - constexpr FieldParameters timestamp_params{ - "timestamp_ms", FieldParameters::kTimestampField, FieldType::kVarInt, 64}; - RtcEventLogParseStatusOr> result = - parser.ParseNumericField(timestamp_params); - if (!result.ok()) - return result.status(); - status = PopulateRtcEventTimestamp( - result.value(), &LoggedStartEvent::timestamp, output_batch); - if (!status.ok()) - return status; - - result = parser.ParseNumericField(utc_start_time_params_); - if (!result.ok()) - return result.status(); - status = PopulateRtcEventTimestamp( - result.value(), &LoggedStartEvent::utc_start_time, output_batch); - if (!status.ok()) - return status; - - return RtcEventLogParseStatus::Success(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.h deleted file mode 100644 index f3b74c117..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_begin_log.h +++ /dev/null @@ -1,74 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BEGIN_LOG_H_ -#define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BEGIN_LOG_H_ - -#include -#include - -#include "absl/strings/string_view.h" -#include "api/array_view.h" -#include "api/rtc_event_log/rtc_event.h" -#include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" -#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" - -namespace webrtc { - -struct LoggedStartEvent { - LoggedStartEvent() = default; - - explicit LoggedStartEvent(Timestamp timestamp) - : LoggedStartEvent(timestamp, timestamp) {} - - LoggedStartEvent(Timestamp timestamp, Timestamp utc_start_time) - : timestamp(timestamp), utc_start_time(utc_start_time) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp utc_time() const { return utc_start_time; } - - Timestamp timestamp = Timestamp::PlusInfinity(); - Timestamp utc_start_time = Timestamp::PlusInfinity(); -}; - -class RtcEventBeginLog final : public RtcEvent { - public: - static constexpr Type kType = Type::BeginV3Log; - - RtcEventBeginLog(Timestamp timestamp, Timestamp utc_start_time); - ~RtcEventBeginLog() override; - - Type GetType() const override { return kType; } - bool IsConfigEvent() const override { return false; } - - static std::string Encode(rtc::ArrayView batch); - - static RtcEventLogParseStatus Parse(absl::string_view encoded_bytes, - bool batched, - std::vector& output); - - private: - RtcEventBeginLog(const RtcEventBeginLog& other); - - int64_t utc_start_time_ms_; - - static constexpr EventParameters event_params_{"BeginLog", - RtcEventBeginLog::kType}; - static constexpr FieldParameters utc_start_time_params_{ - "utc_start_time_ms", /*id=*/1, FieldType::kVarInt, /*width=*/64}; -}; - -} // namespace webrtc -#endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BEGIN_LOG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc index 0e98b2ff1..f3f12192c 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.cc @@ -15,12 +15,6 @@ namespace webrtc { -constexpr RtcEventDefinition - RtcEventBweUpdateDelayBased::definition_; - RtcEventBweUpdateDelayBased::RtcEventBweUpdateDelayBased( int32_t bitrate_bps, BandwidthUsage detector_state) diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h index 796f11938..522f98fd8 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h @@ -13,76 +13,14 @@ #include -#include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/network_state_predictor.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_definition.h" namespace webrtc { -// Separate the event log encoding from the enum values. -// As long as the enum values are the same as the encodings, -// the two conversion functions can be compiled to (roughly) -// a range check each. -template <> -class RtcEventLogEnum { - static constexpr uint64_t kBwNormal = 0; - static constexpr uint64_t kBwUnderusing = 1; - static constexpr uint64_t kBwOverusing = 2; - - public: - static uint64_t Encode(BandwidthUsage x) { - switch (x) { - case BandwidthUsage::kBwNormal: - return kBwNormal; - case BandwidthUsage::kBwUnderusing: - return kBwUnderusing; - case BandwidthUsage::kBwOverusing: - return kBwOverusing; - case BandwidthUsage::kLast: - RTC_DCHECK_NOTREACHED(); - } - RTC_DCHECK_NOTREACHED(); - return std::numeric_limits::max(); - } - static RtcEventLogParseStatusOr Decode(uint64_t x) { - switch (x) { - case kBwNormal: - return BandwidthUsage::kBwNormal; - case kBwUnderusing: - return BandwidthUsage::kBwUnderusing; - case kBwOverusing: - return BandwidthUsage::kBwOverusing; - } - return RtcEventLogParseStatus::Error("Failed to decode BandwidthUsage enum", - __FILE__, __LINE__); - } -}; - -struct LoggedBweDelayBasedUpdate { - LoggedBweDelayBasedUpdate() = default; - LoggedBweDelayBasedUpdate(Timestamp timestamp, - int32_t bitrate_bps, - BandwidthUsage detector_state) - : timestamp(timestamp), - bitrate_bps(bitrate_bps), - detector_state(detector_state) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int32_t bitrate_bps; - BandwidthUsage detector_state; -}; - class RtcEventBweUpdateDelayBased final : public RtcEvent { public: static constexpr Type kType = Type::BweUpdateDelayBased; @@ -99,36 +37,28 @@ class RtcEventBweUpdateDelayBased final : public RtcEvent { int32_t bitrate_bps() const { return bitrate_bps_; } BandwidthUsage detector_state() const { return detector_state_; } - static std::string Encode(rtc::ArrayView batch) { - return RtcEventBweUpdateDelayBased::definition_.EncodeBatch(batch); - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - return RtcEventBweUpdateDelayBased::definition_.ParseBatch(encoded_bytes, - batched, output); - } - private: RtcEventBweUpdateDelayBased(const RtcEventBweUpdateDelayBased& other); const int32_t bitrate_bps_; const BandwidthUsage detector_state_; +}; - static constexpr RtcEventDefinition - definition_{ - {"BweDelayBased", RtcEventBweUpdateDelayBased::kType}, - {&RtcEventBweUpdateDelayBased::bitrate_bps_, - &LoggedBweDelayBasedUpdate::bitrate_bps, - {"bitrate_bps", /*id=*/1, FieldType::kVarInt, /*width=*/32}}, - {&RtcEventBweUpdateDelayBased::detector_state_, - &LoggedBweDelayBasedUpdate::detector_state, - {"detector_state", /*id=*/2, FieldType::kVarInt, /*width=*/64}}}; +struct LoggedBweDelayBasedUpdate { + LoggedBweDelayBasedUpdate() = default; + LoggedBweDelayBasedUpdate(Timestamp timestamp, + int32_t bitrate_bps, + BandwidthUsage detector_state) + : timestamp(timestamp), + bitrate_bps(bitrate_bps), + detector_state(detector_state) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int32_t bitrate_bps; + BandwidthUsage detector_state; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h index fd41b316e..b031658ea 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h @@ -14,37 +14,12 @@ #include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedBweLossBasedUpdate { - LoggedBweLossBasedUpdate() = default; - LoggedBweLossBasedUpdate(Timestamp timestamp, - int32_t bitrate_bps, - uint8_t fraction_lost, - int32_t expected_packets) - : timestamp(timestamp), - bitrate_bps(bitrate_bps), - fraction_lost(fraction_lost), - expected_packets(expected_packets) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int32_t bitrate_bps; - uint8_t fraction_lost; - int32_t expected_packets; -}; - class RtcEventBweUpdateLossBased final : public RtcEvent { public: static constexpr Type kType = Type::BweUpdateLossBased; @@ -63,19 +38,6 @@ class RtcEventBweUpdateLossBased final : public RtcEvent { uint8_t fraction_loss() const { return fraction_loss_; } int32_t total_packets() const { return total_packets_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventBweUpdateLossBased(const RtcEventBweUpdateLossBased& other); @@ -84,6 +46,26 @@ class RtcEventBweUpdateLossBased final : public RtcEvent { const int32_t total_packets_; }; +struct LoggedBweLossBasedUpdate { + LoggedBweLossBasedUpdate() = default; + LoggedBweLossBasedUpdate(Timestamp timestamp, + int32_t bitrate_bps, + uint8_t fraction_lost, + int32_t expected_packets) + : timestamp(timestamp), + bitrate_bps(bitrate_bps), + fraction_lost(fraction_lost), + expected_packets(expected_packets) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int32_t bitrate_bps; + uint8_t fraction_lost; + int32_t expected_packets; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_BWE_UPDATE_LOSS_BASED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_definition.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_definition.h deleted file mode 100644 index 8688c5fc7..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_definition.h +++ /dev/null @@ -1,152 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DEFINITION_H_ -#define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DEFINITION_H_ - -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "absl/types/optional.h" -#include "api/array_view.h" -#include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" -#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -template -struct RtcEventFieldDefinition { - const T EventType::*event_member; - T LoggedType::*logged_member; - FieldParameters params; -}; - -// Base case -template -class RtcEventDefinitionImpl { - public: - void EncodeImpl(EventEncoder&, rtc::ArrayView) const {} - RtcEventLogParseStatus ParseImpl(EventParser&, - rtc::ArrayView) const { - return RtcEventLogParseStatus::Success(); - } -}; - -// Recursive case -template -class RtcEventDefinitionImpl { - public: - constexpr RtcEventDefinitionImpl( - RtcEventFieldDefinition field, - RtcEventFieldDefinition... rest) - : field_(field), rest_(rest...) {} - - void EncodeImpl(EventEncoder& encoder, - rtc::ArrayView batch) const { - auto values = ExtractRtcEventMember(batch, field_.event_member); - encoder.EncodeField(field_.params, values); - rest_.EncodeImpl(encoder, batch); - } - - RtcEventLogParseStatus ParseImpl( - EventParser& parser, - rtc::ArrayView output_batch) const { - RtcEventLogParseStatusOr> result = - parser.ParseNumericField(field_.params); - if (!result.ok()) - return result.status(); - auto status = PopulateRtcEventMember(result.value(), field_.logged_member, - output_batch); - if (!status.ok()) - return status; - - return rest_.ParseImpl(parser, output_batch); - } - - private: - RtcEventFieldDefinition field_; - RtcEventDefinitionImpl rest_; -}; - -// The RtcEventDefinition sets up a mapping between the fields -// in an RtcEvent and the corresponding fields in the parsed struct. -// For example, an RtcFoo class containing two fields; `uint32_t bar` -// and `bool baz` (a log timestamp is always implicitly added) -// might have a definition -// RtcEventDefinition( -// {"foo", RtcFoo::Type}, -// {&RtcFoo::bar_, &LoggedFoo::bar, {"bar", 1, FieldType::kVarInt, 32}}, -// {&RtcFoo::baz_, &LoggedFoo::baz, {"baz", 2, FieldType::kFixed8, 1}}, -// ); -// In addition to defining string names to aid debugging, -// this specifies that -// * RtcFoo::Type uniquely identifies an RtcFoo in the encoded stream -// * The `bar` field has ID 1, is encoded as a VarInt -// (when not delta compressed), and wraps around after 32 bits. -// * The `baz` field has ID 2, is encoded as an 8-bit field -// (when not delta compressed), and wraps around after 1 bit. -// Note that the numerical field and event IDs can't be changed since -// that would break compatibility with old logs. -// In most cases (including all cases where wrap around isn't -// expected), the wrap around should be equal to the bitwidth of -// the field. -template -class RtcEventDefinition { - public: - constexpr RtcEventDefinition( - EventParameters params, - RtcEventFieldDefinition... fields) - : params_(params), fields_(fields...) {} - - std::string EncodeBatch(rtc::ArrayView batch) const { - EventEncoder encoder(params_, batch); - fields_.EncodeImpl(encoder, batch); - return encoder.AsString(); - } - - RtcEventLogParseStatus ParseBatch(absl::string_view s, - bool batched, - std::vector& output) const { - EventParser parser; - auto status = parser.Initialize(s, batched); - if (!status.ok()) - return status; - - rtc::ArrayView output_batch = - ExtendLoggedBatch(output, parser.NumEventsInBatch()); - - constexpr FieldParameters timestamp_params{"timestamp_ms", - FieldParameters::kTimestampField, - FieldType::kVarInt, 64}; - RtcEventLogParseStatusOr> result = - parser.ParseNumericField(timestamp_params); - if (!result.ok()) - return result.status(); - status = PopulateRtcEventTimestamp(result.value(), &LoggedType::timestamp, - output_batch); - if (!status.ok()) - return status; - - return fields_.ParseImpl(parser, output_batch); - } - - private: - EventParameters params_; - RtcEventDefinitionImpl fields_; -}; - -} // namespace webrtc - -#endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DEFINITION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h index b9af21325..9a3eecb3d 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_transport_state.h @@ -12,26 +12,13 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_TRANSPORT_STATE_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/dtls_transport_interface.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedDtlsTransportState { - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - DtlsTransportState dtls_transport_state; -}; - class RtcEventDtlsTransportState : public RtcEvent { public: static constexpr Type kType = Type::DtlsTransportState; @@ -48,25 +35,20 @@ class RtcEventDtlsTransportState : public RtcEvent { return dtls_transport_state_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventDtlsTransportState(const RtcEventDtlsTransportState& other); const DtlsTransportState dtls_transport_state_; }; +struct LoggedDtlsTransportState { + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + DtlsTransportState dtls_transport_state; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_TRANSPORT_STATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h index c820f184d..c0cc5b87e 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_dtls_writable_state.h @@ -12,28 +12,12 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_WRITABLE_STATE_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedDtlsWritableState { - LoggedDtlsWritableState() = default; - explicit LoggedDtlsWritableState(bool writable) : writable(writable) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - bool writable; -}; - class RtcEventDtlsWritableState : public RtcEvent { public: static constexpr Type kType = Type::DtlsWritableState; @@ -48,25 +32,23 @@ class RtcEventDtlsWritableState : public RtcEvent { bool writable() const { return writable_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventDtlsWritableState(const RtcEventDtlsWritableState& other); const bool writable_; }; +struct LoggedDtlsWritableState { + LoggedDtlsWritableState() = default; + explicit LoggedDtlsWritableState(bool writable) : writable(writable) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + bool writable; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_DTLS_WRITABLE_STATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.cc deleted file mode 100644 index 52abf9e84..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.cc +++ /dev/null @@ -1,58 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "logging/rtc_event_log/events/rtc_event_end_log.h" - -#include "absl/strings/string_view.h" - -namespace webrtc { -constexpr RtcEvent::Type RtcEventEndLog::kType; -constexpr EventParameters RtcEventEndLog::event_params_; - -RtcEventEndLog::RtcEventEndLog(Timestamp timestamp) - : RtcEvent(timestamp.us()) {} - -RtcEventEndLog::RtcEventEndLog(const RtcEventEndLog& other) - : RtcEvent(other.timestamp_us_) {} - -RtcEventEndLog::~RtcEventEndLog() = default; - -std::string RtcEventEndLog::Encode(rtc::ArrayView batch) { - EventEncoder encoder(event_params_, batch); - return encoder.AsString(); -} - -RtcEventLogParseStatus RtcEventEndLog::Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - EventParser parser; - auto status = parser.Initialize(encoded_bytes, batched); - if (!status.ok()) - return status; - - rtc::ArrayView output_batch = - ExtendLoggedBatch(output, parser.NumEventsInBatch()); - - constexpr FieldParameters timestamp_params{ - "timestamp_ms", FieldParameters::kTimestampField, FieldType::kVarInt, 64}; - RtcEventLogParseStatusOr> result = - parser.ParseNumericField(timestamp_params); - if (!result.ok()) - return result.status(); - status = PopulateRtcEventTimestamp(result.value(), - &LoggedStopEvent::timestamp, output_batch); - if (!status.ok()) - return status; - - return RtcEventLogParseStatus::Success(); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.h deleted file mode 100644 index 79648bdb8..000000000 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_end_log.h +++ /dev/null @@ -1,64 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_END_LOG_H_ -#define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_END_LOG_H_ - -#include -#include -#include - -#include "absl/strings/string_view.h" -#include "api/array_view.h" -#include "api/rtc_event_log/rtc_event.h" -#include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" -#include "logging/rtc_event_log/events/rtc_event_field_extraction.h" - -namespace webrtc { - -struct LoggedStopEvent { - LoggedStopEvent() = default; - - explicit LoggedStopEvent(Timestamp timestamp) : timestamp(timestamp) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::PlusInfinity(); -}; - -class RtcEventEndLog final : public RtcEvent { - public: - static constexpr Type kType = Type::EndV3Log; - - explicit RtcEventEndLog(Timestamp timestamp); - ~RtcEventEndLog() override; - - Type GetType() const override { return kType; } - bool IsConfigEvent() const override { return false; } - - static std::string Encode(rtc::ArrayView batch); - - static RtcEventLogParseStatus Parse(absl::string_view encoded_bytes, - bool batched, - std::vector& output); - - private: - RtcEventEndLog(const RtcEventEndLog& other); - - static constexpr EventParameters event_params_{"EndLog", - RtcEventEndLog::kType}; -}; - -} // namespace webrtc -#endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_END_LOG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding.h index 33b77b80f..8376a8b8a 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding.h @@ -93,87 +93,5 @@ std::string EncodeDeltasV3(FixedLengthEncodingParametersV3 params, uint64_t base, rtc::ArrayView values); -// Given a batch of RtcEvents and a member pointer, extract that -// member from each event in the batch. Signed integer members are -// encoded as unsigned, and the bitsize increased so the result can -// represented as a std::vector. -// This is intended to be used in conjuction with -// EventEncoder::EncodeField to encode a batch of events as follows: -// auto values = ExtractRtcEventMember(batch, RtcEventFoo::timestamp_ms); -// encoder.EncodeField(timestamp_params, values) -template ::value, bool> = true> -std::vector ExtractRtcEventMember( - rtc::ArrayView batch, - const T E::*member) { - std::vector values; - values.reserve(batch.size()); - for (const RtcEvent* event : batch) { - RTC_CHECK_EQ(event->GetType(), E::kType); - T value = static_cast(event)->*member; - values.push_back(EncodeAsUnsigned(value)); - } - return values; -} - -// Extract an optional field from a batch of RtcEvents. -// The function returns a vector of positions in addition to the vector of -// values. The vector `positions` has the same length as the batch where -// `positions[i] == true` iff the batch[i]->member has a value. -// The values vector only contains the values that exists, so it -// may be shorter than the batch. -template ::value, bool> = true> -ValuesWithPositions ExtractRtcEventMember(rtc::ArrayView batch, - const absl::optional E::*member) { - ValuesWithPositions result; - result.position_mask.reserve(batch.size()); - result.values.reserve(batch.size()); - for (const RtcEvent* event : batch) { - RTC_CHECK_EQ(event->GetType(), E::kType); - absl::optional field = static_cast(event)->*member; - result.position_mask.push_back(field.has_value()); - if (field.has_value()) { - result.values.push_back(EncodeAsUnsigned(field.value())); - } - } - return result; -} - -// Extract an enum field from a batch of RtcEvents. -// Requires specializing RtcEventLogEnum for the enum type T. -template ::value, bool> = true> -std::vector ExtractRtcEventMember( - rtc::ArrayView batch, - const T E::*member) { - std::vector values; - values.reserve(batch.size()); - for (const RtcEvent* event : batch) { - RTC_CHECK_EQ(event->GetType(), E::kType); - T value = static_cast(event)->*member; - values.push_back(RtcEventLogEnum::Encode(value)); - } - return values; -} - -// Extract a string field from a batch of RtcEvents. -template -std::vector ExtractRtcEventMember( - rtc::ArrayView batch, - const std::string E::*member) { - std::vector values; - values.reserve(batch.size()); - for (const RtcEvent* event : batch) { - RTC_CHECK_EQ(event->GetType(), E::kType); - absl::string_view str = static_cast(event)->*member; - values.push_back(str); - } - return values; -} - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_ENCODING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h index c33d4bee3..f1af5db44 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_encoding_parser.h @@ -36,9 +36,6 @@ class RtcEventLogParseStatus { } bool ok() const { return error_.empty(); } - ABSL_DEPRECATED("Use ok() instead") explicit operator bool() const { - return ok(); - } std::string message() const { return error_; } @@ -53,17 +50,15 @@ class RtcEventLogParseStatus { template class RtcEventLogParseStatusOr { public: - RtcEventLogParseStatusOr(RtcEventLogParseStatus status) // NOLINT + explicit RtcEventLogParseStatusOr(RtcEventLogParseStatus status) : status_(status), value_() {} - RtcEventLogParseStatusOr(const T& value) // NOLINT + explicit RtcEventLogParseStatusOr(const T& value) : status_(), value_(value) {} bool ok() const { return status_.ok(); } std::string message() const { return status_.message(); } - RtcEventLogParseStatus status() const { return status_; } - const T& value() const { RTC_DCHECK(ok()); return value_; @@ -134,7 +129,7 @@ class EventParser { uint64_t ReadOptionalValuePositions(); void ReadDeltasAndPopulateValues(FixedLengthEncodingParametersV3 params, uint64_t num_deltas, - uint64_t base); + const uint64_t base); RtcEventLogParseStatus ParseNumericFieldInternal(uint64_t value_bit_width, FieldType field_type); RtcEventLogParseStatus ParseStringFieldInternal(); @@ -176,110 +171,5 @@ class EventParser { uint64_t last_field_id_ = FieldParameters::kTimestampField; }; -// Inverse of the ExtractRtcEventMember function used when parsing -// a log. Uses a vector of values to populate a specific field in a -// vector of structs. -template ::value, bool> = true> -ABSL_MUST_USE_RESULT RtcEventLogParseStatus -PopulateRtcEventMember(const rtc::ArrayView values, - T E::*member, - rtc::ArrayView output) { - size_t batch_size = values.size(); - RTC_CHECK_EQ(output.size(), batch_size); - for (size_t i = 0; i < batch_size; ++i) { - output[i].*member = DecodeFromUnsignedToType(values[i]); - } - return RtcEventLogParseStatus::Success(); -} - -// Same as above, but for optional fields. -template ::value, bool> = true> -ABSL_MUST_USE_RESULT RtcEventLogParseStatus -PopulateRtcEventMember(const rtc::ArrayView positions, - const rtc::ArrayView values, - absl::optional E::*member, - rtc::ArrayView output) { - size_t batch_size = positions.size(); - RTC_CHECK_EQ(output.size(), batch_size); - RTC_CHECK_LE(values.size(), batch_size); - auto value_it = values.begin(); - for (size_t i = 0; i < batch_size; ++i) { - if (positions[i]) { - RTC_CHECK(value_it != values.end()); - output[i].*member = DecodeFromUnsignedToType(value_it); - ++value_it; - } else { - output[i].*member = absl::nullopt; - } - } - RTC_CHECK(value_it == values.end()); - return RtcEventLogParseStatus::Success(); -} - -// Same as above, but for enum fields. -template ::value, bool> = true> -ABSL_MUST_USE_RESULT RtcEventLogParseStatus -PopulateRtcEventMember(const rtc::ArrayView values, - T E::*member, - rtc::ArrayView output) { - size_t batch_size = values.size(); - RTC_CHECK_EQ(output.size(), batch_size); - for (size_t i = 0; i < batch_size; ++i) { - auto result = RtcEventLogEnum::Decode(values[i]); - if (!result.ok()) { - return result.status(); - } - output[i].*member = result.value(); - } - return RtcEventLogParseStatus::Success(); -} - -// Same as above, but for string fields. -template -ABSL_MUST_USE_RESULT RtcEventLogParseStatus -PopulateRtcEventMember(const rtc::ArrayView values, - std::string E::*member, - rtc::ArrayView output) { - size_t batch_size = values.size(); - RTC_CHECK_EQ(output.size(), batch_size); - for (size_t i = 0; i < batch_size; ++i) { - output[i].*member = values[i]; - } - return RtcEventLogParseStatus::Success(); -} - -// Same as above, but for Timestamp fields. -// N.B. Assumes that the encoded value uses millisecond precision. -template -ABSL_MUST_USE_RESULT RtcEventLogParseStatus -PopulateRtcEventTimestamp(const rtc::ArrayView& values, - Timestamp E::*timestamp, - rtc::ArrayView output) { - size_t batch_size = values.size(); - RTC_CHECK_EQ(batch_size, output.size()); - for (size_t i = 0; i < batch_size; ++i) { - output[i].*timestamp = - Timestamp::Millis(DecodeFromUnsignedToType(values[i])); - } - return RtcEventLogParseStatus::Success(); -} - -template -rtc::ArrayView ExtendLoggedBatch(std::vector& output, - size_t new_elements) { - size_t old_size = output.size(); - output.insert(output.end(), old_size + new_elements, E()); - rtc::ArrayView output_batch = output; - output_batch.subview(old_size); - RTC_DCHECK_EQ(output_batch.size(), new_elements); - return output_batch; -} - } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_ENCODING_PARSER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_extraction.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_extraction.h index eb9d67f1c..8cd020fe1 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_extraction.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_field_extraction.h @@ -17,7 +17,6 @@ #include "absl/types/optional.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" -#include "api/units/timestamp.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" #include "rtc_base/logging.h" @@ -56,16 +55,29 @@ T DecodeFromUnsignedToType(uint64_t value) { return static_cast(value); } -// RtcEventLogEnum defines a mapping between an enum T -// and the event log encodings. To log a new enum type T, -// specialize RtcEventLogEnum and add static methods -// static uint64_t Encode(T x) {} -// static RtcEventLogParseStatusOr Decode(uint64_t x) {} -template -class RtcEventLogEnum { - static_assert(sizeof(T) != sizeof(T), - "Missing specialisation of RtcEventLogEnum for type"); -}; +// Given a batch of RtcEvents and a member pointer, extract that +// member from each event in the batch. Signed integer members are +// encoded as unsigned, and the bitsize increased so the result can +// represented as a std::vector. +// This is intended to be used in conjuction with +// EventEncoder::EncodeField to encode a batch of events as follows: +// auto values = ExtractRtcEventMember(batch, RtcEventFoo::timestamp_ms); +// encoder.EncodeField(timestamp_params, values) +template ::value, bool> = true> +std::vector ExtractRtcEventMember( + rtc::ArrayView batch, + const T E::*member) { + std::vector values; + values.reserve(batch.size()); + for (const RtcEvent* event : batch) { + RTC_CHECK_EQ(event->GetType(), E::kType); + T value = static_cast(event)->*member; + values.push_back(EncodeAsUnsigned(value)); + } + return values; +} // Represents a vector> optional_values // as a bit-vector `position_mask` which identifies the positions @@ -79,6 +91,96 @@ struct ValuesWithPositions { std::vector values; }; +// Same as above but for optional fields. It returns a struct +// containing a vector of positions in addition to the vector of values. +// The vector `positions` has the same length as the batch where +// `positions[i] == true` iff the batch[i]->member has a value. +// The values vector only contains the values that exists, so it +// may be shorter than the batch. +template ::value, bool> = true> +ValuesWithPositions ExtractRtcEventMember(rtc::ArrayView batch, + const absl::optional E::*member) { + ValuesWithPositions result; + result.position_mask.reserve(batch.size()); + result.values.reserve(batch.size()); + for (const RtcEvent* event : batch) { + RTC_CHECK_EQ(event->GetType(), E::kType); + absl::optional field = static_cast(event)->*member; + result.position_mask.push_back(field.has_value()); + if (field.has_value()) { + result.values.push_back(EncodeAsUnsigned(field.value())); + } + } + return result; +} + +template +std::vector ExtractRtcEventMember( + rtc::ArrayView batch, + const std::string E::*member) { + std::vector values; + values.reserve(batch.size()); + for (const RtcEvent* event : batch) { + RTC_CHECK_EQ(event->GetType(), E::kType); + absl::string_view str = static_cast(event)->*member; + values.push_back(str); + } + return values; +} + +// Inverse of the ExtractRtcEventMember function used when parsing +// a log. Uses a vector of values to populate a specific field in a +// vector of structs. +template ::value, bool> = true> +void PopulateRtcEventMember(const rtc::ArrayView values, + T E::*member, + rtc::ArrayView output) { + size_t batch_size = values.size(); + RTC_CHECK_EQ(output.size(), batch_size); + for (size_t i = 0; i < batch_size; ++i) { + output[i].*member = DecodeFromUnsignedToType(values[i]); + } +} + +// Same as above, but for optional fields. +template ::value, bool> = true> +void PopulateRtcEventMember(const rtc::ArrayView positions, + const rtc::ArrayView values, + absl::optional E::*member, + rtc::ArrayView output) { + size_t batch_size = positions.size(); + RTC_CHECK_EQ(output.size(), batch_size); + RTC_CHECK_LE(values.size(), batch_size); + auto value_it = values.begin(); + for (size_t i = 0; i < batch_size; ++i) { + if (positions[i]) { + RTC_CHECK(value_it != values.end()); + output[i].*member = DecodeFromUnsignedToType(value_it); + ++value_it; + } else { + output[i].*member = absl::nullopt; + } + } + RTC_CHECK(value_it == values.end()); +} + +template +void PopulateRtcEventMember(const rtc::ArrayView values, + std::string E::*member, + rtc::ArrayView output) { + size_t batch_size = values.size(); + RTC_CHECK_EQ(output.size(), batch_size); + for (size_t i = 0; i < batch_size; ++i) { + output[i].*member = values[i]; + } +} + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FIELD_EXTRACTION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h index 91190faea..4a6bb90d0 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_frame_decoded.h @@ -13,33 +13,14 @@ #include -#include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" #include "api/video/video_codec_type.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedFrameDecoded { - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int64_t render_time_ms; - uint32_t ssrc; - int width; - int height; - VideoCodecType codec; - uint8_t qp; -}; - class RtcEventFrameDecoded final : public RtcEvent { public: static constexpr Type kType = Type::FrameDecoded; @@ -64,19 +45,6 @@ class RtcEventFrameDecoded final : public RtcEvent { VideoCodecType codec() const { return codec_; } uint8_t qp() const { return qp_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::map>& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventFrameDecoded(const RtcEventFrameDecoded& other); @@ -88,6 +56,19 @@ class RtcEventFrameDecoded final : public RtcEvent { const uint8_t qp_; }; +struct LoggedFrameDecoded { + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int64_t render_time_ms; + uint32_t ssrc; + int width; + int height; + VideoCodecType codec; + uint8_t qp; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_FRAME_DECODED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h index 57fd7cd9a..3cd8f5cce 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_ack_received.h @@ -12,38 +12,14 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_ACK_RECEIVED_H_ #include -#include #include -#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedGenericAckReceived { - LoggedGenericAckReceived() = default; - LoggedGenericAckReceived(Timestamp timestamp, - int64_t packet_number, - int64_t acked_packet_number, - absl::optional receive_acked_packet_time_ms) - : timestamp(timestamp), - packet_number(packet_number), - acked_packet_number(acked_packet_number), - receive_acked_packet_time_ms(receive_acked_packet_time_ms) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int64_t packet_number; - int64_t acked_packet_number; - absl::optional receive_acked_packet_time_ms; -}; - struct AckedPacket { // The packet number that was acked. int64_t packet_number; @@ -81,19 +57,6 @@ class RtcEventGenericAckReceived final : public RtcEvent { return receive_acked_packet_time_ms_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventGenericAckReceived(const RtcEventGenericAckReceived& packet); @@ -113,6 +76,26 @@ class RtcEventGenericAckReceived final : public RtcEvent { const absl::optional receive_acked_packet_time_ms_; }; +struct LoggedGenericAckReceived { + LoggedGenericAckReceived() = default; + LoggedGenericAckReceived(Timestamp timestamp, + int64_t packet_number, + int64_t acked_packet_number, + absl::optional receive_acked_packet_time_ms) + : timestamp(timestamp), + packet_number(packet_number), + acked_packet_number(acked_packet_number), + receive_acked_packet_time_ms(receive_acked_packet_time_ms) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int64_t packet_number; + int64_t acked_packet_number; + absl::optional receive_acked_packet_time_ms; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_ACK_RECEIVED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h index a6006ca4d..428e7b380 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_received.h @@ -12,34 +12,12 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_RECEIVED_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedGenericPacketReceived { - LoggedGenericPacketReceived() = default; - LoggedGenericPacketReceived(Timestamp timestamp, - int64_t packet_number, - int packet_length) - : timestamp(timestamp), - packet_number(packet_number), - packet_length(packet_length) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int64_t packet_number; - int packet_length; -}; - class RtcEventGenericPacketReceived final : public RtcEvent { public: static constexpr Type kType = Type::GenericPacketReceived; @@ -59,19 +37,6 @@ class RtcEventGenericPacketReceived final : public RtcEvent { // including ICE/TURN/IP overheads. size_t packet_length() const { return packet_length_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventGenericPacketReceived(const RtcEventGenericPacketReceived& packet); @@ -79,6 +44,23 @@ class RtcEventGenericPacketReceived final : public RtcEvent { const size_t packet_length_; }; +struct LoggedGenericPacketReceived { + LoggedGenericPacketReceived() = default; + LoggedGenericPacketReceived(Timestamp timestamp, + int64_t packet_number, + int packet_length) + : timestamp(timestamp), + packet_number(packet_number), + packet_length(packet_length) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int64_t packet_number; + int packet_length; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_RECEIVED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h index 903950a39..6e626e63a 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_generic_packet_sent.h @@ -12,43 +12,12 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_SENT_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedGenericPacketSent { - LoggedGenericPacketSent() = default; - LoggedGenericPacketSent(Timestamp timestamp, - int64_t packet_number, - size_t overhead_length, - size_t payload_length, - size_t padding_length) - : timestamp(timestamp), - packet_number(packet_number), - overhead_length(overhead_length), - payload_length(payload_length), - padding_length(padding_length) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - size_t packet_length() const { - return payload_length + padding_length + overhead_length; - } - Timestamp timestamp = Timestamp::MinusInfinity(); - int64_t packet_number; - size_t overhead_length; - size_t payload_length; - size_t padding_length; -}; - class RtcEventGenericPacketSent final : public RtcEvent { public: static constexpr Type kType = Type::GenericPacketSent; @@ -83,19 +52,6 @@ class RtcEventGenericPacketSent final : public RtcEvent { size_t padding_length() const { return padding_length_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventGenericPacketSent(const RtcEventGenericPacketSent& packet); @@ -105,6 +61,31 @@ class RtcEventGenericPacketSent final : public RtcEvent { const size_t padding_length_; }; +struct LoggedGenericPacketSent { + LoggedGenericPacketSent() = default; + LoggedGenericPacketSent(Timestamp timestamp, + int64_t packet_number, + size_t overhead_length, + size_t payload_length, + size_t padding_length) + : timestamp(timestamp), + packet_number(packet_number), + overhead_length(overhead_length), + payload_length(payload_length), + padding_length(padding_length) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + size_t packet_length() const { + return payload_length + padding_length + overhead_length; + } + Timestamp timestamp = Timestamp::MinusInfinity(); + int64_t packet_number; + size_t overhead_length; + size_t payload_length; + size_t padding_length; +}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_GENERIC_PACKET_SENT_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h index bdacf15a5..1f4d825a9 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair.h @@ -14,13 +14,9 @@ #include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { @@ -32,27 +28,6 @@ enum class IceCandidatePairEventType { kNumValues, }; -struct LoggedIceCandidatePairEvent { - LoggedIceCandidatePairEvent() = default; - LoggedIceCandidatePairEvent(Timestamp timestamp, - IceCandidatePairEventType type, - uint32_t candidate_pair_id, - uint32_t transaction_id) - : timestamp(timestamp), - type(type), - candidate_pair_id(candidate_pair_id), - transaction_id(transaction_id) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - IceCandidatePairEventType type; - uint32_t candidate_pair_id; - uint32_t transaction_id; -}; - class RtcEventIceCandidatePair final : public RtcEvent { public: static constexpr Type kType = Type::IceCandidatePairEvent; @@ -72,19 +47,6 @@ class RtcEventIceCandidatePair final : public RtcEvent { uint32_t candidate_pair_id() const { return candidate_pair_id_; } uint32_t transaction_id() const { return transaction_id_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventIceCandidatePair(const RtcEventIceCandidatePair& other); @@ -93,6 +55,26 @@ class RtcEventIceCandidatePair final : public RtcEvent { const uint32_t transaction_id_; }; +struct LoggedIceCandidatePairEvent { + LoggedIceCandidatePairEvent() = default; + LoggedIceCandidatePairEvent(Timestamp timestamp, + IceCandidatePairEventType type, + uint32_t candidate_pair_id, + uint32_t transaction_id) + : timestamp(timestamp), + type(type), + candidate_pair_id(candidate_pair_id), + transaction_id(transaction_id) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + IceCandidatePairEventType type; + uint32_t candidate_pair_id; + uint32_t transaction_id; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ICE_CANDIDATE_PAIR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h index e72d999cf..465a79978 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_ice_candidate_pair_config.h @@ -14,13 +14,9 @@ #include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { @@ -69,23 +65,6 @@ enum class IceCandidateNetworkType { kNumValues, }; -struct LoggedIceCandidatePairConfig { - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - IceCandidatePairConfigType type; - uint32_t candidate_pair_id; - IceCandidateType local_candidate_type; - IceCandidatePairProtocol local_relay_protocol; - IceCandidateNetworkType local_network_type; - IceCandidatePairAddressFamily local_address_family; - IceCandidateType remote_candidate_type; - IceCandidatePairAddressFamily remote_address_family; - IceCandidatePairProtocol candidate_pair_protocol; -}; - class IceCandidatePairDescription { public: IceCandidatePairDescription(); @@ -126,19 +105,6 @@ class RtcEventIceCandidatePairConfig final : public RtcEvent { return candidate_pair_desc_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventIceCandidatePairConfig(const RtcEventIceCandidatePairConfig& other); @@ -147,6 +113,22 @@ class RtcEventIceCandidatePairConfig final : public RtcEvent { const IceCandidatePairDescription candidate_pair_desc_; }; +struct LoggedIceCandidatePairConfig { + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + IceCandidatePairConfigType type; + uint32_t candidate_pair_id; + IceCandidateType local_candidate_type; + IceCandidatePairProtocol local_relay_protocol; + IceCandidateNetworkType local_network_type; + IceCandidatePairAddressFamily local_address_family; + IceCandidateType remote_candidate_type; + IceCandidatePairAddressFamily remote_address_family; + IceCandidatePairProtocol candidate_pair_protocol; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ICE_CANDIDATE_PAIR_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h index ae6810c39..974a0c9a5 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_cluster_created.h @@ -14,40 +14,12 @@ #include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedBweProbeClusterCreatedEvent { - LoggedBweProbeClusterCreatedEvent() = default; - LoggedBweProbeClusterCreatedEvent(Timestamp timestamp, - int32_t id, - int32_t bitrate_bps, - uint32_t min_packets, - uint32_t min_bytes) - : timestamp(timestamp), - id(id), - bitrate_bps(bitrate_bps), - min_packets(min_packets), - min_bytes(min_bytes) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int32_t id; - int32_t bitrate_bps; - uint32_t min_packets; - uint32_t min_bytes; -}; - class RtcEventProbeClusterCreated final : public RtcEvent { public: static constexpr Type kType = Type::ProbeClusterCreated; @@ -68,19 +40,6 @@ class RtcEventProbeClusterCreated final : public RtcEvent { uint32_t min_probes() const { return min_probes_; } uint32_t min_bytes() const { return min_bytes_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventProbeClusterCreated(const RtcEventProbeClusterCreated& other); @@ -90,6 +49,29 @@ class RtcEventProbeClusterCreated final : public RtcEvent { const uint32_t min_bytes_; }; +struct LoggedBweProbeClusterCreatedEvent { + LoggedBweProbeClusterCreatedEvent() = default; + LoggedBweProbeClusterCreatedEvent(Timestamp timestamp, + int32_t id, + int32_t bitrate_bps, + uint32_t min_packets, + uint32_t min_bytes) + : timestamp(timestamp), + id(id), + bitrate_bps(bitrate_bps), + min_packets(min_packets), + min_bytes(min_bytes) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int32_t id; + int32_t bitrate_bps; + uint32_t min_packets; + uint32_t min_bytes; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_PROBE_CLUSTER_CREATED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h index 1aa6e75cb..fa61b314b 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_failure.h @@ -14,13 +14,9 @@ #include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { @@ -31,22 +27,6 @@ enum class ProbeFailureReason { kLast }; -struct LoggedBweProbeFailureEvent { - LoggedBweProbeFailureEvent() = default; - LoggedBweProbeFailureEvent(Timestamp timestamp, - int32_t id, - ProbeFailureReason failure_reason) - : timestamp(timestamp), id(id), failure_reason(failure_reason) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int32_t id; - ProbeFailureReason failure_reason; -}; - class RtcEventProbeResultFailure final : public RtcEvent { public: static constexpr Type kType = Type::ProbeResultFailure; @@ -62,19 +42,6 @@ class RtcEventProbeResultFailure final : public RtcEvent { int32_t id() const { return id_; } ProbeFailureReason failure_reason() const { return failure_reason_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventProbeResultFailure(const RtcEventProbeResultFailure& other); @@ -82,6 +49,21 @@ class RtcEventProbeResultFailure final : public RtcEvent { const ProbeFailureReason failure_reason_; }; +struct LoggedBweProbeFailureEvent { + LoggedBweProbeFailureEvent() = default; + LoggedBweProbeFailureEvent(Timestamp timestamp, + int32_t id, + ProbeFailureReason failure_reason) + : timestamp(timestamp), id(id), failure_reason(failure_reason) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int32_t id; + ProbeFailureReason failure_reason; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_PROBE_RESULT_FAILURE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h index 49d1abec5..d00cfa81d 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_probe_result_success.h @@ -14,32 +14,12 @@ #include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedBweProbeSuccessEvent { - LoggedBweProbeSuccessEvent() = default; - LoggedBweProbeSuccessEvent(Timestamp timestamp, - int32_t id, - int32_t bitrate_bps) - : timestamp(timestamp), id(id), bitrate_bps(bitrate_bps) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - int32_t id; - int32_t bitrate_bps; -}; - class RtcEventProbeResultSuccess final : public RtcEvent { public: static constexpr Type kType = Type::ProbeResultSuccess; @@ -55,19 +35,6 @@ class RtcEventProbeResultSuccess final : public RtcEvent { int32_t id() const { return id_; } int32_t bitrate_bps() const { return bitrate_bps_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventProbeResultSuccess(const RtcEventProbeResultSuccess& other); @@ -75,6 +42,21 @@ class RtcEventProbeResultSuccess final : public RtcEvent { const int32_t bitrate_bps_; }; +struct LoggedBweProbeSuccessEvent { + LoggedBweProbeSuccessEvent() = default; + LoggedBweProbeSuccessEvent(Timestamp timestamp, + int32_t id, + int32_t bitrate_bps) + : timestamp(timestamp), id(id), bitrate_bps(bitrate_bps) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + int32_t id; + int32_t bitrate_bps; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_PROBE_RESULT_SUCCESS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h index 4a39ecc59..956e05f68 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_remote_estimate.h @@ -11,30 +11,14 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_REMOTE_ESTIMATE_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/data_rate.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedRemoteEstimateEvent { - LoggedRemoteEstimateEvent() = default; - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - absl::optional link_capacity_lower; - absl::optional link_capacity_upper; -}; - class RtcEventRemoteEstimate final : public RtcEvent { public: static constexpr Type kType = Type::RemoteEstimateEvent; @@ -47,22 +31,19 @@ class RtcEventRemoteEstimate final : public RtcEvent { Type GetType() const override { return kType; } bool IsConfigEvent() const override { return false; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - const DataRate link_capacity_lower_; const DataRate link_capacity_upper_; }; +struct LoggedRemoteEstimateEvent { + LoggedRemoteEstimateEvent() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + absl::optional link_capacity_lower; + absl::optional link_capacity_upper; +}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_REMOTE_ESTIMATE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h index bc1461d7b..4a4e9aef8 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_route_change.h @@ -12,30 +12,12 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ROUTE_CHANGE_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" namespace webrtc { -struct LoggedRouteChangeEvent { - LoggedRouteChangeEvent() = default; - LoggedRouteChangeEvent(Timestamp timestamp, bool connected, uint32_t overhead) - : timestamp(timestamp), connected(connected), overhead(overhead) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - bool connected; - uint32_t overhead; -}; - class RtcEventRouteChange final : public RtcEvent { public: static constexpr Type kType = Type::RouteChangeEvent; @@ -51,19 +33,6 @@ class RtcEventRouteChange final : public RtcEvent { bool connected() const { return connected_; } uint32_t overhead() const { return overhead_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventRouteChange(const RtcEventRouteChange& other); @@ -71,5 +40,18 @@ class RtcEventRouteChange final : public RtcEvent { const uint32_t overhead_; }; +struct LoggedRouteChangeEvent { + LoggedRouteChangeEvent() = default; + LoggedRouteChangeEvent(Timestamp timestamp, bool connected, uint32_t overhead) + : timestamp(timestamp), connected(connected), overhead(overhead) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + bool connected; + uint32_t overhead; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_ROUTE_CHANGE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h index 84fe398e0..1cbac7712 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h @@ -14,14 +14,9 @@ #include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" -#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "rtc_base/buffer.h" namespace webrtc { @@ -40,19 +35,6 @@ class RtcEventRtcpPacketIncoming final : public RtcEvent { const rtc::Buffer& packet() const { return packet_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventRtcpPacketIncoming(const RtcEventRtcpPacketIncoming& other); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h index 687bd319b..0ecccbeaa 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h @@ -14,14 +14,9 @@ #include #include -#include -#include -#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" -#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "rtc_base/buffer.h" namespace webrtc { @@ -40,19 +35,6 @@ class RtcEventRtcpPacketOutgoing final : public RtcEvent { const rtc::Buffer& packet() const { return packet_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventRtcpPacketOutgoing(const RtcEventRtcpPacketOutgoing& other); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h index 926ddddff..ee48fa360 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h @@ -13,17 +13,11 @@ #include #include -#include #include -#include #include -#include -#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" -#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "modules/rtp_rtcp/source/rtp_packet.h" namespace webrtc { @@ -65,19 +59,6 @@ class RtcEventRtpPacketIncoming final : public RtcEvent { size_t header_length() const { return packet_.headers_size(); } size_t padding_length() const { return packet_.padding_size(); } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::map>& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventRtpPacketIncoming(const RtcEventRtpPacketIncoming& other); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h index c7b7a0971..626c094ca 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h @@ -13,17 +13,11 @@ #include #include -#include #include -#include #include -#include -#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/rtc_event_log/rtc_event.h" -#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "modules/rtp_rtcp/source/rtp_packet.h" namespace webrtc { @@ -67,19 +61,6 @@ class RtcEventRtpPacketOutgoing final : public RtcEvent { size_t padding_length() const { return packet_.padding_size(); } int probe_cluster_id() const { return probe_cluster_id_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::map>& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventRtpPacketOutgoing(const RtcEventRtpPacketOutgoing& other); diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h index 0be56c206..e7b906187 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h @@ -12,30 +12,13 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_RECEIVE_STREAM_CONFIG_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { -struct LoggedVideoRecvConfig { - LoggedVideoRecvConfig() = default; - LoggedVideoRecvConfig(Timestamp timestamp, const rtclog::StreamConfig config) - : timestamp(timestamp), config(config) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtclog::StreamConfig config; -}; - class RtcEventVideoReceiveStreamConfig final : public RtcEvent { public: static constexpr Type kType = Type::VideoReceiveStreamConfig; @@ -51,19 +34,6 @@ class RtcEventVideoReceiveStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventVideoReceiveStreamConfig( const RtcEventVideoReceiveStreamConfig& other); @@ -71,6 +41,18 @@ class RtcEventVideoReceiveStreamConfig final : public RtcEvent { const std::unique_ptr config_; }; +struct LoggedVideoRecvConfig { + LoggedVideoRecvConfig() = default; + LoggedVideoRecvConfig(Timestamp timestamp, const rtclog::StreamConfig config) + : timestamp(timestamp), config(config) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtclog::StreamConfig config; +}; + } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_RECEIVE_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h index f1717b19e..e72e75e49 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/events/rtc_event_video_send_stream_config.h @@ -12,30 +12,13 @@ #define LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_SEND_STREAM_CONFIG_H_ #include -#include -#include -#include "absl/strings/string_view.h" #include "api/rtc_event_log/rtc_event.h" #include "api/units/timestamp.h" -#include "logging/rtc_event_log/events/rtc_event_field_encoding_parser.h" #include "logging/rtc_event_log/rtc_stream_config.h" namespace webrtc { -struct LoggedVideoSendConfig { - LoggedVideoSendConfig() = default; - LoggedVideoSendConfig(Timestamp timestamp, const rtclog::StreamConfig config) - : timestamp(timestamp), config(config) {} - - int64_t log_time_us() const { return timestamp.us(); } - int64_t log_time_ms() const { return timestamp.ms(); } - Timestamp log_time() const { return timestamp; } - - Timestamp timestamp = Timestamp::MinusInfinity(); - rtclog::StreamConfig config; -}; - class RtcEventVideoSendStreamConfig final : public RtcEvent { public: static constexpr Type kType = Type::VideoSendStreamConfig; @@ -51,25 +34,23 @@ class RtcEventVideoSendStreamConfig final : public RtcEvent { const rtclog::StreamConfig& config() const { return *config_; } - static std::string Encode(rtc::ArrayView batch) { - // TODO(terelius): Implement - return ""; - } - - static RtcEventLogParseStatus Parse( - absl::string_view encoded_bytes, - bool batched, - std::vector& output) { - // TODO(terelius): Implement - return RtcEventLogParseStatus::Error("Not Implemented", __FILE__, __LINE__); - } - private: RtcEventVideoSendStreamConfig(const RtcEventVideoSendStreamConfig& other); const std::unique_ptr config_; }; +struct LoggedVideoSendConfig { + LoggedVideoSendConfig() = default; + LoggedVideoSendConfig(Timestamp timestamp, const rtclog::StreamConfig config) + : timestamp(timestamp), config(config) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtclog::StreamConfig config; +}; } // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_EVENTS_RTC_EVENT_VIDEO_SEND_STREAM_CONFIG_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.cc new file mode 100644 index 000000000..5ef3de11c --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.cc @@ -0,0 +1,57 @@ +/* + * Copyright 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "logging/rtc_event_log/logged_events.h" + +namespace webrtc { + +LoggedPacketInfo::LoggedPacketInfo(const LoggedRtpPacket& rtp, + LoggedMediaType media_type, + bool rtx, + Timestamp capture_time) + : ssrc(rtp.header.ssrc), + stream_seq_no(rtp.header.sequenceNumber), + size(static_cast(rtp.total_length)), + payload_size(static_cast(rtp.total_length - + rtp.header.paddingLength - + rtp.header.headerLength)), + padding_size(static_cast(rtp.header.paddingLength)), + payload_type(rtp.header.payloadType), + media_type(media_type), + rtx(rtx), + marker_bit(rtp.header.markerBit), + has_transport_seq_no(rtp.header.extension.hasTransportSequenceNumber), + transport_seq_no(static_cast( + has_transport_seq_no ? rtp.header.extension.transportSequenceNumber + : 0)), + capture_time(capture_time), + log_packet_time(Timestamp::Micros(rtp.log_time_us())), + reported_send_time(rtp.header.extension.hasAbsoluteSendTime + ? rtp.header.extension.GetAbsoluteSendTimestamp() + : Timestamp::MinusInfinity()) {} + +LoggedPacketInfo::LoggedPacketInfo(const LoggedPacketInfo&) = default; + +LoggedPacketInfo::~LoggedPacketInfo() {} + +LoggedRtcpPacket::LoggedRtcpPacket(Timestamp timestamp, + const std::vector& packet) + : timestamp(timestamp), raw_data(packet) {} + +LoggedRtcpPacket::LoggedRtcpPacket(Timestamp timestamp, + const std::string& packet) + : timestamp(timestamp), raw_data(packet.size()) { + memcpy(raw_data.data(), packet.data(), packet.size()); +} + +LoggedRtcpPacket::LoggedRtcpPacket(const LoggedRtcpPacket& rhs) = default; + +LoggedRtcpPacket::~LoggedRtcpPacket() = default; + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h index d6b3cc607..5bce658c3 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/logged_events.h @@ -7,12 +7,337 @@ * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ - #ifndef LOGGING_RTC_EVENT_LOG_LOGGED_EVENTS_H_ #define LOGGING_RTC_EVENT_LOG_LOGGED_EVENTS_H_ -// TODO(terelius): Delete this forwarding header when downstream -// projects have been updated. -#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" +#include +#include +#include "absl/types/optional.h" +#include "api/rtp_headers.h" +#include "api/units/time_delta.h" +#include "api/units/timestamp.h" +#include "modules/rtp_rtcp/source/rtcp_packet/bye.h" +#include "modules/rtp_rtcp/source/rtcp_packet/extended_reports.h" +#include "modules/rtp_rtcp/source/rtcp_packet/fir.h" +#include "modules/rtp_rtcp/source/rtcp_packet/loss_notification.h" +#include "modules/rtp_rtcp/source/rtcp_packet/nack.h" +#include "modules/rtp_rtcp/source/rtcp_packet/pli.h" +#include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/remb.h" +#include "modules/rtp_rtcp/source/rtcp_packet/sender_report.h" +#include "modules/rtp_rtcp/source/rtcp_packet/transport_feedback.h" + +namespace webrtc { + +// The different event types are deliberately POD. Analysis of large logs is +// already resource intensive. The code simplifications that would be possible +// possible by having a base class (containing e.g. the log time) are not +// considered to outweigh the added memory and runtime overhead incurred by +// adding a vptr. + +struct LoggedRtpPacket { + LoggedRtpPacket(Timestamp timestamp, + RTPHeader header, + size_t header_length, + size_t total_length) + : timestamp(timestamp), + header(header), + header_length(header_length), + total_length(total_length) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp; + // TODO(terelius): This allocates space for 15 CSRCs even if none are used. + RTPHeader header; + size_t header_length; + size_t total_length; +}; + +struct LoggedRtpPacketIncoming { + LoggedRtpPacketIncoming(Timestamp timestamp, + RTPHeader header, + size_t header_length, + size_t total_length) + : rtp(timestamp, header, header_length, total_length) {} + int64_t log_time_us() const { return rtp.timestamp.us(); } + int64_t log_time_ms() const { return rtp.timestamp.ms(); } + + LoggedRtpPacket rtp; +}; + +struct LoggedRtpPacketOutgoing { + LoggedRtpPacketOutgoing(Timestamp timestamp, + RTPHeader header, + size_t header_length, + size_t total_length) + : rtp(timestamp, header, header_length, total_length) {} + int64_t log_time_us() const { return rtp.timestamp.us(); } + int64_t log_time_ms() const { return rtp.timestamp.ms(); } + + LoggedRtpPacket rtp; +}; + +struct LoggedRtcpPacket { + LoggedRtcpPacket(Timestamp timestamp, const std::vector& packet); + LoggedRtcpPacket(Timestamp timestamp, const std::string& packet); + LoggedRtcpPacket(const LoggedRtcpPacket&); + ~LoggedRtcpPacket(); + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp; + std::vector raw_data; +}; + +struct LoggedRtcpPacketIncoming { + LoggedRtcpPacketIncoming(Timestamp timestamp, + const std::vector& packet) + : rtcp(timestamp, packet) {} + LoggedRtcpPacketIncoming(Timestamp timestamp, const std::string& packet) + : rtcp(timestamp, packet) {} + + int64_t log_time_us() const { return rtcp.timestamp.us(); } + int64_t log_time_ms() const { return rtcp.timestamp.ms(); } + + LoggedRtcpPacket rtcp; +}; + +struct LoggedRtcpPacketOutgoing { + LoggedRtcpPacketOutgoing(Timestamp timestamp, + const std::vector& packet) + : rtcp(timestamp, packet) {} + LoggedRtcpPacketOutgoing(Timestamp timestamp, const std::string& packet) + : rtcp(timestamp, packet) {} + + int64_t log_time_us() const { return rtcp.timestamp.us(); } + int64_t log_time_ms() const { return rtcp.timestamp.ms(); } + + LoggedRtcpPacket rtcp; +}; + +struct LoggedRtcpPacketReceiverReport { + LoggedRtcpPacketReceiverReport() = default; + LoggedRtcpPacketReceiverReport(Timestamp timestamp, + const rtcp::ReceiverReport& rr) + : timestamp(timestamp), rr(rr) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::ReceiverReport rr; +}; + +struct LoggedRtcpPacketSenderReport { + LoggedRtcpPacketSenderReport() = default; + LoggedRtcpPacketSenderReport(Timestamp timestamp, + const rtcp::SenderReport& sr) + : timestamp(timestamp), sr(sr) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::SenderReport sr; +}; + +struct LoggedRtcpPacketExtendedReports { + LoggedRtcpPacketExtendedReports() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::ExtendedReports xr; +}; + +struct LoggedRtcpPacketRemb { + LoggedRtcpPacketRemb() = default; + LoggedRtcpPacketRemb(Timestamp timestamp, const rtcp::Remb& remb) + : timestamp(timestamp), remb(remb) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::Remb remb; +}; + +struct LoggedRtcpPacketNack { + LoggedRtcpPacketNack() = default; + LoggedRtcpPacketNack(Timestamp timestamp, const rtcp::Nack& nack) + : timestamp(timestamp), nack(nack) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::Nack nack; +}; + +struct LoggedRtcpPacketFir { + LoggedRtcpPacketFir() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::Fir fir; +}; + +struct LoggedRtcpPacketPli { + LoggedRtcpPacketPli() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::Pli pli; +}; + +struct LoggedRtcpPacketTransportFeedback { + LoggedRtcpPacketTransportFeedback() + : transport_feedback(/*include_timestamps=*/true, /*include_lost*/ true) { + } + LoggedRtcpPacketTransportFeedback( + Timestamp timestamp, + const rtcp::TransportFeedback& transport_feedback) + : timestamp(timestamp), transport_feedback(transport_feedback) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::TransportFeedback transport_feedback; +}; + +struct LoggedRtcpPacketLossNotification { + LoggedRtcpPacketLossNotification() = default; + LoggedRtcpPacketLossNotification( + Timestamp timestamp, + const rtcp::LossNotification& loss_notification) + : timestamp(timestamp), loss_notification(loss_notification) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::LossNotification loss_notification; +}; + +struct LoggedRtcpPacketBye { + LoggedRtcpPacketBye() = default; + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp = Timestamp::MinusInfinity(); + rtcp::Bye bye; +}; + +struct LoggedStartEvent { + explicit LoggedStartEvent(Timestamp timestamp) + : LoggedStartEvent(timestamp, timestamp) {} + + LoggedStartEvent(Timestamp timestamp, Timestamp utc_start_time) + : timestamp(timestamp), utc_start_time(utc_start_time) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp utc_time() const { return utc_start_time; } + + Timestamp timestamp; + Timestamp utc_start_time; +}; + +struct LoggedStopEvent { + explicit LoggedStopEvent(Timestamp timestamp) : timestamp(timestamp) {} + + int64_t log_time_us() const { return timestamp.us(); } + int64_t log_time_ms() const { return timestamp.ms(); } + + Timestamp timestamp; +}; + +struct InferredRouteChangeEvent { + int64_t log_time_ms() const { return log_time.ms(); } + int64_t log_time_us() const { return log_time.us(); } + uint32_t route_id; + Timestamp log_time = Timestamp::MinusInfinity(); + uint16_t send_overhead; + uint16_t return_overhead; +}; + +enum class LoggedMediaType : uint8_t { kUnknown, kAudio, kVideo }; + +struct LoggedPacketInfo { + LoggedPacketInfo(const LoggedRtpPacket& rtp, + LoggedMediaType media_type, + bool rtx, + Timestamp capture_time); + LoggedPacketInfo(const LoggedPacketInfo&); + ~LoggedPacketInfo(); + int64_t log_time_ms() const { return log_packet_time.ms(); } + int64_t log_time_us() const { return log_packet_time.us(); } + uint32_t ssrc; + uint16_t stream_seq_no; + uint16_t size; + uint16_t payload_size; + uint16_t padding_size; + uint16_t overhead = 0; + uint8_t payload_type; + LoggedMediaType media_type = LoggedMediaType::kUnknown; + bool rtx = false; + bool marker_bit = false; + bool has_transport_seq_no = false; + bool last_in_feedback = false; + uint16_t transport_seq_no = 0; + // The RTP header timestamp unwrapped and converted from tick count to seconds + // based timestamp. + Timestamp capture_time; + // The time the packet was logged. This is the receive time for incoming + // packets and send time for outgoing. + Timestamp log_packet_time; + // Send time as reported by abs-send-time extension, For outgoing packets this + // corresponds to log_packet_time, but might be measured using another clock. + Timestamp reported_send_time; + // The receive time that was reported in feedback. For incoming packets this + // corresponds to log_packet_time, but might be measured using another clock. + // PlusInfinity indicates that the packet was lost. + Timestamp reported_recv_time = Timestamp::MinusInfinity(); + // The time feedback message was logged. This is the feedback send time for + // incoming packets and feedback receive time for outgoing. + // PlusInfinity indicates that feedback was expected but not received. + Timestamp log_feedback_time = Timestamp::MinusInfinity(); + // The delay betweeen receiving an RTP packet and sending feedback for + // incoming packets. For outgoing packets we don't know the feedback send + // time, and this is instead calculated as the difference in reported receive + // time between this packet and the last packet in the same feedback message. + TimeDelta feedback_hold_duration = TimeDelta::MinusInfinity(); +}; + +enum class LoggedIceEventType { + kAdded, + kUpdated, + kDestroyed, + kSelected, + kCheckSent, + kCheckReceived, + kCheckResponseSent, + kCheckResponseReceived, +}; + +struct LoggedIceEvent { + uint32_t candidate_pair_id; + Timestamp log_time; + LoggedIceEventType event_type; +}; + + +} // namespace webrtc #endif // LOGGING_RTC_EVENT_LOG_LOGGED_EVENTS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc index 0766b4a88..4237b2a71 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_impl.cc @@ -22,6 +22,7 @@ #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_legacy.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_new_format.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -39,10 +40,10 @@ std::unique_ptr CreateEncoder( RtcEventLog::EncodingType type) { switch (type) { case RtcEventLog::EncodingType::Legacy: - RTC_DLOG(LS_INFO) << "Creating legacy encoder for RTC event log."; + RTC_LOG(LS_INFO) << "Creating legacy encoder for RTC event log."; return std::make_unique(); case RtcEventLog::EncodingType::NewFormat: - RTC_DLOG(LS_INFO) << "Creating new format encoder for RTC event log."; + RTC_LOG(LS_INFO) << "Creating new format encoder for RTC event log."; return std::make_unique(); default: RTC_LOG(LS_ERROR) << "Unknown RtcEventLog encoder type (" << int(type) @@ -91,7 +92,8 @@ bool RtcEventLogImpl::StartLogging(std::unique_ptr output, const int64_t timestamp_us = rtc::TimeMillis() * 1000; const int64_t utc_time_us = rtc::TimeUTCMillis() * 1000; - RTC_LOG(LS_INFO) << "Starting WebRTC event log. (Timestamp, UTC) = (" + RTC_LOG(LS_INFO) << "Starting WebRTC event log. (Timestamp, UTC) = " + "(" << timestamp_us << ", " << utc_time_us << ")."; RTC_DCHECK_RUN_ON(&logging_state_checker_); @@ -112,7 +114,7 @@ bool RtcEventLogImpl::StartLogging(std::unique_ptr output, } void RtcEventLogImpl::StopLogging() { - RTC_DLOG(LS_INFO) << "Stopping WebRTC event log."; + RTC_LOG(LS_INFO) << "Stopping WebRTC event log."; // TODO(danilchap): Do not block current thread waiting on the task queue. // It might work for now, for current callers, but disallows caller to share // threads with the `task_queue_`. @@ -120,7 +122,7 @@ void RtcEventLogImpl::StopLogging() { StopLogging([&output_stopped]() { output_stopped.Set(); }); output_stopped.Wait(rtc::Event::kForever); - RTC_DLOG(LS_INFO) << "WebRTC event log successfully stopped."; + RTC_LOG(LS_INFO) << "WebRTC event log successfully stopped."; } void RtcEventLogImpl::StopLogging(std::function callback) { diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc index da3212cd7..b4e117044 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.cc @@ -28,7 +28,6 @@ #include "logging/rtc_event_log/encoder/delta_encoding.h" #include "logging/rtc_event_log/encoder/rtc_event_log_encoder_common.h" #include "logging/rtc_event_log/encoder/var_int.h" -#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" #include "logging/rtc_event_log/rtc_event_processor.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" #include "modules/include/module_common_types_public.h" @@ -54,12 +53,6 @@ return ParsedRtcEventLog::ParseStatus::Error(#X, __FILE__, __LINE__); \ } while (0) -#define RTC_PARSE_CHECK_OR_RETURN_MESSAGE(X, M) \ - do { \ - if (!(X)) \ - return ParsedRtcEventLog::ParseStatus::Error((M), __FILE__, __LINE__); \ - } while (0) - #define RTC_PARSE_CHECK_OR_RETURN_OP(OP, X, Y) \ do { \ if (!((X)OP(Y))) \ @@ -954,35 +947,6 @@ std::vector GetRuntimeRtpHeaderExtensionConfig( } // End of conversion functions. -LoggedPacketInfo::LoggedPacketInfo(const LoggedRtpPacket& rtp, - LoggedMediaType media_type, - bool rtx, - Timestamp capture_time) - : ssrc(rtp.header.ssrc), - stream_seq_no(rtp.header.sequenceNumber), - size(static_cast(rtp.total_length)), - payload_size(static_cast(rtp.total_length - - rtp.header.paddingLength - - rtp.header.headerLength)), - padding_size(static_cast(rtp.header.paddingLength)), - payload_type(rtp.header.payloadType), - media_type(media_type), - rtx(rtx), - marker_bit(rtp.header.markerBit), - has_transport_seq_no(rtp.header.extension.hasTransportSequenceNumber), - transport_seq_no(static_cast( - has_transport_seq_no ? rtp.header.extension.transportSequenceNumber - : 0)), - capture_time(capture_time), - log_packet_time(Timestamp::Micros(rtp.log_time_us())), - reported_send_time(rtp.header.extension.hasAbsoluteSendTime - ? rtp.header.extension.GetAbsoluteSendTimestamp() - : Timestamp::MinusInfinity()) {} - -LoggedPacketInfo::LoggedPacketInfo(const LoggedPacketInfo&) = default; - -LoggedPacketInfo::~LoggedPacketInfo() {} - ParsedRtcEventLog::~ParsedRtcEventLog() = default; ParsedRtcEventLog::LoggedRtpStreamIncoming::LoggedRtpStreamIncoming() = default; @@ -1112,8 +1076,8 @@ void ParsedRtcEventLog::Clear() { last_incoming_rtcp_packet_.clear(); - first_timestamp_ = Timestamp::PlusInfinity(); - last_timestamp_ = Timestamp::MinusInfinity(); + first_timestamp_ = std::numeric_limits::max(); + last_timestamp_ = std::numeric_limits::min(); first_log_segment_ = LogSegment(0, std::numeric_limits::max()); incoming_rtp_extensions_maps_.clear(); @@ -1234,8 +1198,8 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( // stream configurations and starting/stopping the log. // TODO(terelius): Figure out if we actually need to find the first and last // timestamp in the parser. It seems like this could be done by the caller. - first_timestamp_ = Timestamp::PlusInfinity(); - last_timestamp_ = Timestamp::MinusInfinity(); + first_timestamp_ = std::numeric_limits::max(); + last_timestamp_ = std::numeric_limits::min(); StoreFirstAndLastTimestamp(alr_state_events()); StoreFirstAndLastTimestamp(route_change_events()); for (const auto& audio_stream : audio_playout_events()) { @@ -1274,8 +1238,7 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( // event, we could use the timestamp of the the last previous regular event. auto start_iter = start_log_events().begin(); auto stop_iter = stop_log_events().begin(); - int64_t start_us = - first_timestamp().us_or(std::numeric_limits::max()); + int64_t start_us = first_timestamp(); int64_t next_start_us = std::numeric_limits::max(); int64_t stop_us = std::numeric_limits::max(); if (start_iter != start_log_events().end()) { @@ -1289,14 +1252,15 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( } stop_us = std::min(stop_us, next_start_us); if (stop_us == std::numeric_limits::max() && - !last_timestamp().IsMinusInfinity()) { - stop_us = last_timestamp().us(); + last_timestamp() != std::numeric_limits::min()) { + stop_us = last_timestamp(); } RTC_PARSE_CHECK_OR_RETURN_LE(start_us, stop_us); first_log_segment_ = LogSegment(start_us, stop_us); - if (first_timestamp_ > last_timestamp_) { - first_timestamp_ = last_timestamp_ = Timestamp::Zero(); + if (first_timestamp_ == std::numeric_limits::max() && + last_timestamp_ == std::numeric_limits::min()) { + first_timestamp_ = last_timestamp_ = 0; } return status; @@ -1305,34 +1269,18 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStream( ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternal( absl::string_view s) { constexpr uint64_t kMaxEventSize = 10000000; // Sanity check. - // Protobuf defines the message tag as - // (field_number << 3) | wire_type. In the legacy encoding, the field number - // is supposed to be 1 and the wire type for a length-delimited field is 2. - // In the new encoding we still expect the wire type to be 2, but the field - // number will be greater than 1. - constexpr uint64_t kExpectedV1Tag = (1 << 3) | 2; - bool success = false; - - // "Peek" at the first varint. - absl::string_view event_start = s; - uint64_t tag = 0; - std::tie(success, std::ignore) = DecodeVarInt(s, &tag); - if (!success) { - RTC_LOG(LS_WARNING) << "Failed to read varint from beginning of event log."; - RTC_PARSE_WARN_AND_RETURN_SUCCESS_IF(allow_incomplete_logs_, - kIncompleteLogError); - return ParseStatus::Error("Failed to read field tag varint", __FILE__, - __LINE__); - } - s = event_start; - - if (tag >> 1 == static_cast(RtcEvent::Type::BeginV3Log)) { - return ParseStreamInternalV3(s); - } while (!s.empty()) { - // If not, "reset" event_start and read the field tag for the next event. - event_start = s; + absl::string_view event_start = s; + bool success = false; + + // Read the next message tag. Protobuf defines the message tag as + // (field_number << 3) | wire_type. In the legacy encoding, the field number + // is supposed to be 1 and the wire type for a length-delimited field is 2. + // In the new encoding we still expect the wire type to be 2, but the field + // number will be greater than 1. + constexpr uint64_t kExpectedV1Tag = (1 << 3) | 2; + uint64_t tag = 0; std::tie(success, s) = DecodeVarInt(s, &tag); if (!success) { RTC_LOG(LS_WARNING) @@ -1342,7 +1290,6 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternal( return ParseStatus::Error("Failed to read field tag varint", __FILE__, __LINE__); } - constexpr uint64_t kWireTypeMask = 0x07; const uint64_t wire_type = tag & kWireTypeMask; if (wire_type != 2) { @@ -1412,161 +1359,12 @@ ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternal( return ParseStatus::Success(); } -ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::ParseStreamInternalV3( - absl::string_view s) { - constexpr uint64_t kMaxEventSize = 10000000; // Sanity check. - bool expect_begin_log_event = true; - bool success = false; - - while (!s.empty()) { - // Read event type. - uint64_t event_tag = 0; - std::tie(success, s) = DecodeVarInt(s, &event_tag); - RTC_PARSE_CHECK_OR_RETURN_MESSAGE(success, "Failed to read event type."); - bool batched = event_tag & 1; - uint64_t event_type = event_tag >> 1; - - // Read event size - uint64_t event_size_bytes = 0; - std::tie(success, s) = DecodeVarInt(s, &event_size_bytes); - RTC_PARSE_CHECK_OR_RETURN_MESSAGE(success, "Failed to read event size."); - if (event_size_bytes > kMaxEventSize || event_size_bytes > s.size()) { - RTC_LOG(LS_WARNING) << "Event size is too large."; - RTC_PARSE_CHECK_OR_RETURN_LE(event_size_bytes, kMaxEventSize); - RTC_PARSE_CHECK_OR_RETURN_LE(event_size_bytes, s.size()); - } - - // Read remaining event fields into a buffer. - absl::string_view event_fields = s.substr(0, event_size_bytes); - s = s.substr(event_size_bytes); - - if (expect_begin_log_event) { - RTC_PARSE_CHECK_OR_RETURN_EQ( - event_type, static_cast(RtcEvent::Type::BeginV3Log)); - expect_begin_log_event = false; - } - - switch (event_type) { - case static_cast(RtcEvent::Type::BeginV3Log): - RtcEventBeginLog::Parse(event_fields, batched, start_log_events_); - break; - case static_cast(RtcEvent::Type::EndV3Log): - RtcEventEndLog::Parse(event_fields, batched, stop_log_events_); - expect_begin_log_event = true; - break; - case static_cast(RtcEvent::Type::AlrStateEvent): - RtcEventAlrState::Parse(event_fields, batched, alr_state_events_); - break; - case static_cast(RtcEvent::Type::AudioPlayout): - RtcEventAudioPlayout::Parse(event_fields, batched, - audio_playout_events_); - break; - case static_cast(RtcEvent::Type::BweUpdateDelayBased): - RtcEventBweUpdateDelayBased::Parse(event_fields, batched, - bwe_delay_updates_); - break; - case static_cast(RtcEvent::Type::AudioNetworkAdaptation): - RtcEventAudioNetworkAdaptation::Parse(event_fields, batched, - audio_network_adaptation_events_); - break; - case static_cast(RtcEvent::Type::AudioReceiveStreamConfig): - RtcEventAudioReceiveStreamConfig::Parse(event_fields, batched, - audio_recv_configs_); - break; - case static_cast(RtcEvent::Type::AudioSendStreamConfig): - RtcEventAudioSendStreamConfig::Parse(event_fields, batched, - audio_send_configs_); - break; - case static_cast(RtcEvent::Type::BweUpdateLossBased): - RtcEventBweUpdateLossBased::Parse(event_fields, batched, - bwe_loss_updates_); - break; - case static_cast(RtcEvent::Type::DtlsTransportState): - RtcEventDtlsTransportState::Parse(event_fields, batched, - dtls_transport_states_); - break; - case static_cast(RtcEvent::Type::DtlsWritableState): - RtcEventDtlsWritableState::Parse(event_fields, batched, - dtls_writable_states_); - break; - case static_cast(RtcEvent::Type::FrameDecoded): - RtcEventFrameDecoded::Parse(event_fields, batched, decoded_frames_); - break; - case static_cast(RtcEvent::Type::GenericAckReceived): - RtcEventGenericAckReceived::Parse(event_fields, batched, - generic_acks_received_); - break; - case static_cast(RtcEvent::Type::GenericPacketReceived): - RtcEventGenericPacketReceived::Parse(event_fields, batched, - generic_packets_received_); - break; - case static_cast(RtcEvent::Type::GenericPacketSent): - RtcEventGenericPacketSent::Parse(event_fields, batched, - generic_packets_sent_); - break; - case static_cast(RtcEvent::Type::IceCandidatePairConfig): - RtcEventIceCandidatePairConfig::Parse(event_fields, batched, - ice_candidate_pair_configs_); - break; - case static_cast(RtcEvent::Type::IceCandidatePairEvent): - RtcEventIceCandidatePair::Parse(event_fields, batched, - ice_candidate_pair_events_); - break; - case static_cast(RtcEvent::Type::ProbeClusterCreated): - RtcEventProbeClusterCreated::Parse(event_fields, batched, - bwe_probe_cluster_created_events_); - break; - case static_cast(RtcEvent::Type::ProbeResultFailure): - RtcEventProbeResultFailure::Parse(event_fields, batched, - bwe_probe_failure_events_); - break; - case static_cast(RtcEvent::Type::ProbeResultSuccess): - RtcEventProbeResultSuccess::Parse(event_fields, batched, - bwe_probe_success_events_); - break; - case static_cast(RtcEvent::Type::RemoteEstimateEvent): - RtcEventRemoteEstimate::Parse(event_fields, batched, - remote_estimate_events_); - break; - case static_cast(RtcEvent::Type::RouteChangeEvent): - RtcEventRouteChange::Parse(event_fields, batched, route_change_events_); - break; - case static_cast(RtcEvent::Type::RtcpPacketIncoming): - RtcEventRtcpPacketIncoming::Parse(event_fields, batched, - incoming_rtcp_packets_); - break; - case static_cast(RtcEvent::Type::RtcpPacketOutgoing): - RtcEventRtcpPacketOutgoing::Parse(event_fields, batched, - outgoing_rtcp_packets_); - break; - case static_cast(RtcEvent::Type::RtpPacketIncoming): - RtcEventRtpPacketIncoming::Parse(event_fields, batched, - incoming_rtp_packets_map_); - break; - case static_cast(RtcEvent::Type::RtpPacketOutgoing): - RtcEventRtpPacketOutgoing::Parse(event_fields, batched, - outgoing_rtp_packets_map_); - break; - case static_cast(RtcEvent::Type::VideoReceiveStreamConfig): - RtcEventVideoReceiveStreamConfig::Parse(event_fields, batched, - video_recv_configs_); - break; - case static_cast(RtcEvent::Type::VideoSendStreamConfig): - RtcEventVideoSendStreamConfig::Parse(event_fields, batched, - video_send_configs_); - break; - } - } - - return ParseStatus::Success(); -} - template void ParsedRtcEventLog::StoreFirstAndLastTimestamp(const std::vector& v) { if (v.empty()) return; - first_timestamp_ = std::min(first_timestamp_, v.front().log_time()); - last_timestamp_ = std::max(last_timestamp_, v.back().log_time()); + first_timestamp_ = std::min(first_timestamp_, v.front().log_time_us()); + last_timestamp_ = std::max(last_timestamp_, v.back().log_time_us()); } ParsedRtcEventLog::ParseStatus ParsedRtcEventLog::StoreParsedLegacyEvent( @@ -1793,13 +1591,13 @@ const RtpHeaderExtensionMap* ParsedRtcEventLog::GetRtpHeaderExtensionMap( } if (parse_unconfigured_header_extensions_ == UnconfiguredHeaderExtensions::kAttemptWebrtcDefaultConfig) { - RTC_DLOG(LS_WARNING) << "Using default header extension map for SSRC " - << ssrc; + RTC_LOG(LS_WARNING) << "Using default header extension map for SSRC " + << ssrc; extensions_maps.insert(std::make_pair(ssrc, default_extension_map_)); return &default_extension_map_; } - RTC_DLOG(LS_WARNING) << "Not parsing header extensions for SSRC " << ssrc - << ". No header extension map found."; + RTC_LOG(LS_WARNING) << "Not parsing header extensions for SSRC " << ssrc + << ". No header extension map found."; return nullptr; } diff --git a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h index 9ef4e347d..d4c8409b6 100644 --- a/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h +++ b/TMessagesProj/jni/voip/webrtc/logging/rtc_event_log/rtc_event_log_parser.h @@ -15,24 +15,22 @@ #include #include #include +#include // pair #include #include "absl/base/attributes.h" #include "api/rtc_event_log/rtc_event_log.h" #include "call/video_receive_stream.h" #include "call/video_send_stream.h" -#include "logging/rtc_event_log/events/logged_rtp_rtcp.h" #include "logging/rtc_event_log/events/rtc_event_alr_state.h" #include "logging/rtc_event_log/events/rtc_event_audio_network_adaptation.h" #include "logging/rtc_event_log/events/rtc_event_audio_playout.h" #include "logging/rtc_event_log/events/rtc_event_audio_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_audio_send_stream_config.h" -#include "logging/rtc_event_log/events/rtc_event_begin_log.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_delay_based.h" #include "logging/rtc_event_log/events/rtc_event_bwe_update_loss_based.h" #include "logging/rtc_event_log/events/rtc_event_dtls_transport_state.h" #include "logging/rtc_event_log/events/rtc_event_dtls_writable_state.h" -#include "logging/rtc_event_log/events/rtc_event_end_log.h" #include "logging/rtc_event_log/events/rtc_event_frame_decoded.h" #include "logging/rtc_event_log/events/rtc_event_generic_ack_received.h" #include "logging/rtc_event_log/events/rtc_event_generic_packet_received.h" @@ -44,12 +42,9 @@ #include "logging/rtc_event_log/events/rtc_event_probe_result_success.h" #include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" #include "logging/rtc_event_log/events/rtc_event_route_change.h" -#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_incoming.h" -#include "logging/rtc_event_log/events/rtc_event_rtcp_packet_outgoing.h" -#include "logging/rtc_event_log/events/rtc_event_rtp_packet_incoming.h" -#include "logging/rtc_event_log/events/rtc_event_rtp_packet_outgoing.h" #include "logging/rtc_event_log/events/rtc_event_video_receive_stream_config.h" #include "logging/rtc_event_log/events/rtc_event_video_send_stream_config.h" +#include "logging/rtc_event_log/logged_events.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "modules/rtp_rtcp/source/rtcp_packet/common_header.h" #include "rtc_base/ignore_wundef.h" @@ -69,80 +64,6 @@ namespace webrtc { enum PacketDirection { kIncomingPacket = 0, kOutgoingPacket }; -enum class LoggedMediaType : uint8_t { kUnknown, kAudio, kVideo }; - -struct LoggedPacketInfo { - LoggedPacketInfo(const LoggedRtpPacket& rtp, - LoggedMediaType media_type, - bool rtx, - Timestamp capture_time); - LoggedPacketInfo(const LoggedPacketInfo&); - ~LoggedPacketInfo(); - int64_t log_time_ms() const { return log_packet_time.ms(); } - int64_t log_time_us() const { return log_packet_time.us(); } - uint32_t ssrc; - uint16_t stream_seq_no; - uint16_t size; - uint16_t payload_size; - uint16_t padding_size; - uint16_t overhead = 0; - uint8_t payload_type; - LoggedMediaType media_type = LoggedMediaType::kUnknown; - bool rtx = false; - bool marker_bit = false; - bool has_transport_seq_no = false; - bool last_in_feedback = false; - uint16_t transport_seq_no = 0; - // The RTP header timestamp unwrapped and converted from tick count to seconds - // based timestamp. - Timestamp capture_time; - // The time the packet was logged. This is the receive time for incoming - // packets and send time for outgoing. - Timestamp log_packet_time; - // Send time as reported by abs-send-time extension, For outgoing packets this - // corresponds to log_packet_time, but might be measured using another clock. - Timestamp reported_send_time; - // The receive time that was reported in feedback. For incoming packets this - // corresponds to log_packet_time, but might be measured using another clock. - // PlusInfinity indicates that the packet was lost. - Timestamp reported_recv_time = Timestamp::MinusInfinity(); - // The time feedback message was logged. This is the feedback send time for - // incoming packets and feedback receive time for outgoing. - // PlusInfinity indicates that feedback was expected but not received. - Timestamp log_feedback_time = Timestamp::MinusInfinity(); - // The delay betweeen receiving an RTP packet and sending feedback for - // incoming packets. For outgoing packets we don't know the feedback send - // time, and this is instead calculated as the difference in reported receive - // time between this packet and the last packet in the same feedback message. - TimeDelta feedback_hold_duration = TimeDelta::MinusInfinity(); -}; - -struct InferredRouteChangeEvent { - int64_t log_time_ms() const { return log_time.ms(); } - int64_t log_time_us() const { return log_time.us(); } - uint32_t route_id; - Timestamp log_time = Timestamp::MinusInfinity(); - uint16_t send_overhead; - uint16_t return_overhead; -}; - -enum class LoggedIceEventType { - kAdded, - kUpdated, - kDestroyed, - kSelected, - kCheckSent, - kCheckReceived, - kCheckResponseSent, - kCheckResponseReceived, -}; - -struct LoggedIceEvent { - uint32_t candidate_pair_id; - Timestamp log_time; - LoggedIceEventType event_type; -}; - // This class is used to process lists of LoggedRtpPacketIncoming // and LoggedRtpPacketOutgoing without duplicating the code. // TODO(terelius): Remove this class. Instead use e.g. a vector of pointers @@ -319,11 +240,48 @@ class ParsedRtcEventLog { kDontParse, kAttemptWebrtcDefaultConfig }; + class ParseStatus { + public: + static ParseStatus Success() { return ParseStatus(); } + static ParseStatus Error(std::string error, std::string file, int line) { + return ParseStatus(error, file, line); + } - using ParseStatus = RtcEventLogParseStatus; + bool ok() const { return error_.empty() && file_.empty() && line_ == 0; } + std::string message() const { + return error_ + " failed at " + file_ + " line " + std::to_string(line_); + } + + ABSL_DEPRECATED("Use ok() instead") operator bool() const { return ok(); } + + private: + ParseStatus() : error_(), file_(), line_(0) {} + ParseStatus(std::string error, std::string file, int line) + : error_(error), file_(file), line_(line) {} + std::string error_; + std::string file_; + int line_; + }; template - using ParseStatusOr = RtcEventLogParseStatusOr; + class ParseStatusOr { + public: + ParseStatusOr(const ParseStatus& error) // NOLINT + : status_(error), value_() {} + ParseStatusOr(const T& value) // NOLINT + : status_(ParseStatus::Success()), value_(value) {} + bool ok() const { return status_.ok(); } + const T& value() const& { + RTC_DCHECK(status_.ok()); + return value_; + } + std::string message() const { return status_.message(); } + const ParseStatus& status() const { return status_; } + + private: + ParseStatus status_; + T value_; + }; struct LoggedRtpStreamIncoming { LoggedRtpStreamIncoming(); @@ -643,8 +601,8 @@ class ParsedRtcEventLog { return decoded_frames_; } - Timestamp first_timestamp() const { return first_timestamp_; } - Timestamp last_timestamp() const { return last_timestamp_; } + int64_t first_timestamp() const { return first_timestamp_; } + int64_t last_timestamp() const { return last_timestamp_; } const LogSegment& first_log_segment() const { return first_log_segment_; } @@ -662,7 +620,6 @@ class ParsedRtcEventLog { private: ABSL_MUST_USE_RESULT ParseStatus ParseStreamInternal(absl::string_view s); - ABSL_MUST_USE_RESULT ParseStatus ParseStreamInternalV3(absl::string_view s); ABSL_MUST_USE_RESULT ParseStatus StoreParsedLegacyEvent(const rtclog::Event& event); @@ -889,8 +846,8 @@ class ParsedRtcEventLog { std::vector last_incoming_rtcp_packet_; - Timestamp first_timestamp_ = Timestamp::PlusInfinity(); - Timestamp last_timestamp_ = Timestamp::MinusInfinity(); + int64_t first_timestamp_; + int64_t last_timestamp_; LogSegment first_log_segment_ = LogSegment(0, std::numeric_limits::max()); diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h index bf8a0592a..e4f7b6659 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_media_engine.h @@ -275,7 +275,7 @@ class RtpHelper : public Base { } void OnPacketSent(const rtc::SentPacket& sent_packet) override {} void OnReadyToSend(bool ready) override { ready_to_send_ = ready; } - void OnNetworkRouteChanged(absl::string_view transport_name, + void OnNetworkRouteChanged(const std::string& transport_name, const rtc::NetworkRoute& network_route) override { last_network_route_ = network_route; ++num_network_route_changes_; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.cc b/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.cc index 21322419e..4f4282176 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.cc @@ -21,7 +21,7 @@ void CompareHeaderExtensions(const char* packet1, size_t packet1_size, const char* packet2, size_t packet2_size, - const std::vector& encrypted_headers, + const std::vector encrypted_headers, bool expect_equal) { // Sanity check: packets must be large enough to contain the RTP header and // extensions header. diff --git a/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.h b/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.h index 8a176038c..f2578151e 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/fake_rtp.h @@ -295,7 +295,7 @@ void CompareHeaderExtensions(const char* packet1, size_t packet1_size, const char* packet2, size_t packet2_size, - const std::vector& encrypted_headers, + const std::vector encrypted_headers, bool expect_equal); #endif // MEDIA_BASE_FAKE_RTP_H_ diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc index 1b11fcc4e..11953c2c5 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.cc @@ -26,8 +26,14 @@ VideoOptions::VideoOptions() : content_hint(VideoTrackInterface::ContentHint::kNone) {} VideoOptions::~VideoOptions() = default; -MediaChannel::MediaChannel(TaskQueueBase* network_thread, bool enable_dscp) - : enable_dscp_(enable_dscp), +MediaChannel::MediaChannel(const MediaConfig& config, + TaskQueueBase* network_thread) + : enable_dscp_(config.enable_dscp), + network_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()), + network_thread_(network_thread) {} + +MediaChannel::MediaChannel(TaskQueueBase* network_thread) + : enable_dscp_(false), network_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()), network_thread_(network_thread) {} @@ -89,11 +95,6 @@ bool MediaChannel::ExtmapAllowMixed() const { return extmap_allow_mixed_; } -bool MediaChannel::HasNetworkInterface() const { - RTC_DCHECK_RUN_ON(network_thread_); - return network_interface_ != nullptr; -} - void MediaChannel::SetEncoderToPacketizerFrameTransformer( uint32_t ssrc, rtc::scoped_refptr frame_transformer) {} diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h index 2b0ef8127..b3ceb3930 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_channel.h @@ -38,6 +38,7 @@ #include "common_video/include/quality_limitation_reason.h" #include "media/base/codec.h" #include "media/base/delayable.h" +#include "media/base/media_config.h" #include "media/base/media_constants.h" #include "media/base/stream_params.h" #include "modules/audio_processing/include/audio_processing_statistics.h" @@ -169,8 +170,9 @@ class MediaChannel { virtual ~NetworkInterface() {} }; - explicit MediaChannel(webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false); + MediaChannel(const MediaConfig& config, + webrtc::TaskQueueBase* network_thread); + explicit MediaChannel(webrtc::TaskQueueBase* network_thread); virtual ~MediaChannel(); virtual cricket::MediaType media_type() const = 0; @@ -187,7 +189,7 @@ class MediaChannel { virtual void OnReadyToSend(bool ready) = 0; // Called when the network route used for sending packets changed. virtual void OnNetworkRouteChanged( - absl::string_view transport_name, + const std::string& transport_name, const rtc::NetworkRoute& network_route) = 0; // Creates a new outgoing media stream with SSRCs and CNAME as described // by sp. @@ -208,14 +210,11 @@ class MediaChannel { // Resets any cached StreamParams for an unsignaled RecvStream, and removes // any existing unsignaled streams. virtual void ResetUnsignaledRecvStream() = 0; - // This is currently a workaround because of the demuxer state being managed - // across two separate threads. Once the state is consistently managed on - // the same thread (network), this workaround can be removed. - // These two notifications inform the media channel when the transport's - // demuxer criteria is being updated. + // Informs the media channel when the transport's demuxer criteria is updated. // * OnDemuxerCriteriaUpdatePending() happens on the same thread that the // channel's streams are added and removed (worker thread). - // * OnDemuxerCriteriaUpdateComplete() happens on the same thread. + // * OnDemuxerCriteriaUpdateComplete() happens on the thread where the demuxer + // lives (network thread). // Because the demuxer is updated asynchronously, there is a window of time // where packets are arriving to the channel for streams that have already // been removed on the worker thread. It is important NOT to treat these as @@ -260,10 +259,6 @@ class MediaChannel { void SetExtmapAllowMixed(bool extmap_allow_mixed); bool ExtmapAllowMixed() const; - // Returns `true` if a non-null NetworkInterface pointer is held. - // Must be called on the network thread. - bool HasNetworkInterface() const; - virtual webrtc::RtpParameters GetRtpSendParameters(uint32_t ssrc) const = 0; virtual webrtc::RTCError SetRtpSendParameters( uint32_t ssrc, @@ -779,9 +774,11 @@ struct AudioRecvParameters : RtpParameters {}; class VoiceMediaChannel : public MediaChannel, public Delayable { public: - VoiceMediaChannel(webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false) - : MediaChannel(network_thread, enable_dscp) {} + explicit VoiceMediaChannel(webrtc::TaskQueueBase* network_thread) + : MediaChannel(network_thread) {} + VoiceMediaChannel(const MediaConfig& config, + webrtc::TaskQueueBase* network_thread) + : MediaChannel(config, network_thread) {} ~VoiceMediaChannel() override {} cricket::MediaType media_type() const override; @@ -849,9 +846,11 @@ struct VideoRecvParameters : RtpParameters {}; class VideoMediaChannel : public MediaChannel, public Delayable { public: - explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread, - bool enable_dscp = false) - : MediaChannel(network_thread, enable_dscp) {} + explicit VideoMediaChannel(webrtc::TaskQueueBase* network_thread) + : MediaChannel(network_thread) {} + VideoMediaChannel(const MediaConfig& config, + webrtc::TaskQueueBase* network_thread) + : MediaChannel(config, network_thread) {} ~VideoMediaChannel() override {} cricket::MediaType media_type() const override; diff --git a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h index 1778104a3..6f47127f3 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/media_engine.h @@ -24,7 +24,6 @@ #include "call/audio_state.h" #include "media/base/codec.h" #include "media/base/media_channel.h" -#include "media/base/media_config.h" #include "media/base/video_common.h" #include "rtc_base/system/file_wrapper.h" @@ -64,9 +63,7 @@ class VoiceEngineInterface : public RtpHeaderExtensionQueryInterface { public: VoiceEngineInterface() = default; virtual ~VoiceEngineInterface() = default; - - VoiceEngineInterface(const VoiceEngineInterface&) = delete; - VoiceEngineInterface& operator=(const VoiceEngineInterface&) = delete; + RTC_DISALLOW_COPY_AND_ASSIGN(VoiceEngineInterface); // Initialization // Starts the engine. @@ -100,9 +97,7 @@ class VideoEngineInterface : public RtpHeaderExtensionQueryInterface { public: VideoEngineInterface() = default; virtual ~VideoEngineInterface() = default; - - VideoEngineInterface(const VideoEngineInterface&) = delete; - VideoEngineInterface& operator=(const VideoEngineInterface&) = delete; + RTC_DISALLOW_COPY_AND_ASSIGN(VideoEngineInterface); // Creates a video media channel, paired with the specified voice channel. // Returns NULL on failure. diff --git a/TMessagesProj/jni/voip/webrtc/media/base/stream_params.h b/TMessagesProj/jni/voip/webrtc/media/base/stream_params.h index c9c8a0959..1f46469cb 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/stream_params.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/stream_params.h @@ -54,6 +54,7 @@ #include "absl/algorithm/container.h" #include "media/base/rid_description.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/unique_id_generator.h" namespace cricket { diff --git a/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h b/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h index 071330f33..1bae10d41 100644 --- a/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/media/base/video_adapter.h @@ -19,6 +19,7 @@ #include "api/video/video_source_interface.h" #include "common_video/framerate_controller.h" #include "media/base/video_common.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/thread_annotations.h" @@ -37,9 +38,6 @@ class RTC_EXPORT VideoAdapter { explicit VideoAdapter(int source_resolution_alignment); virtual ~VideoAdapter(); - VideoAdapter(const VideoAdapter&) = delete; - VideoAdapter& operator=(const VideoAdapter&) = delete; - // Return the adapted resolution and cropping parameters given the // input resolution. The input frame should first be cropped, then // scaled to the final output resolution. Returns true if the frame @@ -148,6 +146,8 @@ class RTC_EXPORT VideoAdapter { // The critical section to protect the above variables. mutable webrtc::Mutex mutex_; + + RTC_DISALLOW_COPY_AND_ASSIGN(VideoAdapter); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h index b84c385b9..e732379cb 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/fake_webrtc_call.h @@ -193,7 +193,8 @@ class FakeVideoSendStream final void OnFrame(const webrtc::VideoFrame& frame) override; // webrtc::VideoSendStream implementation. - void UpdateActiveSimulcastLayers(std::vector active_layers) override; + void UpdateActiveSimulcastLayers( + const std::vector active_layers) override; void Start() override; void Stop() override; bool started() override { return IsSending(); } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc b/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc index c24c48854..faac91e70 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/internal_decoder_factory.cc @@ -24,7 +24,7 @@ #include "system_wrappers/include/field_trial.h" #if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY) -#include "modules/video_coding/codecs/av1/dav1d_decoder.h" // nogncheck +#include "modules/video_coding/codecs/av1/dav1d_decoder.h" #endif namespace webrtc { @@ -47,7 +47,7 @@ std::vector InternalDecoderFactory::GetSupportedFormats() formats.push_back(SdpVideoFormat(cricket::kVp8CodecName)); for (const SdpVideoFormat& format : SupportedVP9DecoderCodecs()) formats.push_back(format); - for (const SdpVideoFormat& h264_format : SupportedH264DecoderCodecs()) + for (const SdpVideoFormat& h264_format : SupportedH264Codecs()) formats.push_back(h264_format); if (kIsLibaomAv1DecoderSupported || diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.cc b/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.cc index 5ccb93d0c..bb550662f 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/internal_encoder_factory.cc @@ -16,7 +16,7 @@ #include "api/video_codecs/sdp_video_format.h" #include "media/base/codec.h" #include "media/base/media_constants.h" -#include "modules/video_coding/codecs/av1/libaom_av1_encoder_supported.h" +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" #include "modules/video_coding/codecs/h264/include/h264.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" @@ -51,7 +51,7 @@ std::unique_ptr InternalEncoderFactory::CreateVideoEncoder( return H264Encoder::Create(cricket::VideoCodec(format)); if (kIsLibaomAv1EncoderSupported && absl::EqualsIgnoreCase(format.name, cricket::kAv1CodecName)) - return CreateLibaomAv1EncoderIfSupported(); + return CreateLibaomAv1Encoder(); RTC_LOG(LS_ERROR) << "Trying to created encoder of unsupported format " << format.name; return nullptr; diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc index 704c2d6a0..9143361e8 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/simulcast_encoder_adapter.cc @@ -791,8 +791,8 @@ webrtc::VideoCodec SimulcastEncoderAdapter::MakeStreamCodec( // kComplexityHigher, which maps to cpu_used = -4. int pixels_per_frame = codec_params.width * codec_params.height; if (pixels_per_frame < 352 * 288) { - codec_params.SetVideoEncoderComplexity( - webrtc::VideoCodecComplexity::kComplexityHigher); + codec_params.VP8()->complexity = + webrtc::VideoCodecComplexity::kComplexityHigher; } // Turn off denoising for all streams but the highest resolution. codec_params.VP8()->denoisingOn = false; diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc index 58ae4995d..f9de3ffb4 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.cc @@ -135,15 +135,8 @@ bool IsCodecValidForLowerRange(const VideoCodec& codec) { return true; } else if (absl::EqualsIgnoreCase(codec.name, kH264CodecName)) { std::string profileLevelId; - std::string packetizationMode; - + // H264 with YUV444. if (codec.GetParam(kH264FmtpProfileLevelId, &profileLevelId)) { - if (absl::StartsWithIgnoreCase(profileLevelId, "4d00")) { - if (codec.GetParam(kH264FmtpPacketizationMode, &packetizationMode)) { - return packetizationMode == "0"; - } - } - // H264 with YUV444. return absl::StartsWithIgnoreCase(profileLevelId, "f400"); } } @@ -486,6 +479,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( (parameters_.config.rtp.ssrcs.size() == 1 || NumActiveStreams(rtp_parameters_) == 1); + bool frame_dropping = !is_screencast; bool denoising; bool codec_default_denoising = false; if (is_screencast) { @@ -499,6 +493,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( if (absl::EqualsIgnoreCase(codec.name, kH264CodecName)) { webrtc::VideoCodecH264 h264_settings = webrtc::VideoEncoder::GetDefaultH264Settings(); + h264_settings.frameDroppingOn = frame_dropping; return rtc::make_ref_counted< webrtc::VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings); } @@ -508,6 +503,7 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( vp8_settings.automaticResizeOn = automatic_resize; // VP8 denoising is enabled by default. vp8_settings.denoisingOn = codec_default_denoising ? true : denoising; + vp8_settings.frameDroppingOn = frame_dropping; return rtc::make_ref_counted< webrtc::VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); } @@ -529,16 +525,15 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( // Ensure frame dropping is always enabled. RTC_DCHECK(vp9_settings.frameDroppingOn); if (!is_screencast) { - webrtc::FieldTrialFlag interlayer_pred_experiment_enabled("Enabled"); + webrtc::FieldTrialFlag interlayer_pred_experiment_enabled = + webrtc::FieldTrialFlag("Enabled"); webrtc::FieldTrialEnum inter_layer_pred_mode( "inter_layer_pred_mode", webrtc::InterLayerPredMode::kOnKeyPic, {{"off", webrtc::InterLayerPredMode::kOff}, {"on", webrtc::InterLayerPredMode::kOn}, {"onkeypic", webrtc::InterLayerPredMode::kOnKeyPic}}); - webrtc::FieldTrialFlag force_flexible_mode("FlexibleMode"); webrtc::ParseFieldTrial( - {&interlayer_pred_experiment_enabled, &inter_layer_pred_mode, - &force_flexible_mode}, + {&interlayer_pred_experiment_enabled, &inter_layer_pred_mode}, call_->trials().Lookup("WebRTC-Vp9InterLayerPred")); if (interlayer_pred_experiment_enabled) { vp9_settings.interLayerPred = inter_layer_pred_mode; @@ -546,7 +541,6 @@ WebRtcVideoChannel::WebRtcVideoSendStream::ConfigureVideoEncoderSettings( // Limit inter-layer prediction to key pictures by default. vp9_settings.interLayerPred = webrtc::InterLayerPredMode::kOnKeyPic; } - vp9_settings.flexibleMode = force_flexible_mode.Get(); } else { // Multiple spatial layers vp9 screenshare needs flexible mode. vp9_settings.flexibleMode = vp9_settings.numberOfSpatialLayers > 1; @@ -693,7 +687,7 @@ WebRtcVideoChannel::WebRtcVideoChannel( webrtc::VideoEncoderFactory* encoder_factory, webrtc::VideoDecoderFactory* decoder_factory, webrtc::VideoBitrateAllocatorFactory* bitrate_allocator_factory) - : VideoMediaChannel(call->network_thread(), config.enable_dscp), + : VideoMediaChannel(config, call->network_thread()), worker_thread_(call->worker_thread()), call_(call), unsignalled_ssrc_handler_(&default_unsignalled_ssrc_handler_), @@ -906,13 +900,56 @@ void WebRtcVideoChannel::RequestEncoderFallback() { } void WebRtcVideoChannel::RequestEncoderSwitch( - const webrtc::SdpVideoFormat& format, - bool allow_default_fallback) { + const EncoderSwitchRequestCallback::Config& conf) { if (!worker_thread_->IsCurrent()) { - worker_thread_->PostTask( - ToQueuedTask(task_safety_, [this, format, allow_default_fallback] { - RequestEncoderSwitch(format, allow_default_fallback); - })); + worker_thread_->PostTask(ToQueuedTask( + task_safety_, [this, conf] { RequestEncoderSwitch(conf); })); + return; + } + + RTC_DCHECK_RUN_ON(&thread_checker_); + + if (!allow_codec_switching_) { + RTC_LOG(LS_INFO) << "Encoder switch requested but codec switching has" + " not been enabled yet."; + requested_encoder_switch_ = conf; + return; + } + + for (const VideoCodecSettings& codec_setting : negotiated_codecs_) { + if (codec_setting.codec.name == conf.codec_name) { + if (conf.param) { + auto it = codec_setting.codec.params.find(*conf.param); + if (it == codec_setting.codec.params.end()) + continue; + + if (conf.value && it->second != *conf.value) + continue; + } + + if (send_codec_ == codec_setting) { + // Already using this codec, no switch required. + return; + } + + ChangedSendParameters params; + params.send_codec = codec_setting; + ApplyChangedParams(params); + return; + } + } + + RTC_LOG(LS_WARNING) << "Requested encoder with codec_name:" << conf.codec_name + << ", param:" << conf.param.value_or("none") + << " and value:" << conf.value.value_or("none") + << "not found. No switch performed."; +} + +void WebRtcVideoChannel::RequestEncoderSwitch( + const webrtc::SdpVideoFormat& format) { + if (!worker_thread_->IsCurrent()) { + worker_thread_->PostTask(ToQueuedTask( + task_safety_, [this, format] { RequestEncoderSwitch(format); })); return; } @@ -938,13 +975,8 @@ void WebRtcVideoChannel::RequestEncoderSwitch( } } - RTC_LOG(LS_WARNING) << "Failed to switch encoder to: " << format.ToString() - << ". Is default fallback allowed: " - << allow_default_fallback; - - if (allow_default_fallback) { - RequestEncoderFallback(); - } + RTC_LOG(LS_WARNING) << "Encoder switch failed: SdpVideoFormat " + << format.ToString() << " not negotiated."; } bool WebRtcVideoChannel::ApplyChangedParams( @@ -1038,16 +1070,8 @@ webrtc::RtpParameters WebRtcVideoChannel::GetRtpSendParameters( // Need to add the common list of codecs to the send stream-specific // RTP parameters. for (const VideoCodec& codec : send_params_.codecs) { - if (send_codec_ && send_codec_->codec.id == codec.id) { - // Put the current send codec to the front of the codecs list. - RTC_DCHECK_EQ(codec.name, send_codec_->codec.name); - rtp_params.codecs.insert(rtp_params.codecs.begin(), - codec.ToCodecParameters()); - } else { - rtp_params.codecs.push_back(codec.ToCodecParameters()); - } + rtp_params.codecs.push_back(codec.ToCodecParameters()); } - return rtp_params; } @@ -1572,8 +1596,11 @@ void WebRtcVideoChannel::OnDemuxerCriteriaUpdatePending() { } void WebRtcVideoChannel::OnDemuxerCriteriaUpdateComplete() { - RTC_DCHECK_RUN_ON(&thread_checker_); - ++demuxer_criteria_completed_id_; + RTC_DCHECK_RUN_ON(&network_thread_checker_); + worker_thread_->PostTask(ToQueuedTask(task_safety_, [this] { + RTC_DCHECK_RUN_ON(&thread_checker_); + ++demuxer_criteria_completed_id_; + })); } bool WebRtcVideoChannel::SetSink( @@ -1843,12 +1870,11 @@ void WebRtcVideoChannel::OnReadyToSend(bool ready) { } void WebRtcVideoChannel::OnNetworkRouteChanged( - absl::string_view transport_name, + const std::string& transport_name, const rtc::NetworkRoute& network_route) { RTC_DCHECK_RUN_ON(&network_thread_checker_); worker_thread_->PostTask(ToQueuedTask( - task_safety_, - [this, name = std::string(transport_name), route = network_route] { + task_safety_, [this, name = transport_name, route = network_route] { RTC_DCHECK_RUN_ON(&thread_checker_); webrtc::RtpTransportControllerSendInterface* transport = call_->GetTransportControllerSend(); @@ -1925,6 +1951,11 @@ void WebRtcVideoChannel::SetVideoCodecSwitchingEnabled(bool enabled) { allow_codec_switching_ = enabled; if (allow_codec_switching_) { RTC_LOG(LS_INFO) << "Encoder switching enabled."; + if (requested_encoder_switch_) { + RTC_LOG(LS_INFO) << "Executing cached video encoder switch request."; + RequestEncoderSwitch(*requested_encoder_switch_); + requested_encoder_switch_.reset(); + } } } diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h index 940985d9f..90d824a55 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_video_engine.h @@ -167,7 +167,7 @@ class WebRtcVideoChannel : public VideoMediaChannel, int64_t packet_time_us) override; void OnPacketSent(const rtc::SentPacket& sent_packet) override; void OnReadyToSend(bool ready) override; - void OnNetworkRouteChanged(absl::string_view transport_name, + void OnNetworkRouteChanged(const std::string& transport_name, const rtc::NetworkRoute& network_route) override; void SetInterface(NetworkInterface* iface) override; @@ -224,8 +224,11 @@ class WebRtcVideoChannel : public VideoMediaChannel, // Implements webrtc::EncoderSwitchRequestCallback. void RequestEncoderFallback() override; - void RequestEncoderSwitch(const webrtc::SdpVideoFormat& format, - bool allow_default_fallback) override; + + // TODO(bugs.webrtc.org/11341) : Remove this version of RequestEncoderSwitch. + void RequestEncoderSwitch( + const EncoderSwitchRequestCallback::Config& conf) override; + void RequestEncoderSwitch(const webrtc::SdpVideoFormat& format) override; void SetRecordableEncodedFrameCallback( uint32_t ssrc, @@ -635,10 +638,9 @@ class WebRtcVideoChannel : public VideoMediaChannel, std::unique_ptr unknown_ssrc_packet_buffer_ RTC_GUARDED_BY(thread_checker_); - // TODO(bugs.webrtc.org/11341): Remove this and relevant PC API. Presence - // of multiple negotiated codecs allows generic encoder fallback on failures. - // Presence of EncoderSelector allows switching to specific encoders. bool allow_codec_switching_ = false; + absl::optional + requested_encoder_switch_; }; class EncoderStreamFactory diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc index 829cb82af..6934a0ca5 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.cc @@ -343,7 +343,6 @@ void WebRtcVoiceEngine::Init() { RTC_LOG(LS_INFO) << "WebRtcVoiceEngine::Init"; // TaskQueue expects to be created/destroyed on the same thread. - RTC_DCHECK(!low_priority_worker_queue_); low_priority_worker_queue_.reset( new rtc::TaskQueue(task_queue_factory_->CreateTaskQueue( "rtc-low-prio", webrtc::TaskQueueFactory::Priority::LOW))); @@ -404,12 +403,15 @@ void WebRtcVoiceEngine::Init() { options.noise_suppression = true; options.typing_detection = true; #endif + options.experimental_ns = false; options.highpass_filter = true; options.stereo_swapping = false; options.audio_jitter_buffer_max_packets = 200; options.audio_jitter_buffer_fast_accelerate = false; options.audio_jitter_buffer_min_delay_ms = 0; options.audio_jitter_buffer_enable_rtx_handling = false; + options.experimental_agc = false; + options.residual_echo_detector = true; bool error = ApplyOptions(options); RTC_DCHECK(error); } @@ -462,14 +464,17 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { // Override noise suppression options for Android. #if defined(WEBRTC_ANDROID) options.typing_detection = false; + options.experimental_ns = false; #endif // Set and adjust gain control options. #if defined(WEBRTC_IOS) // On iOS, VPIO provides built-in AGC. options.auto_gain_control = false; + options.experimental_agc = false; RTC_LOG(LS_INFO) << "Always disable AGC on iOS. Use built-in instead."; #elif defined(WEBRTC_ANDROID) + options.experimental_agc = false; #endif #if defined(WEBRTC_IOS) || defined(WEBRTC_ANDROID) @@ -577,8 +582,18 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { return true; } + if (options.experimental_ns) { + experimental_ns_ = options.experimental_ns; + } + webrtc::AudioProcessing::Config apm_config = ap->GetConfig(); +#if !(defined(WEBRTC_ANDROID) || defined(WEBRTC_IOS)) + if (experimental_ns_.has_value()) { + apm_config.transient_suppression.enabled = experimental_ns_.value(); + } +#endif + if (options.echo_cancellation) { apm_config.echo_canceller.enabled = *options.echo_cancellation; apm_config.echo_canceller.mobile_mode = use_mobile_software_aec; @@ -595,11 +610,25 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { apm_config.gain_controller1.kAdaptiveAnalog; #endif } + if (options.tx_agc_target_dbov) { + apm_config.gain_controller1.target_level_dbfs = *options.tx_agc_target_dbov; + } + if (options.tx_agc_digital_compression_gain) { + apm_config.gain_controller1.compression_gain_db = + *options.tx_agc_digital_compression_gain; + } + if (options.tx_agc_limiter) { + apm_config.gain_controller1.enable_limiter = *options.tx_agc_limiter; + } if (options.highpass_filter) { apm_config.high_pass_filter.enabled = *options.highpass_filter; } + if (options.residual_echo_detector) { + apm_config.residual_echo_detector.enabled = *options.residual_echo_detector; + } + if (options.noise_suppression) { const bool enabled = *options.noise_suppression; apm_config.noise_suppression.enabled = enabled; @@ -609,7 +638,9 @@ bool WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { } if (options.typing_detection) { - RTC_LOG(LS_WARNING) << "Typing detection is requested, but unsupported."; + RTC_LOG(LS_INFO) << "Typing detection is enabled? " + << *options.typing_detection; + apm_config.voice_detection.enabled = *options.typing_detection; } ap->ApplyConfig(apm_config); @@ -1306,7 +1337,7 @@ WebRtcVoiceMediaChannel::WebRtcVoiceMediaChannel( const AudioOptions& options, const webrtc::CryptoOptions& crypto_options, webrtc::Call* call) - : VoiceMediaChannel(call->network_thread(), config.enable_dscp), + : VoiceMediaChannel(config, call->network_thread()), worker_thread_(call->worker_thread()), engine_(engine), call_(call), @@ -1641,8 +1672,8 @@ bool CheckRedParameters( RTC_LOG(LS_WARNING) << "audio/RED missing fmtp parameters."; return false; } - std::vector redundant_payloads = - rtc::split(red_parameters->second, '/'); + std::vector redundant_payloads; + rtc::split(red_parameters->second, '/', &redundant_payloads); // 32 is chosen as a maximum upper bound for consistency with the // red payload splitter. if (redundant_payloads.size() < 2 || redundant_payloads.size() > 32) { @@ -1836,15 +1867,13 @@ void WebRtcVoiceMediaChannel::SetSend(bool send) { return; } - // Apply channel specific options. + // Apply channel specific options, and initialize the ADM for recording (this + // may take time on some platforms, e.g. Android). if (send) { engine()->ApplyOptions(options_); - // Initialize the ADM for recording (this may take time on some platforms, - // e.g. Android). - if (options_.init_recording_on_send.value_or(true) && - // InitRecording() may return an error if the ADM is already recording. - !engine()->adm()->RecordingIsInitialized() && + // InitRecording() may return an error if the ADM is already recording. + if (!engine()->adm()->RecordingIsInitialized() && !engine()->adm()->Recording()) { if (engine()->adm()->InitRecording() != 0) { RTC_LOG(LS_WARNING) << "Failed to initialize recording"; @@ -2260,15 +2289,14 @@ void WebRtcVoiceMediaChannel::OnPacketSent(const rtc::SentPacket& sent_packet) { } void WebRtcVoiceMediaChannel::OnNetworkRouteChanged( - absl::string_view transport_name, + const std::string& transport_name, const rtc::NetworkRoute& network_route) { RTC_DCHECK_RUN_ON(&network_thread_checker_); call_->OnAudioTransportOverheadChanged(network_route.packet_overhead); worker_thread_->PostTask(ToQueuedTask( - task_safety_, - [this, name = std::string(transport_name), route = network_route] { + task_safety_, [this, name = transport_name, route = network_route] { RTC_DCHECK_RUN_ON(worker_thread_); call_->GetTransportControllerSend()->OnNetworkRouteChanged(name, route); })); diff --git a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h index 1061d7a12..a8eb61d31 100644 --- a/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h +++ b/TMessagesProj/jni/voip/webrtc/media/engine/webrtc_voice_engine.h @@ -124,6 +124,9 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { bool is_dumping_aec_ = false; bool initialized_ = false; + // Cache experimental_ns and apply in case they are missing in the audio + // options. + absl::optional experimental_ns_; // Jitter buffer settings for new streams. size_t audio_jitter_buffer_max_packets_ = 200; bool audio_jitter_buffer_fast_accelerate_ = false; @@ -206,7 +209,7 @@ class WebRtcVoiceMediaChannel final : public VoiceMediaChannel, void OnPacketReceived(rtc::CopyOnWriteBuffer packet, int64_t packet_time_us) override; void OnPacketSent(const rtc::SentPacket& sent_packet) override; - void OnNetworkRouteChanged(absl::string_view transport_name, + void OnNetworkRouteChanged(const std::string& transport_name, const rtc::NetworkRoute& network_route) override; void OnReadyToSend(bool ready) override; bool GetStats(VoiceMediaInfo* info, bool get_and_clear_legacy_stats) override; diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc index 0a671ced8..4a5166863 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.cc @@ -10,7 +10,6 @@ #include "media/sctp/dcsctp_transport.h" -#include #include #include #include @@ -127,7 +126,7 @@ DcSctpTransport::DcSctpTransport(rtc::Thread* network_thread, socket_->HandleTimeout(timeout_id); }) { RTC_DCHECK_RUN_ON(network_thread_); - static std::atomic instance_count = 0; + static int instance_count = 0; rtc::StringBuilder sb; sb << debug_name_ << instance_count++; debug_name_ = sb.Release(); @@ -360,9 +359,8 @@ SendPacketStatus DcSctpTransport::SendPacketWithStatus( return SendPacketStatus::kSuccess; } -std::unique_ptr DcSctpTransport::CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) { - return task_queue_timeout_factory_.CreateTimeout(precision); +std::unique_ptr DcSctpTransport::CreateTimeout() { + return task_queue_timeout_factory_.CreateTimeout(); } dcsctp::TimeMs DcSctpTransport::TimeMillis() { @@ -527,7 +525,6 @@ void DcSctpTransport::OnTransportReadPacket( size_t length, const int64_t& /* packet_time_us */, int flags) { - RTC_DCHECK_RUN_ON(network_thread_); if (flags) { // We are only interested in SCTP packets. return; diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h index 11c2f829c..c8c519939 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/dcsctp_transport.h @@ -17,7 +17,6 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" -#include "api/task_queue/task_queue_base.h" #include "media/sctp/sctp_transport_internal.h" #include "net/dcsctp/public/dcsctp_options.h" #include "net/dcsctp/public/dcsctp_socket.h" @@ -62,8 +61,7 @@ class DcSctpTransport : public cricket::SctpTransportInternal, // dcsctp::DcSctpSocketCallbacks dcsctp::SendPacketStatus SendPacketWithStatus( rtc::ArrayView data) override; - std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) override; + std::unique_ptr CreateTimeout() override; dcsctp::TimeMs TimeMillis() override; uint32_t GetRandomInt(uint32_t low, uint32_t high) override; void OnTotalBufferedAmountLow() override; diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc index 071d7fdb2..5097d423d 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.cc @@ -25,11 +25,11 @@ namespace cricket { SctpTransportFactory::SctpTransportFactory(rtc::Thread* network_thread) - : network_thread_(network_thread), use_usrsctp_("Disabled", false) { + : network_thread_(network_thread), use_dcsctp_("Enabled", false) { RTC_UNUSED(network_thread_); #ifdef WEBRTC_HAVE_DCSCTP - webrtc::ParseFieldTrial({&use_usrsctp_}, webrtc::field_trial::FindFullName( - "WebRTC-DataChannel-Dcsctp")); + webrtc::ParseFieldTrial({&use_dcsctp_}, webrtc::field_trial::FindFullName( + "WebRTC-DataChannel-Dcsctp")); #endif } @@ -38,7 +38,7 @@ SctpTransportFactory::CreateSctpTransport( rtc::PacketTransportInternal* transport) { std::unique_ptr result; #ifdef WEBRTC_HAVE_DCSCTP - if (!use_usrsctp_.Get()) { + if (use_dcsctp_.Get()) { result = std::unique_ptr(new webrtc::DcSctpTransport( network_thread_, transport, webrtc::Clock::GetRealTimeClock())); } diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h index 9ae246a6a..ed7c2163d 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/sctp_transport_factory.h @@ -29,7 +29,7 @@ class SctpTransportFactory : public webrtc::SctpTransportFactoryInterface { private: rtc::Thread* network_thread_; - webrtc::FieldTrialFlag use_usrsctp_; + webrtc::FieldTrialFlag use_dcsctp_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.h b/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.h index 2dd6abf9c..06988fd15 100644 --- a/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/media/sctp/usrsctp_transport.h @@ -22,6 +22,7 @@ #include "absl/types/optional.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -75,9 +76,6 @@ class UsrsctpTransport : public SctpTransportInternal, rtc::PacketTransportInternal* transport); ~UsrsctpTransport() override; - UsrsctpTransport(const UsrsctpTransport&) = delete; - UsrsctpTransport& operator=(const UsrsctpTransport&) = delete; - // SctpTransportInternal overrides (see sctptransportinternal.h for comments). void SetDtlsTransport(rtc::PacketTransportInternal* transport) override; bool Start(int local_port, int remote_port, int max_message_size) override; @@ -287,6 +285,8 @@ class UsrsctpTransport : public SctpTransportInternal, uintptr_t id_ = 0; friend class UsrsctpTransportMap; + + RTC_DISALLOW_COPY_AND_ASSIGN(UsrsctpTransport); }; class UsrsctpTransportMap; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h index 664e76bda..1c91fa19a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/audio_network_adaptor_impl.h @@ -21,6 +21,7 @@ #include "modules/audio_coding/audio_network_adaptor/debug_dump_writer.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -43,9 +44,6 @@ class AudioNetworkAdaptorImpl final : public AudioNetworkAdaptor { ~AudioNetworkAdaptorImpl() override; - AudioNetworkAdaptorImpl(const AudioNetworkAdaptorImpl&) = delete; - AudioNetworkAdaptorImpl& operator=(const AudioNetworkAdaptorImpl&) = delete; - void SetUplinkBandwidth(int uplink_bandwidth_bps) override; void SetUplinkPacketLossFraction(float uplink_packet_loss_fraction) override; @@ -82,6 +80,8 @@ class AudioNetworkAdaptorImpl final : public AudioNetworkAdaptor { absl::optional prev_config_; ANAStats stats_; + + RTC_DISALLOW_COPY_AND_ASSIGN(AudioNetworkAdaptorImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.h index c1032146c..41bfbd1c3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/bitrate_controller.h @@ -16,6 +16,7 @@ #include "absl/types/optional.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { namespace audio_network_adaptor { @@ -38,9 +39,6 @@ class BitrateController final : public Controller { ~BitrateController() override; - BitrateController(const BitrateController&) = delete; - BitrateController& operator=(const BitrateController&) = delete; - void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override; void MakeDecision(AudioEncoderRuntimeConfig* config) override; @@ -51,6 +49,7 @@ class BitrateController final : public Controller { int frame_length_ms_; absl::optional target_audio_bitrate_bps_; absl::optional overhead_bytes_per_packet_; + RTC_DISALLOW_COPY_AND_ASSIGN(BitrateController); }; } // namespace audio_network_adaptor diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/channel_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/channel_controller.h index 3cd4bb7de..f211f40f1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/channel_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/channel_controller.h @@ -16,6 +16,7 @@ #include "absl/types/optional.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -40,9 +41,6 @@ class ChannelController final : public Controller { ~ChannelController() override; - ChannelController(const ChannelController&) = delete; - ChannelController& operator=(const ChannelController&) = delete; - void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override; void MakeDecision(AudioEncoderRuntimeConfig* config) override; @@ -51,6 +49,7 @@ class ChannelController final : public Controller { const Config config_; size_t channels_to_encode_; absl::optional uplink_bandwidth_bps_; + RTC_DISALLOW_COPY_AND_ASSIGN(ChannelController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/config.proto b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/config.proto deleted file mode 100644 index a81545199..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/config.proto +++ /dev/null @@ -1,196 +0,0 @@ -syntax = "proto2"; - -package webrtc.audio_network_adaptor.config; - -option optimize_for = LITE_RUNTIME; -option java_package = "org.webrtc.AudioNetworkAdaptor"; -option java_outer_classname = "Config"; -option objc_class_prefix = "WANA"; - -message FecController { - message Threshold { - // Threshold defines a curve in the bandwidth/packet-loss domain. The - // curve is characterized by the two conjunction points: A and B. - // - // packet ^ | - // loss | A| - // | \ A: (low_bandwidth_bps, low_bandwidth_packet_loss) - // | \ B: (high_bandwidth_bps, high_bandwidth_packet_loss) - // | B\________ - // |---------------> bandwidth - optional int32 low_bandwidth_bps = 1; - optional float low_bandwidth_packet_loss = 2; - optional int32 high_bandwidth_bps = 3; - optional float high_bandwidth_packet_loss = 4; - } - - // `fec_enabling_threshold` defines a curve, above which FEC should be - // enabled. `fec_disabling_threshold` defines a curve, under which FEC - // should be disabled. See below - // - // packet-loss ^ | | - // | | | FEC - // | \ \ ON - // | FEC \ \_______ fec_enabling_threshold - // | OFF \_________ fec_disabling_threshold - // |-----------------> bandwidth - optional Threshold fec_enabling_threshold = 1; - optional Threshold fec_disabling_threshold = 2; - - // `time_constant_ms` is the time constant for an exponential filter, which - // is used for smoothing the packet loss fraction. - optional int32 time_constant_ms = 3; -} - -message FecControllerRplrBased { - message Threshold { - // Threshold defines a curve in the bandwidth/recoverable-packet-loss - // domain. - // The curve is characterized by the two conjunction points: A and B. - // - // recoverable ^ - // packet | | - // loss | A| - // | \ A: (low_bandwidth_bps, - // | \ low_bandwidth_recoverable_packet_loss) - // | \ B: (high_bandwidth_bps, - // | \ high_bandwidth_recoverable_packet_loss) - // | B\________ - // |---------------> bandwidth - optional int32 low_bandwidth_bps = 1; - optional float low_bandwidth_recoverable_packet_loss = 2; - optional int32 high_bandwidth_bps = 3; - optional float high_bandwidth_recoverable_packet_loss = 4; - } - - // `fec_enabling_threshold` defines a curve, above which FEC should be - // enabled. `fec_disabling_threshold` defines a curve, under which FEC - // should be disabled. See below - // - // packet-loss ^ | | - // | | | FEC - // | \ \ ON - // | FEC \ \_______ fec_enabling_threshold - // | OFF \_________ fec_disabling_threshold - // |-----------------> bandwidth - optional Threshold fec_enabling_threshold = 1; - optional Threshold fec_disabling_threshold = 2; -} - -message FrameLengthController { - // Uplink packet loss fraction below which frame length can increase. - optional float fl_increasing_packet_loss_fraction = 1; - - // Uplink packet loss fraction above which frame length should decrease. - optional float fl_decreasing_packet_loss_fraction = 2; - - // Uplink bandwidth below which frame length can switch from 20ms to 60ms. - optional int32 fl_20ms_to_60ms_bandwidth_bps = 3; - - // Uplink bandwidth above which frame length should switch from 60ms to 20ms. - optional int32 fl_60ms_to_20ms_bandwidth_bps = 4; - - // Uplink bandwidth below which frame length can switch from 60ms to 120ms. - optional int32 fl_60ms_to_120ms_bandwidth_bps = 5; - - // Uplink bandwidth above which frame length should switch from 120ms to 60ms. - optional int32 fl_120ms_to_60ms_bandwidth_bps = 6; - - // Offset to apply to the per-packet overhead when increasing frame length. - optional int32 fl_increase_overhead_offset = 7; - - // Offset to apply to the per-packet overhead when decreasing frame length. - optional int32 fl_decrease_overhead_offset = 8; - - // Uplink bandwidth below which frame length can switch from 20ms to 40ms. In - // current implementation, defining this will invalidate - // fl_20ms_to_60ms_bandwidth_bps. - optional int32 fl_20ms_to_40ms_bandwidth_bps = 9; - - // Uplink bandwidth above which frame length should switch from 40ms to 20ms. - optional int32 fl_40ms_to_20ms_bandwidth_bps = 10; - - // Uplink bandwidth below which frame length can switch from 40ms to 60ms. - optional int32 fl_40ms_to_60ms_bandwidth_bps = 11; - - // Uplink bandwidth above which frame length should switch from 60ms to 40ms. - // In current implementation, defining this will invalidate - // fl_60ms_to_20ms_bandwidth_bps. - optional int32 fl_60ms_to_40ms_bandwidth_bps = 12; -} - -message FrameLengthControllerV2 { - // FrameLengthControllerV2 chooses the frame length by taking the target - // bitrate and subtracting the overhead bitrate to obtain the remaining - // bitrate for the payload. The chosen frame length is the shortest possible - // where the payload bitrate is more than `min_payload_bitrate_bps`. - optional int32 min_payload_bitrate_bps = 1; - - // If true, uses the stable target bitrate to decide the frame length. This - // will result in less frame length toggling but spending more time at longer - // frame lengths compared to using the normal target bitrate. - optional bool use_slow_adaptation = 2; -} - -message ChannelController { - // Uplink bandwidth above which the number of encoded channels should switch - // from 1 to 2. - optional int32 channel_1_to_2_bandwidth_bps = 1; - - // Uplink bandwidth below which the number of encoded channels should switch - // from 2 to 1. - optional int32 channel_2_to_1_bandwidth_bps = 2; -} - -message DtxController { - // Uplink bandwidth below which DTX should be switched on. - optional int32 dtx_enabling_bandwidth_bps = 1; - - // Uplink bandwidth above which DTX should be switched off. - optional int32 dtx_disabling_bandwidth_bps = 2; -} - -message BitrateController { - // Offset to apply to per-packet overhead when the frame length is increased. - optional int32 fl_increase_overhead_offset = 1; - // Offset to apply to per-packet overhead when the frame length is decreased. - optional int32 fl_decrease_overhead_offset = 2; -} - -message Controller { - message ScoringPoint { - // `ScoringPoint` is a subspace of network condition. It is used for - // comparing the significance of controllers. - optional int32 uplink_bandwidth_bps = 1; - optional float uplink_packet_loss_fraction = 2; - } - - // The distance from `scoring_point` to a given network condition defines - // the significance of this controller with respect that network condition. - // Shorter distance means higher significance. The significances of - // controllers determine their order in the processing pipeline. Controllers - // without `scoring_point` follow their default order in - // `ControllerManager::controllers`. - optional ScoringPoint scoring_point = 1; - - oneof controller { - FecController fec_controller = 21; - FrameLengthController frame_length_controller = 22; - ChannelController channel_controller = 23; - DtxController dtx_controller = 24; - BitrateController bitrate_controller = 25; - FecControllerRplrBased fec_controller_rplr_based = 26; - FrameLengthControllerV2 frame_length_controller_v2 = 27; - } -} - -message ControllerManager { - repeated Controller controllers = 1; - - // Least time since last reordering for a new reordering to be made. - optional int32 min_reordering_time_ms = 2; - - // Least squared distance from last scoring point for a new reordering to be - // made. - optional float min_reordering_squared_distance = 3; -} diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.h index f7d7b34fb..c168ebc6c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/controller_manager.h @@ -17,6 +17,7 @@ #include #include "modules/audio_coding/audio_network_adaptor/controller.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -79,9 +80,6 @@ class ControllerManagerImpl final : public ControllerManager { ~ControllerManagerImpl() override; - ControllerManagerImpl(const ControllerManagerImpl&) = delete; - ControllerManagerImpl& operator=(const ControllerManagerImpl&) = delete; - // Sort controllers based on their significance. std::vector GetSortedControllers( const Controller::NetworkMetrics& metrics) override; @@ -116,6 +114,8 @@ class ControllerManagerImpl final : public ControllerManager { // `scoring_points_` saves the scoring points of various // controllers. std::map controller_scoring_points_; + + RTC_DISALLOW_COPY_AND_ASSIGN(ControllerManagerImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h index 8fdf2f772..367f65954 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/debug_dump_writer.h @@ -15,6 +15,7 @@ #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ignore_wundef.h" #include "rtc_base/system/file_wrapper.h" #if WEBRTC_ENABLE_PROTOBUF diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/dtx_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/dtx_controller.h index b8a8e476e..83fdf3ddd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/dtx_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/dtx_controller.h @@ -14,6 +14,7 @@ #include "absl/types/optional.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -34,9 +35,6 @@ class DtxController final : public Controller { ~DtxController() override; - DtxController(const DtxController&) = delete; - DtxController& operator=(const DtxController&) = delete; - void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override; void MakeDecision(AudioEncoderRuntimeConfig* config) override; @@ -45,6 +43,7 @@ class DtxController final : public Controller { const Config config_; bool dtx_enabled_; absl::optional uplink_bandwidth_bps_; + RTC_DISALLOW_COPY_AND_ASSIGN(DtxController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/event_log_writer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/event_log_writer.h index a147311fc..c5e57e63e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/event_log_writer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/event_log_writer.h @@ -12,6 +12,7 @@ #define MODULES_AUDIO_CODING_AUDIO_NETWORK_ADAPTOR_EVENT_LOG_WRITER_H_ #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { class RtcEventLog; @@ -23,10 +24,6 @@ class EventLogWriter final { float min_bitrate_change_fraction, float min_packet_loss_change_fraction); ~EventLogWriter(); - - EventLogWriter(const EventLogWriter&) = delete; - EventLogWriter& operator=(const EventLogWriter&) = delete; - void MaybeLogEncoderConfig(const AudioEncoderRuntimeConfig& config); private: @@ -37,6 +34,7 @@ class EventLogWriter final { const float min_bitrate_change_fraction_; const float min_packet_loss_change_fraction_; AudioEncoderRuntimeConfig last_logged_config_; + RTC_DISALLOW_COPY_AND_ASSIGN(EventLogWriter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h index 0c57ad1d1..85d235ed2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/fec_controller_plr_based.h @@ -18,6 +18,7 @@ #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" #include "modules/audio_coding/audio_network_adaptor/util/threshold_curve.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -52,9 +53,6 @@ class FecControllerPlrBased final : public Controller { ~FecControllerPlrBased() override; - FecControllerPlrBased(const FecControllerPlrBased&) = delete; - FecControllerPlrBased& operator=(const FecControllerPlrBased&) = delete; - void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override; void MakeDecision(AudioEncoderRuntimeConfig* config) override; @@ -67,6 +65,8 @@ class FecControllerPlrBased final : public Controller { bool fec_enabled_; absl::optional uplink_bandwidth_bps_; const std::unique_ptr packet_loss_smoother_; + + RTC_DISALLOW_COPY_AND_ASSIGN(FecControllerPlrBased); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller.h index 04693f8db..74a787e1c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/audio_network_adaptor/frame_length_controller.h @@ -19,6 +19,7 @@ #include "absl/types/optional.h" #include "modules/audio_coding/audio_network_adaptor/controller.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor_config.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -61,9 +62,6 @@ class FrameLengthController final : public Controller { ~FrameLengthController() override; - FrameLengthController(const FrameLengthController&) = delete; - FrameLengthController& operator=(const FrameLengthController&) = delete; - void UpdateNetworkMetrics(const NetworkMetrics& network_metrics) override; void MakeDecision(AudioEncoderRuntimeConfig* config) override; @@ -86,6 +84,8 @@ class FrameLengthController final : public Controller { // True if the previous frame length decision was an increase, otherwise // false. bool prev_decision_increase_ = false; + + RTC_DISALLOW_COPY_AND_ASSIGN(FrameLengthController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc index 46ac671b3..d580a0509 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.cc @@ -40,14 +40,8 @@ int AudioDecoderPcmU::DecodeInternal(const uint8_t* encoded, int16_t* decoded, SpeechType* speech_type) { RTC_DCHECK_EQ(SampleRateHz(), sample_rate_hz); - // Adjust the encoded length down to ensure the same number of samples in each - // channel. - const size_t encoded_len_adjusted = - PacketDuration(encoded, encoded_len) * - Channels(); // 1 byte per sample per channel int16_t temp_type = 1; // Default is speech. - size_t ret = - WebRtcG711_DecodeU(encoded, encoded_len_adjusted, decoded, &temp_type); + size_t ret = WebRtcG711_DecodeU(encoded, encoded_len, decoded, &temp_type); *speech_type = ConvertSpeechType(temp_type); return static_cast(ret); } @@ -81,14 +75,8 @@ int AudioDecoderPcmA::DecodeInternal(const uint8_t* encoded, int16_t* decoded, SpeechType* speech_type) { RTC_DCHECK_EQ(SampleRateHz(), sample_rate_hz); - // Adjust the encoded length down to ensure the same number of samples in each - // channel. - const size_t encoded_len_adjusted = - PacketDuration(encoded, encoded_len) * - Channels(); // 1 byte per sample per channel int16_t temp_type = 1; // Default is speech. - size_t ret = - WebRtcG711_DecodeA(encoded, encoded_len_adjusted, decoded, &temp_type); + size_t ret = WebRtcG711_DecodeA(encoded, encoded_len, decoded, &temp_type); *speech_type = ConvertSpeechType(temp_type); return static_cast(ret); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h index 3fa42cba3..618591876 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_decoder_pcm.h @@ -19,6 +19,7 @@ #include "api/audio_codecs/audio_decoder.h" #include "rtc_base/buffer.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -27,10 +28,6 @@ class AudioDecoderPcmU final : public AudioDecoder { explicit AudioDecoderPcmU(size_t num_channels) : num_channels_(num_channels) { RTC_DCHECK_GE(num_channels, 1); } - - AudioDecoderPcmU(const AudioDecoderPcmU&) = delete; - AudioDecoderPcmU& operator=(const AudioDecoderPcmU&) = delete; - void Reset() override; std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; @@ -47,6 +44,7 @@ class AudioDecoderPcmU final : public AudioDecoder { private: const size_t num_channels_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmU); }; class AudioDecoderPcmA final : public AudioDecoder { @@ -54,10 +52,6 @@ class AudioDecoderPcmA final : public AudioDecoder { explicit AudioDecoderPcmA(size_t num_channels) : num_channels_(num_channels) { RTC_DCHECK_GE(num_channels, 1); } - - AudioDecoderPcmA(const AudioDecoderPcmA&) = delete; - AudioDecoderPcmA& operator=(const AudioDecoderPcmA&) = delete; - void Reset() override; std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; @@ -74,6 +68,7 @@ class AudioDecoderPcmA final : public AudioDecoder { private: const size_t num_channels_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcmA); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h index d50be4b45..c4413f50a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g711/audio_encoder_pcm.h @@ -17,6 +17,7 @@ #include "absl/types/optional.h" #include "api/audio_codecs/audio_encoder.h" #include "api/units/time_delta.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -82,9 +83,6 @@ class AudioEncoderPcmA final : public AudioEncoderPcm { explicit AudioEncoderPcmA(const Config& config) : AudioEncoderPcm(config, kSampleRateHz) {} - AudioEncoderPcmA(const AudioEncoderPcmA&) = delete; - AudioEncoderPcmA& operator=(const AudioEncoderPcmA&) = delete; - protected: size_t EncodeCall(const int16_t* audio, size_t input_len, @@ -96,6 +94,7 @@ class AudioEncoderPcmA final : public AudioEncoderPcm { private: static const int kSampleRateHz = 8000; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmA); }; class AudioEncoderPcmU final : public AudioEncoderPcm { @@ -107,9 +106,6 @@ class AudioEncoderPcmU final : public AudioEncoderPcm { explicit AudioEncoderPcmU(const Config& config) : AudioEncoderPcm(config, kSampleRateHz) {} - AudioEncoderPcmU(const AudioEncoderPcmU&) = delete; - AudioEncoderPcmU& operator=(const AudioEncoderPcmU&) = delete; - protected: size_t EncodeCall(const int16_t* audio, size_t input_len, @@ -121,6 +117,7 @@ class AudioEncoderPcmU final : public AudioEncoderPcm { private: static const int kSampleRateHz = 8000; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcmU); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc index c21ab9341..f02ca7f89 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.cc @@ -114,12 +114,6 @@ int AudioDecoderG722StereoImpl::DecodeInternal(const uint8_t* encoded, return static_cast(ret); } -int AudioDecoderG722StereoImpl::PacketDuration(const uint8_t* encoded, - size_t encoded_len) const { - // 1/2 encoded byte per sample per channel. - return static_cast(2 * encoded_len / Channels()); -} - int AudioDecoderG722StereoImpl::SampleRateHz() const { return 16000; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h index 5872fad5d..eeca13975 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_decoder_g722.h @@ -12,6 +12,7 @@ #define MODULES_AUDIO_CODING_CODECS_G722_AUDIO_DECODER_G722_H_ #include "api/audio_codecs/audio_decoder.h" +#include "rtc_base/constructor_magic.h" typedef struct WebRtcG722DecInst G722DecInst; @@ -21,10 +22,6 @@ class AudioDecoderG722Impl final : public AudioDecoder { public: AudioDecoderG722Impl(); ~AudioDecoderG722Impl() override; - - AudioDecoderG722Impl(const AudioDecoderG722Impl&) = delete; - AudioDecoderG722Impl& operator=(const AudioDecoderG722Impl&) = delete; - bool HasDecodePlc() const override; void Reset() override; std::vector ParsePayload(rtc::Buffer&& payload, @@ -42,22 +39,17 @@ class AudioDecoderG722Impl final : public AudioDecoder { private: G722DecInst* dec_state_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722Impl); }; class AudioDecoderG722StereoImpl final : public AudioDecoder { public: AudioDecoderG722StereoImpl(); ~AudioDecoderG722StereoImpl() override; - - AudioDecoderG722StereoImpl(const AudioDecoderG722StereoImpl&) = delete; - AudioDecoderG722StereoImpl& operator=(const AudioDecoderG722StereoImpl&) = - delete; - void Reset() override; std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; int SampleRateHz() const override; - int PacketDuration(const uint8_t* encoded, size_t encoded_len) const override; size_t Channels() const override; protected: @@ -79,6 +71,7 @@ class AudioDecoderG722StereoImpl final : public AudioDecoder { G722DecInst* dec_state_left_; G722DecInst* dec_state_right_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderG722StereoImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h index a932aa8b7..c836503f2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/g722/audio_encoder_g722.h @@ -20,6 +20,7 @@ #include "api/units/time_delta.h" #include "modules/audio_coding/codecs/g722/g722_interface.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -28,9 +29,6 @@ class AudioEncoderG722Impl final : public AudioEncoder { AudioEncoderG722Impl(const AudioEncoderG722Config& config, int payload_type); ~AudioEncoderG722Impl() override; - AudioEncoderG722Impl(const AudioEncoderG722Impl&) = delete; - AudioEncoderG722Impl& operator=(const AudioEncoderG722Impl&) = delete; - int SampleRateHz() const override; size_t NumChannels() const override; int RtpTimestampRateHz() const override; @@ -65,6 +63,7 @@ class AudioEncoderG722Impl final : public AudioEncoder { uint32_t first_timestamp_in_buffer_; const std::unique_ptr encoders_; rtc::Buffer interleave_buffer_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderG722Impl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h index 46ba75514..c2d62ed2d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_decoder_ilbc.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_decoder.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" typedef struct iLBC_decinst_t_ IlbcDecoderInstance; @@ -27,10 +28,6 @@ class AudioDecoderIlbcImpl final : public AudioDecoder { public: AudioDecoderIlbcImpl(); ~AudioDecoderIlbcImpl() override; - - AudioDecoderIlbcImpl(const AudioDecoderIlbcImpl&) = delete; - AudioDecoderIlbcImpl& operator=(const AudioDecoderIlbcImpl&) = delete; - bool HasDecodePlc() const override; size_t DecodePlc(size_t num_frames, int16_t* decoded) override; void Reset() override; @@ -48,6 +45,7 @@ class AudioDecoderIlbcImpl final : public AudioDecoder { private: IlbcDecoderInstance* dec_state_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderIlbcImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h index c8dfa2ca6..05a900e3c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/ilbc/audio_encoder_ilbc.h @@ -21,6 +21,7 @@ #include "api/audio_codecs/ilbc/audio_encoder_ilbc_config.h" #include "api/units/time_delta.h" #include "modules/audio_coding/codecs/ilbc/ilbc.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -29,9 +30,6 @@ class AudioEncoderIlbcImpl final : public AudioEncoder { AudioEncoderIlbcImpl(const AudioEncoderIlbcConfig& config, int payload_type); ~AudioEncoderIlbcImpl() override; - AudioEncoderIlbcImpl(const AudioEncoderIlbcImpl&) = delete; - AudioEncoderIlbcImpl& operator=(const AudioEncoderIlbcImpl&) = delete; - int SampleRateHz() const override; size_t NumChannels() const override; size_t Num10MsFramesInNextPacket() const override; @@ -55,6 +53,7 @@ class AudioEncoderIlbcImpl final : public AudioEncoder { uint32_t first_timestamp_in_buffer_; int16_t input_buffer_[kMaxSamplesPerPacket]; IlbcEncoderInstance* encoder_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderIlbcImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h index aae708f29..23a302018 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_decoder_isac_t.h @@ -16,6 +16,7 @@ #include "absl/types/optional.h" #include "api/audio_codecs/audio_decoder.h" #include "api/scoped_refptr.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -29,9 +30,6 @@ class AudioDecoderIsacT final : public AudioDecoder { explicit AudioDecoderIsacT(const Config& config); virtual ~AudioDecoderIsacT() override; - AudioDecoderIsacT(const AudioDecoderIsacT&) = delete; - AudioDecoderIsacT& operator=(const AudioDecoderIsacT&) = delete; - bool HasDecodePlc() const override; size_t DecodePlc(size_t num_frames, int16_t* decoded) override; void Reset() override; @@ -47,6 +45,8 @@ class AudioDecoderIsacT final : public AudioDecoder { private: typename T::instance_type* isac_state_; int sample_rate_hz_; + + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderIsacT); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h index c382ea076..8bde0e34a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/audio_encoder_isac_t.h @@ -18,6 +18,7 @@ #include "api/audio_codecs/audio_encoder.h" #include "api/scoped_refptr.h" #include "api/units/time_delta.h" +#include "rtc_base/constructor_magic.h" #include "system_wrappers/include/field_trial.h" namespace webrtc { @@ -43,9 +44,6 @@ class AudioEncoderIsacT final : public AudioEncoder { explicit AudioEncoderIsacT(const Config& config); ~AudioEncoderIsacT() override; - AudioEncoderIsacT(const AudioEncoderIsacT&) = delete; - AudioEncoderIsacT& operator=(const AudioEncoderIsacT&) = delete; - int SampleRateHz() const override; size_t NumChannels() const override; size_t Num10MsFramesInNextPacket() const override; @@ -101,6 +99,8 @@ class AudioEncoderIsacT final : public AudioEncoder { // Start out with a reasonable default that we can use until we receive a real // value. DataSize overhead_per_packet_ = DataSize::Bytes(28); + + RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderIsacT); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h index d112bfe7f..cc4ed555c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/arith_routins.h @@ -38,7 +38,7 @@ int WebRtcIsacfix_EncLogisticMulti2(Bitstr_enc* streamData, int16_t* dataQ7, const uint16_t* env, - int16_t lenData); + const int16_t lenData); /**************************************************************************** * WebRtcIsacfix_EncTerminate(...) @@ -73,7 +73,7 @@ int16_t WebRtcIsacfix_EncTerminate(Bitstr_enc* streamData); int WebRtcIsacfix_DecLogisticMulti2(int16_t* data, Bitstr_dec* streamData, const int32_t* env, - int16_t lenData); + const int16_t lenData); /**************************************************************************** * WebRtcIsacfix_EncHistMulti(...) @@ -92,7 +92,7 @@ int WebRtcIsacfix_DecLogisticMulti2(int16_t* data, int WebRtcIsacfix_EncHistMulti(Bitstr_enc* streamData, const int16_t* data, const uint16_t* const* cdf, - int16_t lenData); + const int16_t lenData); /**************************************************************************** * WebRtcIsacfix_DecHistBisectMulti(...) @@ -118,7 +118,7 @@ int16_t WebRtcIsacfix_DecHistBisectMulti(int16_t* data, Bitstr_dec* streamData, const uint16_t* const* cdf, const uint16_t* cdfSize, - int16_t lenData); + const int16_t lenData); /**************************************************************************** * WebRtcIsacfix_DecHistOneStepMulti(...) @@ -144,6 +144,6 @@ int16_t WebRtcIsacfix_DecHistOneStepMulti(int16_t* data, Bitstr_dec* streamData, const uint16_t* const* cdf, const uint16_t* initIndex, - int16_t lenData); + const int16_t lenData); #endif /* MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ARITH_ROUTINS_H_ */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h index f106746f1..ebb74d6c4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/bandwidth_estimator.h @@ -56,17 +56,17 @@ int32_t WebRtcIsacfix_InitBandwidthEstimator(BwEstimatorstr* bwest_str); */ int32_t WebRtcIsacfix_UpdateUplinkBwImpl(BwEstimatorstr* bwest_str, - uint16_t rtp_number, - int16_t frameSize, - uint32_t send_ts, - uint32_t arr_ts, - size_t pksize, - uint16_t Index); + const uint16_t rtp_number, + const int16_t frameSize, + const uint32_t send_ts, + const uint32_t arr_ts, + const size_t pksize, + const uint16_t Index); /* Update receiving estimates. Used when we only receive BWE index, no iSAC data * packet. */ int16_t WebRtcIsacfix_UpdateUplinkBwRec(BwEstimatorstr* bwest_str, - int16_t Index); + const int16_t Index); /**************************************************************************** * WebRtcIsacfix_GetDownlinkBwIndexImpl(...) @@ -100,19 +100,19 @@ int16_t WebRtcIsacfix_GetUplinkMaxDelay(const BwEstimatorstr* bwest_str); */ uint16_t WebRtcIsacfix_GetMinBytes( RateModel* State, - int16_t StreamSize, /* bytes in bitstream */ - int16_t FrameLen, /* ms per frame */ - int16_t BottleNeck, /* bottle neck rate; excl headers (bps) */ - int16_t DelayBuildUp); /* max delay from bottle neck buffering (ms) */ + int16_t StreamSize, /* bytes in bitstream */ + const int16_t FrameLen, /* ms per frame */ + const int16_t BottleNeck, /* bottle neck rate; excl headers (bps) */ + const int16_t DelayBuildUp); /* max delay from bottle neck buffering (ms) */ /* * update long-term average bitrate and amount of data in buffer */ void WebRtcIsacfix_UpdateRateModel( RateModel* State, - int16_t StreamSize, /* bytes in bitstream */ - int16_t FrameSamples, /* samples per frame */ - int16_t BottleNeck); /* bottle neck rate; excl headers (bps) */ + int16_t StreamSize, /* bytes in bitstream */ + const int16_t FrameSamples, /* samples per frame */ + const int16_t BottleNeck); /* bottle neck rate; excl headers (bps) */ void WebRtcIsacfix_InitRateModel(RateModel* State); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h index ae11394f7..b4251cee1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/entropy_coding.h @@ -101,19 +101,19 @@ void WebRtcIsacfix_TranscodeLpcCoef(int32_t* tmpcoeffs_gQ6, int16_t* index_gQQ); typedef void (*MatrixProduct1)(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); + const int matrix1_index_factor1, + const int matrix0_index_factor1, + const int matrix1_index_init_case, + const int matrix1_index_step, + const int matrix0_index_step, + const int inner_loop_count, + const int mid_loop_count, + const int shift); typedef void (*MatrixProduct2)(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); + const int matrix0_index_factor, + const int matrix0_index_step); extern MatrixProduct1 WebRtcIsacfix_MatrixProduct1; extern MatrixProduct2 WebRtcIsacfix_MatrixProduct2; @@ -121,57 +121,57 @@ extern MatrixProduct2 WebRtcIsacfix_MatrixProduct2; void WebRtcIsacfix_MatrixProduct1C(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); + const int matrix1_index_factor1, + const int matrix0_index_factor1, + const int matrix1_index_init_case, + const int matrix1_index_step, + const int matrix0_index_step, + const int inner_loop_count, + const int mid_loop_count, + const int shift); void WebRtcIsacfix_MatrixProduct2C(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); + const int matrix0_index_factor, + const int matrix0_index_step); #if defined(WEBRTC_HAS_NEON) void WebRtcIsacfix_MatrixProduct1Neon(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); + const int matrix1_index_factor1, + const int matrix0_index_factor1, + const int matrix1_index_init_case, + const int matrix1_index_step, + const int matrix0_index_step, + const int inner_loop_count, + const int mid_loop_count, + const int shift); void WebRtcIsacfix_MatrixProduct2Neon(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); + const int matrix0_index_factor, + const int matrix0_index_step); #endif #if defined(MIPS32_LE) void WebRtcIsacfix_MatrixProduct1MIPS(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - int matrix1_index_factor1, - int matrix0_index_factor1, - int matrix1_index_init_case, - int matrix1_index_step, - int matrix0_index_step, - int inner_loop_count, - int mid_loop_count, - int shift); + const int matrix1_index_factor1, + const int matrix0_index_factor1, + const int matrix1_index_init_case, + const int matrix1_index_step, + const int matrix0_index_step, + const int inner_loop_count, + const int mid_loop_count, + const int shift); void WebRtcIsacfix_MatrixProduct2MIPS(const int16_t matrix0[], const int32_t matrix1[], int32_t matrix_product[], - int matrix0_index_factor, - int matrix0_index_step); + const int matrix0_index_factor, + const int matrix0_index_step); #endif #endif // MODULES_AUDIO_CODING_CODECS_ISAC_FIX_SOURCE_ENTROPY_CODING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h index f741e6f67..6b99914b6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/fix/source/filterbank_internal.h @@ -46,7 +46,7 @@ typedef void (*AllpassFilter2FixDec16)( int16_t* data_ch2, // Input and output in channel 2, in Q0 const int16_t* factor_ch1, // Scaling factor for channel 1, in Q15 const int16_t* factor_ch2, // Scaling factor for channel 2, in Q15 - int length, // Length of the data buffers + const int length, // Length of the data buffers int32_t* filter_state_ch1, // Filter state for channel 1, in Q16 int32_t* filter_state_ch2); // Filter state for channel 2, in Q16 extern AllpassFilter2FixDec16 WebRtcIsacfix_AllpassFilter2FixDec16; @@ -55,7 +55,7 @@ void WebRtcIsacfix_AllpassFilter2FixDec16C(int16_t* data_ch1, int16_t* data_ch2, const int16_t* factor_ch1, const int16_t* factor_ch2, - int length, + const int length, int32_t* filter_state_ch1, int32_t* filter_state_ch2); @@ -64,7 +64,7 @@ void WebRtcIsacfix_AllpassFilter2FixDec16Neon(int16_t* data_ch1, int16_t* data_ch2, const int16_t* factor_ch1, const int16_t* factor_ch2, - int length, + const int length, int32_t* filter_state_ch1, int32_t* filter_state_ch2); #endif @@ -74,7 +74,7 @@ void WebRtcIsacfix_AllpassFilter2FixDec16MIPS(int16_t* data_ch1, int16_t* data_ch2, const int16_t* factor_ch1, const int16_t* factor_ch2, - int length, + const int length, int32_t* filter_state_ch1, int32_t* filter_state_ch2); #endif diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h index 3f9f6de7b..6e7ea1da5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/arith_routines.h @@ -24,9 +24,9 @@ int WebRtcIsac_EncLogisticMulti2( Bitstr* streamdata, /* in-/output struct containing bitstream */ int16_t* dataQ7, /* input: data vector */ const uint16_t* - env, /* input: side info vector defining the width of the pdf */ - int N, /* input: data vector length */ - int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ + env, /* input: side info vector defining the width of the pdf */ + const int N, /* input: data vector length */ + const int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ /* returns the number of bytes in the stream */ int WebRtcIsac_EncTerminate( @@ -38,15 +38,15 @@ int WebRtcIsac_DecLogisticMulti2( Bitstr* streamdata, /* in-/output struct containing bitstream */ const uint16_t* env, /* input: side info vector defining the width of the pdf */ - const int16_t* dither, /* input: dither vector */ - int N, /* input: data vector length */ - int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ + const int16_t* dither, /* input: dither vector */ + const int N, /* input: data vector length */ + const int16_t isSWB12kHz); /* if the codec is working in 12kHz bandwidth */ void WebRtcIsac_EncHistMulti( Bitstr* streamdata, /* in-/output struct containing bitstream */ const int* data, /* input: data vector */ const uint16_t* const* cdf, /* input: array of cdf arrays */ - int N); /* input: data vector length */ + const int N); /* input: data vector length */ int WebRtcIsac_DecHistBisectMulti( int* data, /* output: data vector */ @@ -54,7 +54,7 @@ int WebRtcIsac_DecHistBisectMulti( const uint16_t* const* cdf, /* input: array of cdf arrays */ const uint16_t* cdf_size, /* input: array of cdf table sizes+1 (power of two: 2^k) */ - int N); /* input: data vector length */ + const int N); /* input: data vector length */ int WebRtcIsac_DecHistOneStepMulti( int* data, /* output: data vector */ @@ -62,6 +62,6 @@ int WebRtcIsac_DecHistOneStepMulti( const uint16_t* const* cdf, /* input: array of cdf arrays */ const uint16_t* init_index, /* input: vector of initial cdf table search entries */ - int N); /* input: data vector length */ + const int N); /* input: data vector length */ #endif /* MODULES_AUDIO_CODING_CODECS_ISAC_MAIN_SOURCE_ARITH_ROUTINES_H_ */ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h index 5f4550a3a..221e65ff3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/isac/main/source/bandwidth_estimator.h @@ -92,11 +92,11 @@ int32_t WebRtcIsac_InitBandwidthEstimator( * estimated by other side */ /* returns 0 if everything went fine, -1 otherwise */ int16_t WebRtcIsac_UpdateBandwidthEstimator(BwEstimatorstr* bwest_str, - uint16_t rtp_number, - int32_t frame_length, - uint32_t send_ts, - uint32_t arr_ts, - size_t pksize); + const uint16_t rtp_number, + const int32_t frame_length, + const uint32_t send_ts, + const uint32_t arr_ts, + const size_t pksize); /* Update receiving estimates. Used when we only receive BWE index, no iSAC data * packet. */ @@ -131,10 +131,10 @@ int32_t WebRtcIsac_GetUplinkMaxDelay(const BwEstimatorstr* bwest_str); */ int WebRtcIsac_GetMinBytes( RateModel* State, - int StreamSize, /* bytes in bitstream */ - int FrameLen, /* ms per frame */ - double BottleNeck, /* bottle neck rate; excl headers (bps) */ - double DelayBuildUp, /* max delay from bottleneck buffering (ms) */ + int StreamSize, /* bytes in bitstream */ + const int FrameLen, /* ms per frame */ + const double BottleNeck, /* bottle neck rate; excl headers (bps) */ + const double DelayBuildUp, /* max delay from bottleneck buffering (ms) */ enum ISACBandwidth bandwidth /*,int16_t frequentLargePackets*/); @@ -143,9 +143,9 @@ int WebRtcIsac_GetMinBytes( */ void WebRtcIsac_UpdateRateModel( RateModel* State, - int StreamSize, /* bytes in bitstream */ - int FrameSamples, /* samples per frame */ - double BottleNeck); /* bottle neck rate; excl headers (bps) */ + int StreamSize, /* bytes in bitstream */ + const int FrameSamples, /* samples per frame */ + const double BottleNeck); /* bottle neck rate; excl headers (bps) */ void WebRtcIsac_InitRateModel(RateModel* State); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h index 2ff47a8a5..efc3f0dda 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_multi_channel_opus_impl.h @@ -21,6 +21,7 @@ #include "api/audio_codecs/opus/audio_decoder_multi_channel_opus_config.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -31,11 +32,6 @@ class AudioDecoderMultiChannelOpusImpl final : public AudioDecoder { ~AudioDecoderMultiChannelOpusImpl() override; - AudioDecoderMultiChannelOpusImpl(const AudioDecoderMultiChannelOpusImpl&) = - delete; - AudioDecoderMultiChannelOpusImpl& operator=( - const AudioDecoderMultiChannelOpusImpl&) = delete; - std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; void Reset() override; @@ -67,6 +63,7 @@ class AudioDecoderMultiChannelOpusImpl final : public AudioDecoder { OpusDecInst* dec_state_; const AudioDecoderMultiChannelOpusConfig config_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderMultiChannelOpusImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h index e8fd0440b..c79272284 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_decoder_opus.h @@ -19,6 +19,7 @@ #include "api/audio_codecs/audio_decoder.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -28,9 +29,6 @@ class AudioDecoderOpusImpl final : public AudioDecoder { int sample_rate_hz = 48000); ~AudioDecoderOpusImpl() override; - AudioDecoderOpusImpl(const AudioDecoderOpusImpl&) = delete; - AudioDecoderOpusImpl& operator=(const AudioDecoderOpusImpl&) = delete; - std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; void Reset() override; @@ -57,6 +55,7 @@ class AudioDecoderOpusImpl final : public AudioDecoder { OpusDecInst* dec_state_; const size_t channels_; const int sample_rate_hz_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderOpusImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h index 8a7210515..eadb4a6eb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_multi_channel_opus_impl.h @@ -21,6 +21,7 @@ #include "api/audio_codecs/opus/audio_encoder_multi_channel_opus_config.h" #include "api/units/time_delta.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -33,11 +34,6 @@ class AudioEncoderMultiChannelOpusImpl final : public AudioEncoder { int payload_type); ~AudioEncoderMultiChannelOpusImpl() override; - AudioEncoderMultiChannelOpusImpl(const AudioEncoderMultiChannelOpusImpl&) = - delete; - AudioEncoderMultiChannelOpusImpl& operator=( - const AudioEncoderMultiChannelOpusImpl&) = delete; - // Static interface for use by BuiltinAudioEncoderFactory. static constexpr const char* GetPayloadName() { return "multiopus"; } static absl::optional QueryAudioEncoder( @@ -85,6 +81,7 @@ class AudioEncoderMultiChannelOpusImpl final : public AudioEncoder { int next_frame_length_ms_; friend struct AudioEncoderMultiChannelOpus; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderMultiChannelOpusImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h index 14477cc31..c7ee4f452 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/audio_encoder_opus.h @@ -23,6 +23,7 @@ #include "common_audio/smoothing_filter.h" #include "modules/audio_coding/audio_network_adaptor/include/audio_network_adaptor.h" #include "modules/audio_coding/codecs/opus/opus_interface.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -60,9 +61,6 @@ class AudioEncoderOpusImpl final : public AudioEncoder { AudioEncoderOpusImpl(int payload_type, const SdpAudioFormat& format); ~AudioEncoderOpusImpl() override; - AudioEncoderOpusImpl(const AudioEncoderOpusImpl&) = delete; - AudioEncoderOpusImpl& operator=(const AudioEncoderOpusImpl&) = delete; - int SampleRateHz() const override; size_t NumChannels() const override; int RtpTimestampRateHz() const override; @@ -177,6 +175,7 @@ class AudioEncoderOpusImpl final : public AudioEncoder { int consecutive_dtx_frames_; friend struct AudioEncoderOpus; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderOpusImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc deleted file mode 100644 index 0636935b6..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_fec_test.cc +++ /dev/null @@ -1,250 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/audio_coding/codecs/opus/opus_interface.h" -#include "rtc_base/format_macros.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -using std::get; -using std::string; -using std::tuple; -using ::testing::TestWithParam; - -namespace webrtc { - -// Define coding parameter as . -typedef tuple coding_param; -typedef struct mode mode; - -struct mode { - bool fec; - uint8_t target_packet_loss_rate; -}; - -const int kOpusBlockDurationMs = 20; -const int kOpusSamplingKhz = 48; - -class OpusFecTest : public TestWithParam { - protected: - OpusFecTest(); - - void SetUp() override; - void TearDown() override; - - virtual void EncodeABlock(); - - virtual void DecodeABlock(bool lost_previous, bool lost_current); - - int block_duration_ms_; - int sampling_khz_; - size_t block_length_sample_; - - size_t channels_; - int bit_rate_; - - size_t data_pointer_; - size_t loop_length_samples_; - size_t max_bytes_; - size_t encoded_bytes_; - - WebRtcOpusEncInst* opus_encoder_; - WebRtcOpusDecInst* opus_decoder_; - - string in_filename_; - - std::unique_ptr in_data_; - std::unique_ptr out_data_; - std::unique_ptr bit_stream_; -}; - -void OpusFecTest::SetUp() { - channels_ = get<0>(GetParam()); - bit_rate_ = get<1>(GetParam()); - printf("Coding %" RTC_PRIuS " channel signal at %d bps.\n", channels_, - bit_rate_); - - in_filename_ = test::ResourcePath(get<2>(GetParam()), get<3>(GetParam())); - - FILE* fp = fopen(in_filename_.c_str(), "rb"); - ASSERT_FALSE(fp == NULL); - - // Obtain file size. - fseek(fp, 0, SEEK_END); - loop_length_samples_ = ftell(fp) / sizeof(int16_t); - rewind(fp); - - // Allocate memory to contain the whole file. - in_data_.reset( - new int16_t[loop_length_samples_ + block_length_sample_ * channels_]); - - // Copy the file into the buffer. - ASSERT_EQ(fread(&in_data_[0], sizeof(int16_t), loop_length_samples_, fp), - loop_length_samples_); - fclose(fp); - - // The audio will be used in a looped manner. To ease the acquisition of an - // audio frame that crosses the end of the excerpt, we add an extra block - // length of samples to the end of the array, starting over again from the - // beginning of the array. Audio frames cross the end of the excerpt always - // appear as a continuum of memory. - memcpy(&in_data_[loop_length_samples_], &in_data_[0], - block_length_sample_ * channels_ * sizeof(int16_t)); - - // Maximum number of bytes in output bitstream. - max_bytes_ = block_length_sample_ * channels_ * sizeof(int16_t); - - out_data_.reset(new int16_t[2 * block_length_sample_ * channels_]); - bit_stream_.reset(new uint8_t[max_bytes_]); - - // If channels_ == 1, use Opus VOIP mode, otherwise, audio mode. - int app = channels_ == 1 ? 0 : 1; - - // Create encoder memory. - EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_encoder_, channels_, app, 48000)); - EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_decoder_, channels_, 48000)); - // Set bitrate. - EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_encoder_, bit_rate_)); -} - -void OpusFecTest::TearDown() { - // Free memory. - EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_encoder_)); - EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_)); -} - -OpusFecTest::OpusFecTest() - : block_duration_ms_(kOpusBlockDurationMs), - sampling_khz_(kOpusSamplingKhz), - block_length_sample_( - static_cast(block_duration_ms_ * sampling_khz_)), - data_pointer_(0), - max_bytes_(0), - encoded_bytes_(0), - opus_encoder_(NULL), - opus_decoder_(NULL) {} - -void OpusFecTest::EncodeABlock() { - int value = - WebRtcOpus_Encode(opus_encoder_, &in_data_[data_pointer_], - block_length_sample_, max_bytes_, &bit_stream_[0]); - EXPECT_GT(value, 0); - - encoded_bytes_ = static_cast(value); -} - -void OpusFecTest::DecodeABlock(bool lost_previous, bool lost_current) { - int16_t audio_type; - int value_1 = 0, value_2 = 0; - - if (lost_previous) { - // Decode previous frame. - if (!lost_current && - WebRtcOpus_PacketHasFec(&bit_stream_[0], encoded_bytes_) == 1) { - value_1 = - WebRtcOpus_DecodeFec(opus_decoder_, &bit_stream_[0], encoded_bytes_, - &out_data_[0], &audio_type); - } else { - // Call decoder PLC. - while (value_1 < static_cast(block_length_sample_)) { - int ret = WebRtcOpus_Decode(opus_decoder_, NULL, 0, &out_data_[value_1], - &audio_type); - EXPECT_EQ(ret, sampling_khz_ * 10); // Should return 10 ms of samples. - value_1 += ret; - } - } - EXPECT_EQ(static_cast(block_length_sample_), value_1); - } - - if (!lost_current) { - // Decode current frame. - value_2 = WebRtcOpus_Decode(opus_decoder_, &bit_stream_[0], encoded_bytes_, - &out_data_[value_1 * channels_], &audio_type); - EXPECT_EQ(static_cast(block_length_sample_), value_2); - } -} - -TEST_P(OpusFecTest, RandomPacketLossTest) { - const int kDurationMs = 200000; - int time_now_ms, fec_frames; - int actual_packet_loss_rate; - bool lost_current, lost_previous; - mode mode_set[3] = {{true, 0}, {false, 0}, {true, 50}}; - - lost_current = false; - for (int i = 0; i < 3; i++) { - if (mode_set[i].fec) { - EXPECT_EQ(0, WebRtcOpus_EnableFec(opus_encoder_)); - EXPECT_EQ(0, WebRtcOpus_SetPacketLossRate( - opus_encoder_, mode_set[i].target_packet_loss_rate)); - printf("FEC is ON, target at packet loss rate %d percent.\n", - mode_set[i].target_packet_loss_rate); - } else { - EXPECT_EQ(0, WebRtcOpus_DisableFec(opus_encoder_)); - printf("FEC is OFF.\n"); - } - // In this test, we let the target packet loss rate match the actual rate. - actual_packet_loss_rate = mode_set[i].target_packet_loss_rate; - // Run every mode a certain time. - time_now_ms = 0; - fec_frames = 0; - while (time_now_ms < kDurationMs) { - // Encode & decode. - EncodeABlock(); - - // Check if payload has FEC. - int fec = WebRtcOpus_PacketHasFec(&bit_stream_[0], encoded_bytes_); - - // If FEC is disabled or the target packet loss rate is set to 0, there - // should be no FEC in the bit stream. - if (!mode_set[i].fec || mode_set[i].target_packet_loss_rate == 0) { - EXPECT_EQ(fec, 0); - } else if (fec == 1) { - fec_frames++; - } - - lost_previous = lost_current; - lost_current = rand() < actual_packet_loss_rate * (RAND_MAX / 100); - DecodeABlock(lost_previous, lost_current); - - time_now_ms += block_duration_ms_; - - // `data_pointer_` is incremented and wrapped across - // `loop_length_samples_`. - data_pointer_ = (data_pointer_ + block_length_sample_ * channels_) % - loop_length_samples_; - } - if (mode_set[i].fec) { - printf("%.2f percent frames has FEC.\n", - static_cast(fec_frames) * block_duration_ms_ / 2000); - } - } -} - -const coding_param param_set[] = { - std::make_tuple(1, - 64000, - string("audio_coding/testfile32kHz"), - string("pcm")), - std::make_tuple(1, - 32000, - string("audio_coding/testfile32kHz"), - string("pcm")), - std::make_tuple(2, - 64000, - string("audio_coding/teststereo32kHz"), - string("pcm"))}; - -// 64 kbps, stereo -INSTANTIATE_TEST_SUITE_P(AllTest, OpusFecTest, ::testing::ValuesIn(param_set)); - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc deleted file mode 100644 index 4477e8a5f..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/opus/opus_speed_test.cc +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/opus/opus_interface.h" -#include "modules/audio_coding/codecs/tools/audio_codec_speed_test.h" - -using ::std::string; - -namespace webrtc { - -static const int kOpusBlockDurationMs = 20; -static const int kOpusSamplingKhz = 48; - -class OpusSpeedTest : public AudioCodecSpeedTest { - protected: - OpusSpeedTest(); - void SetUp() override; - void TearDown() override; - float EncodeABlock(int16_t* in_data, - uint8_t* bit_stream, - size_t max_bytes, - size_t* encoded_bytes) override; - float DecodeABlock(const uint8_t* bit_stream, - size_t encoded_bytes, - int16_t* out_data) override; - WebRtcOpusEncInst* opus_encoder_; - WebRtcOpusDecInst* opus_decoder_; -}; - -OpusSpeedTest::OpusSpeedTest() - : AudioCodecSpeedTest(kOpusBlockDurationMs, - kOpusSamplingKhz, - kOpusSamplingKhz), - opus_encoder_(NULL), - opus_decoder_(NULL) {} - -void OpusSpeedTest::SetUp() { - AudioCodecSpeedTest::SetUp(); - // If channels_ == 1, use Opus VOIP mode, otherwise, audio mode. - int app = channels_ == 1 ? 0 : 1; - /* Create encoder memory. */ - EXPECT_EQ(0, WebRtcOpus_EncoderCreate(&opus_encoder_, channels_, app, 48000)); - EXPECT_EQ(0, WebRtcOpus_DecoderCreate(&opus_decoder_, channels_, 48000)); - /* Set bitrate. */ - EXPECT_EQ(0, WebRtcOpus_SetBitRate(opus_encoder_, bit_rate_)); -} - -void OpusSpeedTest::TearDown() { - AudioCodecSpeedTest::TearDown(); - /* Free memory. */ - EXPECT_EQ(0, WebRtcOpus_EncoderFree(opus_encoder_)); - EXPECT_EQ(0, WebRtcOpus_DecoderFree(opus_decoder_)); -} - -float OpusSpeedTest::EncodeABlock(int16_t* in_data, - uint8_t* bit_stream, - size_t max_bytes, - size_t* encoded_bytes) { - clock_t clocks = clock(); - int value = WebRtcOpus_Encode(opus_encoder_, in_data, input_length_sample_, - max_bytes, bit_stream); - clocks = clock() - clocks; - EXPECT_GT(value, 0); - *encoded_bytes = static_cast(value); - return 1000.0 * clocks / CLOCKS_PER_SEC; -} - -float OpusSpeedTest::DecodeABlock(const uint8_t* bit_stream, - size_t encoded_bytes, - int16_t* out_data) { - int value; - int16_t audio_type; - clock_t clocks = clock(); - value = WebRtcOpus_Decode(opus_decoder_, bit_stream, encoded_bytes, out_data, - &audio_type); - clocks = clock() - clocks; - EXPECT_EQ(output_length_sample_, static_cast(value)); - return 1000.0 * clocks / CLOCKS_PER_SEC; -} - -/* Test audio length in second. */ -constexpr size_t kDurationSec = 400; - -#define ADD_TEST(complexity) \ - TEST_P(OpusSpeedTest, OpusSetComplexityTest##complexity) { \ - /* Set complexity. */ \ - printf("Setting complexity to %d ...\n", complexity); \ - EXPECT_EQ(0, WebRtcOpus_SetComplexity(opus_encoder_, complexity)); \ - EncodeDecode(kDurationSec); \ - } - -ADD_TEST(10) -ADD_TEST(9) -ADD_TEST(8) -ADD_TEST(7) -ADD_TEST(6) -ADD_TEST(5) -ADD_TEST(4) -ADD_TEST(3) -ADD_TEST(2) -ADD_TEST(1) -ADD_TEST(0) - -#define ADD_BANDWIDTH_TEST(bandwidth) \ - TEST_P(OpusSpeedTest, OpusSetBandwidthTest##bandwidth) { \ - /* Set bandwidth. */ \ - printf("Setting bandwidth to %d ...\n", bandwidth); \ - EXPECT_EQ(0, WebRtcOpus_SetBandwidth(opus_encoder_, bandwidth)); \ - EncodeDecode(kDurationSec); \ - } - -ADD_BANDWIDTH_TEST(OPUS_BANDWIDTH_NARROWBAND) -ADD_BANDWIDTH_TEST(OPUS_BANDWIDTH_MEDIUMBAND) -ADD_BANDWIDTH_TEST(OPUS_BANDWIDTH_WIDEBAND) -ADD_BANDWIDTH_TEST(OPUS_BANDWIDTH_SUPERWIDEBAND) -ADD_BANDWIDTH_TEST(OPUS_BANDWIDTH_FULLBAND) - -// List all test cases: (channel, bit rat, filename, extension). -const coding_param param_set[] = { - std::make_tuple(1, - 64000, - string("audio_coding/speech_mono_32_48kHz"), - string("pcm"), - true), - std::make_tuple(1, - 32000, - string("audio_coding/speech_mono_32_48kHz"), - string("pcm"), - true), - std::make_tuple(2, - 64000, - string("audio_coding/music_stereo_48kHz"), - string("pcm"), - true)}; - -INSTANTIATE_TEST_SUITE_P(AllTest, - OpusSpeedTest, - ::testing::ValuesIn(param_set)); - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc index 7761efe8b..1dd2ff289 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.cc @@ -42,12 +42,7 @@ int AudioDecoderPcm16B::DecodeInternal(const uint8_t* encoded, int16_t* decoded, SpeechType* speech_type) { RTC_DCHECK_EQ(sample_rate_hz_, sample_rate_hz); - // Adjust the encoded length down to ensure the same number of samples in each - // channel. - const size_t encoded_len_adjusted = - PacketDuration(encoded, encoded_len) * 2 * - Channels(); // 2 bytes per sample per channel - size_t ret = WebRtcPcm16b_Decode(encoded, encoded_len_adjusted, decoded); + size_t ret = WebRtcPcm16b_Decode(encoded, encoded_len, decoded); *speech_type = ConvertSpeechType(1); return static_cast(ret); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h index 6f50161d3..f08c4a629 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_decoder_pcm16b.h @@ -18,16 +18,13 @@ #include "api/audio_codecs/audio_decoder.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { class AudioDecoderPcm16B final : public AudioDecoder { public: AudioDecoderPcm16B(int sample_rate_hz, size_t num_channels); - - AudioDecoderPcm16B(const AudioDecoderPcm16B&) = delete; - AudioDecoderPcm16B& operator=(const AudioDecoderPcm16B&) = delete; - void Reset() override; std::vector ParsePayload(rtc::Buffer&& payload, uint32_t timestamp) override; @@ -45,6 +42,7 @@ class AudioDecoderPcm16B final : public AudioDecoder { private: const int sample_rate_hz_; const size_t num_channels_; + RTC_DISALLOW_COPY_AND_ASSIGN(AudioDecoderPcm16B); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h index c363b40b3..71c757250 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/pcm16b/audio_encoder_pcm16b.h @@ -12,6 +12,7 @@ #define MODULES_AUDIO_CODING_CODECS_PCM16B_AUDIO_ENCODER_PCM16B_H_ #include "modules/audio_coding/codecs/g711/audio_encoder_pcm.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -28,9 +29,6 @@ class AudioEncoderPcm16B final : public AudioEncoderPcm { explicit AudioEncoderPcm16B(const Config& config) : AudioEncoderPcm(config, config.sample_rate_hz) {} - AudioEncoderPcm16B(const AudioEncoderPcm16B&) = delete; - AudioEncoderPcm16B& operator=(const AudioEncoderPcm16B&) = delete; - protected: size_t EncodeCall(const int16_t* audio, size_t input_len, @@ -39,6 +37,9 @@ class AudioEncoderPcm16B final : public AudioEncoderPcm { size_t BytesPerSample() const override; AudioEncoder::CodecType GetCodecType() const override; + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderPcm16B); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h index d16319325..d5b1bf686 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/red/audio_encoder_copy_red.h @@ -23,6 +23,7 @@ #include "api/audio_codecs/audio_encoder.h" #include "api/units/time_delta.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -46,9 +47,6 @@ class AudioEncoderCopyRed final : public AudioEncoder { ~AudioEncoderCopyRed() override; - AudioEncoderCopyRed(const AudioEncoderCopyRed&) = delete; - AudioEncoderCopyRed& operator=(const AudioEncoderCopyRed&) = delete; - int SampleRateHz() const override; size_t NumChannels() const override; int RtpTimestampRateHz() const override; @@ -94,6 +92,8 @@ class AudioEncoderCopyRed final : public AudioEncoder { size_t max_packet_length_; int red_payload_type_; std::list> redundant_encodings_; + + RTC_DISALLOW_COPY_AND_ASSIGN(AudioEncoderCopyRed); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc deleted file mode 100644 index f61aacc47..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.cc +++ /dev/null @@ -1,127 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/audio_coding/codecs/tools/audio_codec_speed_test.h" - -#include "rtc_base/checks.h" -#include "rtc_base/format_macros.h" -#include "test/gtest.h" -#include "test/testsupport/file_utils.h" - -using ::std::get; - -namespace webrtc { - -AudioCodecSpeedTest::AudioCodecSpeedTest(int block_duration_ms, - int input_sampling_khz, - int output_sampling_khz) - : block_duration_ms_(block_duration_ms), - input_sampling_khz_(input_sampling_khz), - output_sampling_khz_(output_sampling_khz), - input_length_sample_( - static_cast(block_duration_ms_ * input_sampling_khz_)), - output_length_sample_( - static_cast(block_duration_ms_ * output_sampling_khz_)), - data_pointer_(0), - loop_length_samples_(0), - max_bytes_(0), - encoded_bytes_(0), - encoding_time_ms_(0.0), - decoding_time_ms_(0.0), - out_file_(NULL) {} - -void AudioCodecSpeedTest::SetUp() { - channels_ = get<0>(GetParam()); - bit_rate_ = get<1>(GetParam()); - in_filename_ = test::ResourcePath(get<2>(GetParam()), get<3>(GetParam())); - save_out_data_ = get<4>(GetParam()); - - FILE* fp = fopen(in_filename_.c_str(), "rb"); - RTC_DCHECK(fp); - - // Obtain file size. - fseek(fp, 0, SEEK_END); - loop_length_samples_ = ftell(fp) / sizeof(int16_t); - rewind(fp); - - // Allocate memory to contain the whole file. - in_data_.reset( - new int16_t[loop_length_samples_ + input_length_sample_ * channels_]); - - data_pointer_ = 0; - - // Copy the file into the buffer. - ASSERT_EQ(fread(&in_data_[0], sizeof(int16_t), loop_length_samples_, fp), - loop_length_samples_); - fclose(fp); - - // Add an extra block length of samples to the end of the array, starting - // over again from the beginning of the array. This is done to simplify - // the reading process when reading over the end of the loop. - memcpy(&in_data_[loop_length_samples_], &in_data_[0], - input_length_sample_ * channels_ * sizeof(int16_t)); - - max_bytes_ = input_length_sample_ * channels_ * sizeof(int16_t); - out_data_.reset(new int16_t[output_length_sample_ * channels_]); - bit_stream_.reset(new uint8_t[max_bytes_]); - - if (save_out_data_) { - std::string out_filename = - ::testing::UnitTest::GetInstance()->current_test_info()->name(); - - // Erase '/' - size_t found; - while ((found = out_filename.find('/')) != std::string::npos) - out_filename.replace(found, 1, "_"); - - out_filename = test::OutputPath() + out_filename + ".pcm"; - - out_file_ = fopen(out_filename.c_str(), "wb"); - RTC_DCHECK(out_file_); - - printf("Output to be saved in %s.\n", out_filename.c_str()); - } -} - -void AudioCodecSpeedTest::TearDown() { - if (save_out_data_) { - fclose(out_file_); - } -} - -void AudioCodecSpeedTest::EncodeDecode(size_t audio_duration_sec) { - size_t time_now_ms = 0; - float time_ms; - - printf("Coding %d kHz-sampled %" RTC_PRIuS "-channel audio at %d bps ...\n", - input_sampling_khz_, channels_, bit_rate_); - - while (time_now_ms < audio_duration_sec * 1000) { - // Encode & decode. - time_ms = EncodeABlock(&in_data_[data_pointer_], &bit_stream_[0], - max_bytes_, &encoded_bytes_); - encoding_time_ms_ += time_ms; - time_ms = DecodeABlock(&bit_stream_[0], encoded_bytes_, &out_data_[0]); - decoding_time_ms_ += time_ms; - if (save_out_data_) { - fwrite(&out_data_[0], sizeof(int16_t), output_length_sample_ * channels_, - out_file_); - } - data_pointer_ = (data_pointer_ + input_length_sample_ * channels_) % - loop_length_samples_; - time_now_ms += block_duration_ms_; - } - - printf("Encoding: %.2f%% real time,\nDecoding: %.2f%% real time.\n", - (encoding_time_ms_ / audio_duration_sec) / 10.0, - (decoding_time_ms_ / audio_duration_sec) / 10.0); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h deleted file mode 100644 index c5f1d7c25..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/codecs/tools/audio_codec_speed_test.h +++ /dev/null @@ -1,93 +0,0 @@ -/* - * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_ -#define MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_ - -#include -#include - -#include "test/gtest.h" - -namespace webrtc { - -// Define coding parameter as -// . -typedef std::tuple coding_param; - -class AudioCodecSpeedTest : public ::testing::TestWithParam { - protected: - AudioCodecSpeedTest(int block_duration_ms, - int input_sampling_khz, - int output_sampling_khz); - virtual void SetUp(); - virtual void TearDown(); - - // EncodeABlock(...) does the following: - // 1. encodes a block of audio, saved in `in_data`, - // 2. save the bit stream to `bit_stream` of `max_bytes` bytes in size, - // 3. assign `encoded_bytes` with the length of the bit stream (in bytes), - // 4. return the cost of time (in millisecond) spent on actual encoding. - virtual float EncodeABlock(int16_t* in_data, - uint8_t* bit_stream, - size_t max_bytes, - size_t* encoded_bytes) = 0; - - // DecodeABlock(...) does the following: - // 1. decodes the bit stream in `bit_stream` with a length of `encoded_bytes` - // (in bytes), - // 2. save the decoded audio in `out_data`, - // 3. return the cost of time (in millisecond) spent on actual decoding. - virtual float DecodeABlock(const uint8_t* bit_stream, - size_t encoded_bytes, - int16_t* out_data) = 0; - - // Encoding and decode an audio of `audio_duration` (in seconds) and - // record the runtime for encoding and decoding separately. - void EncodeDecode(size_t audio_duration); - - int block_duration_ms_; - int input_sampling_khz_; - int output_sampling_khz_; - - // Number of samples-per-channel in a frame. - size_t input_length_sample_; - - // Expected output number of samples-per-channel in a frame. - size_t output_length_sample_; - - std::unique_ptr in_data_; - std::unique_ptr out_data_; - size_t data_pointer_; - size_t loop_length_samples_; - std::unique_ptr bit_stream_; - - // Maximum number of bytes in output bitstream for a frame of audio. - size_t max_bytes_; - - size_t encoded_bytes_; - float encoding_time_ms_; - float decoding_time_ms_; - FILE* out_file_; - - size_t channels_; - - // Bit rate is in bit-per-second. - int bit_rate_; - - std::string in_filename_; - - // Determines whether to save the output to file. - bool save_out_data_; -}; - -} // namespace webrtc - -#endif // MODULES_AUDIO_CODING_CODECS_TOOLS_AUDIO_CODEC_SPEED_TEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module.h index 8b518fb97..003d966fb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/include/audio_coding_module.h @@ -190,7 +190,7 @@ class AudioCodingModule { // 0 if payload is successfully pushed in. // virtual int32_t IncomingPacket(const uint8_t* incoming_payload, - size_t payload_len_bytes, + const size_t payload_len_bytes, const RTPHeader& rtp_header) = 0; /////////////////////////////////////////////////////////////////////////// diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/accelerate.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/accelerate.h index 01fe874d5..e03f609ff 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/accelerate.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/accelerate.h @@ -15,6 +15,7 @@ #include #include "modules/audio_coding/neteq/time_stretch.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -32,9 +33,6 @@ class Accelerate : public TimeStretch { const BackgroundNoise& background_noise) : TimeStretch(sample_rate_hz, num_channels, background_noise) {} - Accelerate(const Accelerate&) = delete; - Accelerate& operator=(const Accelerate&) = delete; - // This method performs the actual Accelerate operation. The samples are // read from `input`, of length `input_length` elements, and are written to // `output`. The number of samples removed through time-stretching is @@ -64,6 +62,9 @@ class Accelerate : public TimeStretch { bool active_speech, bool fast_mode, AudioMultiVector* output) const override; + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(Accelerate); }; struct AccelerateFactory { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.h index 715ec6dfc..10179d7f0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_multi_vector.h @@ -18,6 +18,7 @@ #include "api/array_view.h" #include "modules/audio_coding/neteq/audio_vector.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -33,9 +34,6 @@ class AudioMultiVector { virtual ~AudioMultiVector(); - AudioMultiVector(const AudioMultiVector&) = delete; - AudioMultiVector& operator=(const AudioMultiVector&) = delete; - // Deletes all values and make the vector empty. virtual void Clear(); @@ -132,6 +130,9 @@ class AudioMultiVector { protected: std::vector channels_; size_t num_channels_; + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(AudioMultiVector); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_vector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_vector.h index d68f3ec6b..c722b5696 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_vector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/audio_vector.h @@ -17,6 +17,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -30,9 +31,6 @@ class AudioVector { virtual ~AudioVector(); - AudioVector(const AudioVector&) = delete; - AudioVector& operator=(const AudioVector&) = delete; - // Deletes all values and make the vector empty. virtual void Clear(); @@ -166,6 +164,8 @@ class AudioVector { // The index of the sample after the last sample in `array_`. size_t end_index_; + + RTC_DISALLOW_COPY_AND_ASSIGN(AudioVector); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/background_noise.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/background_noise.h index 8e6d5890a..005b3766f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/background_noise.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/background_noise.h @@ -16,6 +16,7 @@ #include #include "api/array_view.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -33,9 +34,6 @@ class BackgroundNoise { explicit BackgroundNoise(size_t num_channels); virtual ~BackgroundNoise(); - BackgroundNoise(const BackgroundNoise&) = delete; - BackgroundNoise& operator=(const BackgroundNoise&) = delete; - void Reset(); // Updates the parameter estimates based on the signal currently in the @@ -132,6 +130,8 @@ class BackgroundNoise { size_t num_channels_; std::unique_ptr channel_parameters_; bool initialized_; + + RTC_DISALLOW_COPY_AND_ASSIGN(BackgroundNoise); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h index ced36da9c..94a37150e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/buffer_level_filter.h @@ -14,16 +14,14 @@ #include #include +#include "rtc_base/constructor_magic.h" + namespace webrtc { class BufferLevelFilter { public: BufferLevelFilter(); virtual ~BufferLevelFilter() {} - - BufferLevelFilter(const BufferLevelFilter&) = delete; - BufferLevelFilter& operator=(const BufferLevelFilter&) = delete; - virtual void Reset(); // Updates the filter. Current buffer size is `buffer_size_samples`. @@ -48,6 +46,8 @@ class BufferLevelFilter { private: int level_factor_; // Filter factor for the buffer level filter in Q8. int filtered_current_level_; // Filtered current buffer level in Q8. + + RTC_DISALLOW_COPY_AND_ASSIGN(BufferLevelFilter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/comfort_noise.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/comfort_noise.h index 31fcee31d..6419d397d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/comfort_noise.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/comfort_noise.h @@ -13,6 +13,8 @@ #include +#include "rtc_base/constructor_magic.h" + namespace webrtc { // Forward declarations. @@ -40,9 +42,6 @@ class ComfortNoise { decoder_database_(decoder_database), sync_buffer_(sync_buffer) {} - ComfortNoise(const ComfortNoise&) = delete; - ComfortNoise& operator=(const ComfortNoise&) = delete; - // Resets the state. Should be called before each new comfort noise period. void Reset(); @@ -66,6 +65,7 @@ class ComfortNoise { DecoderDatabase* decoder_database_; SyncBuffer* sync_buffer_; int internal_error_code_; + RTC_DISALLOW_COPY_AND_ASSIGN(ComfortNoise); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h index a8571ade9..693f6169e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decision_logic.h @@ -18,6 +18,7 @@ #include "api/neteq/tick_timer.h" #include "modules/audio_coding/neteq/buffer_level_filter.h" #include "modules/audio_coding/neteq/delay_manager.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { @@ -36,9 +37,6 @@ class DecisionLogic : public NetEqController { ~DecisionLogic() override; - DecisionLogic(const DecisionLogic&) = delete; - DecisionLogic& operator=(const DecisionLogic&) = delete; - // Resets object to a clean state. void Reset() override; @@ -194,6 +192,8 @@ class DecisionLogic : public NetEqController { FieldTrialParameter estimate_dtx_delay_; FieldTrialParameter time_stretch_cn_; FieldTrialConstrained target_level_window_ms_; + + RTC_DISALLOW_COPY_AND_ASSIGN(DecisionLogic); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.h index 6c2ce5403..a63a9cff1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/decoder_database.h @@ -20,6 +20,7 @@ #include "api/scoped_refptr.h" #include "modules/audio_coding/codecs/cng/webrtc_cng.h" #include "modules/audio_coding/neteq/packet.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -121,9 +122,6 @@ class DecoderDatabase { virtual ~DecoderDatabase(); - DecoderDatabase(const DecoderDatabase&) = delete; - DecoderDatabase& operator=(const DecoderDatabase&) = delete; - // Returns true if the database is empty. virtual bool Empty() const; @@ -210,6 +208,8 @@ class DecoderDatabase { mutable std::unique_ptr active_cng_decoder_; rtc::scoped_refptr decoder_factory_; const absl::optional codec_pair_id_; + + RTC_DISALLOW_COPY_AND_ASSIGN(DecoderDatabase); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h index 56d108ad1..410aa94b6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/delay_manager.h @@ -22,6 +22,7 @@ #include "modules/audio_coding/neteq/relative_arrival_delay_tracker.h" #include "modules/audio_coding/neteq/reorder_optimizer.h" #include "modules/audio_coding/neteq/underrun_optimizer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -51,9 +52,6 @@ class DelayManager { virtual ~DelayManager(); - DelayManager(const DelayManager&) = delete; - DelayManager& operator=(const DelayManager&) = delete; - // Updates the delay manager with a new incoming packet, with `timestamp` from // the RTP header. This updates the statistics and a new target buffer level // is calculated. Returns the relative delay if it can be calculated. If @@ -113,7 +111,9 @@ class DelayManager { int maximum_delay_ms_; // Externally set maximum allowed delay. int packet_len_ms_ = 0; - int target_level_ms_; // Currently preferred buffer level. + int target_level_ms_; // Currently preferred buffer level. + + RTC_DISALLOW_COPY_AND_ASSIGN(DelayManager); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dsp_helper.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dsp_helper.h index 4aead7df1..7bdeba6ec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dsp_helper.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dsp_helper.h @@ -16,6 +16,7 @@ #include "modules/audio_coding/neteq/audio_multi_vector.h" #include "modules/audio_coding/neteq/audio_vector.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -149,12 +150,11 @@ class DspHelper { bool compensate_delay, int16_t* output); - DspHelper(const DspHelper&) = delete; - DspHelper& operator=(const DspHelper&) = delete; - private: // Table of constants used in method DspHelper::ParabolicFit(). static const int16_t kParabolaCoefficients[17][3]; + + RTC_DISALLOW_COPY_AND_ASSIGN(DspHelper); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_buffer.h index 62b751525..9209cae86 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_buffer.h @@ -16,6 +16,8 @@ #include +#include "rtc_base/constructor_magic.h" + namespace webrtc { struct DtmfEvent { @@ -48,9 +50,6 @@ class DtmfBuffer { virtual ~DtmfBuffer(); - DtmfBuffer(const DtmfBuffer&) = delete; - DtmfBuffer& operator=(const DtmfBuffer&) = delete; - // Flushes the buffer. virtual void Flush(); @@ -98,6 +97,8 @@ class DtmfBuffer { static bool CompareEvents(const DtmfEvent& a, const DtmfEvent& b); DtmfList buffer_; + + RTC_DISALLOW_COPY_AND_ASSIGN(DtmfBuffer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h index 35114f4f4..968bc7f8c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/dtmf_tone_generator.h @@ -15,6 +15,7 @@ #include #include "modules/audio_coding/neteq/audio_multi_vector.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -28,10 +29,6 @@ class DtmfToneGenerator { DtmfToneGenerator(); virtual ~DtmfToneGenerator() {} - - DtmfToneGenerator(const DtmfToneGenerator&) = delete; - DtmfToneGenerator& operator=(const DtmfToneGenerator&) = delete; - virtual int Init(int fs, int event, int attenuation); virtual void Reset(); virtual int Generate(size_t num_samples, AudioMultiVector* output); @@ -51,6 +48,8 @@ class DtmfToneGenerator { int amplitude_; // Amplitude for this event. int16_t sample_history1_[2]; // Last 2 samples for the 1st oscillator. int16_t sample_history2_[2]; // Last 2 samples for the 2nd oscillator. + + RTC_DISALLOW_COPY_AND_ASSIGN(DtmfToneGenerator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand.h index 2e64583ec..2d22b1128 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand.h @@ -15,6 +15,7 @@ #include #include "modules/audio_coding/neteq/audio_vector.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -40,9 +41,6 @@ class Expand { virtual ~Expand(); - Expand(const Expand&) = delete; - Expand& operator=(const Expand&) = delete; - // Resets the object. virtual void Reset(); @@ -136,6 +134,8 @@ class Expand { bool stop_muting_; size_t expand_duration_samples_; std::unique_ptr channel_parameters_; + + RTC_DISALLOW_COPY_AND_ASSIGN(Expand); }; struct ExpandFactory { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.h index a29d3532f..246aaffd4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/expand_uma_logger.h @@ -17,6 +17,7 @@ #include "absl/types/optional.h" #include "api/neteq/tick_timer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -35,9 +36,6 @@ class ExpandUmaLogger { ~ExpandUmaLogger(); - ExpandUmaLogger(const ExpandUmaLogger&) = delete; - ExpandUmaLogger& operator=(const ExpandUmaLogger&) = delete; - // In this call, value should be an incremental sample counter. The sample // rate must be strictly positive. void UpdateSampleCounter(uint64_t value, int sample_rate_hz); @@ -50,6 +48,8 @@ class ExpandUmaLogger { absl::optional last_logged_value_; uint64_t last_value_ = 0; int sample_rate_hz_ = 0; + + RTC_DISALLOW_COPY_AND_ASSIGN(ExpandUmaLogger); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.h index 2f27106bf..13aa31df8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/merge.h @@ -12,6 +12,7 @@ #define MODULES_AUDIO_CODING_NETEQ_MERGE_H_ #include "modules/audio_coding/neteq/audio_multi_vector.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -35,9 +36,6 @@ class Merge { SyncBuffer* sync_buffer); virtual ~Merge(); - Merge(const Merge&) = delete; - Merge& operator=(const Merge&) = delete; - // The main method to produce the audio data. The decoded data is supplied in // `input`, having `input_length` samples in total for all channels // (interleaved). The result is written to `output`. The number of channels @@ -95,6 +93,8 @@ class Merge { int16_t input_downsampled_[kInputDownsampLength]; AudioMultiVector expanded_; std::vector temp_data_; + + RTC_DISALLOW_COPY_AND_ASSIGN(Merge); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h index e2cd6c605..2522e31a3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/neteq_impl.h @@ -29,6 +29,7 @@ #include "modules/audio_coding/neteq/packet.h" #include "modules/audio_coding/neteq/random_vector.h" #include "modules/audio_coding/neteq/statistics_calculator.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -123,9 +124,6 @@ class NetEqImpl : public webrtc::NetEq { ~NetEqImpl() override; - NetEqImpl(const NetEqImpl&) = delete; - NetEqImpl& operator=(const NetEqImpl&) = delete; - // Inserts a new packet into NetEq. Returns 0 on success, -1 on failure. int InsertPacket(const RTPHeader& rtp_header, rtc::ArrayView payload) override; @@ -401,6 +399,9 @@ class NetEqImpl : public webrtc::NetEq { ExpandUmaLogger speech_expand_uma_logger_ RTC_GUARDED_BY(mutex_); bool no_time_stretching_ RTC_GUARDED_BY(mutex_); // Only used for test. rtc::BufferT concealment_audio_ RTC_GUARDED_BY(mutex_); + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(NetEqImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc index 461ee7fa4..6ffae0975 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.cc @@ -159,7 +159,7 @@ int Normal::Process(const int16_t* input, if (cng_decoder) { // Generate long enough for 48kHz. - if (!cng_decoder->Generate(cng_output, false)) { + if (!cng_decoder->Generate(cng_output, 0)) { // Error returned; set return vector to all zeros. memset(cng_output, 0, sizeof(cng_output)); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h index 772293b60..3607208f1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/normal.h @@ -17,6 +17,7 @@ #include "api/neteq/neteq.h" #include "modules/audio_coding/neteq/statistics_calculator.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/numerics/safe_conversions.h" namespace webrtc { @@ -48,9 +49,6 @@ class Normal { virtual ~Normal() {} - Normal(const Normal&) = delete; - Normal& operator=(const Normal&) = delete; - // Performs the "Normal" operation. The decoder data is supplied in `input`, // having `length` samples in total for all channels (interleaved). The // result is written to `output`. The number of channels allocated in @@ -70,6 +68,8 @@ class Normal { const size_t samples_per_ms_; const int16_t default_win_slope_Q14_; StatisticsCalculator* const statistics_; + + RTC_DISALLOW_COPY_AND_ASSIGN(Normal); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h index c6fb47ffb..20a053323 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/packet_buffer.h @@ -15,6 +15,7 @@ #include "modules/audio_coding/neteq/decoder_database.h" #include "modules/audio_coding/neteq/packet.h" #include "modules/include/module_common_types_public.h" // IsNewerTimestamp +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -50,9 +51,6 @@ class PacketBuffer { // Deletes all packets in the buffer before destroying the buffer. virtual ~PacketBuffer(); - PacketBuffer(const PacketBuffer&) = delete; - PacketBuffer& operator=(const PacketBuffer&) = delete; - // Flushes the buffer and deletes all packets in it. virtual void Flush(StatisticsCalculator* stats); @@ -175,6 +173,7 @@ class PacketBuffer { size_t max_number_of_packets_; PacketList buffer_; const TickTimer* tick_timer_; + RTC_DISALLOW_COPY_AND_ASSIGN(PacketBuffer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/post_decode_vad.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/post_decode_vad.h index 3bd91b9ed..3134d5f3a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/post_decode_vad.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/post_decode_vad.h @@ -16,6 +16,7 @@ #include "api/audio_codecs/audio_decoder.h" #include "common_audio/vad/include/webrtc_vad.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -30,9 +31,6 @@ class PostDecodeVad { virtual ~PostDecodeVad(); - PostDecodeVad(const PostDecodeVad&) = delete; - PostDecodeVad& operator=(const PostDecodeVad&) = delete; - // Enables post-decode VAD. void Enable(); @@ -65,6 +63,8 @@ class PostDecodeVad { bool active_speech_; int sid_interval_counter_; ::VadInst* vad_instance_; + + RTC_DISALLOW_COPY_AND_ASSIGN(PostDecodeVad); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/preemptive_expand.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/preemptive_expand.h index 6338b993f..708ebfd1b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/preemptive_expand.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/preemptive_expand.h @@ -15,6 +15,7 @@ #include #include "modules/audio_coding/neteq/time_stretch.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -35,9 +36,6 @@ class PreemptiveExpand : public TimeStretch { old_data_length_per_channel_(0), overlap_samples_(overlap_samples) {} - PreemptiveExpand(const PreemptiveExpand&) = delete; - PreemptiveExpand& operator=(const PreemptiveExpand&) = delete; - // This method performs the actual PreemptiveExpand operation. The samples are // read from `input`, of length `input_length` elements, and are written to // `output`. The number of samples added through time-stretching is @@ -69,6 +67,8 @@ class PreemptiveExpand : public TimeStretch { private: size_t old_data_length_per_channel_; size_t overlap_samples_; + + RTC_DISALLOW_COPY_AND_ASSIGN(PreemptiveExpand); }; struct PreemptiveExpandFactory { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/random_vector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/random_vector.h index 4a782f111..1d3760055 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/random_vector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/random_vector.h @@ -14,6 +14,8 @@ #include #include +#include "rtc_base/constructor_magic.h" + namespace webrtc { // This class generates pseudo-random samples. @@ -24,9 +26,6 @@ class RandomVector { RandomVector() : seed_(777), seed_increment_(1) {} - RandomVector(const RandomVector&) = delete; - RandomVector& operator=(const RandomVector&) = delete; - void Reset(); void Generate(size_t length, int16_t* output); @@ -40,6 +39,8 @@ class RandomVector { private: uint32_t seed_; int16_t seed_increment_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RandomVector); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h index 2f48e4b7d..55660913d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/red_payload_splitter.h @@ -12,6 +12,7 @@ #define MODULES_AUDIO_CODING_NETEQ_RED_PAYLOAD_SPLITTER_H_ #include "modules/audio_coding/neteq/packet.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -29,9 +30,6 @@ class RedPayloadSplitter { virtual ~RedPayloadSplitter() {} - RedPayloadSplitter(const RedPayloadSplitter&) = delete; - RedPayloadSplitter& operator=(const RedPayloadSplitter&) = delete; - // Splits each packet in `packet_list` into its separate RED payloads. Each // RED payload is packetized into a Packet. The original elements in // `packet_list` are properly deleted, and replaced by the new packets. @@ -45,6 +43,9 @@ class RedPayloadSplitter { // is accepted. Any packet with another payload type is discarded. virtual void CheckRedPayloads(PacketList* packet_list, const DecoderDatabase& decoder_database); + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(RedPayloadSplitter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h index 269e6a09b..5c3fb75d1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/statistics_calculator.h @@ -15,6 +15,7 @@ #include #include "api/neteq/neteq.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -27,9 +28,6 @@ class StatisticsCalculator { virtual ~StatisticsCalculator(); - StatisticsCalculator(const StatisticsCalculator&) = delete; - StatisticsCalculator& operator=(const StatisticsCalculator&) = delete; - // Resets most of the counters. void Reset(); @@ -199,6 +197,8 @@ class StatisticsCalculator { PeriodicUmaAverage excess_buffer_delay_; PeriodicUmaCount buffer_full_counter_; bool decoded_output_played_ = false; + + RTC_DISALLOW_COPY_AND_ASSIGN(StatisticsCalculator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/sync_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/sync_buffer.h index cf56c432e..7d24730cb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/sync_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/sync_buffer.h @@ -20,6 +20,7 @@ #include "modules/audio_coding/neteq/audio_multi_vector.h" #include "modules/audio_coding/neteq/audio_vector.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -31,9 +32,6 @@ class SyncBuffer : public AudioMultiVector { end_timestamp_(0), dtmf_index_(0) {} - SyncBuffer(const SyncBuffer&) = delete; - SyncBuffer& operator=(const SyncBuffer&) = delete; - // Returns the number of samples yet to play out from the buffer. size_t FutureLength() const; @@ -104,6 +102,8 @@ class SyncBuffer : public AudioMultiVector { size_t next_index_; uint32_t end_timestamp_; // The timestamp of the last sample in the buffer. size_t dtmf_index_; // Index to the first non-DTMF sample in the buffer. + + RTC_DISALLOW_COPY_AND_ASSIGN(SyncBuffer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/time_stretch.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/time_stretch.h index f0ddaebec..998d08071 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/time_stretch.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/time_stretch.h @@ -14,6 +14,7 @@ #include // memset, size_t #include "modules/audio_coding/neteq/audio_multi_vector.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -48,9 +49,6 @@ class TimeStretch { virtual ~TimeStretch() {} - TimeStretch(const TimeStretch&) = delete; - TimeStretch& operator=(const TimeStretch&) = delete; - // This method performs the processing common to both Accelerate and // PreemptiveExpand. ReturnCodes Process(const int16_t* input, @@ -107,6 +105,8 @@ class TimeStretch { int32_t vec2_energy, size_t peak_index, int scaling) const; + + RTC_DISALLOW_COPY_AND_ASSIGN(TimeStretch); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/timestamp_scaler.h b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/timestamp_scaler.h index f42ce7207..4d578fc43 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/timestamp_scaler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_coding/neteq/timestamp_scaler.h @@ -12,6 +12,7 @@ #define MODULES_AUDIO_CODING_NETEQ_TIMESTAMP_SCALER_H_ #include "modules/audio_coding/neteq/packet.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -33,9 +34,6 @@ class TimestampScaler { virtual ~TimestampScaler() {} - TimestampScaler(const TimestampScaler&) = delete; - TimestampScaler& operator=(const TimestampScaler&) = delete; - // Start over. virtual void Reset(); @@ -61,6 +59,8 @@ class TimestampScaler { uint32_t external_ref_; uint32_t internal_ref_; const DecoderDatabase& decoder_database_; + + RTC_DISALLOW_COPY_AND_ASSIGN(TimestampScaler); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc index 873e5d608..d393a8877 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.cc @@ -54,7 +54,6 @@ AudioDeviceBuffer::AudioDeviceBuffer(TaskQueueFactory* task_queue_factory) typing_status_(false), play_delay_ms_(0), rec_delay_ms_(0), - capture_timestamp_ns_(0), num_stat_reports_(0), last_timer_task_time_(0), rec_stat_count_(0), @@ -230,12 +229,6 @@ void AudioDeviceBuffer::SetVQEData(int play_delay_ms, int rec_delay_ms) { int32_t AudioDeviceBuffer::SetRecordedBuffer(const void* audio_buffer, size_t samples_per_channel) { - return SetRecordedBuffer(audio_buffer, samples_per_channel, 0); -} - -int32_t AudioDeviceBuffer::SetRecordedBuffer(const void* audio_buffer, - size_t samples_per_channel, - int64_t capture_timestamp_ns) { // Copy the complete input buffer to the local buffer. const size_t old_size = rec_buffer_.size(); rec_buffer_.SetData(static_cast(audio_buffer), @@ -246,17 +239,6 @@ int32_t AudioDeviceBuffer::SetRecordedBuffer(const void* audio_buffer, RTC_LOG(LS_INFO) << "Size of recording buffer: " << rec_buffer_.size(); } - // If the timestamp is less then or equal to zero, it's not valid and are - // ignored. If we do antimestamp alignment on them they might accidentally - // become greater then zero, and will be handled as if they were a correct - // timestamp. - capture_timestamp_ns_ = - (capture_timestamp_ns > 0) - ? rtc::kNumNanosecsPerMicrosec * - timestamp_aligner_.TranslateTimestamp( - capture_timestamp_ns_ / rtc::kNumNanosecsPerMicrosec, - rtc::TimeMicros()) - : capture_timestamp_ns; // Derive a new level value twice per second and check if it is non-zero. int16_t max_abs = 0; RTC_DCHECK_LT(rec_stat_count_, 50); @@ -289,7 +271,7 @@ int32_t AudioDeviceBuffer::DeliverRecordedData() { int32_t res = audio_transport_cb_->RecordedDataIsAvailable( rec_buffer_.data(), frames, bytes_per_frame, rec_channels_, rec_sample_rate_, total_delay_ms, 0, 0, typing_status_, - new_mic_level_dummy, capture_timestamp_ns_); + new_mic_level_dummy); if (res == -1) { RTC_LOG(LS_ERROR) << "RecordedDataIsAvailable() failed"; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h index ea6ab9a93..a0b795319 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_buffer.h @@ -23,7 +23,6 @@ #include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_queue.h" #include "rtc_base/thread_annotations.h" -#include "rtc_base/timestamp_aligner.h" namespace webrtc { @@ -98,13 +97,8 @@ class AudioDeviceBuffer { size_t RecordingChannels() const; size_t PlayoutChannels() const; - // TODO(bugs.webrtc.org/13621) Deprecate this function virtual int32_t SetRecordedBuffer(const void* audio_buffer, size_t samples_per_channel); - - virtual int32_t SetRecordedBuffer(const void* audio_buffer, - size_t samples_per_channel, - int64_t capture_timestamp_ns); virtual void SetVQEData(int play_delay_ms, int rec_delay_ms); virtual int32_t DeliverRecordedData(); uint32_t NewMicLevel() const; @@ -193,9 +187,6 @@ class AudioDeviceBuffer { int play_delay_ms_; int rec_delay_ms_; - // Capture timestamp. - int64_t capture_timestamp_ns_; - // Counts number of times LogStats() has been called. size_t num_stat_reports_ RTC_GUARDED_BY(task_queue_); @@ -228,10 +219,6 @@ class AudioDeviceBuffer { // being printed in the LogStats() task. bool log_stats_ RTC_GUARDED_BY(task_queue_); - // Used for converting capture timestaps (recieved from AudioRecordThread - // via AudioRecordJni::DataIsRecorded) to RTC clock. - rtc::TimestampAligner timestamp_aligner_; - // Should *never* be defined in production builds. Only used for testing. // When defined, the output signal will be replaced by a sinus tone at 440Hz. #ifdef AUDIO_DEVICE_PLAYS_SINUS_TONE diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc index e54494c28..f655c5a78 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/audio_device_data_observer.cc @@ -45,34 +45,17 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { // Make sure we have a valid ADM before returning it to user. bool IsValid() { return is_valid_; } - int32_t RecordedDataIsAvailable(const void* audioSamples, - size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samples_per_sec, - uint32_t total_delay_ms, - int32_t clockDrift, - uint32_t currentMicLevel, - bool keyPressed, - uint32_t& newMicLevel) override { - return RecordedDataIsAvailable(audioSamples, nSamples, nBytesPerSample, - nChannels, samples_per_sec, total_delay_ms, - clockDrift, currentMicLevel, keyPressed, - newMicLevel, /*capture_timestamp_ns*/ 0); - } - // AudioTransport methods overrides. int32_t RecordedDataIsAvailable(const void* audioSamples, - size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samples_per_sec, - uint32_t total_delay_ms, - int32_t clockDrift, - uint32_t currentMicLevel, - bool keyPressed, - uint32_t& newMicLevel, - int64_t capture_timestamp_ns) override { + const size_t nSamples, + const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samples_per_sec, + const uint32_t total_delay_ms, + const int32_t clockDrift, + const uint32_t currentMicLevel, + const bool keyPressed, + uint32_t& newMicLevel) override { int32_t res = 0; // Capture PCM data of locally captured audio. if (observer_) { @@ -84,8 +67,7 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { if (audio_transport_) { res = audio_transport_->RecordedDataIsAvailable( audioSamples, nSamples, nBytesPerSample, nChannels, samples_per_sec, - total_delay_ms, clockDrift, currentMicLevel, keyPressed, newMicLevel, - capture_timestamp_ns); + total_delay_ms, clockDrift, currentMicLevel, keyPressed, newMicLevel); } return res; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h index 36dc45f19..b59cafcb5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_data_observer.h @@ -26,16 +26,16 @@ namespace webrtc { class AudioDeviceDataObserver { public: virtual void OnCaptureData(const void* audio_samples, - size_t num_samples, - size_t bytes_per_sample, - size_t num_channels, - uint32_t samples_per_sec) = 0; + const size_t num_samples, + const size_t bytes_per_sample, + const size_t num_channels, + const uint32_t samples_per_sec) = 0; virtual void OnRenderData(const void* audio_samples, - size_t num_samples, - size_t bytes_per_sample, - size_t num_channels, - uint32_t samples_per_sec) = 0; + const size_t num_samples, + const size_t bytes_per_sample, + const size_t num_channels, + const uint32_t samples_per_sec) = 0; AudioDeviceDataObserver() = default; virtual ~AudioDeviceDataObserver() = default; @@ -56,14 +56,14 @@ rtc::scoped_refptr CreateAudioDeviceWithDataObserver( // Creates an ADM instance with AudioDeviceDataObserver registered. rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - AudioDeviceModule::AudioLayer audio_layer, + const AudioDeviceModule::AudioLayer audio_layer, TaskQueueFactory* task_queue_factory, std::unique_ptr observer); // Creates an ADM instance with AudioDeviceDataObserver registered. ABSL_DEPRECATED("") rtc::scoped_refptr CreateAudioDeviceWithDataObserver( - AudioDeviceModule::AudioLayer audio_layer, + const AudioDeviceModule::AudioLayer audio_layer, TaskQueueFactory* task_queue_factory, AudioDeviceDataObserver* observer); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h index 89d33f853..01129a47a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/audio_device_defines.h @@ -33,43 +33,22 @@ static const int kAdmMaxPlayoutBufferSizeMs = 250; class AudioTransport { public: - // TODO(bugs.webrtc.org/13620) Deprecate this function virtual int32_t RecordedDataIsAvailable(const void* audioSamples, - size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, - uint32_t totalDelayMS, - int32_t clockDrift, - uint32_t currentMicLevel, - bool keyPressed, + const size_t nSamples, + const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samplesPerSec, + const uint32_t totalDelayMS, + const int32_t clockDrift, + const uint32_t currentMicLevel, + const bool keyPressed, uint32_t& newMicLevel) = 0; // NOLINT - virtual int32_t RecordedDataIsAvailable( - const void* audioSamples, - size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, - uint32_t totalDelayMS, - int32_t clockDrift, - uint32_t currentMicLevel, - bool keyPressed, - uint32_t& newMicLevel, - int64_t estimatedCaptureTimeNS) { // NOLINT - // TODO(webrtc:13620) Make the default behaver of the new API to behave as - // the old API. This can be pure virtual if all uses of the old API is - // removed. - return RecordedDataIsAvailable( - audioSamples, nSamples, nBytesPerSample, nChannels, samplesPerSec, - totalDelayMS, clockDrift, currentMicLevel, keyPressed, newMicLevel); - } - // Implementation has to setup safe values for all specified out parameters. - virtual int32_t NeedMorePlayData(size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, + virtual int32_t NeedMorePlayData(const size_t nSamples, + const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samplesPerSec, void* audioSamples, size_t& nSamplesOut, // NOLINT int64_t* elapsed_time_ms, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_transport.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_transport.h index e1be5f422..8f71a2d71 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_transport.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/include/mock_audio_transport.h @@ -25,38 +25,23 @@ class MockAudioTransport : public AudioTransport { MOCK_METHOD(int32_t, RecordedDataIsAvailable, (const void* audioSamples, - size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, - uint32_t totalDelayMS, - int32_t clockDrift, - uint32_t currentMicLevel, - bool keyPressed, + const size_t nSamples, + const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samplesPerSec, + const uint32_t totalDelayMS, + const int32_t clockDrift, + const uint32_t currentMicLevel, + const bool keyPressed, uint32_t& newMicLevel), (override)); - MOCK_METHOD(int32_t, - RecordedDataIsAvailable, - (const void* audioSamples, - size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, - uint32_t totalDelayMS, - int32_t clockDrift, - uint32_t currentMicLevel, - bool keyPressed, - uint32_t& newMicLevel, - int64_t estimated_capture_time_ns), - (override)); - MOCK_METHOD(int32_t, NeedMorePlayData, - (size_t nSamples, - size_t nBytesPerSample, - size_t nChannels, - uint32_t samplesPerSec, + (const size_t nSamples, + const size_t nBytesPerSample, + const size_t nChannels, + const uint32_t samplesPerSec, void* audioSamples, size_t& nSamplesOut, int64_t* elapsed_time_ms, diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h index 23e21d3ce..1f4a23164 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h @@ -131,11 +131,11 @@ class AudioDeviceLinuxALSA : public AudioDeviceGeneric { int32_t InitPlayoutLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t InitSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t InitMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); - int32_t GetDevicesInfo(int32_t function, - bool playback, - int32_t enumDeviceNo = 0, + int32_t GetDevicesInfo(const int32_t function, + const bool playback, + const int32_t enumDeviceNo = 0, char* enumDeviceName = NULL, - int32_t ednLen = 0) const; + const int32_t ednLen = 0) const; int32_t ErrorRecovery(int32_t error, snd_pcm_t* deviceHandle); bool KeyPressed() const; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h index 9484b075e..6cfb65974 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h @@ -15,6 +15,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" // This file provides macros for creating "symbol table" classes to simplify the // dynamic loading of symbols from DLLs. Currently the implementation only @@ -54,9 +55,6 @@ class LateBindingSymbolTable { ~LateBindingSymbolTable() { Unload(); } - LateBindingSymbolTable(const LateBindingSymbolTable&) = delete; - LateBindingSymbolTable& operator=(LateBindingSymbolTable&) = delete; - static int NumSymbols() { return SYMBOL_TABLE_SIZE; } // We do not use this, but we offer it for theoretical convenience. @@ -111,6 +109,8 @@ class LateBindingSymbolTable { DllHandle handle_; bool undefined_symbols_; void* symbols_[SYMBOL_TABLE_SIZE]; + + RTC_DISALLOW_COPY_AND_ASSIGN(LateBindingSymbolTable); }; // This macro must be invoked in a header to declare a symbol table class. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h index 76b113177..737fcbdc4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_mixer/audio_mixer_impl.h @@ -22,6 +22,7 @@ #include "api/scoped_refptr.h" #include "modules/audio_mixer/frame_combiner.h" #include "modules/audio_mixer/output_rate_calculator.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/race_checker.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -47,9 +48,6 @@ class AudioMixerImpl : public AudioMixer { ~AudioMixerImpl() override; - AudioMixerImpl(const AudioMixerImpl&) = delete; - AudioMixerImpl& operator=(const AudioMixerImpl&) = delete; - // AudioMixer functions bool AddSource(Source* audio_source) override; void RemoveSource(Source* audio_source) override; @@ -94,6 +92,8 @@ class AudioMixerImpl : public AudioMixer { // Component that handles actual adding of audio frames. FrameCombiner frame_combiner_; + + RTC_DISALLOW_COPY_AND_ASSIGN(AudioMixerImpl); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h index 32b564f14..3bfff967a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_common.h @@ -85,10 +85,10 @@ constexpr size_t GetRenderDelayBufferSize(size_t down_sampling_factor, Aec3Optimization DetectOptimization(); // Computes the log2 of the input in a fast an approximate manner. -float FastApproxLog2f(float in); +float FastApproxLog2f(const float in); // Returns dB from a power quantity expressed in log2. -float Log2TodB(float in_log2); +float Log2TodB(const float in_log2); static_assert(1 << kBlockSizeLog2 == kBlockSize, "Proper number of shifts for blocksize"); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.h index c68de5396..6f7fbe4d0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/aec3_fft.h @@ -18,6 +18,7 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/fft_data.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -29,9 +30,6 @@ class Aec3Fft { Aec3Fft(); - Aec3Fft(const Aec3Fft&) = delete; - Aec3Fft& operator=(const Aec3Fft&) = delete; - // Computes the FFT. Note that both the input and output are modified. void Fft(std::array* x, FftData* X) const { RTC_DCHECK(x); @@ -68,6 +66,8 @@ class Aec3Fft { private: const OouraFft ooura_fft_; + + RTC_DISALLOW_COPY_AND_ASSIGN(Aec3Fft); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.h index aa1830fc0..682aec912 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/alignment_mixer.h @@ -49,7 +49,7 @@ class AlignmentMixer { int selected_channel_ = 0; size_t block_counter_ = 0; - void Downmix(rtc::ArrayView> x, + void Downmix(const rtc::ArrayView> x, rtc::ArrayView y) const; int SelectChannel(rtc::ArrayView> x); }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor_metrics.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor_metrics.h index a70d0dac5..4ba053683 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor_metrics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/block_processor_metrics.h @@ -11,6 +11,8 @@ #ifndef MODULES_AUDIO_PROCESSING_AEC3_BLOCK_PROCESSOR_METRICS_H_ #define MODULES_AUDIO_PROCESSING_AEC3_BLOCK_PROCESSOR_METRICS_H_ +#include "rtc_base/constructor_magic.h" + namespace webrtc { // Handles the reporting of metrics for the block_processor. @@ -18,9 +20,6 @@ class BlockProcessorMetrics { public: BlockProcessorMetrics() = default; - BlockProcessorMetrics(const BlockProcessorMetrics&) = delete; - BlockProcessorMetrics& operator=(const BlockProcessorMetrics&) = delete; - // Updates the metric with new capture data. void UpdateCapture(bool underrun); @@ -39,6 +38,8 @@ class BlockProcessorMetrics { int render_buffer_underruns_ = 0; int render_buffer_overruns_ = 0; int buffer_render_calls_ = 0; + + RTC_DISALLOW_COPY_AND_ASSIGN(BlockProcessorMetrics); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/comfort_noise_generator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/comfort_noise_generator.h index 2785b765c..16eaf3550 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/comfort_noise_generator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/comfort_noise_generator.h @@ -19,6 +19,7 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec_state.h" #include "modules/audio_processing/aec3/fft_data.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/system/arch.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/decimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/decimator.h index dbff3d9ff..3ccd292f0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/decimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/decimator.h @@ -17,6 +17,7 @@ #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/utility/cascaded_biquad_filter.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -25,9 +26,6 @@ class Decimator { public: explicit Decimator(size_t down_sampling_factor); - Decimator(const Decimator&) = delete; - Decimator& operator=(const Decimator&) = delete; - // Downsamples the signal. void Decimate(rtc::ArrayView in, rtc::ArrayView out); @@ -35,6 +33,8 @@ class Decimator { const size_t down_sampling_factor_; CascadedBiQuadFilter anti_aliasing_filter_; CascadedBiQuadFilter noise_reduction_filter_; + + RTC_DISALLOW_COPY_AND_ASSIGN(Decimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.h index b9d6f87d2..1ffc017b7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_audibility.h @@ -19,6 +19,7 @@ #include "modules/audio_processing/aec3/render_buffer.h" #include "modules/audio_processing/aec3/spectrum_buffer.h" #include "modules/audio_processing/aec3/stationarity_estimator.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc index 419a056d2..58fb6a49a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_canceller3.cc @@ -290,11 +290,6 @@ EchoCanceller3Config AdjustConfig(const EchoCanceller3Config& config) { adjusted_cfg.ep_strength.use_conservative_tail_frequency_response = true; } - if (field_trial::IsDisabled("WebRTC-Aec3ConservativeTailFreqResponse")) { - adjusted_cfg.ep_strength.use_conservative_tail_frequency_response = false; - } - - if (field_trial::IsEnabled("WebRTC-Aec3ShortHeadroomKillSwitch")) { // Two blocks headroom. adjusted_cfg.delay.delay_headroom_samples = kBlockSize * 2; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.h index d8f97757b..6c8c21282 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_path_delay_estimator.h @@ -21,6 +21,7 @@ #include "modules/audio_processing/aec3/delay_estimate.h" #include "modules/audio_processing/aec3/matched_filter.h" #include "modules/audio_processing/aec3/matched_filter_lag_aggregator.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -36,9 +37,6 @@ class EchoPathDelayEstimator { size_t num_capture_channels); ~EchoPathDelayEstimator(); - EchoPathDelayEstimator(const EchoPathDelayEstimator&) = delete; - EchoPathDelayEstimator& operator=(const EchoPathDelayEstimator&) = delete; - // Resets the estimation. If the delay confidence is reset, the reset behavior // is as if the call is restarted. void Reset(bool reset_delay_confidence); @@ -73,6 +71,8 @@ class EchoPathDelayEstimator { // Internal reset method with more granularity. void Reset(bool reset_lag_aggregator, bool reset_delay_confidence); + + RTC_DISALLOW_COPY_AND_ASSIGN(EchoPathDelayEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h index aec8084d7..c3d8e20da 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/echo_remover_metrics.h @@ -15,6 +15,7 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec_state.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -33,9 +34,6 @@ class EchoRemoverMetrics { EchoRemoverMetrics(); - EchoRemoverMetrics(const EchoRemoverMetrics&) = delete; - EchoRemoverMetrics& operator=(const EchoRemoverMetrics&) = delete; - // Updates the metric with new data. void Update( const AecState& aec_state, @@ -54,6 +52,8 @@ class EchoRemoverMetrics { DbMetric erle_time_domain_; bool saturated_capture_ = false; bool metrics_reported_ = false; + + RTC_DISALLOW_COPY_AND_ASSIGN(EchoRemoverMetrics); }; namespace aec3 { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erl_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erl_estimator.h index 639a52c56..89bf6ace3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erl_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/erl_estimator.h @@ -18,6 +18,7 @@ #include "api/array_view.h" #include "modules/audio_processing/aec3/aec3_common.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -27,9 +28,6 @@ class ErlEstimator { explicit ErlEstimator(size_t startup_phase_length_blocks_); ~ErlEstimator(); - ErlEstimator(const ErlEstimator&) = delete; - ErlEstimator& operator=(const ErlEstimator&) = delete; - // Resets the ERL estimation. void Reset(); @@ -51,6 +49,7 @@ class ErlEstimator { float erl_time_domain_; int hold_counter_time_domain_; size_t blocks_since_reset_ = 0; + RTC_DISALLOW_COPY_AND_ASSIGN(ErlEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc index c07e5c864..be954d3a1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.cc @@ -170,16 +170,11 @@ void FilterAnalyzer::PreProcessFilters( std::fill(h_highpass_[ch].begin() + region_.start_sample_, h_highpass_[ch].begin() + region_.end_sample_ + 1, 0.f); - float* h_highpass_ch = h_highpass_[ch].data(); - const float* filters_time_domain_ch = filters_time_domain[ch].data(); - const size_t region_end = region_.end_sample_; for (size_t k = std::max(h.size() - 1, region_.start_sample_); - k <= region_end; ++k) { - float tmp = h_highpass_ch[k]; + k <= region_.end_sample_; ++k) { for (size_t j = 0; j < h.size(); ++j) { - tmp += filters_time_domain_ch[k - j] * h[j]; + h_highpass_[ch][k] += filters_time_domain[ch][k - j] * h[j]; } - h_highpass_ch[k] = tmp; } } } @@ -235,23 +230,19 @@ bool FilterAnalyzer::ConsistentFilterDetector::Detect( peak_index > filter_to_analyze.size() - 129 ? 0 : peak_index + 128; } - float filter_floor_accum = filter_floor_accum_; - float filter_secondary_peak = filter_secondary_peak_; for (size_t k = region.start_sample_; k < std::min(region.end_sample_ + 1, filter_floor_low_limit_); ++k) { float abs_h = fabsf(filter_to_analyze[k]); - filter_floor_accum += abs_h; - filter_secondary_peak = std::max(filter_secondary_peak, abs_h); + filter_floor_accum_ += abs_h; + filter_secondary_peak_ = std::max(filter_secondary_peak_, abs_h); } for (size_t k = std::max(filter_floor_high_limit_, region.start_sample_); k <= region.end_sample_; ++k) { float abs_h = fabsf(filter_to_analyze[k]); - filter_floor_accum += abs_h; - filter_secondary_peak = std::max(filter_secondary_peak, abs_h); + filter_floor_accum_ += abs_h; + filter_secondary_peak_ = std::max(filter_secondary_peak_, abs_h); } - filter_floor_accum_ = filter_floor_accum; - filter_secondary_peak_ = filter_secondary_peak; if (region.end_sample_ == filter_to_analyze.size() - 1) { float filter_floor = filter_floor_accum_ / diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h index e05fb7113..b0b707011 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/filter_analyzer.h @@ -20,6 +20,7 @@ #include "api/array_view.h" #include "api/audio/echo_canceller3_config.h" #include "modules/audio_processing/aec3/aec3_common.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h index 7a082176d..2b720a4de 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/fullband_erle_estimator.h @@ -67,7 +67,7 @@ class FullBandErleEstimator { // Updates the estimator with a new point, returns true // if the instantaneous ERLE was updated due to having enough // points for performing the estimate. - bool Update(float Y2_sum, float E2_sum); + bool Update(const float Y2_sum, const float E2_sum); // Resets the instantaneous ERLE estimator to its initial state. void Reset(); // Resets the members related with an instantaneous estimate. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc index 6613d143c..7d988f22e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.cc @@ -166,9 +166,7 @@ void MatchedFilterCore_SSE2(size_t x_start_index, // Initialize values for the accumulation. __m128 s_128 = _mm_set1_ps(0); - __m128 s_128_4 = _mm_set1_ps(0); __m128 x2_sum_128 = _mm_set1_ps(0); - __m128 x2_sum_128_4 = _mm_set1_ps(0); float x2_sum = 0.f; float s = 0; @@ -181,26 +179,20 @@ void MatchedFilterCore_SSE2(size_t x_start_index, const int chunk2 = h_size - chunk1; for (int limit : {chunk1, chunk2}) { // Perform 128 bit vector operations. - const int limit_by_8 = limit >> 3; - for (int k = limit_by_8; k > 0; --k, h_p += 8, x_p += 8) { + const int limit_by_4 = limit >> 2; + for (int k = limit_by_4; k > 0; --k, h_p += 4, x_p += 4) { // Load the data into 128 bit vectors. const __m128 x_k = _mm_loadu_ps(x_p); const __m128 h_k = _mm_loadu_ps(h_p); - const __m128 x_k_4 = _mm_loadu_ps(x_p + 4); - const __m128 h_k_4 = _mm_loadu_ps(h_p + 4); const __m128 xx = _mm_mul_ps(x_k, x_k); - const __m128 xx_4 = _mm_mul_ps(x_k_4, x_k_4); // Compute and accumulate x * x and h * x. x2_sum_128 = _mm_add_ps(x2_sum_128, xx); - x2_sum_128_4 = _mm_add_ps(x2_sum_128_4, xx_4); const __m128 hx = _mm_mul_ps(h_k, x_k); - const __m128 hx_4 = _mm_mul_ps(h_k_4, x_k_4); s_128 = _mm_add_ps(s_128, hx); - s_128_4 = _mm_add_ps(s_128_4, hx_4); } // Perform non-vector operations for any remaining items. - for (int k = limit - limit_by_8 * 8; k > 0; --k, ++h_p, ++x_p) { + for (int k = limit - limit_by_4 * 4; k > 0; --k, ++h_p, ++x_p) { const float x_k = *x_p; x2_sum += x_k * x_k; s += *h_p * x_k; @@ -210,10 +202,8 @@ void MatchedFilterCore_SSE2(size_t x_start_index, } // Combine the accumulated vector and scalar values. - x2_sum_128 = _mm_add_ps(x2_sum_128, x2_sum_128_4); float* v = reinterpret_cast(&x2_sum_128); x2_sum += v[0] + v[1] + v[2] + v[3]; - s_128 = _mm_add_ps(s_128, s_128_4); v = reinterpret_cast(&s_128); s += v[0] + v[1] + v[2] + v[3]; @@ -308,41 +298,6 @@ void MatchedFilterCore(size_t x_start_index, } } -size_t MaxSquarePeakIndex(rtc::ArrayView h) { - if (h.size() < 2) { - return 0; - } - float max_element1 = h[0] * h[0]; - float max_element2 = h[1] * h[1]; - size_t lag_estimate1 = 0; - size_t lag_estimate2 = 1; - const size_t last_index = h.size() - 1; - // Keeping track of even & odd max elements separately typically allows the - // compiler to produce more efficient code. - for (size_t k = 2; k < last_index; k += 2) { - float element1 = h[k] * h[k]; - float element2 = h[k + 1] * h[k + 1]; - if (element1 > max_element1) { - max_element1 = element1; - lag_estimate1 = k; - } - if (element2 > max_element2) { - max_element2 = element2; - lag_estimate2 = k + 1; - } - } - if (max_element2 > max_element1) { - max_element1 = max_element2; - lag_estimate1 = lag_estimate2; - } - // In case of odd h size, we have not yet checked the last element. - float last_element = h[last_index] * h[last_index]; - if (last_element > max_element1) { - return last_index; - } - return lag_estimate1; -} - } // namespace aec3 MatchedFilter::MatchedFilter(ApmDataDumper* data_dumper, @@ -430,15 +385,17 @@ void MatchedFilter::Update(const DownsampledRenderBuffer& render_buffer, } // Compute anchor for the matched filter error. - float error_sum_anchor = 0.0f; - for (size_t k = 0; k < y.size(); ++k) { - error_sum_anchor += y[k] * y[k]; - } + const float error_sum_anchor = + std::inner_product(y.begin(), y.end(), y.begin(), 0.f); // Estimate the lag in the matched filter as the distance to the portion in // the filter that contributes the most to the matched filter output. This // is detected as the peak of the matched filter. - const size_t lag_estimate = aec3::MaxSquarePeakIndex(filters_[n]); + const size_t lag_estimate = std::distance( + filters_[n].begin(), + std::max_element( + filters_[n].begin(), filters_[n].end(), + [](float a, float b) -> bool { return a * a < b * b; })); // Update the lag estimates for the matched filter. lag_estimates_[n] = LagEstimate( diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h index dd4a67839..c6410ab4e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter.h @@ -74,9 +74,6 @@ void MatchedFilterCore(size_t x_start_index, bool* filters_updated, float* error_sum); -// Find largest peak of squared values in array. -size_t MaxSquarePeakIndex(rtc::ArrayView h); - } // namespace aec3 // Produces recursively updated cross-correlation estimates for several signal diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc index 8b7010f1d..ed32102aa 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/matched_filter_avx2.cc @@ -39,9 +39,7 @@ void MatchedFilterCore_AVX2(size_t x_start_index, // Initialize values for the accumulation. __m256 s_256 = _mm256_set1_ps(0); - __m256 s_256_8 = _mm256_set1_ps(0); __m256 x2_sum_256 = _mm256_set1_ps(0); - __m256 x2_sum_256_8 = _mm256_set1_ps(0); float x2_sum = 0.f; float s = 0; @@ -54,22 +52,18 @@ void MatchedFilterCore_AVX2(size_t x_start_index, const int chunk2 = h_size - chunk1; for (int limit : {chunk1, chunk2}) { // Perform 256 bit vector operations. - const int limit_by_16 = limit >> 4; - for (int k = limit_by_16; k > 0; --k, h_p += 16, x_p += 16) { + const int limit_by_8 = limit >> 3; + for (int k = limit_by_8; k > 0; --k, h_p += 8, x_p += 8) { // Load the data into 256 bit vectors. __m256 x_k = _mm256_loadu_ps(x_p); __m256 h_k = _mm256_loadu_ps(h_p); - __m256 x_k_8 = _mm256_loadu_ps(x_p + 8); - __m256 h_k_8 = _mm256_loadu_ps(h_p + 8); // Compute and accumulate x * x and h * x. x2_sum_256 = _mm256_fmadd_ps(x_k, x_k, x2_sum_256); - x2_sum_256_8 = _mm256_fmadd_ps(x_k_8, x_k_8, x2_sum_256_8); s_256 = _mm256_fmadd_ps(h_k, x_k, s_256); - s_256_8 = _mm256_fmadd_ps(h_k_8, x_k_8, s_256_8); } // Perform non-vector operations for any remaining items. - for (int k = limit - limit_by_16 * 16; k > 0; --k, ++h_p, ++x_p) { + for (int k = limit - limit_by_8 * 8; k > 0; --k, ++h_p, ++x_p) { const float x_k = *x_p; x2_sum += x_k * x_k; s += *h_p * x_k; @@ -79,8 +73,6 @@ void MatchedFilterCore_AVX2(size_t x_start_index, } // Sum components together. - x2_sum_256 = _mm256_add_ps(x2_sum_256, x2_sum_256_8); - s_256 = _mm256_add_ps(s_256, s_256_8); __m128 x2_sum_128 = _mm_add_ps(_mm256_extractf128_ps(x2_sum_256, 0), _mm256_extractf128_ps(x2_sum_256, 1)); __m128 s_128 = _mm_add_ps(_mm256_extractf128_ps(s_256, 0), diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.h index 309122d80..8c527a142 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_delay_controller_metrics.h @@ -15,6 +15,7 @@ #include "absl/types/optional.h" #include "modules/audio_processing/aec3/clockdrift_detector.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -23,10 +24,6 @@ class RenderDelayControllerMetrics { public: RenderDelayControllerMetrics(); - RenderDelayControllerMetrics(const RenderDelayControllerMetrics&) = delete; - RenderDelayControllerMetrics& operator=(const RenderDelayControllerMetrics&) = - delete; - // Updates the metric with new data. void Update(absl::optional delay_samples, size_t buffer_delay_blocks, @@ -49,6 +46,8 @@ class RenderDelayControllerMetrics { bool metrics_reported_ = false; bool initial_update = true; int skew_shift_count_ = 0; + + RTC_DISALLOW_COPY_AND_ASSIGN(RenderDelayControllerMetrics); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.h index 2e4aaa4ba..c7a3d8b7a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/render_signal_analyzer.h @@ -20,6 +20,7 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/render_buffer.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -29,9 +30,6 @@ class RenderSignalAnalyzer { explicit RenderSignalAnalyzer(const EchoCanceller3Config& config); ~RenderSignalAnalyzer(); - RenderSignalAnalyzer(const RenderSignalAnalyzer&) = delete; - RenderSignalAnalyzer& operator=(const RenderSignalAnalyzer&) = delete; - // Updates the render signal analysis with the most recent render signal. void Update(const RenderBuffer& render_buffer, const absl::optional& delay_partitions); @@ -55,6 +53,8 @@ class RenderSignalAnalyzer { std::array narrow_band_counters_; absl::optional narrow_peak_band_; size_t narrow_peak_counter_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RenderSignalAnalyzer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.h index 375bfda5a..dcf2292c7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_filter.h @@ -17,6 +17,7 @@ #include "modules/audio_processing/aec3/aec3_common.h" #include "modules/audio_processing/aec3/aec3_fft.h" #include "modules/audio_processing/aec3/fft_data.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -26,10 +27,6 @@ class SuppressionFilter { int sample_rate_hz, size_t num_capture_channels_); ~SuppressionFilter(); - - SuppressionFilter(const SuppressionFilter&) = delete; - SuppressionFilter& operator=(const SuppressionFilter&) = delete; - void ApplyGain(rtc::ArrayView comfort_noise, rtc::ArrayView comfort_noise_high_bands, const std::array& suppression_gain, @@ -43,6 +40,7 @@ class SuppressionFilter { const size_t num_capture_channels_; const Aec3Fft fft_; std::vector>> e_output_old_; + RTC_DISALLOW_COPY_AND_ASSIGN(SuppressionFilter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h index c8e13f7cf..7c4a1c9f7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aec3/suppression_gain.h @@ -25,6 +25,7 @@ #include "modules/audio_processing/aec3/nearend_detector.h" #include "modules/audio_processing/aec3/render_signal_analyzer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -35,10 +36,6 @@ class SuppressionGain { int sample_rate_hz, size_t num_capture_channels); ~SuppressionGain(); - - SuppressionGain(const SuppressionGain&) = delete; - SuppressionGain& operator=(const SuppressionGain&) = delete; - void GetGain( rtc::ArrayView> nearend_spectrum, @@ -137,6 +134,8 @@ class SuppressionGain { // echo spectrum. const bool use_unbounded_echo_spectrum_; std::unique_ptr dominant_nearend_detector_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SuppressionGain); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aecm/aecm_core.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aecm/aecm_core.h index 3de49315c..d6d0d8daf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aecm/aecm_core.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/aecm/aecm_core.h @@ -248,7 +248,7 @@ int WebRtcAecm_ProcessBlock(AecmCore* aecm, // void WebRtcAecm_BufferFarFrame(AecmCore* const aecm, const int16_t* const farend, - int farLen); + const int farLen); //////////////////////////////////////////////////////////////////////////////// // WebRtcAecm_FetchFarFrame() @@ -263,8 +263,8 @@ void WebRtcAecm_BufferFarFrame(AecmCore* const aecm, // void WebRtcAecm_FetchFarFrame(AecmCore* const aecm, int16_t* const farend, - int farLen, - int knownDelay); + const int farLen, + const int knownDelay); // All the functions below are intended to be private @@ -339,8 +339,8 @@ int16_t WebRtcAecm_CalcSuppressionGain(AecmCore* const aecm); // void WebRtcAecm_CalcEnergies(AecmCore* aecm, const uint16_t* far_spectrum, - int16_t far_q, - uint32_t nearEner, + const int16_t far_q, + const uint32_t nearEner, int32_t* echoEst); /////////////////////////////////////////////////////////////////////////////// @@ -374,9 +374,9 @@ int16_t WebRtcAecm_CalcStepSize(AecmCore* const aecm); // void WebRtcAecm_UpdateChannel(AecmCore* aecm, const uint16_t* far_spectrum, - int16_t far_q, + const int16_t far_q, const uint16_t* const dfa, - int16_t mu, + const int16_t mu, int32_t* echoEst); extern const int16_t WebRtcAecm_kCosTable[]; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.cc index a018ff9f9..e36d32c87 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.cc @@ -21,11 +21,9 @@ namespace webrtc { namespace { -constexpr int kDefaultLevelDbfs = -18; -constexpr int kNumAnalysisFrames = 100; -constexpr double kActivityThreshold = 0.3; -constexpr int kNum10msFramesInOneSecond = 100; -constexpr int kMaxSampleRateHz = 384000; +const int kDefaultLevelDbfs = -18; +const int kNumAnalysisFrames = 100; +const double kActivityThreshold = 0.3; } // namespace @@ -37,10 +35,8 @@ Agc::Agc() Agc::~Agc() = default; -void Agc::Process(rtc::ArrayView audio) { - const int sample_rate_hz = audio.size() * kNum10msFramesInOneSecond; - RTC_DCHECK_LE(sample_rate_hz, kMaxSampleRateHz); - vad_.ProcessChunk(audio.data(), audio.size(), sample_rate_hz); +void Agc::Process(const int16_t* audio, size_t length, int sample_rate_hz) { + vad_.ProcessChunk(audio, length, sample_rate_hz); const std::vector& rms = vad_.chunkwise_rms(); const std::vector& probabilities = vad_.chunkwise_voice_probabilities(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.h index da4280822..2693d9488 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc.h @@ -13,7 +13,6 @@ #include -#include "api/array_view.h" #include "modules/audio_processing/vad/voice_activity_detector.h" namespace webrtc { @@ -27,7 +26,7 @@ class Agc { // `audio` must be mono; in a multi-channel stream, provide the first (usually // left) channel. - virtual void Process(rtc::ArrayView audio); + virtual void Process(const int16_t* audio, size_t length, int sample_rate_hz); // Retrieves the difference between the target RMS level and the current // signal RMS level in dB. Returns true if an update is available and false diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc index 8bce7690a..b2b8a51ac 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.cc @@ -13,7 +13,6 @@ #include #include -#include "api/array_view.h" #include "common_audio/include/audio_util.h" #include "modules/audio_processing/agc/gain_control.h" #include "modules/audio_processing/agc/gain_map_internal.h" @@ -63,27 +62,28 @@ bool UseMaxAnalogChannelLevel() { return field_trial::IsEnabled("WebRTC-UseMaxAnalogAgcChannelLevel"); } -// If the "WebRTC-Audio-AgcMinMicLevelExperiment" field trial is specified, -// parses it and returns a value between 0 and 255 depending on the field-trial -// string. Returns an unspecified value if the field trial is not specified, if -// disabled or if it cannot be parsed. Example: -// 'WebRTC-Audio-AgcMinMicLevelExperiment/Enabled-80' => returns 80. -absl::optional GetMinMicLevelOverride() { +// Returns kMinMicLevel if no field trial exists or if it has been disabled. +// Returns a value between 0 and 255 depending on the field-trial string. +// Example: 'WebRTC-Audio-AgcMinMicLevelExperiment/Enabled-80' => returns 80. +int GetMinMicLevel() { + RTC_LOG(LS_INFO) << "[agc] GetMinMicLevel"; constexpr char kMinMicLevelFieldTrial[] = "WebRTC-Audio-AgcMinMicLevelExperiment"; if (!webrtc::field_trial::IsEnabled(kMinMicLevelFieldTrial)) { - return absl::nullopt; + RTC_LOG(LS_INFO) << "[agc] Using default min mic level: " << kMinMicLevel; + return kMinMicLevel; } const auto field_trial_string = webrtc::field_trial::FindFullName(kMinMicLevelFieldTrial); int min_mic_level = -1; sscanf(field_trial_string.c_str(), "Enabled-%d", &min_mic_level); if (min_mic_level >= 0 && min_mic_level <= 255) { + RTC_LOG(LS_INFO) << "[agc] Experimental min mic level: " << min_mic_level; return min_mic_level; } else { RTC_LOG(LS_WARNING) << "[agc] Invalid parameter for " << kMinMicLevelFieldTrial << ", ignored."; - return absl::nullopt; + return kMinMicLevel; } } @@ -124,7 +124,7 @@ float ComputeClippedRatio(const float* const* audio, int num_clipped_in_ch = 0; for (size_t i = 0; i < samples_per_channel; ++i) { RTC_DCHECK(audio[ch]); - if (audio[ch][i] >= 32767.0f || audio[ch][i] <= -32768.0f) { + if (audio[ch][i] >= 32767.f || audio[ch][i] <= -32768.f) { ++num_clipped_in_ch; } } @@ -204,7 +204,9 @@ void MonoAgc::Initialize() { check_volume_on_next_process_ = true; } -void MonoAgc::Process(rtc::ArrayView audio) { +void MonoAgc::Process(const int16_t* audio, + size_t samples_per_channel, + int sample_rate_hz) { new_compression_to_set_ = absl::nullopt; if (check_volume_on_next_process_) { @@ -214,7 +216,7 @@ void MonoAgc::Process(rtc::ArrayView audio) { CheckVolumeAndReset(); } - agc_->Process(audio); + agc_->Process(audio, samples_per_channel, sample_rate_hz); UpdateGain(); if (!disable_digital_adaptive_) { @@ -445,6 +447,7 @@ AgcManagerDirect::AgcManagerDirect( Agc* agc, int startup_min_level, int clipped_level_min, + int sample_rate_hz, int clipped_level_step, float clipped_ratio_threshold, int clipped_wait_frames, @@ -453,6 +456,7 @@ AgcManagerDirect::AgcManagerDirect( startup_min_level, clipped_level_min, /*disable_digital_adaptive*/ false, + sample_rate_hz, clipped_level_step, clipped_ratio_threshold, clipped_wait_frames, @@ -467,14 +471,15 @@ AgcManagerDirect::AgcManagerDirect( int startup_min_level, int clipped_level_min, bool disable_digital_adaptive, + int sample_rate_hz, int clipped_level_step, float clipped_ratio_threshold, int clipped_wait_frames, const ClippingPredictorConfig& clipping_config) - : min_mic_level_override_(GetMinMicLevelOverride()), - data_dumper_( + : data_dumper_( new ApmDataDumper(rtc::AtomicOps::Increment(&instance_counter_))), use_min_channel_level_(!UseMaxAnalogChannelLevel()), + sample_rate_hz_(sample_rate_hz), num_capture_channels_(num_capture_channels), disable_digital_adaptive_(disable_digital_adaptive), frames_since_clipped_(clipped_wait_frames), @@ -492,11 +497,7 @@ AgcManagerDirect::AgcManagerDirect( clipping_predictor_log_counter_(0), clipping_rate_log_(0.0f), clipping_rate_log_counter_(0) { - const int min_mic_level = min_mic_level_override_.value_or(kMinMicLevel); - RTC_LOG(LS_INFO) << "[agc] Min mic level: " << min_mic_level - << " (overridden: " - << (min_mic_level_override_.has_value() ? "yes" : "no") - << ")"; + const int min_mic_level = GetMinMicLevel(); for (size_t ch = 0; ch < channel_agcs_.size(); ++ch) { ApmDataDumper* data_dumper_ch = ch == 0 ? data_dumper_.get() : nullptr; @@ -651,20 +652,27 @@ void AgcManagerDirect::AnalyzePreProcess(const float* const* audio, } void AgcManagerDirect::Process(const AudioBuffer* audio) { - RTC_DCHECK(audio); AggregateChannelLevels(); if (!capture_output_used_) { return; } - const size_t num_frames_per_band = audio->num_frames_per_band(); for (size_t ch = 0; ch < channel_agcs_.size(); ++ch) { + int16_t* audio_use = nullptr; std::array audio_data; - int16_t* audio_use = audio_data.data(); - FloatS16ToS16(audio->split_bands_const_f(ch)[0], num_frames_per_band, - audio_use); - channel_agcs_[ch]->Process({audio_use, num_frames_per_band}); + int num_frames_per_band; + if (audio) { + FloatS16ToS16(audio->split_bands_const_f(ch)[0], + audio->num_frames_per_band(), audio_data.data()); + audio_use = audio_data.data(); + num_frames_per_band = audio->num_frames_per_band(); + } else { + // Only used for testing. + // TODO(peah): Change unittests to only allow on non-null audio input. + num_frames_per_band = 320; + } + channel_agcs_[ch]->Process(audio_use, num_frames_per_band, sample_rate_hz_); new_compressions_to_set_[ch] = channel_agcs_[ch]->new_compression(); } @@ -719,10 +727,6 @@ void AgcManagerDirect::AggregateChannelLevels() { } } } - if (min_mic_level_override_.has_value()) { - stream_analog_level_ = - std::max(stream_analog_level_, *min_mic_level_override_); - } } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h index ce67a971b..a452ee1c4 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/agc_manager_direct.h @@ -14,7 +14,6 @@ #include #include "absl/types/optional.h" -#include "api/array_view.h" #include "modules/audio_processing/agc/agc.h" #include "modules/audio_processing/agc/clipping_predictor.h" #include "modules/audio_processing/agc/clipping_predictor_evaluator.h" @@ -48,6 +47,7 @@ class AgcManagerDirect final { int startup_min_level, int clipped_level_min, bool disable_digital_adaptive, + int sample_rate_hz, int clipped_level_step, float clipped_ratio_threshold, int clipped_wait_frames, @@ -72,6 +72,7 @@ class AgcManagerDirect final { int stream_analog_level() const { return stream_analog_level_; } void set_stream_analog_level(int level); int num_channels() const { return num_capture_channels_; } + int sample_rate_hz() const { return sample_rate_hz_; } // If available, returns a new compression gain for the digital gain control. absl::optional GetDigitalComressionGain(); @@ -116,6 +117,7 @@ class AgcManagerDirect final { Agc* agc, int startup_min_level, int clipped_level_min, + int sample_rate_hz, int clipped_level_step, float clipped_ratio_threshold, int clipped_wait_frames, @@ -126,10 +128,10 @@ class AgcManagerDirect final { void AggregateChannelLevels(); - const absl::optional min_mic_level_override_; std::unique_ptr data_dumper_; static int instance_counter_; const bool use_min_channel_level_; + const int sample_rate_hz_; const int num_capture_channels_; const bool disable_digital_adaptive_; @@ -169,7 +171,9 @@ class MonoAgc { void HandleClipping(int clipped_level_step); - void Process(rtc::ArrayView audio); + void Process(const int16_t* audio, + size_t samples_per_channel, + int sample_rate_hz); void set_stream_analog_level(int level) { stream_analog_level_ = level; } int stream_analog_level() const { return stream_analog_level_; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/mock_agc.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/mock_agc.h index 3080e1563..0ef41c6e5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/mock_agc.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc/mock_agc.h @@ -11,7 +11,6 @@ #ifndef MODULES_AUDIO_PROCESSING_AGC_MOCK_AGC_H_ #define MODULES_AUDIO_PROCESSING_AGC_MOCK_AGC_H_ -#include "api/array_view.h" #include "modules/audio_processing/agc/agc.h" #include "test/gmock.h" @@ -20,7 +19,10 @@ namespace webrtc { class MockAgc : public Agc { public: virtual ~MockAgc() {} - MOCK_METHOD(void, Process, (rtc::ArrayView audio), (override)); + MOCK_METHOD(void, + Process, + (const int16_t* audio, size_t length, int sample_rate_hz), + (override)); MOCK_METHOD(bool, GetRmsErrorDb, (int* error), (override)); MOCK_METHOD(void, Reset, (), (override)); MOCK_METHOD(int, set_target_level_dbfs, (int level), (override)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h index d26b55950..d96aedaf9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/fixed_digital_level_estimator.h @@ -16,6 +16,7 @@ #include "modules/audio_processing/agc2/agc2_common.h" #include "modules/audio_processing/include/audio_frame_view.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -33,10 +34,6 @@ class FixedDigitalLevelEstimator { FixedDigitalLevelEstimator(int sample_rate_hz, ApmDataDumper* apm_data_dumper); - FixedDigitalLevelEstimator(const FixedDigitalLevelEstimator&) = delete; - FixedDigitalLevelEstimator& operator=(const FixedDigitalLevelEstimator&) = - delete; - // The input is assumed to be in FloatS16 format. Scaled input will // produce similarly scaled output. A frame of with kFrameDurationMs // ms of audio produces a level estimates in the same scale. The @@ -60,6 +57,8 @@ class FixedDigitalLevelEstimator { float filter_state_level_; int samples_in_frame_; int samples_in_sub_frame_; + + RTC_DISALLOW_COPY_AND_ASSIGN(FixedDigitalLevelEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h index b1a5cf473..af993204c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/interpolated_gain_curve.h @@ -15,6 +15,7 @@ #include #include "modules/audio_processing/agc2/agc2_common.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/gtest_prod_util.h" #include "system_wrappers/include/metrics.h" @@ -63,9 +64,6 @@ class InterpolatedGainCurve { const std::string& histogram_name_prefix); ~InterpolatedGainCurve(); - InterpolatedGainCurve(const InterpolatedGainCurve&) = delete; - InterpolatedGainCurve& operator=(const InterpolatedGainCurve&) = delete; - Stats get_stats() const { return stats_; } // Given a non-negative input level (linear scale), a scalar factor to apply @@ -145,6 +143,8 @@ class InterpolatedGainCurve { // Stats. mutable Stats stats_; + + RTC_DISALLOW_COPY_AND_ASSIGN(InterpolatedGainCurve); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h index 669e202c2..f8894a308 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/agc2/limiter.h @@ -17,6 +17,7 @@ #include "modules/audio_processing/agc2/fixed_digital_level_estimator.h" #include "modules/audio_processing/agc2/interpolated_gain_curve.h" #include "modules/audio_processing/include/audio_frame_view.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { class ApmDataDumper; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc index 9a1aaee82..8a3ddf5ba 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.cc @@ -24,11 +24,13 @@ #include "common_audio/include/audio_util.h" #include "modules/audio_processing/aec_dump/aec_dump_factory.h" #include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/common.h" #include "modules/audio_processing/include/audio_frame_view.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "modules/audio_processing/optionally_built_submodule_creators.h" #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/time_utils.h" @@ -49,6 +51,20 @@ namespace webrtc { namespace { +static bool LayoutHasKeyboard(AudioProcessing::ChannelLayout layout) { + switch (layout) { + case AudioProcessing::kMono: + case AudioProcessing::kStereo: + return false; + case AudioProcessing::kMonoAndKeyboard: + case AudioProcessing::kStereoAndKeyboard: + return true; + } + + RTC_DCHECK_NOTREACHED(); + return false; +} + bool SampleRateSupportsMultiBand(int sample_rate_hz) { return sample_rate_hz == AudioProcessing::kSampleRate32kHz || sample_rate_hz == AudioProcessing::kSampleRate48kHz; @@ -113,13 +129,6 @@ static const size_t kMaxAllowedValuesOfSamplesPerFrame = 480; // reverse and forward call numbers. static const size_t kMaxNumFramesToBuffer = 100; -void PackRenderAudioBufferForEchoDetector(const AudioBuffer& audio, - std::vector& packed_buffer) { - packed_buffer.clear(); - packed_buffer.insert(packed_buffer.end(), audio.channels_const()[0], - audio.channels_const()[0] + audio.num_frames()); -} - } // namespace // Throughout webrtc, it's assumed that success is represented by zero. @@ -136,31 +145,38 @@ AudioProcessingImpl::SubmoduleStates::SubmoduleStates( bool AudioProcessingImpl::SubmoduleStates::Update( bool high_pass_filter_enabled, bool mobile_echo_controller_enabled, + bool residual_echo_detector_enabled, bool noise_suppressor_enabled, bool adaptive_gain_controller_enabled, bool gain_controller2_enabled, bool gain_adjustment_enabled, bool echo_controller_enabled, + bool voice_detector_enabled, bool transient_suppressor_enabled) { bool changed = false; changed |= (high_pass_filter_enabled != high_pass_filter_enabled_); changed |= (mobile_echo_controller_enabled != mobile_echo_controller_enabled_); + changed |= + (residual_echo_detector_enabled != residual_echo_detector_enabled_); changed |= (noise_suppressor_enabled != noise_suppressor_enabled_); changed |= (adaptive_gain_controller_enabled != adaptive_gain_controller_enabled_); changed |= (gain_controller2_enabled != gain_controller2_enabled_); changed |= (gain_adjustment_enabled != gain_adjustment_enabled_); changed |= (echo_controller_enabled != echo_controller_enabled_); + changed |= (voice_detector_enabled != voice_detector_enabled_); changed |= (transient_suppressor_enabled != transient_suppressor_enabled_); if (changed) { high_pass_filter_enabled_ = high_pass_filter_enabled; mobile_echo_controller_enabled_ = mobile_echo_controller_enabled; + residual_echo_detector_enabled_ = residual_echo_detector_enabled; noise_suppressor_enabled_ = noise_suppressor_enabled; adaptive_gain_controller_enabled_ = adaptive_gain_controller_enabled; gain_controller2_enabled_ = gain_controller2_enabled; gain_adjustment_enabled_ = gain_adjustment_enabled; echo_controller_enabled_ = echo_controller_enabled; + voice_detector_enabled_ = voice_detector_enabled; transient_suppressor_enabled_ = transient_suppressor_enabled; } @@ -171,7 +187,7 @@ bool AudioProcessingImpl::SubmoduleStates::Update( bool AudioProcessingImpl::SubmoduleStates::CaptureMultiBandSubModulesActive() const { - return CaptureMultiBandProcessingPresent(); + return CaptureMultiBandProcessingPresent() || voice_detector_enabled_; } bool AudioProcessingImpl::SubmoduleStates::CaptureMultiBandProcessingPresent() @@ -280,6 +296,11 @@ AudioProcessingImpl::AudioProcessingImpl( capture_nonlocked_.echo_controller_enabled = static_cast(echo_control_factory_); + // If no echo detector is injected, use the ResidualEchoDetector. + if (!submodules_.echo_detector) { + submodules_.echo_detector = rtc::make_ref_counted(); + } + Initialize(); } @@ -293,6 +314,26 @@ int AudioProcessingImpl::Initialize() { return kNoError; } +int AudioProcessingImpl::Initialize(int capture_input_sample_rate_hz, + int capture_output_sample_rate_hz, + int render_input_sample_rate_hz, + ChannelLayout capture_input_layout, + ChannelLayout capture_output_layout, + ChannelLayout render_input_layout) { + const ProcessingConfig processing_config = { + {{capture_input_sample_rate_hz, ChannelsFromLayout(capture_input_layout), + LayoutHasKeyboard(capture_input_layout)}, + {capture_output_sample_rate_hz, + ChannelsFromLayout(capture_output_layout), + LayoutHasKeyboard(capture_output_layout)}, + {render_input_sample_rate_hz, ChannelsFromLayout(render_input_layout), + LayoutHasKeyboard(render_input_layout)}, + {render_input_sample_rate_hz, ChannelsFromLayout(render_input_layout), + LayoutHasKeyboard(render_input_layout)}}}; + + return Initialize(processing_config); +} + int AudioProcessingImpl::Initialize(const ProcessingConfig& processing_config) { // Run in a single-threaded manner during initialization. MutexLock lock_render(&mutex_render_); @@ -368,6 +409,7 @@ void AudioProcessingImpl::InitializeLocked() { InitializeGainController1(); InitializeTransientSuppressor(); InitializeHighPassFilter(true); + InitializeVoiceDetector(); InitializeResidualEchoDetector(); InitializeEchoController(); InitializeGainController2(/*config_has_changed=*/true); @@ -502,6 +544,9 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { const bool agc2_config_changed = config_.gain_controller2 != config.gain_controller2; + const bool voice_detection_config_changed = + config_.voice_detection.enabled != config.voice_detection.enabled; + const bool ns_config_changed = config_.noise_suppression.enabled != config.noise_suppression.enabled || config_.noise_suppression.level != config.noise_suppression.level; @@ -550,6 +595,10 @@ void AudioProcessingImpl::ApplyConfig(const AudioProcessing::Config& config) { InitializeCaptureLevelsAdjuster(); } + if (voice_detection_config_changed) { + InitializeVoiceDetector(); + } + // Reinitialization must happen after all submodule configuration to avoid // additional reinitializations on the next capture / render processing call. if (pipeline_config_changed) { @@ -732,6 +781,7 @@ int AudioProcessingImpl::ProcessStream(const float* const* src, RecordUnprocessedCaptureStream(src); } + capture_.keyboard_info.Extract(src, formats_.api_format.input_stream()); capture_.capture_audio->CopyFrom(src, formats_.api_format.input_stream()); if (capture_.capture_fullband_audio) { capture_.capture_fullband_audio->CopyFrom( @@ -919,18 +969,16 @@ void AudioProcessingImpl::QueueBandedRenderAudio(AudioBuffer* audio) { } void AudioProcessingImpl::QueueNonbandedRenderAudio(AudioBuffer* audio) { - if (submodules_.echo_detector) { - PackRenderAudioBufferForEchoDetector(*audio, red_render_queue_buffer_); - RTC_DCHECK(red_render_signal_queue_); - // Insert the samples into the queue. - if (!red_render_signal_queue_->Insert(&red_render_queue_buffer_)) { - // The data queue is full and needs to be emptied. - EmptyQueuedRenderAudio(); + ResidualEchoDetector::PackRenderAudioBuffer(audio, &red_render_queue_buffer_); - // Retry the insert (should always work). - bool result = red_render_signal_queue_->Insert(&red_render_queue_buffer_); - RTC_DCHECK(result); - } + // Insert the samples into the queue. + if (!red_render_signal_queue_->Insert(&red_render_queue_buffer_)) { + // The data queue is full and needs to be emptied. + EmptyQueuedRenderAudio(); + + // Retry the insert (should always work). + bool result = red_render_signal_queue_->Insert(&red_render_queue_buffer_); + RTC_DCHECK(result); } } @@ -963,26 +1011,23 @@ void AudioProcessingImpl::AllocateRenderQueue() { agc_render_signal_queue_->Clear(); } - if (submodules_.echo_detector) { - if (red_render_queue_element_max_size_ < - new_red_render_queue_element_max_size) { - red_render_queue_element_max_size_ = - new_red_render_queue_element_max_size; + if (red_render_queue_element_max_size_ < + new_red_render_queue_element_max_size) { + red_render_queue_element_max_size_ = new_red_render_queue_element_max_size; - std::vector template_queue_element( - red_render_queue_element_max_size_); + std::vector template_queue_element( + red_render_queue_element_max_size_); - red_render_signal_queue_.reset( - new SwapQueue, RenderQueueItemVerifier>( - kMaxNumFramesToBuffer, template_queue_element, - RenderQueueItemVerifier( - red_render_queue_element_max_size_))); + red_render_signal_queue_.reset( + new SwapQueue, RenderQueueItemVerifier>( + kMaxNumFramesToBuffer, template_queue_element, + RenderQueueItemVerifier( + red_render_queue_element_max_size_))); - red_render_queue_buffer_.resize(red_render_queue_element_max_size_); - red_capture_queue_buffer_.resize(red_render_queue_element_max_size_); - } else { - red_render_signal_queue_->Clear(); - } + red_render_queue_buffer_.resize(red_render_queue_element_max_size_); + red_capture_queue_buffer_.resize(red_render_queue_element_max_size_); + } else { + red_render_signal_queue_->Clear(); } } @@ -1006,10 +1051,9 @@ void AudioProcessingImpl::EmptyQueuedRenderAudioLocked() { } } - if (submodules_.echo_detector) { - while (red_render_signal_queue_->Remove(&red_capture_queue_buffer_)) { - submodules_.echo_detector->AnalyzeRenderAudio(red_capture_queue_buffer_); - } + while (red_render_signal_queue_->Remove(&red_capture_queue_buffer_)) { + RTC_DCHECK(submodules_.echo_detector); + submodules_.echo_detector->AnalyzeRenderAudio(red_capture_queue_buffer_); } } @@ -1204,6 +1248,13 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { } } + if (config_.voice_detection.enabled) { + capture_.stats.voice_detected = + submodules_.voice_detector->ProcessCaptureAudio(capture_buffer); + } else { + capture_.stats.voice_detected = absl::nullopt; + } + if (submodules_.agc_manager) { submodules_.agc_manager->Process(capture_buffer); @@ -1238,7 +1289,8 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { capture_buffer = capture_.capture_fullband_audio.get(); } - if (submodules_.echo_detector) { + if (config_.residual_echo_detector.enabled) { + RTC_DCHECK(submodules_.echo_detector); submodules_.echo_detector->AnalyzeCaptureAudio( rtc::ArrayView(capture_buffer->channels()[0], capture_buffer->num_frames())); @@ -1257,7 +1309,8 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { capture_buffer->num_channels(), capture_buffer->split_bands_const(0)[kBand0To8kHz], capture_buffer->num_frames_per_band(), - /*reference_data=*/nullptr, /*reference_length=*/0, voice_probability, + capture_.keyboard_info.keyboard_data, + capture_.keyboard_info.num_keyboard_frames, voice_probability, capture_.key_pressed); } @@ -1295,7 +1348,8 @@ int AudioProcessingImpl::ProcessCaptureStreamLocked() { } // Compute echo-detector stats. - if (submodules_.echo_detector) { + if (config_.residual_echo_detector.enabled) { + RTC_DCHECK(submodules_.echo_detector); auto ed_metrics = submodules_.echo_detector->GetMetrics(); capture_.stats.residual_echo_likelihood = ed_metrics.echo_likelihood; capture_.stats.residual_echo_likelihood_recent_max = @@ -1660,11 +1714,11 @@ AudioProcessing::Config AudioProcessingImpl::GetConfig() const { bool AudioProcessingImpl::UpdateActiveSubmoduleStates() { return submodule_states_.Update( config_.high_pass_filter.enabled, !!submodules_.echo_control_mobile, - !!submodules_.noise_suppressor, !!submodules_.gain_control, - !!submodules_.gain_controller2, + config_.residual_echo_detector.enabled, !!submodules_.noise_suppressor, + !!submodules_.gain_control, !!submodules_.gain_controller2, config_.pre_amplifier.enabled || config_.capture_level_adjustment.enabled, capture_nonlocked_.echo_controller_enabled, - !!submodules_.transient_suppressor); + config_.voice_detection.enabled, !!submodules_.transient_suppressor); } void AudioProcessingImpl::InitializeTransientSuppressor() { @@ -1714,6 +1768,14 @@ void AudioProcessingImpl::InitializeHighPassFilter(bool forced_reset) { } } +void AudioProcessingImpl::InitializeVoiceDetector() { + if (config_.voice_detection.enabled) { + submodules_.voice_detector = std::make_unique( + proc_split_sample_rate_hz(), VoiceDetection::kVeryLowLikelihood); + } else { + submodules_.voice_detector.reset(); + } +} void AudioProcessingImpl::InitializeEchoController() { bool use_echo_controller = echo_control_factory_ || @@ -1831,7 +1893,9 @@ void AudioProcessingImpl::InitializeGainController1() { if (!submodules_.agc_manager.get() || submodules_.agc_manager->num_channels() != - static_cast(num_proc_channels())) { + static_cast(num_proc_channels()) || + submodules_.agc_manager->sample_rate_hz() != + capture_nonlocked_.split_rate) { int stream_analog_level = -1; const bool re_creation = !!submodules_.agc_manager; if (re_creation) { @@ -1843,6 +1907,7 @@ void AudioProcessingImpl::InitializeGainController1() { config_.gain_controller1.analog_gain_controller.clipped_level_min, !config_.gain_controller1.analog_gain_controller .enable_digital_adaptive, + capture_nonlocked_.split_rate, config_.gain_controller1.analog_gain_controller.clipped_level_step, config_.gain_controller1.analog_gain_controller.clipped_ratio_threshold, config_.gain_controller1.analog_gain_controller.clipped_wait_frames, @@ -1926,11 +1991,10 @@ void AudioProcessingImpl::InitializeCaptureLevelsAdjuster() { } void AudioProcessingImpl::InitializeResidualEchoDetector() { - if (submodules_.echo_detector) { - submodules_.echo_detector->Initialize( - proc_fullband_sample_rate_hz(), 1, - formats_.render_processing_format.sample_rate_hz(), 1); - } + RTC_DCHECK(submodules_.echo_detector); + submodules_.echo_detector->Initialize( + proc_fullband_sample_rate_hz(), 1, + formats_.render_processing_format.sample_rate_hz(), 1); } void AudioProcessingImpl::InitializeAnalyzer() { @@ -2095,6 +2159,17 @@ AudioProcessingImpl::ApmCaptureState::ApmCaptureState() AudioProcessingImpl::ApmCaptureState::~ApmCaptureState() = default; +void AudioProcessingImpl::ApmCaptureState::KeyboardInfo::Extract( + const float* const* data, + const StreamConfig& stream_config) { + if (stream_config.has_keyboard()) { + keyboard_data = data[stream_config.num_channels()]; + } else { + keyboard_data = NULL; + } + num_keyboard_frames = stream_config.num_frames(); +} + AudioProcessingImpl::ApmRenderState::ApmRenderState() = default; AudioProcessingImpl::ApmRenderState::~ApmRenderState() = default; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h index 344b8c595..22cdaddb2 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/audio_processing_impl.h @@ -18,7 +18,6 @@ #include #include -#include "api/array_view.h" #include "api/function_view.h" #include "modules/audio_processing/aec3/echo_canceller3.h" #include "modules/audio_processing/agc/agc_manager_direct.h" @@ -37,8 +36,10 @@ #include "modules/audio_processing/ns/noise_suppressor.h" #include "modules/audio_processing/optionally_built_submodule_creators.h" #include "modules/audio_processing/render_queue_item_verifier.h" +#include "modules/audio_processing/residual_echo_detector.h" #include "modules/audio_processing/rms_level.h" #include "modules/audio_processing/transient/transient_suppressor.h" +#include "modules/audio_processing/voice_detection.h" #include "rtc_base/gtest_prod_util.h" #include "rtc_base/ignore_wundef.h" #include "rtc_base/swap_queue.h" @@ -50,10 +51,6 @@ namespace webrtc { class ApmDataDumper; class AudioConverter; -constexpr int RuntimeSettingQueueSize() { - return 100; -} - class AudioProcessingImpl : public AudioProcessing { public: // Methods forcing APM to run in a single-threaded manner. @@ -67,6 +64,12 @@ class AudioProcessingImpl : public AudioProcessing { std::unique_ptr capture_analyzer); ~AudioProcessingImpl() override; int Initialize() override; + int Initialize(int capture_input_sample_rate_hz, + int capture_output_sample_rate_hz, + int render_sample_rate_hz, + ChannelLayout capture_input_layout, + ChannelLayout capture_output_layout, + ChannelLayout render_input_layout) override; int Initialize(const ProcessingConfig& processing_config) override; void ApplyConfig(const AudioProcessing::Config& config) override; bool CreateAndAttachAecDump(const std::string& file_name, @@ -179,7 +182,7 @@ class AudioProcessingImpl : public AudioProcessing { SwapQueue& runtime_settings_; }; - const std::unique_ptr data_dumper_; + std::unique_ptr data_dumper_; static int instance_count_; const bool use_setup_specific_default_aec3_config_; @@ -192,7 +195,7 @@ class AudioProcessingImpl : public AudioProcessing { RuntimeSettingEnqueuer render_runtime_settings_enqueuer_; // EchoControl factory. - const std::unique_ptr echo_control_factory_; + std::unique_ptr echo_control_factory_; class SubmoduleStates { public: @@ -202,11 +205,13 @@ class AudioProcessingImpl : public AudioProcessing { // Updates the submodule state and returns true if it has changed. bool Update(bool high_pass_filter_enabled, bool mobile_echo_controller_enabled, + bool residual_echo_detector_enabled, bool noise_suppressor_enabled, bool adaptive_gain_controller_enabled, bool gain_controller2_enabled, bool gain_adjustment_enabled, bool echo_controller_enabled, + bool voice_detector_enabled, bool transient_suppressor_enabled); bool CaptureMultiBandSubModulesActive() const; bool CaptureMultiBandProcessingPresent() const; @@ -224,11 +229,13 @@ class AudioProcessingImpl : public AudioProcessing { const bool capture_analyzer_enabled_ = false; bool high_pass_filter_enabled_ = false; bool mobile_echo_controller_enabled_ = false; + bool residual_echo_detector_enabled_ = false; bool noise_suppressor_enabled_ = false; bool adaptive_gain_controller_enabled_ = false; bool gain_controller2_enabled_ = false; bool gain_adjustment_enabled_ = false; bool echo_controller_enabled_ = false; + bool voice_detector_enabled_ = false; bool transient_suppressor_enabled_ = false; bool first_update_ = true; }; @@ -264,6 +271,7 @@ class AudioProcessingImpl : public AudioProcessing { // already acquired. void InitializeHighPassFilter(bool forced_reset) RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); + void InitializeVoiceDetector() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeGainController1() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); void InitializeTransientSuppressor() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_capture_); @@ -384,18 +392,19 @@ class AudioProcessingImpl : public AudioProcessing { render_pre_processor(std::move(render_pre_processor)), capture_analyzer(std::move(capture_analyzer)) {} // Accessed internally from capture or during initialization. - const rtc::scoped_refptr echo_detector; - const std::unique_ptr capture_post_processor; - const std::unique_ptr render_pre_processor; - const std::unique_ptr capture_analyzer; std::unique_ptr agc_manager; std::unique_ptr gain_control; std::unique_ptr gain_controller2; std::unique_ptr high_pass_filter; + rtc::scoped_refptr echo_detector; std::unique_ptr echo_controller; std::unique_ptr echo_control_mobile; std::unique_ptr noise_suppressor; std::unique_ptr transient_suppressor; + std::unique_ptr capture_post_processor; + std::unique_ptr render_pre_processor; + std::unique_ptr capture_analyzer; + std::unique_ptr voice_detector; std::unique_ptr capture_levels_adjuster; } submodules_; @@ -407,10 +416,10 @@ class AudioProcessingImpl : public AudioProcessing { struct ApmFormatState { ApmFormatState() : // Format of processing streams at input/output call sites. - api_format({{{kSampleRate16kHz, 1}, - {kSampleRate16kHz, 1}, - {kSampleRate16kHz, 1}, - {kSampleRate16kHz, 1}}}), + api_format({{{kSampleRate16kHz, 1, false}, + {kSampleRate16kHz, 1, false}, + {kSampleRate16kHz, 1, false}, + {kSampleRate16kHz, 1, false}}}), render_processing_format(kSampleRate16kHz, 1) {} ProcessingConfig api_format; StreamConfig render_processing_format; @@ -457,6 +466,11 @@ class AudioProcessingImpl : public AudioProcessing { int playout_volume; int prev_playout_volume; AudioProcessingStats stats; + struct KeyboardInfo { + void Extract(const float* const* data, const StreamConfig& stream_config); + size_t num_keyboard_frames = 0; + const float* keyboard_data = nullptr; + } keyboard_info; int cached_stream_analog_level_ = 0; } capture_ RTC_GUARDED_BY(mutex_capture_); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/common.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/common.h new file mode 100644 index 000000000..e14279ec1 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/common.h @@ -0,0 +1,38 @@ +/* + * Copyright (c) 2014 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_COMMON_H_ +#define MODULES_AUDIO_PROCESSING_COMMON_H_ + +#include "modules/audio_processing/include/audio_processing.h" +#include "rtc_base/checks.h" + +namespace webrtc { + +constexpr int RuntimeSettingQueueSize() { + return 100; +} + +static inline size_t ChannelsFromLayout(AudioProcessing::ChannelLayout layout) { + switch (layout) { + case AudioProcessing::kMono: + case AudioProcessing::kMonoAndKeyboard: + return 1; + case AudioProcessing::kStereo: + case AudioProcessing::kStereoAndKeyboard: + return 2; + } + RTC_DCHECK_NOTREACHED(); + return 0; +} + +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_COMMON_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/echo_control_mobile_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/echo_control_mobile_impl.cc index fa5cb8ffe..667d6bfec 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/echo_control_mobile_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/echo_control_mobile_impl.cc @@ -18,6 +18,7 @@ #include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/include/audio_processing.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -84,9 +85,6 @@ class EchoControlMobileImpl::Canceller { WebRtcAecm_Free(state_); } - Canceller(const Canceller&) = delete; - Canceller& operator=(const Canceller&) = delete; - void* state() { RTC_DCHECK(state_); return state_; @@ -100,6 +98,7 @@ class EchoControlMobileImpl::Canceller { private: void* state_; + RTC_DISALLOW_COPY_AND_ASSIGN(Canceller); }; EchoControlMobileImpl::EchoControlMobileImpl() diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_frame_proxies.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_frame_proxies.cc index 7cc4fb75e..b960e72e8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_frame_proxies.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_frame_proxies.cc @@ -20,8 +20,10 @@ int ProcessAudioFrame(AudioProcessing* ap, AudioFrame* frame) { return AudioProcessing::Error::kNullPointerError; } - StreamConfig input_config(frame->sample_rate_hz_, frame->num_channels_); - StreamConfig output_config(frame->sample_rate_hz_, frame->num_channels_); + StreamConfig input_config(frame->sample_rate_hz_, frame->num_channels_, + /*has_keyboard=*/false); + StreamConfig output_config(frame->sample_rate_hz_, frame->num_channels_, + /*has_keyboard=*/false); RTC_DCHECK_EQ(frame->samples_per_channel(), input_config.num_frames()); int result = ap->ProcessStream(frame->data(), input_config, output_config, @@ -55,8 +57,10 @@ int ProcessReverseAudioFrame(AudioProcessing* ap, AudioFrame* frame) { return AudioProcessing::Error::kBadNumberChannelsError; } - StreamConfig input_config(frame->sample_rate_hz_, frame->num_channels_); - StreamConfig output_config(frame->sample_rate_hz_, frame->num_channels_); + StreamConfig input_config(frame->sample_rate_hz_, frame->num_channels_, + /*has_keyboard=*/false); + StreamConfig output_config(frame->sample_rate_hz_, frame->num_channels_, + /*has_keyboard=*/false); int result = ap->ProcessReverseStream(frame->data(), input_config, output_config, frame->mutable_data()); diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc index 86edaee08..0fd18fd95 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.cc @@ -145,6 +145,7 @@ std::string AudioProcessing::Config::ToString() const { << NoiseSuppressionLevelToString(noise_suppression.level) << " }, transient_suppression: { enabled: " << transient_suppression.enabled + << " }, voice_detection: { enabled: " << voice_detection.enabled << " }, gain_controller1: { enabled: " << gain_controller1.enabled << ", mode: " << GainController1ModeToString(gain_controller1.mode) << ", target_level_dbfs: " << gain_controller1.target_level_dbfs @@ -204,7 +205,8 @@ std::string AudioProcessing::Config::ToString() const { << gain_controller2.adaptive_digital.max_gain_change_db_per_second << ", max_output_noise_level_dbfs: " << gain_controller2.adaptive_digital.max_output_noise_level_dbfs - << "}}"; + << "}}, residual_echo_detector: { enabled: " + << residual_echo_detector.enabled << " }}"; return builder.str(); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h index 9d6824c03..b3ef3af9b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing.h @@ -30,6 +30,7 @@ #include "api/scoped_refptr.h" #include "modules/audio_processing/include/audio_processing_statistics.h" #include "rtc_base/arraysize.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/system/rtc_export.h" @@ -113,6 +114,8 @@ static constexpr int kClippedLevelMin = 70; // // config.high_pass_filter.enabled = true; // +// config.voice_detection.enabled = true; +// // apm->ApplyConfig(config) // // apm->noise_reduction()->set_level(kHighSuppression); @@ -158,6 +161,7 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // submodule resets, affecting the audio quality. Use the RuntimeSetting // construct for runtime configuration. struct RTC_EXPORT Config { + // Sets the properties of the audio processing pipeline. struct RTC_EXPORT Pipeline { // Maximum allowed processing rate used internally. May only be set to @@ -230,6 +234,11 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { bool enabled = false; } transient_suppression; + // Enables reporting of `voice_detected` in webrtc::AudioProcessingStats. + struct VoiceDetection { + bool enabled = false; + } voice_detection; + // Enables automatic gain control (AGC) functionality. // The automatic gain control (AGC) component brings the signal to an // appropriate range. This is done by applying a digital gain directly and, @@ -369,15 +378,24 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { } adaptive_digital; } gain_controller2; - // TODO(bugs.webrtc.org/11539): Deprecated. Delete this flag. Replaced by - // injectable submodule. struct ResidualEchoDetector { - bool enabled = false; + bool enabled = true; } residual_echo_detector; std::string ToString() const; }; + // TODO(mgraczyk): Remove once all methods that use ChannelLayout are gone. + enum ChannelLayout { + kMono, + // Left, right. + kStereo, + // Mono, keyboard, and mic. + kMonoAndKeyboard, + // Left, right, keyboard, and mic. + kStereoAndKeyboard + }; + // Specifies the properties of a setting to be passed to AudioProcessing at // runtime. class RuntimeSetting { @@ -512,6 +530,16 @@ class RTC_EXPORT AudioProcessing : public rtc::RefCountInterface { // number of channels as the input. virtual int Initialize(const ProcessingConfig& processing_config) = 0; + // Initialize with unpacked parameters. See Initialize() above for details. + // + // TODO(mgraczyk): Remove once clients are updated to use the new interface. + virtual int Initialize(int capture_input_sample_rate_hz, + int capture_output_sample_rate_hz, + int render_sample_rate_hz, + ChannelLayout capture_input_layout, + ChannelLayout capture_output_layout, + ChannelLayout render_input_layout) = 0; + // TODO(peah): This method is a temporary solution used to take control // over the parameters in the audio processing module and is likely to change. virtual void ApplyConfig(const Config& config) = 0; @@ -769,10 +797,23 @@ class RTC_EXPORT AudioProcessingBuilder { class StreamConfig { public: // sample_rate_hz: The sampling rate of the stream. - // num_channels: The number of audio channels in the stream. - StreamConfig(int sample_rate_hz = 0, size_t num_channels = 0) + // + // num_channels: The number of audio channels in the stream, excluding the + // keyboard channel if it is present. When passing a + // StreamConfig with an array of arrays T*[N], + // + // N == {num_channels + 1 if has_keyboard + // {num_channels if !has_keyboard + // + // has_keyboard: True if the stream has a keyboard channel. When has_keyboard + // is true, the last channel in any corresponding list of + // channels is the keyboard channel. + StreamConfig(int sample_rate_hz = 0, + size_t num_channels = 0, + bool has_keyboard = false) : sample_rate_hz_(sample_rate_hz), num_channels_(num_channels), + has_keyboard_(has_keyboard), num_frames_(calculate_frames(sample_rate_hz)) {} void set_sample_rate_hz(int value) { @@ -780,18 +821,22 @@ class StreamConfig { num_frames_ = calculate_frames(value); } void set_num_channels(size_t value) { num_channels_ = value; } + void set_has_keyboard(bool value) { has_keyboard_ = value; } int sample_rate_hz() const { return sample_rate_hz_; } - // The number of channels in the stream. + // The number of channels in the stream, not including the keyboard channel if + // present. size_t num_channels() const { return num_channels_; } + bool has_keyboard() const { return has_keyboard_; } size_t num_frames() const { return num_frames_; } size_t num_samples() const { return num_channels_ * num_frames_; } bool operator==(const StreamConfig& other) const { return sample_rate_hz_ == other.sample_rate_hz_ && - num_channels_ == other.num_channels_; + num_channels_ == other.num_channels_ && + has_keyboard_ == other.has_keyboard_; } bool operator!=(const StreamConfig& other) const { return !(*this == other); } @@ -804,6 +849,7 @@ class StreamConfig { int sample_rate_hz_; size_t num_channels_; + bool has_keyboard_; size_t num_frames_; }; @@ -893,13 +939,17 @@ class EchoDetector : public rtc::RefCountInterface { int render_sample_rate_hz, int num_render_channels) = 0; - // Analysis (not changing) of the first channel of the render signal. + // Analysis (not changing) of the render signal. virtual void AnalyzeRenderAudio(rtc::ArrayView render_audio) = 0; // Analysis (not changing) of the capture signal. virtual void AnalyzeCaptureAudio( rtc::ArrayView capture_audio) = 0; + // Pack an AudioBuffer into a vector. + static void PackRenderAudioBuffer(AudioBuffer* audio, + std::vector* packed_buffer); + struct Metrics { absl::optional echo_likelihood; absl::optional echo_likelihood_recent_max; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing_statistics.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing_statistics.h index 3b4331995..a31dafe49 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing_statistics.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/audio_processing_statistics.h @@ -24,8 +24,6 @@ struct RTC_EXPORT AudioProcessingStats { AudioProcessingStats(const AudioProcessingStats& other); ~AudioProcessingStats(); - // Deprecated. - // TODO(bugs.webrtc.org/11226): Remove. // True if voice is detected in the last capture frame, after processing. // It is conservative in flagging audio as speech, with low likelihood of // incorrectly flagging a frame as voice. diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h index f88094f5e..46c5f0efb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/include/mock_audio_processing.h @@ -67,27 +67,6 @@ class MockEchoControl : public EchoControl { MOCK_METHOD(bool, ActiveProcessing, (), (const, override)); }; -class MockEchoDetector : public EchoDetector { - public: - virtual ~MockEchoDetector() {} - MOCK_METHOD(void, - Initialize, - (int capture_sample_rate_hz, - int num_capture_channels, - int render_sample_rate_hz, - int num_render_channels), - (override)); - MOCK_METHOD(void, - AnalyzeRenderAudio, - (rtc::ArrayView render_audio), - (override)); - MOCK_METHOD(void, - AnalyzeCaptureAudio, - (rtc::ArrayView capture_audio), - (override)); - MOCK_METHOD(Metrics, GetMetrics, (), (const, override)); -}; - class MockAudioProcessing : public AudioProcessing { public: MockAudioProcessing() {} @@ -95,6 +74,15 @@ class MockAudioProcessing : public AudioProcessing { virtual ~MockAudioProcessing() {} MOCK_METHOD(int, Initialize, (), (override)); + MOCK_METHOD(int, + Initialize, + (int capture_input_sample_rate_hz, + int capture_output_sample_rate_hz, + int render_sample_rate_hz, + ChannelLayout capture_input_layout, + ChannelLayout capture_output_layout, + ChannelLayout render_input_layout), + (override)); MOCK_METHOD(int, Initialize, (const ProcessingConfig& processing_config), diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.cc index fe1149a89..618888361 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/residual_echo_detector.cc @@ -14,6 +14,7 @@ #include #include "absl/types/optional.h" +#include "modules/audio_processing/audio_buffer.h" #include "modules/audio_processing/logging/apm_data_dumper.h" #include "rtc_base/atomic_ops.h" #include "rtc_base/checks.h" @@ -198,6 +199,13 @@ void ResidualEchoDetector::Initialize(int /*capture_sample_rate_hz*/, reliability_ = 0.f; } +void EchoDetector::PackRenderAudioBuffer(AudioBuffer* audio, + std::vector* packed_buffer) { + packed_buffer->clear(); + packed_buffer->insert(packed_buffer->end(), audio->channels()[0], + audio->channels()[0] + audio->num_frames()); +} + EchoDetector::Metrics ResidualEchoDetector::GetMetrics() const { EchoDetector::Metrics metrics; metrics.echo_likelihood = echo_likelihood_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/three_band_filter_bank.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/three_band_filter_bank.cc index bd1c50477..fc665efcc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/three_band_filter_bank.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/three_band_filter_bank.cc @@ -211,9 +211,8 @@ void ThreeBandFilterBank::Analysis( // Band and modulate the output. for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { - float* out_band = out[band].data(); for (int n = 0; n < kSplitBandSize; ++n) { - out_band[n] += dct_modulation[band] * out_subsampled[n]; + out[band][n] += dct_modulation[band] * out_subsampled[n]; } } } @@ -255,9 +254,8 @@ void ThreeBandFilterBank::Synthesis( std::fill(in_subsampled.begin(), in_subsampled.end(), 0.f); for (int band = 0; band < ThreeBandFilterBank::kNumBands; ++band) { RTC_DCHECK_EQ(in[band].size(), kSplitBandSize); - const float* in_band = in[band].data(); for (int n = 0; n < kSplitBandSize; ++n) { - in_subsampled[n] += dct_modulation[band] * in_band[n]; + in_subsampled[n] += dct_modulation[band] * in[band][n]; } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/cascaded_biquad_filter.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/cascaded_biquad_filter.cc index 0d236ce0b..08b946438 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/cascaded_biquad_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/utility/cascaded_biquad_filter.cc @@ -99,28 +99,19 @@ void CascadedBiQuadFilter::ApplyBiQuad(rtc::ArrayView x, rtc::ArrayView y, CascadedBiQuadFilter::BiQuad* biquad) { RTC_DCHECK_EQ(x.size(), y.size()); - const float c_a_0 = biquad->coefficients.a[0]; - const float c_a_1 = biquad->coefficients.a[1]; - const float c_b_0 = biquad->coefficients.b[0]; - const float c_b_1 = biquad->coefficients.b[1]; - const float c_b_2 = biquad->coefficients.b[2]; - float m_x_0 = biquad->x[0]; - float m_x_1 = biquad->x[1]; - float m_y_0 = biquad->y[0]; - float m_y_1 = biquad->y[1]; + const auto* c_b = biquad->coefficients.b; + const auto* c_a = biquad->coefficients.a; + auto* m_x = biquad->x; + auto* m_y = biquad->y; for (size_t k = 0; k < x.size(); ++k) { const float tmp = x[k]; - y[k] = c_b_0 * tmp + c_b_1 * m_x_0 + c_b_2 * m_x_1 - c_a_0 * m_y_0 - - c_a_1 * m_y_1; - m_x_1 = m_x_0; - m_x_0 = tmp; - m_y_1 = m_y_0; - m_y_0 = y[k]; + y[k] = c_b[0] * tmp + c_b[1] * m_x[0] + c_b[2] * m_x[1] - c_a[0] * m_y[0] - + c_a[1] * m_y[1]; + m_x[1] = m_x[0]; + m_x[0] = tmp; + m_y[1] = m_y[0]; + m_y[0] = y[k]; } - biquad->x[0] = m_x_0; - biquad->x[1] = m_x_1; - biquad->y[0] = m_y_0; - biquad->y[1] = m_y_1; } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc index 02023d6a7..ce4d46b9a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.cc @@ -38,7 +38,6 @@ void VoiceActivityDetector::ProcessChunk(const int16_t* audio, size_t length, int sample_rate_hz) { RTC_DCHECK_EQ(length, sample_rate_hz / 100); - // TODO(bugs.webrtc.org/7494): Remove resampling and force 16 kHz audio. // Resample to the required rate. const int16_t* resampled_ptr = audio; if (sample_rate_hz != kSampleRateHz) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.h index 92b9a8c20..a19883d51 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/vad/voice_activity_detector.h @@ -33,8 +33,6 @@ class VoiceActivityDetector { ~VoiceActivityDetector(); // Processes each audio chunk and estimates the voice probability. - // TODO(bugs.webrtc.org/7494): Switch to rtc::ArrayView and remove - // `sample_rate_hz`. void ProcessChunk(const int16_t* audio, size_t length, int sample_rate_hz); // Returns a vector of voice probabilities for each chunk. It can be empty for diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.cc b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.cc new file mode 100644 index 000000000..1a633e228 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.cc @@ -0,0 +1,92 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "modules/audio_processing/voice_detection.h" + +#include "common_audio/vad/include/webrtc_vad.h" +#include "modules/audio_processing/audio_buffer.h" +#include "rtc_base/checks.h" + +namespace webrtc { +class VoiceDetection::Vad { + public: + Vad() { + state_ = WebRtcVad_Create(); + RTC_CHECK(state_); + int error = WebRtcVad_Init(state_); + RTC_DCHECK_EQ(0, error); + } + ~Vad() { WebRtcVad_Free(state_); } + + Vad(Vad&) = delete; + Vad& operator=(Vad&) = delete; + + VadInst* state() { return state_; } + + private: + VadInst* state_ = nullptr; +}; + +VoiceDetection::VoiceDetection(int sample_rate_hz, Likelihood likelihood) + : sample_rate_hz_(sample_rate_hz), + frame_size_samples_(static_cast(sample_rate_hz_ / 100)), + likelihood_(likelihood), + vad_(new Vad()) { + int mode = 2; + switch (likelihood) { + case VoiceDetection::kVeryLowLikelihood: + mode = 3; + break; + case VoiceDetection::kLowLikelihood: + mode = 2; + break; + case VoiceDetection::kModerateLikelihood: + mode = 1; + break; + case VoiceDetection::kHighLikelihood: + mode = 0; + break; + default: + RTC_DCHECK_NOTREACHED(); + break; + } + int error = WebRtcVad_set_mode(vad_->state(), mode); + RTC_DCHECK_EQ(0, error); +} + +VoiceDetection::~VoiceDetection() {} + +bool VoiceDetection::ProcessCaptureAudio(AudioBuffer* audio) { + RTC_DCHECK_GE(AudioBuffer::kMaxSplitFrameLength, + audio->num_frames_per_band()); + std::array mixed_low_pass_data; + rtc::ArrayView mixed_low_pass(mixed_low_pass_data.data(), + audio->num_frames_per_band()); + if (audio->num_channels() == 1) { + FloatS16ToS16(audio->split_bands_const(0)[kBand0To8kHz], + audio->num_frames_per_band(), mixed_low_pass_data.data()); + } else { + const int num_channels = static_cast(audio->num_channels()); + for (size_t i = 0; i < audio->num_frames_per_band(); ++i) { + int32_t value = + FloatS16ToS16(audio->split_channels_const(kBand0To8kHz)[0][i]); + for (int j = 1; j < num_channels; ++j) { + value += FloatS16ToS16(audio->split_channels_const(kBand0To8kHz)[j][i]); + } + mixed_low_pass_data[i] = value / num_channels; + } + } + + int vad_ret = WebRtcVad_Process(vad_->state(), sample_rate_hz_, + mixed_low_pass.data(), frame_size_samples_); + RTC_DCHECK(vad_ret == 0 || vad_ret == 1); + return vad_ret == 0 ? false : true; +} +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.h b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.h new file mode 100644 index 000000000..79d44e647 --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/audio_processing/voice_detection.h @@ -0,0 +1,59 @@ +/* + * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef MODULES_AUDIO_PROCESSING_VOICE_DETECTION_H_ +#define MODULES_AUDIO_PROCESSING_VOICE_DETECTION_H_ + +#include + +#include + +#include "modules/audio_processing/include/audio_processing.h" + +namespace webrtc { + +class AudioBuffer; + +// The voice activity detection (VAD) component analyzes the stream to +// determine if voice is present. +class VoiceDetection { + public: + // Specifies the likelihood that a frame will be declared to contain voice. + // A higher value makes it more likely that speech will not be clipped, at + // the expense of more noise being detected as voice. + enum Likelihood { + kVeryLowLikelihood, + kLowLikelihood, + kModerateLikelihood, + kHighLikelihood + }; + + VoiceDetection(int sample_rate_hz, Likelihood likelihood); + ~VoiceDetection(); + + VoiceDetection(VoiceDetection&) = delete; + VoiceDetection& operator=(VoiceDetection&) = delete; + + // Returns true if voice is detected in the current frame. + bool ProcessCaptureAudio(AudioBuffer* audio); + + Likelihood likelihood() const { return likelihood_; } + + private: + class Vad; + + int sample_rate_hz_; + size_t frame_size_samples_; + Likelihood likelihood_; + std::unique_ptr vad_; +}; +} // namespace webrtc + +#endif // MODULES_AUDIO_PROCESSING_VOICE_DETECTION_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/OWNERS b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/OWNERS index c74790f87..3304c672c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/OWNERS +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/OWNERS @@ -1,6 +1,7 @@ +srte@webrtc.org stefan@webrtc.org terelius@webrtc.org +crodbro@webrtc.org philipel@webrtc.org mflodman@webrtc.org yinwa@webrtc.org -perkj@webrtc.org diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc index 2ae5441ef..4c5bdb67a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.cc @@ -32,8 +32,21 @@ namespace webrtc { namespace { constexpr TimeDelta kStreamTimeOut = TimeDelta::Seconds(2); + +// Used with field trial "WebRTC-Bwe-NewInterArrivalDelta/Enabled/ constexpr TimeDelta kSendTimeGroupLength = TimeDelta::Millis(5); +// Used unless field trial "WebRTC-Bwe-NewInterArrivalDelta/Enabled/" +constexpr int kTimestampGroupLengthMs = 5; +constexpr int kAbsSendTimeFraction = 18; +constexpr int kAbsSendTimeInterArrivalUpshift = 8; +constexpr int kInterArrivalShift = + kAbsSendTimeFraction + kAbsSendTimeInterArrivalUpshift; +constexpr int kTimestampGroupTicks = + (kTimestampGroupLengthMs << kInterArrivalShift) / 1000; +constexpr double kTimestampToMs = + 1000.0 / static_cast(1 << kInterArrivalShift); + // This ssrc is used to fulfill the current API but will be removed // after the API has been changed. constexpr uint32_t kFixedSsrc = 0; @@ -82,6 +95,9 @@ DelayBasedBwe::DelayBasedBwe(const WebRtcKeyValueConfig* key_value_config, prev_bitrate_(DataRate::Zero()), has_once_detected_overuse_(false), prev_state_(BandwidthUsage::kBwNormal), + use_new_inter_arrival_delta_(!absl::StartsWith( + key_value_config->Lookup("WebRTC-Bwe-NewInterArrivalDelta"), + "Disabled")), alr_limited_backoff_enabled_(absl::StartsWith( key_value_config->Lookup("WebRTC-Bwe-AlrLimitedBackoff"), "Enabled")) { @@ -146,11 +162,17 @@ void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, // Reset if the stream has timed out. if (last_seen_packet_.IsInfinite() || at_time - last_seen_packet_ > kStreamTimeOut) { - video_inter_arrival_delta_ = - std::make_unique(kSendTimeGroupLength); - audio_inter_arrival_delta_ = - std::make_unique(kSendTimeGroupLength); - + if (use_new_inter_arrival_delta_) { + video_inter_arrival_delta_ = + std::make_unique(kSendTimeGroupLength); + audio_inter_arrival_delta_ = + std::make_unique(kSendTimeGroupLength); + } else { + video_inter_arrival_ = std::make_unique( + kTimestampGroupTicks, kTimestampToMs, true); + audio_inter_arrival_ = std::make_unique( + kTimestampGroupTicks, kTimestampToMs, true); + } video_delay_detector_.reset( new TrendlineEstimator(key_value_config_, network_state_predictor_)); audio_delay_detector_.reset( @@ -181,6 +203,7 @@ void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, } DataSize packet_size = packet_feedback.sent_packet.size; + if (use_new_inter_arrival_delta_) { TimeDelta send_delta = TimeDelta::Zero(); TimeDelta recv_delta = TimeDelta::Zero(); int size_delta = 0; @@ -198,6 +221,39 @@ void DelayBasedBwe::IncomingPacketFeedback(const PacketResult& packet_feedback, packet_feedback.sent_packet.send_time.ms(), packet_feedback.receive_time.ms(), packet_size.bytes(), calculated_deltas); + } else { + InterArrival* inter_arrival_for_packet = + (separate_audio_.enabled && packet_feedback.sent_packet.audio) + ? video_inter_arrival_.get() + : audio_inter_arrival_.get(); + + uint32_t send_time_24bits = + static_cast( + ((static_cast(packet_feedback.sent_packet.send_time.ms()) + << kAbsSendTimeFraction) + + 500) / + 1000) & + 0x00FFFFFF; + // Shift up send time to use the full 32 bits that inter_arrival works with, + // so wrapping works properly. + uint32_t timestamp = send_time_24bits << kAbsSendTimeInterArrivalUpshift; + + uint32_t timestamp_delta = 0; + int64_t recv_delta_ms = 0; + int size_delta = 0; + + bool calculated_deltas = inter_arrival_for_packet->ComputeDeltas( + timestamp, packet_feedback.receive_time.ms(), at_time.ms(), + packet_size.bytes(), ×tamp_delta, &recv_delta_ms, &size_delta); + double send_delta_ms = + (1000.0 * timestamp_delta) / (1 << kInterArrivalShift); + + delay_detector_for_packet->Update( + recv_delta_ms, send_delta_ms, + packet_feedback.sent_packet.send_time.ms(), + packet_feedback.receive_time.ms(), packet_size.bytes(), + calculated_deltas); + } } DataRate DelayBasedBwe::TriggerOveruse(Timestamp at_time, diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h index 7823f77ab..85ce6eaa8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_based_bwe.h @@ -127,6 +127,7 @@ class DelayBasedBwe { DataRate prev_bitrate_; bool has_once_detected_overuse_; BandwidthUsage prev_state_; + const bool use_new_inter_arrival_delta_; bool alr_limited_backoff_enabled_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h index fc12cff7d..eaadb0d12 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/delay_increase_detector_interface.h @@ -13,6 +13,7 @@ #include #include "api/network_state_predictor.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -21,11 +22,6 @@ class DelayIncreaseDetectorInterface { DelayIncreaseDetectorInterface() {} virtual ~DelayIncreaseDetectorInterface() {} - DelayIncreaseDetectorInterface(const DelayIncreaseDetectorInterface&) = - delete; - DelayIncreaseDetectorInterface& operator=( - const DelayIncreaseDetectorInterface&) = delete; - // Update the detector with a new sample. The deltas should represent deltas // between timestamp groups as defined by the InterArrival class. virtual void Update(double recv_delta_ms, @@ -36,6 +32,8 @@ class DelayIncreaseDetectorInterface { bool calculated_deltas) = 0; virtual BandwidthUsage State() const = 0; + + RTC_DISALLOW_COPY_AND_ASSIGN(DelayIncreaseDetectorInterface); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc index ba656be23..2344f45a6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.cc @@ -22,7 +22,6 @@ #include #include "absl/strings/match.h" -#include "api/units/data_rate.h" #include "api/units/time_delta.h" #include "logging/rtc_event_log/events/rtc_event_remote_estimate.h" #include "modules/congestion_controller/goog_cc/alr_detector.h" @@ -89,9 +88,6 @@ GoogCcNetworkController::GoogCcNetworkController(NetworkControllerConfig config, RateControlSettings::ParseFromKeyValueConfig(key_value_config_)), loss_based_stable_rate_( IsEnabled(key_value_config_, "WebRTC-Bwe-LossBasedStableRate")), - pace_at_max_of_bwe_and_lower_link_capacity_( - IsEnabled(key_value_config_, - "WebRTC-Bwe-PaceAtMaxOfBweAndLowerLinkCapacity")), probe_controller_( new ProbeController(key_value_config_, config.event_log)), congestion_window_pushback_controller_( @@ -698,17 +694,9 @@ void GoogCcNetworkController::MaybeTriggerOnNetworkChanged( PacerConfig GoogCcNetworkController::GetPacingRates(Timestamp at_time) const { // Pacing rate is based on target rate before congestion window pushback, // because we don't want to build queues in the pacer when pushback occurs. - DataRate pacing_rate = DataRate::Zero(); - if (pace_at_max_of_bwe_and_lower_link_capacity_ && estimate_) { - pacing_rate = - std::max({min_total_allocated_bitrate_, estimate_->link_capacity_lower, - last_loss_based_target_rate_}) * - pacing_factor_; - } else { - pacing_rate = - std::max(min_total_allocated_bitrate_, last_loss_based_target_rate_) * - pacing_factor_; - } + DataRate pacing_rate = + std::max(min_total_allocated_bitrate_, last_loss_based_target_rate_) * + pacing_factor_; DataRate padding_rate = std::min(max_padding_rate_, last_pushback_target_rate_); PacerConfig msg; diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h index 946c07693..6dd70c896 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/goog_cc_network_control.h @@ -94,7 +94,6 @@ class GoogCcNetworkController : public NetworkControllerInterface { const bool limit_probes_lower_than_throughput_estimate_; const RateControlSettings rate_control_settings_; const bool loss_based_stable_rate_; - const bool pace_at_max_of_bwe_and_lower_link_capacity_; const std::unique_ptr probe_controller_; const std::unique_ptr diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.cc index df753ed0c..cb75456fd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.cc @@ -225,7 +225,7 @@ std::vector ProbeController::OnMaxTotalAllocatedBitrate( probes.push_back(second_probe_rate.bps()); } return InitiateProbing(at_time_ms, probes, - config_.allocation_allow_further_probing.Get()); + config_.allocation_allow_further_probing); } max_total_allocated_bitrate_ = max_total_allocated_bitrate; return std::vector(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h index d0f1458ec..7f24ff98c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/probe_controller.h @@ -22,6 +22,7 @@ #include "api/transport/network_control.h" #include "api/transport/webrtc_key_value_config.h" #include "api/units/data_rate.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" namespace webrtc { @@ -62,9 +63,6 @@ class ProbeController { RtcEventLog* event_log); ~ProbeController(); - ProbeController(const ProbeController&) = delete; - ProbeController& operator=(const ProbeController&) = delete; - ABSL_MUST_USE_RESULT std::vector SetBitrates( int64_t min_bitrate_bps, int64_t start_bitrate_bps, @@ -145,6 +143,8 @@ class ProbeController { int32_t next_probe_cluster_id_ = 1; ProbeControllerConfig config_; + + RTC_DISALLOW_COPY_AND_ASSIGN(ProbeController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h index 16fa657e7..3eee7814c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/test/goog_cc_printer.h @@ -61,7 +61,7 @@ class GoogCcDebugFactory : public GoogCcNetworkControllerFactory { std::unique_ptr Create( NetworkControllerConfig config) override; - void PrintState(Timestamp at_time); + void PrintState(const Timestamp at_time); void AttachWriter(std::unique_ptr log_writer); diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h index 6fd442498..75b971d18 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/goog_cc/trendline_estimator.h @@ -20,6 +20,7 @@ #include "api/network_state_predictor.h" #include "api/transport/webrtc_key_value_config.h" #include "modules/congestion_controller/goog_cc/delay_increase_detector_interface.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/struct_parameters_parser.h" namespace webrtc { @@ -56,9 +57,6 @@ class TrendlineEstimator : public DelayIncreaseDetectorInterface { ~TrendlineEstimator() override; - TrendlineEstimator(const TrendlineEstimator&) = delete; - TrendlineEstimator& operator=(const TrendlineEstimator&) = delete; - // Update the estimator with a new sample. The deltas should represent deltas // between timestamp groups as defined by the InterArrival class. void Update(double recv_delta_ms, @@ -120,6 +118,8 @@ class TrendlineEstimator : public DelayIncreaseDetectorInterface { BandwidthUsage hypothesis_; BandwidthUsage hypothesis_predicted_; NetworkStatePredictor* network_state_predictor_; + + RTC_DISALLOW_COPY_AND_ASSIGN(TrendlineEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h index 16ffc32a4..1da646321 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/control_handler.h @@ -19,6 +19,7 @@ #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "modules/pacing/paced_sender.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/system/no_unique_address.h" namespace webrtc { @@ -32,9 +33,6 @@ class CongestionControlHandler { CongestionControlHandler(); ~CongestionControlHandler(); - CongestionControlHandler(const CongestionControlHandler&) = delete; - CongestionControlHandler& operator=(const CongestionControlHandler&) = delete; - void SetTargetRate(TargetTransferRate new_target_rate); void SetNetworkAvailability(bool network_available); void SetPacerQueue(TimeDelta expected_queue_time); @@ -50,6 +48,7 @@ class CongestionControlHandler { int64_t pacer_expected_queue_ms_ = 0; RTC_NO_UNIQUE_ADDRESS SequenceChecker sequenced_checker_; + RTC_DISALLOW_COPY_AND_ASSIGN(CongestionControlHandler); }; } // namespace webrtc #endif // MODULES_CONGESTION_CONTROLLER_RTP_CONTROL_HANDLER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.cc b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.cc index 50987b230..6ab3ad80f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.cc @@ -15,17 +15,10 @@ namespace webrtc { namespace { static const size_t kMaxPacketsInHistory = 5000; } - -TransportFeedbackDemuxer::TransportFeedbackDemuxer() { - // In case the construction thread is different from where the registration - // and callbacks occur, detach from the construction thread. - observer_checker_.Detach(); -} - void TransportFeedbackDemuxer::RegisterStreamFeedbackObserver( std::vector ssrcs, StreamFeedbackObserver* observer) { - RTC_DCHECK_RUN_ON(&observer_checker_); + MutexLock lock(&observers_lock_); RTC_DCHECK(observer); RTC_DCHECK(absl::c_find_if(observers_, [=](const auto& pair) { return pair.second == observer; @@ -35,7 +28,7 @@ void TransportFeedbackDemuxer::RegisterStreamFeedbackObserver( void TransportFeedbackDemuxer::DeRegisterStreamFeedbackObserver( StreamFeedbackObserver* observer) { - RTC_DCHECK_RUN_ON(&observer_checker_); + MutexLock lock(&observers_lock_); RTC_DCHECK(observer); const auto it = absl::c_find_if( observers_, [=](const auto& pair) { return pair.second == observer; }); @@ -44,7 +37,7 @@ void TransportFeedbackDemuxer::DeRegisterStreamFeedbackObserver( } void TransportFeedbackDemuxer::AddPacket(const RtpPacketSendInfo& packet_info) { - RTC_DCHECK_RUN_ON(&observer_checker_); + MutexLock lock(&lock_); StreamFeedbackObserver::StreamPacketInfo info; info.ssrc = packet_info.media_ssrc; @@ -62,22 +55,24 @@ void TransportFeedbackDemuxer::AddPacket(const RtpPacketSendInfo& packet_info) { void TransportFeedbackDemuxer::OnTransportFeedback( const rtcp::TransportFeedback& feedback) { - RTC_DCHECK_RUN_ON(&observer_checker_); - std::vector stream_feedbacks; - for (const auto& packet : feedback.GetAllPackets()) { - int64_t seq_num = - seq_num_unwrapper_.UnwrapWithoutUpdate(packet.sequence_number()); - auto it = history_.find(seq_num); - if (it != history_.end()) { - auto packet_info = it->second; - packet_info.received = packet.received(); - stream_feedbacks.push_back(std::move(packet_info)); - if (packet.received()) - history_.erase(it); + { + MutexLock lock(&lock_); + for (const auto& packet : feedback.GetAllPackets()) { + int64_t seq_num = + seq_num_unwrapper_.UnwrapWithoutUpdate(packet.sequence_number()); + auto it = history_.find(seq_num); + if (it != history_.end()) { + auto packet_info = it->second; + packet_info.received = packet.received(); + stream_feedbacks.push_back(packet_info); + if (packet.received()) + history_.erase(it); + } } } + MutexLock lock(&observers_lock_); for (auto& observer : observers_) { std::vector selected_feedback; for (const auto& packet_info : stream_feedbacks) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.h b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.h index 7f4f5750d..634a37ea1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/congestion_controller/rtp/transport_feedback_demuxer.h @@ -14,26 +14,14 @@ #include #include -#include "api/sequence_checker.h" #include "modules/include/module_common_types_public.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" -#include "rtc_base/system/no_unique_address.h" +#include "rtc_base/synchronization/mutex.h" namespace webrtc { -// Implementation of StreamFeedbackProvider that provides a way for -// implementations of StreamFeedbackObserver to register for feedback callbacks -// for a given set of SSRCs. -// Registration methods need to be called from the same execution context -// (thread or task queue) and callbacks to -// StreamFeedbackObserver::OnPacketFeedbackVector will be made in that same -// context. -// TODO(tommi): This appears to be the only implementation of this interface. -// Do we need the interface? -class TransportFeedbackDemuxer final : public StreamFeedbackProvider { +class TransportFeedbackDemuxer : public StreamFeedbackProvider { public: - TransportFeedbackDemuxer(); - // Implements StreamFeedbackProvider interface void RegisterStreamFeedbackObserver( std::vector ssrcs, @@ -44,16 +32,17 @@ class TransportFeedbackDemuxer final : public StreamFeedbackProvider { void OnTransportFeedback(const rtcp::TransportFeedback& feedback); private: - RTC_NO_UNIQUE_ADDRESS SequenceChecker observer_checker_; - SequenceNumberUnwrapper seq_num_unwrapper_ RTC_GUARDED_BY(&observer_checker_); + Mutex lock_; + SequenceNumberUnwrapper seq_num_unwrapper_ RTC_GUARDED_BY(&lock_); std::map history_ - RTC_GUARDED_BY(&observer_checker_); + RTC_GUARDED_BY(&lock_); // Maps a set of ssrcs to corresponding observer. Vectors are used rather than // set/map to ensure that the processing order is consistent independently of // the randomized ssrcs. + Mutex observers_lock_; std::vector, StreamFeedbackObserver*>> - observers_ RTC_GUARDED_BY(&observer_checker_); + observers_ RTC_GUARDED_BY(&observers_lock_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc index 2b2d064ef..acc492db9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.cc @@ -124,7 +124,7 @@ void PacedSender::EnqueuePackets( packet->SequenceNumber(), "rtp_timestamp", packet->Timestamp()); - RTC_DCHECK_GE(packet->capture_time(), Timestamp::Zero()); + RTC_DCHECK_GE(packet->capture_time_ms(), 0); pacing_controller_.EnqueuePacket(std::move(packet)); } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h index 4a53e0f9b..fe29bc567 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/paced_sender.h @@ -39,7 +39,11 @@ namespace webrtc { class Clock; class RtcEventLog; -class PacedSender : public RtpPacketPacer, public RtpPacketSender { +// TODO(bugs.webrtc.org/10937): Remove the inheritance from Module after +// updating dependencies. +class PacedSender : public Module, + public RtpPacketPacer, + public RtpPacketSender { public: // Expected max pacer delay in ms. If ExpectedQueueTime() is higher than // this value, the packet producers should wait (eg drop frames rather than @@ -113,13 +117,24 @@ class PacedSender : public RtpPacketPacer, public RtpPacketSender { // to module processing thread specifics or methods exposed for test. private: + // Methods implementing Module. + // TODO(bugs.webrtc.org/10937): Remove the inheritance from Module once all + // use of it has been cleared up. + // Returns the number of milliseconds until the module want a worker thread // to call Process. - int64_t TimeUntilNextProcess(); - // Called when the prober is associated with a process thread. - void ProcessThreadAttached(ProcessThread* process_thread); + int64_t TimeUntilNextProcess() override; + + // TODO(bugs.webrtc.org/10937): Make this private (and non virtual) once + // dependencies have been updated to not call this via the PacedSender + // interface. + public: // Process any pending packets in the queue(s). - void Process(); + void Process() override; + + private: + // Called when the prober is associated with a process thread. + void ProcessThreadAttached(ProcessThread* process_thread) override; // In dynamic process mode, refreshes the next process time. void MaybeWakupProcessThread(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc index 921546223..548540a20 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/pacing_controller.cc @@ -18,6 +18,7 @@ #include "absl/strings/match.h" #include "modules/pacing/bitrate_prober.h" #include "modules/pacing/interval_budget.h" +#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h index 11d897905..9958a50b6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/packet_router.h @@ -25,6 +25,7 @@ #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtcp_packet.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -43,9 +44,6 @@ class PacketRouter : public PacingController::PacketSender { explicit PacketRouter(uint16_t start_transport_seq); ~PacketRouter() override; - PacketRouter(const PacketRouter&) = delete; - PacketRouter& operator=(const PacketRouter&) = delete; - void AddSendRtpModule(RtpRtcpInterface* rtp_module, bool remb_candidate); void RemoveSendRtpModule(RtpRtcpInterface* rtp_module); @@ -109,6 +107,8 @@ class PacketRouter : public PacingController::PacketSender { // process thread is gone. std::vector> pending_fec_packets_ RTC_GUARDED_BY(modules_mutex_); + + RTC_DISALLOW_COPY_AND_ASSIGN(PacketRouter); }; } // namespace webrtc #endif // MODULES_PACING_PACKET_ROUTER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc index c392a8872..16c2de58c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.cc @@ -141,7 +141,7 @@ void TaskQueuePacedSender::EnqueuePackets( RTC_DCHECK_RUN_ON(&task_queue_); for (auto& packet : packets_) { packet_size_.Apply(1, packet->size()); - RTC_DCHECK_GE(packet->capture_time(), Timestamp::Zero()); + RTC_DCHECK_GE(packet->capture_time_ms(), 0); pacing_controller_.EnqueuePacket(std::move(packet)); } MaybeProcessPackets(Timestamp::MinusInfinity()); @@ -271,7 +271,7 @@ void TaskQueuePacedSender::MaybeProcessPackets( // Set a new scheduled process time and post a delayed task. next_process_time_ = next_process_time; - task_queue_.PostDelayedHighPrecisionTask( + task_queue_.PostDelayedTask( [this, next_process_time]() { MaybeProcessPackets(next_process_time); }, time_to_next_process->ms()); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h index 353f13796..cb7ca4def 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h +++ b/TMessagesProj/jni/voip/webrtc/modules/pacing/task_queue_paced_sender.h @@ -25,6 +25,7 @@ #include "api/units/data_size.h" #include "api/units/time_delta.h" #include "api/units/timestamp.h" +#include "modules/include/module.h" #include "modules/pacing/pacing_controller.h" #include "modules/pacing/rtp_packet_pacer.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h index 179e290c2..d1c6aa8d3 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_detector.h @@ -14,6 +14,7 @@ #include "api/network_state_predictor.h" #include "api/transport/webrtc_key_value_config.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -25,9 +26,6 @@ class OveruseDetector { explicit OveruseDetector(const WebRtcKeyValueConfig* key_value_config); virtual ~OveruseDetector(); - OveruseDetector(const OveruseDetector&) = delete; - OveruseDetector& operator=(const OveruseDetector&) = delete; - // Update the detection state based on the estimated inter-arrival time delta // offset. `timestamp_delta` is the delta between the last timestamp which the // estimated offset is based on and the last timestamp on which the last @@ -56,6 +54,8 @@ class OveruseDetector { double time_over_using_; int overuse_counter_; BandwidthUsage hypothesis_; + + RTC_DISALLOW_COPY_AND_ASSIGN(OveruseDetector); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h index c021f00da..d023b36d8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/overuse_estimator.h @@ -15,6 +15,7 @@ #include #include "api/network_state_predictor.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -38,9 +39,6 @@ class OveruseEstimator { explicit OveruseEstimator(const OverUseDetectorOptions& options); ~OveruseEstimator(); - OveruseEstimator(const OveruseEstimator&) = delete; - OveruseEstimator& operator=(const OveruseEstimator&) = delete; - // Update the estimator with a new sample. The deltas should represent deltas // between timestamp groups as defined by the InterArrival class. // `current_hypothesis` should be the hypothesis of the over-use detector at @@ -77,6 +75,8 @@ class OveruseEstimator { double avg_noise_; double var_noise_; std::deque ts_delta_hist_; + + RTC_DISALLOW_COPY_AND_ASSIGN(OveruseEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc index 0bc4f6dd1..ae960ab96 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_abs_send_time.cc @@ -278,7 +278,13 @@ void RemoteBitrateEstimatorAbsSendTime::IncomingPacketInfo( TimeoutStreams(now); RTC_DCHECK(inter_arrival_); RTC_DCHECK(estimator_); - ssrcs_.insert_or_assign(ssrc, now); + // TODO(danilchap): Replace 5 lines below with insert_or_assign when that + // c++17 function is available. + auto inserted = ssrcs_.insert(std::make_pair(ssrc, now)); + if (!inserted.second) { + // Already inserted, update. + inserted.first->second = now; + } // For now only try to detect probes while we don't have a valid estimate. // We currently assume that only packets larger than 200 bytes are paced by diff --git a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h index 49e1e716b..e85565596 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h +++ b/TMessagesProj/jni/voip/webrtc/modules/remote_bitrate_estimator/test/bwe_test_logging.h @@ -128,6 +128,7 @@ #include #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #define BWE_TEST_LOGGING_GLOBAL_CONTEXT(name) \ @@ -340,10 +341,6 @@ class Logging { Logging(); ~Logging(); - - Logging(const Logging&) = delete; - Logging& operator=(const Logging&) = delete; - void PushState(const std::string& append_to_tag, int64_t timestamp_ms, bool enabled); @@ -351,6 +348,8 @@ class Logging { Mutex mutex_; ThreadMap thread_map_; + + RTC_DISALLOW_COPY_AND_ASSIGN(Logging); }; } // namespace bwe } // namespace testing diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h index f31503dc4..5734a50e1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/remote_ntp_time_estimator.h @@ -14,6 +14,7 @@ #include #include "absl/types/optional.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/numerics/moving_median_filter.h" #include "system_wrappers/include/rtp_to_ntp_estimator.h" @@ -31,9 +32,6 @@ class RemoteNtpTimeEstimator { ~RemoteNtpTimeEstimator(); - RemoteNtpTimeEstimator(const RemoteNtpTimeEstimator&) = delete; - RemoteNtpTimeEstimator& operator=(const RemoteNtpTimeEstimator&) = delete; - // Updates the estimator with round trip time `rtt`, NTP seconds `ntp_secs`, // NTP fraction `ntp_frac` and RTP timestamp `rtp_timestamp`. bool UpdateRtcpTimestamp(int64_t rtt, @@ -54,6 +52,7 @@ class RemoteNtpTimeEstimator { MovingMedianFilter ntp_clocks_offset_estimator_; RtpToNtpEstimator rtp_to_ntp_; int64_t last_timing_log_ms_; + RTC_DISALLOW_COPY_AND_ASSIGN(RemoteNtpTimeEstimator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h index 79626255e..45cb659b5 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/include/rtp_rtcp_defines.h @@ -103,12 +103,6 @@ enum RTCPPacketType : uint32_t { kRtcpXrTargetBitrate = 0x200000 }; -enum class KeyFrameReqMethod : uint8_t { - kNone, // Don't request keyframes. - kPliRtcp, // Request keyframes through Picture Loss Indication. - kFirRtcp // Request keyframes through Full Intra-frame Request. -}; - enum RtxMode { kRtxOff = 0x0, kRtxRetransmitted = 0x1, // Only send retransmissions over RTX. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h index 2f1c89494..e1ea45ed1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/mocks/mock_rtp_rtcp.h @@ -19,6 +19,7 @@ #include "absl/types/optional.h" #include "api/video/video_bitrate_allocation.h" +#include "modules/include/module.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_rtcp_interface.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc index 9cef33165..c5c06840d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/deprecated/deprecated_rtp_sender_egress.cc @@ -156,7 +156,7 @@ void DEPRECATED_RtpSenderEgress::SendPacket( // In case of VideoTimingExtension, since it's present not in every packet, // data after rtp header may be corrupted if these packets are protected by // the FEC. - int64_t diff_ms = now_ms - packet->capture_time().ms(); + int64_t diff_ms = now_ms - packet->capture_time_ms(); if (packet->HasExtension()) { packet->SetExtension(kTimestampTicksPerMs * diff_ms); } @@ -167,9 +167,9 @@ void DEPRECATED_RtpSenderEgress::SendPacket( if (packet->HasExtension()) { if (populate_network2_timestamp_) { - packet->set_network2_time(Timestamp::Millis(now_ms)); + packet->set_network2_time_ms(now_ms); } else { - packet->set_pacer_exit_time(Timestamp::Millis(now_ms)); + packet->set_pacer_exit_time_ms(now_ms); } } @@ -190,8 +190,8 @@ void DEPRECATED_RtpSenderEgress::SendPacket( if (packet->packet_type() != RtpPacketMediaType::kPadding && packet->packet_type() != RtpPacketMediaType::kRetransmission) { - UpdateDelayStatistics(packet->capture_time().ms(), now_ms, packet_ssrc); - UpdateOnSendPacket(options.packet_id, packet->capture_time().ms(), + UpdateDelayStatistics(packet->capture_time_ms(), now_ms, packet_ssrc); + UpdateOnSendPacket(options.packet_id, packet->capture_time_ms(), packet_ssrc); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_sender.cc index f6fe06e0e..071829f1c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/flexfec_sender.cc @@ -142,7 +142,7 @@ std::vector> FlexfecSender::GetFecPackets() { clock_->TimeInMilliseconds())); // Set "capture time" so that the TransmissionOffset header extension // can be set by the RTPSender. - fec_packet_to_send->set_capture_time(clock_->CurrentTime()); + fec_packet_to_send->set_capture_time_ms(clock_->TimeInMilliseconds()); fec_packet_to_send->SetSsrc(ssrc_); // Reserve extensions, if registered. These will be set by the RTPSender. fec_packet_to_send->ReserveExtension(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc index 55edd768a..037542ddb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/packet_sequencer.cc @@ -99,7 +99,7 @@ void PacketSequencer::UpdateLastPacketState(const RtpPacketToSend& packet) { // Save timestamps to generate timestamp field and extensions for the padding. last_rtp_timestamp_ = packet.Timestamp(); last_timestamp_time_ms_ = clock_->TimeInMilliseconds(); - last_capture_time_ms_ = packet.capture_time().ms(); + last_capture_time_ms_ = packet.capture_time_ms(); } void PacketSequencer::PopulatePaddingFields(RtpPacketToSend& packet) { @@ -107,7 +107,7 @@ void PacketSequencer::PopulatePaddingFields(RtpPacketToSend& packet) { RTC_DCHECK(CanSendPaddingOnMediaSsrc()); packet.SetTimestamp(last_rtp_timestamp_); - packet.set_capture_time(Timestamp::Millis(last_capture_time_ms_)); + packet.set_capture_time_ms(last_capture_time_ms_); packet.SetPayloadType(last_payload_type_); return; } @@ -119,7 +119,7 @@ void PacketSequencer::PopulatePaddingFields(RtpPacketToSend& packet) { } packet.SetTimestamp(last_rtp_timestamp_); - packet.set_capture_time(Timestamp::Millis(last_capture_time_ms_)); + packet.set_capture_time_ms(last_capture_time_ms_); // Only change the timestamp of padding packets sent over RTX. // Padding only packets over RTP has to be sent as part of a media @@ -129,10 +129,9 @@ void PacketSequencer::PopulatePaddingFields(RtpPacketToSend& packet) { packet.SetTimestamp(packet.Timestamp() + (now_ms - last_timestamp_time_ms_) * kTimestampTicksPerMs); - if (packet.capture_time() > Timestamp::Zero()) { - packet.set_capture_time( - packet.capture_time() + - TimeDelta::Millis(now_ms - last_timestamp_time_ms_)); + if (packet.capture_time_ms() > 0) { + packet.set_capture_time_ms(packet.capture_time_ms() + + (now_ms - last_timestamp_time_ms_)); } } } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc index d2dba66ec..b16f122ee 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/receive_statistics_impl.cc @@ -22,13 +22,16 @@ #include "modules/rtp_rtcp/source/rtp_rtcp_config.h" #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/logging.h" -#include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" namespace webrtc { namespace { constexpr int64_t kStatisticsTimeoutMs = 8000; constexpr int64_t kStatisticsProcessIntervalMs = 1000; + +// Number of seconds since 1900 January 1 00:00 GMT (see +// https://tools.ietf.org/html/rfc868). +constexpr int64_t kNtpJan1970Millisecs = 2'208'988'800'000; } // namespace StreamStatistician::~StreamStatistician() {} @@ -40,7 +43,7 @@ StreamStatisticianImpl::StreamStatisticianImpl(uint32_t ssrc, clock_(clock), delta_internal_unix_epoch_ms_(clock_->CurrentNtpInMilliseconds() - clock_->TimeInMilliseconds() - - rtc::kNtpJan1970Millisecs), + kNtpJan1970Millisecs), incoming_bitrate_(kStatisticsProcessIntervalMs, RateStatistics::kBpsScale), max_reordering_threshold_(max_reordering_threshold), diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h index d98dbd088..8bee60069 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/compound_packet.h @@ -16,6 +16,7 @@ #include #include "modules/rtp_rtcp/source/rtcp_packet.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { namespace rtcp { @@ -25,9 +26,6 @@ class CompoundPacket : public RtcpPacket { CompoundPacket(); ~CompoundPacket() override; - CompoundPacket(const CompoundPacket&) = delete; - CompoundPacket& operator=(const CompoundPacket&) = delete; - void Append(std::unique_ptr packet); // Size of this packet in bytes (i.e. total size of nested packets). @@ -40,6 +38,9 @@ class CompoundPacket : public RtcpPacket { protected: std::vector> appended_packets_; + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(CompoundPacket); }; } // namespace rtcp diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h index ad91dfdcc..6fe2099fd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/dlrr.h @@ -24,21 +24,11 @@ struct ReceiveTimeInfo { ReceiveTimeInfo() : ssrc(0), last_rr(0), delay_since_last_rr(0) {} ReceiveTimeInfo(uint32_t ssrc, uint32_t last_rr, uint32_t delay) : ssrc(ssrc), last_rr(last_rr), delay_since_last_rr(delay) {} - uint32_t ssrc; uint32_t last_rr; uint32_t delay_since_last_rr; }; -inline bool operator==(const ReceiveTimeInfo& lhs, const ReceiveTimeInfo& rhs) { - return lhs.ssrc == rhs.ssrc && lhs.last_rr == rhs.last_rr && - lhs.delay_since_last_rr == rhs.delay_since_last_rr; -} - -inline bool operator!=(const ReceiveTimeInfo& lhs, const ReceiveTimeInfo& rhs) { - return !(lhs == rhs); -} - // DLRR Report Block: Delay since the Last Receiver Report (RFC 3611). class Dlrr { public: diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h index 827bd7439..8eb4ce62a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_packet/rrtr.h @@ -46,14 +46,6 @@ class Rrtr { NtpTime ntp_; }; -inline bool operator==(const Rrtr& rrtr1, const Rrtr& rrtr2) { - return rrtr1.ntp() == rrtr2.ntp(); -} - -inline bool operator!=(const Rrtr& rrtr1, const Rrtr& rrtr2) { - return !(rrtr1 == rrtr2); -} - } // namespace rtcp } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTCP_PACKET_RRTR_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc index d0f959683..47843be81 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_receiver.cc @@ -662,12 +662,7 @@ void RTCPReceiver::HandleReportBlock(const ReportBlock& report_block, rtcp_report_block.delay_since_last_sender_report = report_block.delay_since_last_sr(); rtcp_report_block.last_sender_report_timestamp = report_block.last_sr(); - // Number of seconds since 1900 January 1 00:00 GMT (see - // https://tools.ietf.org/html/rfc868). - report_block_data->SetReportBlock( - rtcp_report_block, - (clock_->CurrentNtpInMilliseconds() - rtc::kNtpJan1970Millisecs) * - rtc::kNumMicrosecsPerMillisec); + report_block_data->SetReportBlock(rtcp_report_block, rtc::TimeUTCMicros()); int64_t rtt_ms = 0; uint32_t send_time_ntp = report_block.last_sr(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.cc index dea628609..02c0fef9f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.cc @@ -48,13 +48,13 @@ bool RtcpTransceiverConfig::Validate() const { RTC_LOG(LS_ERROR) << debug_id << "outgoing transport must be set"; return false; } - if (initial_report_delay < TimeDelta::Zero()) { - RTC_LOG(LS_ERROR) << debug_id << "delay " << initial_report_delay.ms() + if (initial_report_delay_ms < 0) { + RTC_LOG(LS_ERROR) << debug_id << "delay " << initial_report_delay_ms << "ms before first report shouldn't be negative."; return false; } - if (report_period <= TimeDelta::Zero()) { - RTC_LOG(LS_ERROR) << debug_id << "period " << report_period.ms() + if (report_period_ms <= 0) { + RTC_LOG(LS_ERROR) << debug_id << "period " << report_period_ms << "ms between reports should be positive."; return false; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h index 34789dbd0..73b933d0a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_config.h @@ -64,41 +64,6 @@ class MediaReceiverRtcpObserver { const VideoBitrateAllocation& allocation) {} }; -// Handles RTCP related messages for a single RTP stream (i.e. single SSRC) -class RtpStreamRtcpHandler { - public: - virtual ~RtpStreamRtcpHandler() = default; - - // Statistic about sent RTP packets to propagate to RTCP sender report. - class RtpStats { - public: - RtpStats() = default; - RtpStats(const RtpStats&) = default; - RtpStats& operator=(const RtpStats&) = default; - ~RtpStats() = default; - - size_t num_sent_packets() const { return num_sent_packets_; } - size_t num_sent_bytes() const { return num_sent_bytes_; } - Timestamp last_capture_time() const { return last_capture_time_; } - uint32_t last_rtp_timestamp() const { return last_rtp_timestamp_; } - int last_clock_rate() const { return last_clock_rate_; } - - void set_num_sent_packets(size_t v) { num_sent_packets_ = v; } - void set_num_sent_bytes(size_t v) { num_sent_bytes_ = v; } - void set_last_capture_time(Timestamp v) { last_capture_time_ = v; } - void set_last_rtp_timestamp(uint32_t v) { last_rtp_timestamp_ = v; } - void set_last_clock_rate(int v) { last_clock_rate_ = v; } - - private: - size_t num_sent_packets_ = 0; - size_t num_sent_bytes_ = 0; - Timestamp last_capture_time_ = Timestamp::Zero(); - uint32_t last_rtp_timestamp_ = 0; - int last_clock_rate_ = 90'000; - }; - virtual RtpStats SentStats() = 0; -}; - struct RtcpTransceiverConfig { RtcpTransceiverConfig(); RtcpTransceiverConfig(const RtcpTransceiverConfig&); @@ -149,10 +114,10 @@ struct RtcpTransceiverConfig { // Initial state if `outgoing_transport` ready to accept packets. bool initial_ready_to_send = true; // Delay before 1st periodic compound packet. - TimeDelta initial_report_delay = TimeDelta::Millis(500); + int initial_report_delay_ms = 500; // Period between periodic compound packets. - TimeDelta report_period = TimeDelta::Seconds(1); + int report_period_ms = 1000; // // Flags for features and experiments. @@ -162,13 +127,6 @@ struct RtcpTransceiverConfig { // https://tools.ietf.org/html/rfc3611#section-4.4 and #section-4.5 bool non_sender_rtt_measurement = false; - // Reply to incoming RRTR messages so that remote endpoint may estimate RTT as - // non-sender as described in https://tools.ietf.org/html/rfc3611#section-4.4 - // and #section-4.5 - // TODO(danilchap): Make it true by default after users got enough time to - // turn it off if not needed. - bool reply_to_non_sender_rtt_measurement = false; - // Allows a REMB message to be sent immediately when SetRemb is called without // having to wait for the next compount message to be sent. bool send_remb_on_change = false; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc index 382e9f19f..c7e11981e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.cc @@ -10,7 +10,6 @@ #include "modules/rtp_rtcp/source/rtcp_transceiver_impl.h" -#include #include #include "absl/algorithm/container.h" @@ -33,7 +32,6 @@ #include "modules/rtp_rtcp/source/time_util.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include "rtc_base/numerics/divide_round.h" #include "rtc_base/task_utils/repeating_task.h" #include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" @@ -54,12 +52,6 @@ struct RtcpTransceiverImpl::RemoteSenderState { std::vector observers; }; -struct RtcpTransceiverImpl::LocalSenderState { - uint32_t ssrc; - size_t last_num_sent_bytes = 0; - RtpStreamRtcpHandler* handler = nullptr; -}; - // Helper to put several RTCP packets into lower layer datagram composing // Compound or Reduced-Size RTCP packet, as defined by RFC 5506 section 2. // TODO(danilchap): When in compound mode and packets are so many that several @@ -100,7 +92,7 @@ RtcpTransceiverImpl::RtcpTransceiverImpl(const RtcpTransceiverConfig& config) : config_(config), ready_to_send_(config.initial_ready_to_send) { RTC_CHECK(config_.Validate()); if (ready_to_send_ && config_.schedule_periodic_compound_packets) { - SchedulePeriodicCompoundPackets(config_.initial_report_delay); + SchedulePeriodicCompoundPackets(config_.initial_report_delay_ms); } } @@ -127,39 +119,13 @@ void RtcpTransceiverImpl::RemoveMediaReceiverRtcpObserver( stored.erase(it); } -bool RtcpTransceiverImpl::AddMediaSender(uint32_t local_ssrc, - RtpStreamRtcpHandler* handler) { - RTC_DCHECK(handler != nullptr); - LocalSenderState state; - state.ssrc = local_ssrc; - state.handler = handler; - local_senders_.push_back(state); - auto it = std::prev(local_senders_.end()); - auto [unused, inserted] = local_senders_by_ssrc_.emplace(local_ssrc, it); - if (!inserted) { - local_senders_.pop_back(); - return false; - } - return true; -} - -bool RtcpTransceiverImpl::RemoveMediaSender(uint32_t local_ssrc) { - auto index_it = local_senders_by_ssrc_.find(local_ssrc); - if (index_it == local_senders_by_ssrc_.end()) { - return false; - } - local_senders_.erase(index_it->second); - local_senders_by_ssrc_.erase(index_it); - return true; -} - void RtcpTransceiverImpl::SetReadyToSend(bool ready) { if (config_.schedule_periodic_compound_packets) { if (ready_to_send_ && !ready) periodic_task_handle_.Stop(); if (!ready_to_send_ && ready) // Restart periodic sending. - SchedulePeriodicCompoundPackets(config_.report_period / 2); + SchedulePeriodicCompoundPackets(config_.report_period_ms / 2); } ready_to_send_ = ready; } @@ -370,14 +336,6 @@ void RtcpTransceiverImpl::HandleExtendedReports( if (!extended_reports.Parse(rtcp_packet_header)) return; - if (config_.reply_to_non_sender_rtt_measurement && extended_reports.rrtr()) { - RrtrTimes& rrtr = received_rrtrs_[extended_reports.sender_ssrc()]; - rrtr.received_remote_mid_ntp_time = - CompactNtp(extended_reports.rrtr()->ntp()); - rrtr.local_receive_mid_ntp_time = - CompactNtp(config_.clock->ConvertTimestampToNtpTime(now)); - } - if (extended_reports.dlrr()) HandleDlrr(extended_reports.dlrr(), now); @@ -470,211 +428,54 @@ void RtcpTransceiverImpl::ReschedulePeriodicCompoundPackets() { return; periodic_task_handle_.Stop(); RTC_DCHECK(ready_to_send_); - SchedulePeriodicCompoundPackets(config_.report_period); + SchedulePeriodicCompoundPackets(config_.report_period_ms); } -void RtcpTransceiverImpl::SchedulePeriodicCompoundPackets(TimeDelta delay) { - periodic_task_handle_ = - RepeatingTaskHandle::DelayedStart(config_.task_queue, delay, [this] { +void RtcpTransceiverImpl::SchedulePeriodicCompoundPackets(int64_t delay_ms) { + periodic_task_handle_ = RepeatingTaskHandle::DelayedStart( + config_.task_queue, TimeDelta::Millis(delay_ms), [this] { RTC_DCHECK(config_.schedule_periodic_compound_packets); RTC_DCHECK(ready_to_send_); SendPeriodicCompoundPacket(); - return config_.report_period; + return TimeDelta::Millis(config_.report_period_ms); }); } -RtcpTransceiverImpl::CompoundPacketInfo RtcpTransceiverImpl::FillReports( - Timestamp now, - size_t reserved_bytes, - PacketSender& rtcp_sender) { - // Sender/receiver reports should be first in the RTCP packet. - RTC_DCHECK(rtcp_sender.IsEmpty()); - - size_t available_bytes = config_.max_packet_size; - if (reserved_bytes > available_bytes) { - // Because reserved_bytes is unsigned, substracting would underflow and will - // not produce desired result. - available_bytes = 0; - } else { - available_bytes -= reserved_bytes; +void RtcpTransceiverImpl::CreateCompoundPacket(PacketSender* sender) { + RTC_DCHECK(sender->IsEmpty()); + const uint32_t sender_ssrc = config_.feedback_ssrc; + Timestamp now = config_.clock->CurrentTime(); + rtcp::ReceiverReport receiver_report; + receiver_report.SetSenderSsrc(sender_ssrc); + receiver_report.SetReportBlocks(CreateReportBlocks(now)); + if (config_.rtcp_mode == RtcpMode::kCompound || + !receiver_report.report_blocks().empty()) { + sender->AppendPacket(receiver_report); } - CompoundPacketInfo result; - result.sender_ssrc = config_.feedback_ssrc; - result.has_sender_report = false; - - static constexpr size_t kSenderReportSizeBytes = 28; - static constexpr size_t kFullSenderReportSizeBytes = - kSenderReportSizeBytes + - rtcp::SenderReport::kMaxNumberOfReportBlocks * rtcp::ReportBlock::kLength; - size_t max_full_sender_reports = available_bytes / kFullSenderReportSizeBytes; - size_t max_report_blocks = - max_full_sender_reports * rtcp::SenderReport::kMaxNumberOfReportBlocks; - size_t available_bytes_for_last_sender_report = - available_bytes - max_full_sender_reports * kFullSenderReportSizeBytes; - if (available_bytes_for_last_sender_report >= kSenderReportSizeBytes) { - max_report_blocks += - (available_bytes_for_last_sender_report - kSenderReportSizeBytes) / - rtcp::ReportBlock::kLength; + if (!config_.cname.empty() && !sender->IsEmpty()) { + rtcp::Sdes sdes; + bool added = sdes.AddCName(config_.feedback_ssrc, config_.cname); + RTC_DCHECK(added) << "Failed to add cname " << config_.cname + << " to rtcp sdes packet."; + sender->AppendPacket(sdes); } - - std::vector report_blocks = - CreateReportBlocks(now, max_report_blocks); - // Previous calculation of max number of sender report made space for max - // number of report blocks per sender report, but if number of report blocks - // is low, more sender reports may fit in. - size_t max_sender_reports = - (available_bytes - report_blocks.size() * rtcp::ReportBlock::kLength) / - kSenderReportSizeBytes; - - auto last_handled_sender_it = local_senders_.end(); - auto report_block_it = report_blocks.begin(); - size_t num_sender_reports = 0; - for (auto it = local_senders_.begin(); - it != local_senders_.end() && num_sender_reports < max_sender_reports; - ++it) { - LocalSenderState& rtp_sender = *it; - RtpStreamRtcpHandler::RtpStats stats = rtp_sender.handler->SentStats(); - - if (stats.num_sent_bytes() < rtp_sender.last_num_sent_bytes) { - RTC_LOG(LS_ERROR) << "Inconsistent SR for SSRC " << rtp_sender.ssrc - << ". Number of total sent bytes decreased."; - rtp_sender.last_num_sent_bytes = 0; - } - if (stats.num_sent_bytes() == rtp_sender.last_num_sent_bytes) { - // Skip because no RTP packet was send for this SSRC since last report. - continue; - } - rtp_sender.last_num_sent_bytes = stats.num_sent_bytes(); - - last_handled_sender_it = it; - rtcp::SenderReport sender_report; - sender_report.SetSenderSsrc(rtp_sender.ssrc); - sender_report.SetPacketCount(stats.num_sent_packets()); - sender_report.SetOctetCount(stats.num_sent_bytes()); - sender_report.SetNtp(config_.clock->ConvertTimestampToNtpTime(now)); - RTC_DCHECK_GE(now, stats.last_capture_time()); - sender_report.SetRtpTimestamp( - stats.last_rtp_timestamp() + - ((now - stats.last_capture_time()) * stats.last_clock_rate()) - .seconds()); - if (report_block_it != report_blocks.end()) { - size_t num_blocks = - std::min(rtcp::SenderReport::kMaxNumberOfReportBlocks, - report_blocks.end() - report_block_it); - std::vector sub_blocks(report_block_it, - report_block_it + num_blocks); - sender_report.SetReportBlocks(std::move(sub_blocks)); - report_block_it += num_blocks; - } - rtcp_sender.AppendPacket(sender_report); - ++num_sender_reports; - - if (!result.has_sender_report) { - result.has_sender_report = true; - result.sender_ssrc = rtp_sender.ssrc; - } - } - if (last_handled_sender_it != local_senders_.end()) { - // Rotate `local_senders_` so that the 1st unhandled sender become first in - // the list, and thus will be first to generate rtcp sender report for on - // the next call to `FillReports`. - local_senders_.splice(local_senders_.end(), local_senders_, - local_senders_.begin(), - std::next(last_handled_sender_it)); - } - - // Calculcate number of receiver reports to attach remaining report blocks to. - size_t num_receiver_reports = - DivideRoundUp(report_blocks.end() - report_block_it, - rtcp::ReceiverReport::kMaxNumberOfReportBlocks); - - // In compound mode each RTCP packet has to start with a sender or receiver - // report. - if (config_.rtcp_mode == RtcpMode::kCompound && num_sender_reports == 0 && - num_receiver_reports == 0) { - num_receiver_reports = 1; - } - - for (size_t i = 0; i < num_receiver_reports; ++i) { - rtcp::ReceiverReport receiver_report; - receiver_report.SetSenderSsrc(result.sender_ssrc); - size_t num_blocks = - std::min(rtcp::ReceiverReport::kMaxNumberOfReportBlocks, - report_blocks.end() - report_block_it); - std::vector sub_blocks(report_block_it, - report_block_it + num_blocks); - receiver_report.SetReportBlocks(std::move(sub_blocks)); - report_block_it += num_blocks; - rtcp_sender.AppendPacket(receiver_report); - } - // All report blocks should be attached at this point. - RTC_DCHECK_EQ(report_blocks.end() - report_block_it, 0); - return result; -} - -void RtcpTransceiverImpl::CreateCompoundPacket(Timestamp now, - size_t reserved_bytes, - PacketSender& sender) { - RTC_DCHECK(sender.IsEmpty()); - absl::optional sdes; - if (!config_.cname.empty()) { - sdes.emplace(); - bool added = sdes->AddCName(config_.feedback_ssrc, config_.cname); - RTC_DCHECK(added) << "Failed to add CNAME " << config_.cname - << " to RTCP SDES packet."; - reserved_bytes += sdes->BlockLength(); - } - if (remb_.has_value()) { - reserved_bytes += remb_->BlockLength(); - } - absl::optional xr; - if (!received_rrtrs_.empty()) { - RTC_DCHECK(config_.reply_to_non_sender_rtt_measurement); - xr.emplace(); - uint32_t now_ntp = - CompactNtp(config_.clock->ConvertTimestampToNtpTime(now)); - for (const auto& [ssrc, rrtr_info] : received_rrtrs_) { - rtcp::ReceiveTimeInfo reply; - reply.ssrc = ssrc; - reply.last_rr = rrtr_info.received_remote_mid_ntp_time; - reply.delay_since_last_rr = - now_ntp - rrtr_info.local_receive_mid_ntp_time; - xr->AddDlrrItem(reply); - } - reserved_bytes += xr->BlockLength(); + if (remb_) { + remb_->SetSenderSsrc(sender_ssrc); + sender->AppendPacket(*remb_); } + // TODO(bugs.webrtc.org/8239): Do not send rrtr if this packet starts with + // SenderReport instead of ReceiverReport + // when RtcpTransceiver supports rtp senders. if (config_.non_sender_rtt_measurement) { - // It looks like bytes for ExtendedReport header are reserved twice, but in - // practice the same RtcpTransceiver won't both produce RRTR (i.e. it is a - // receiver-only) and reply to RRTR (i.e. remote participant is a receiver - // only). If that happen, then `reserved_bytes` would be slightly larger - // than it should, which is not an issue. + rtcp::ExtendedReports xr; - // 4 bytes for common RTCP header + 4 bytes for the ExtenedReports header. - reserved_bytes += (4 + 4 + rtcp::Rrtr::kLength); - } - - CompoundPacketInfo result = FillReports(now, reserved_bytes, sender); - - if (sdes.has_value() && !sender.IsEmpty()) { - sender.AppendPacket(*sdes); - } - if (remb_.has_value()) { - remb_->SetSenderSsrc(result.sender_ssrc); - sender.AppendPacket(*remb_); - } - if (!result.has_sender_report && config_.non_sender_rtt_measurement) { - if (!xr.has_value()) { - xr.emplace(); - } rtcp::Rrtr rrtr; rrtr.SetNtp(config_.clock->ConvertTimestampToNtpTime(now)); - xr->SetRrtr(rrtr); - } - if (xr.has_value()) { - xr->SetSenderSsrc(result.sender_ssrc); - sender.AppendPacket(*xr); + xr.SetRrtr(rrtr); + + xr.SetSenderSsrc(sender_ssrc); + sender->AppendPacket(xr); } } @@ -682,9 +483,8 @@ void RtcpTransceiverImpl::SendPeriodicCompoundPacket() { auto send_packet = [this](rtc::ArrayView packet) { config_.outgoing_transport->SendRtcp(packet.data(), packet.size()); }; - Timestamp now = config_.clock->CurrentTime(); PacketSender sender(send_packet, config_.max_packet_size); - CreateCompoundPacket(now, /*reserved_bytes=*/0, sender); + CreateCompoundPacket(&sender); sender.Send(); } @@ -710,11 +510,8 @@ void RtcpTransceiverImpl::SendImmediateFeedback( PacketSender sender(send_packet, config_.max_packet_size); // Compound mode requires every sent rtcp packet to be compound, i.e. start // with a sender or receiver report. - if (config_.rtcp_mode == RtcpMode::kCompound) { - Timestamp now = config_.clock->CurrentTime(); - CreateCompoundPacket(now, /*reserved_bytes=*/rtcp_packet.BlockLength(), - sender); - } + if (config_.rtcp_mode == RtcpMode::kCompound) + CreateCompoundPacket(&sender); sender.AppendPacket(rtcp_packet); sender.Send(); @@ -725,12 +522,14 @@ void RtcpTransceiverImpl::SendImmediateFeedback( } std::vector RtcpTransceiverImpl::CreateReportBlocks( - Timestamp now, - size_t num_max_blocks) { + Timestamp now) { if (!config_.receive_statistics) return {}; + // TODO(danilchap): Support sending more than + // `ReceiverReport::kMaxNumberOfReportBlocks` per compound rtcp packet. std::vector report_blocks = - config_.receive_statistics->RtcpReportBlocks(num_max_blocks); + config_.receive_statistics->RtcpReportBlocks( + rtcp::ReceiverReport::kMaxNumberOfReportBlocks); uint32_t last_sr = 0; uint32_t last_delay = 0; for (rtcp::ReportBlock& report_block : report_blocks) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.h index be9a98188..b03db7d78 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtcp_transceiver_impl.h @@ -11,7 +11,7 @@ #ifndef MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_IMPL_H_ #define MODULES_RTP_RTCP_SOURCE_RTCP_TRANSCEIVER_IMPL_H_ -#include +#include #include #include #include @@ -48,11 +48,6 @@ class RtcpTransceiverImpl { void RemoveMediaReceiverRtcpObserver(uint32_t remote_ssrc, MediaReceiverRtcpObserver* observer); - // Returns false on failure, e.g. when there is already an handler for the - // `local_ssrc`. - bool AddMediaSender(uint32_t local_ssrc, RtpStreamRtcpHandler* handler); - bool RemoveMediaSender(uint32_t local_ssrc); - void SetReadyToSend(bool ready); void ReceivePacket(rtc::ArrayView packet, Timestamp now); @@ -81,14 +76,6 @@ class RtcpTransceiverImpl { private: class PacketSender; struct RemoteSenderState; - struct LocalSenderState; - struct RrtrTimes { - // Received remote NTP timestamp in compact representation. - uint32_t received_remote_mid_ntp_time; - - // Local NTP time when the report was received in compact representation. - uint32_t local_receive_mid_ntp_time; - }; void HandleReceivedPacket(const rtcp::CommonHeader& rtcp_packet_header, Timestamp now, @@ -116,30 +103,15 @@ class RtcpTransceiverImpl { rtc::ArrayView report_blocks); void ReschedulePeriodicCompoundPackets(); - void SchedulePeriodicCompoundPackets(TimeDelta delay); - // Appends RTCP sender and receiver reports to the `sender`. - // Both sender and receiver reports may have attached report blocks. - // Uses up to `config_.max_packet_size - reserved_bytes` - struct CompoundPacketInfo { - uint32_t sender_ssrc; - bool has_sender_report; - }; - CompoundPacketInfo FillReports(Timestamp now, - size_t reserved_bytes, - PacketSender& rtcp_sender); - + void SchedulePeriodicCompoundPackets(int64_t delay_ms); // Creates compound RTCP packet, as defined in // https://tools.ietf.org/html/rfc5506#section-2 - void CreateCompoundPacket(Timestamp now, - size_t reserved_bytes, - PacketSender& rtcp_sender); - + void CreateCompoundPacket(PacketSender* sender); // Sends RTCP packets. void SendPeriodicCompoundPacket(); void SendImmediateFeedback(const rtcp::RtcpPacket& rtcp_packet); - // Generate Report Blocks to be send in Sender or Receiver Reports. - std::vector CreateReportBlocks(Timestamp now, - size_t num_max_blocks); + // Generate Report Blocks to be send in Sender or Receiver Report. + std::vector CreateReportBlocks(Timestamp now); const RtcpTransceiverConfig config_; @@ -148,10 +120,6 @@ class RtcpTransceiverImpl { // TODO(danilchap): Remove entries from remote_senders_ that are no longer // needed. flat_map remote_senders_; - std::list local_senders_; - flat_map::iterator> - local_senders_by_ssrc_; - flat_map received_rrtrs_; RepeatingTaskHandle periodic_task_handle_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h index 283beacb1..f65859424 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_h264.h @@ -23,6 +23,7 @@ #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/video_coding/codecs/h264/include/h264_globals.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -36,9 +37,6 @@ class RtpPacketizerH264 : public RtpPacketizer { ~RtpPacketizerH264() override; - RtpPacketizerH264(const RtpPacketizerH264&) = delete; - RtpPacketizerH264& operator=(const RtpPacketizerH264&) = delete; - size_t NumPackets() const override; // Get the next payload with H264 payload header. @@ -84,6 +82,8 @@ class RtpPacketizerH264 : public RtpPacketizer { size_t num_packets_left_; std::deque> input_fragments_; std::queue packets_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RtpPacketizerH264); }; } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_H264_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h index fd44bd198..5acd69116 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_video_generic.h @@ -16,6 +16,7 @@ #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_format.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -47,9 +48,6 @@ class RtpPacketizerGeneric : public RtpPacketizer { ~RtpPacketizerGeneric() override; - RtpPacketizerGeneric(const RtpPacketizerGeneric&) = delete; - RtpPacketizerGeneric& operator=(const RtpPacketizerGeneric&) = delete; - size_t NumPackets() const override; // Get the next payload. @@ -66,6 +64,8 @@ class RtpPacketizerGeneric : public RtpPacketizer { rtc::ArrayView remaining_payload_; std::vector payload_sizes_; std::vector::const_iterator current_packet_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RtpPacketizerGeneric); }; } // namespace webrtc #endif // MODULES_RTP_RTCP_SOURCE_RTP_FORMAT_VIDEO_GENERIC_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h index d1f569a94..21009280e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8.h @@ -35,6 +35,7 @@ #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -49,9 +50,6 @@ class RtpPacketizerVp8 : public RtpPacketizer { ~RtpPacketizerVp8() override; - RtpPacketizerVp8(const RtpPacketizerVp8&) = delete; - RtpPacketizerVp8& operator=(const RtpPacketizerVp8&) = delete; - size_t NumPackets() const override; // Get the next payload with VP8 payload header. @@ -68,6 +66,8 @@ class RtpPacketizerVp8 : public RtpPacketizer { rtc::ArrayView remaining_payload_; std::vector payload_sizes_; std::vector::const_iterator current_packet_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RtpPacketizerVp8); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h index 3ecaa476d..916d6577f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp8_test_helper.h @@ -21,6 +21,7 @@ #include "modules/rtp_rtcp/source/rtp_format_vp8.h" #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -28,10 +29,6 @@ class RtpFormatVp8TestHelper { public: RtpFormatVp8TestHelper(const RTPVideoHeaderVP8* hdr, size_t payload_len); ~RtpFormatVp8TestHelper(); - - RtpFormatVp8TestHelper(const RtpFormatVp8TestHelper&) = delete; - RtpFormatVp8TestHelper& operator=(const RtpFormatVp8TestHelper&) = delete; - void GetAllPacketsAndCheck(RtpPacketizerVp8* packetizer, rtc::ArrayView expected_sizes); @@ -49,6 +46,8 @@ class RtpFormatVp8TestHelper { const RTPVideoHeaderVP8* const hdr_info_; rtc::Buffer payload_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RtpFormatVp8TestHelper); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h index 3cf4dd56e..02458aea6 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_format_vp9.h @@ -30,6 +30,7 @@ #include "modules/rtp_rtcp/source/rtp_format.h" #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" +#include "rtc_base/constructor_magic.h" namespace webrtc { @@ -42,9 +43,6 @@ class RtpPacketizerVp9 : public RtpPacketizer { ~RtpPacketizerVp9() override; - RtpPacketizerVp9(const RtpPacketizerVp9&) = delete; - RtpPacketizerVp9& operator=(const RtpPacketizerVp9&) = delete; - size_t NumPackets() const override; // Gets the next payload with VP9 payload header. @@ -66,6 +64,8 @@ class RtpPacketizerVp9 : public RtpPacketizer { rtc::ArrayView remaining_payload_; std::vector payload_sizes_; std::vector::const_iterator current_packet_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RtpPacketizerVp9); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc index 0bfb04344..fe5ccc708 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_history.cc @@ -492,7 +492,7 @@ RtpPacketHistory::PacketState RtpPacketHistory::StoredPacketToPacketState( RtpPacketHistory::PacketState state; state.rtp_sequence_number = stored_packet.packet_->SequenceNumber(); state.send_time_ms = stored_packet.send_time_ms_; - state.capture_time_ms = stored_packet.packet_->capture_time().ms(); + state.capture_time_ms = stored_packet.packet_->capture_time_ms(); state.ssrc = stored_packet.packet_->Ssrc(); state.packet_size = stored_packet.packet_->size(); state.times_retransmitted = stored_packet.times_retransmitted(); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h index f290a643a..431d3f52b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_received.h @@ -14,6 +14,7 @@ #include +#include "absl/base/attributes.h" #include "api/array_view.h" #include "api/ref_counted_base.h" #include "api/rtp_headers.h" @@ -48,6 +49,15 @@ class RtpPacketReceived : public RtpPacket { webrtc::Timestamp arrival_time() const { return arrival_time_; } void set_arrival_time(webrtc::Timestamp time) { arrival_time_ = time; } + ABSL_DEPRECATED("Use arrival_time() instead") + int64_t arrival_time_ms() const { + return arrival_time_.IsMinusInfinity() ? -1 : arrival_time_.ms(); + } + ABSL_DEPRECATED("Use set_arrival_time() instead") + void set_arrival_time_ms(int64_t time) { + arrival_time_ = webrtc::Timestamp::Millis(time); + } + // Flag if packet was recovered via RTX or FEC. bool recovered() const { return recovered_; } void set_recovered(bool value) { recovered_ = value; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h index 9b3bbf673..12341ef6c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_packet_to_send.h @@ -15,12 +15,10 @@ #include -#include "absl/base/attributes.h" #include "absl/types/optional.h" #include "api/array_view.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" -#include "api/units/timestamp.h" #include "api/video/video_timing.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "modules/rtp_rtcp/source/rtp_header_extensions.h" @@ -46,15 +44,9 @@ class RtpPacketToSend : public RtpPacket { ~RtpPacketToSend(); // Time in local time base as close as it can to frame capture time. - webrtc::Timestamp capture_time() const { return capture_time_; } - void set_capture_time(webrtc::Timestamp time) { capture_time_ = time; } + int64_t capture_time_ms() const { return capture_time_ms_; } - ABSL_DEPRECATED("Use capture_time() instead") - int64_t capture_time_ms() const { return capture_time_.ms_or(-1); } - ABSL_DEPRECATED("Use set_capture_time() instead") - void set_capture_time_ms(int64_t time) { - capture_time_ = webrtc::Timestamp::Millis(time); - } + void set_capture_time_ms(int64_t time) { capture_time_ms_ = time; } void set_packet_type(RtpPacketMediaType type) { packet_type_ = type; } absl::optional packet_type() const { @@ -85,55 +77,27 @@ class RtpPacketToSend : public RtpPacket { additional_data_ = std::move(data); } - void set_packetization_finish_time(webrtc::Timestamp time) { - SetExtension( - VideoSendTiming::GetDeltaCappedMs(time - capture_time_), - VideoTimingExtension::kPacketizationFinishDeltaOffset); - } - - void set_pacer_exit_time(webrtc::Timestamp time) { - SetExtension( - VideoSendTiming::GetDeltaCappedMs(time - capture_time_), - VideoTimingExtension::kPacerExitDeltaOffset); - } - - void set_network_time(webrtc::Timestamp time) { - SetExtension( - VideoSendTiming::GetDeltaCappedMs(time - capture_time_), - VideoTimingExtension::kNetworkTimestampDeltaOffset); - } - - void set_network2_time(webrtc::Timestamp time) { - SetExtension( - VideoSendTiming::GetDeltaCappedMs(time - capture_time_), - VideoTimingExtension::kNetwork2TimestampDeltaOffset); - } - - ABSL_DEPRECATED("Use set_packetization_finish_time() instead") void set_packetization_finish_time_ms(int64_t time) { SetExtension( - VideoSendTiming::GetDeltaCappedMs(capture_time_.ms_or(0), time), + VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), VideoTimingExtension::kPacketizationFinishDeltaOffset); } - ABSL_DEPRECATED("Use set_pacer_exit_time() instead") void set_pacer_exit_time_ms(int64_t time) { SetExtension( - VideoSendTiming::GetDeltaCappedMs(capture_time_.ms_or(0), time), + VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), VideoTimingExtension::kPacerExitDeltaOffset); } - ABSL_DEPRECATED("Use set_network_time() instead") void set_network_time_ms(int64_t time) { SetExtension( - VideoSendTiming::GetDeltaCappedMs(capture_time_.ms_or(0), time), + VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), VideoTimingExtension::kNetworkTimestampDeltaOffset); } - ABSL_DEPRECATED("Use set_network2_time() instead") void set_network2_time_ms(int64_t time) { SetExtension( - VideoSendTiming::GetDeltaCappedMs(capture_time_.ms_or(0), time), + VideoSendTiming::GetDeltaCappedMs(capture_time_ms_, time), VideoTimingExtension::kNetwork2TimestampDeltaOffset); } @@ -157,7 +121,7 @@ class RtpPacketToSend : public RtpPacket { bool is_red() const { return is_red_; } private: - webrtc::Timestamp capture_time_ = webrtc::Timestamp::Zero(); + int64_t capture_time_ms_ = 0; absl::optional packet_type_; bool allow_retransmission_ = false; absl::optional retransmitted_sequence_number_; diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h index a411b237a..f3cb8d2c8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_rtcp_interface.h @@ -27,6 +27,7 @@ #include "modules/rtp_rtcp/source/rtp_packet_to_send.h" #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" #include "modules/rtp_rtcp/source/video_fec_generator.h" +#include "rtc_base/constructor_magic.h" #include "system_wrappers/include/ntp_time.h" namespace webrtc { @@ -46,9 +47,6 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { Configuration() = default; Configuration(Configuration&& rhs) = default; - Configuration(const Configuration&) = delete; - Configuration& operator=(const Configuration&) = delete; - // True for a audio version of the RTP/RTCP module object false will create // a video version. bool audio = false; @@ -147,6 +145,9 @@ class RtpRtcpInterface : public RtcpFeedbackSenderInterface { // Estimate RTT as non-sender as described in // https://tools.ietf.org/html/rfc3611#section-4.4 and #section-4.5 bool non_sender_rtt_measurement = false; + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(Configuration); }; // Stats for RTCP sender reports (SR) for a specific SSRC. diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc index a9bc57f7c..feda738d0 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender.cc @@ -258,12 +258,6 @@ size_t RTPSender::MaxRtpPacketSize() const { void RTPSender::SetRtxStatus(int mode) { MutexLock lock(&send_mutex_); - if (mode != kRtxOff && - (!rtx_ssrc_.has_value() || rtx_payload_type_map_.empty())) { - RTC_LOG(LS_ERROR) - << "Failed to enable RTX without RTX SSRC or payload types."; - return; - } rtx_ = mode; } @@ -459,7 +453,6 @@ std::vector> RTPSender::GeneratePadding( } RTC_DCHECK(rtx_ssrc_); - RTC_DCHECK(!rtx_payload_type_map_.empty()); padding_packet->SetSsrc(*rtx_ssrc_); padding_packet->SetPayloadType(rtx_payload_type_map_.begin()->second); } @@ -484,11 +477,13 @@ std::vector> RTPSender::GeneratePadding( bool RTPSender::SendToNetwork(std::unique_ptr packet) { RTC_DCHECK(packet); + int64_t now_ms = clock_->TimeInMilliseconds(); + auto packet_type = packet->packet_type(); RTC_CHECK(packet_type) << "Packet type must be set before sending."; - if (packet->capture_time() <= Timestamp::Zero()) { - packet->set_capture_time(clock_->CurrentTime()); + if (packet->capture_time_ms() <= 0) { + packet->set_capture_time_ms(now_ms); } std::vector> packets; @@ -501,13 +496,13 @@ bool RTPSender::SendToNetwork(std::unique_ptr packet) { void RTPSender::EnqueuePackets( std::vector> packets) { RTC_DCHECK(!packets.empty()); - Timestamp now = clock_->CurrentTime(); + int64_t now_ms = clock_->TimeInMilliseconds(); for (auto& packet : packets) { RTC_DCHECK(packet); RTC_CHECK(packet->packet_type().has_value()) << "Packet type must be set before sending."; - if (packet->capture_time() <= Timestamp::Zero()) { - packet->set_capture_time(now); + if (packet->capture_time_ms() <= 0) { + packet->set_capture_time_ms(now_ms); } } @@ -724,7 +719,7 @@ std::unique_ptr RTPSender::BuildRtxPacket( rtx_packet->set_additional_data(packet.additional_data()); // Copy capture time so e.g. TransmissionOffset is correctly set. - rtx_packet->set_capture_time(packet.capture_time()); + rtx_packet->set_capture_time_ms(packet.capture_time_ms()); return rtx_packet; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc index c0a807530..207d1ca04 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_audio.cc @@ -272,7 +272,7 @@ bool RTPSenderAudio::SendAudio(AudioFrameType frame_type, packet->SetMarker(MarkerBit(frame_type, payload_type)); packet->SetPayloadType(payload_type); packet->SetTimestamp(rtp_timestamp); - packet->set_capture_time(clock_->CurrentTime()); + packet->set_capture_time_ms(clock_->TimeInMilliseconds()); // Update audio level extension, if included. packet->SetExtension( frame_type == AudioFrameType::kAudioFrameSpeech, audio_level_dbov); @@ -370,7 +370,7 @@ bool RTPSenderAudio::SendTelephoneEventPacket(bool ended, packet->SetMarker(marker_bit); packet->SetSsrc(rtp_sender_->SSRC()); packet->SetTimestamp(dtmf_timestamp); - packet->set_capture_time(clock_->CurrentTime()); + packet->set_capture_time_ms(clock_->TimeInMilliseconds()); // Create DTMF data. uint8_t* dtmfbuffer = packet->AllocatePayload(kDtmfSize); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc index b34b54c1f..eb5537808 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_egress.cc @@ -225,7 +225,7 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, // In case of VideoTimingExtension, since it's present not in every packet, // data after rtp header may be corrupted if these packets are protected by // the FEC. - int64_t diff_ms = now_ms - packet->capture_time().ms(); + int64_t diff_ms = now_ms - packet->capture_time_ms(); if (packet->HasExtension()) { packet->SetExtension(kTimestampTicksPerMs * diff_ms); } @@ -236,9 +236,9 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, if (packet->HasExtension()) { if (populate_network2_timestamp_) { - packet->set_network2_time(Timestamp::Millis(now_ms)); + packet->set_network2_time_ms(now_ms); } else { - packet->set_pacer_exit_time(Timestamp::Millis(now_ms)); + packet->set_pacer_exit_time_ms(now_ms); } } @@ -265,8 +265,8 @@ void RtpSenderEgress::SendPacket(RtpPacketToSend* packet, if (packet->packet_type() != RtpPacketMediaType::kPadding && packet->packet_type() != RtpPacketMediaType::kRetransmission) { - UpdateDelayStatistics(packet->capture_time().ms(), now_ms, packet_ssrc); - UpdateOnSendPacket(options.packet_id, packet->capture_time().ms(), + UpdateDelayStatistics(packet->capture_time_ms(), now_ms, packet_ssrc); + UpdateOnSendPacket(options.packet_id, packet->capture_time_ms(), packet_ssrc); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc index b681f3d55..ca8030f74 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video.cc @@ -534,7 +534,7 @@ bool RTPSenderVideo::SendVideo( RTC_DCHECK_LE(packet_capacity, single_packet->capacity()); single_packet->SetPayloadType(payload_type); single_packet->SetTimestamp(rtp_timestamp); - single_packet->set_capture_time(Timestamp::Millis(capture_time_ms)); + single_packet->set_capture_time_ms(capture_time_ms); // Construct the absolute capture time extension if not provided. if (!video_header.absolute_capture_time.has_value()) { @@ -695,7 +695,7 @@ bool RTPSenderVideo::SendVideo( // Put packetization finish timestamp into extension. if (packet->HasExtension()) { - packet->set_packetization_finish_time(clock_->CurrentTime()); + packet->set_packetization_finish_time_ms(clock_->TimeInMilliseconds()); } packet->set_fec_protect_packet(use_fec); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index c3295c68b..377f6c4fb 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -139,7 +139,7 @@ void RTPSenderVideoFrameTransformerDelegate::OnTransformedFrame( // arrives. if (!sender_ || !encoder_queue_) return; - rtc::scoped_refptr delegate(this); + rtc::scoped_refptr delegate = this; encoder_queue_->PostTask(ToQueuedTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { delegate->SendVideo(std::move(frame)); diff --git a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc index ee4d74457..e87be031a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/rtp_rtcp/source/video_rtp_depacketizer_h264.cc @@ -15,6 +15,7 @@ #include #include +#include "absl/base/macros.h" #include "absl/types/optional.h" #include "absl/types/variant.h" #include "common_video/h264/h264_common.h" @@ -196,7 +197,7 @@ absl::optional ProcessStapAOrSingleNalu( case H264::NaluType::kIdr: parsed_payload->video_header.frame_type = VideoFrameType::kVideoFrameKey; - [[fallthrough]]; + ABSL_FALLTHROUGH_INTENDED; case H264::NaluType::kSlice: { absl::optional pps_id = PpsParser::ParsePpsIdFromSlice( &payload_data[start_offset], end_offset - start_offset); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h index 546265049..4b4738960 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/device_info_impl.h @@ -29,7 +29,7 @@ class DeviceInfoImpl : public VideoCaptureModule::DeviceInfo { ~DeviceInfoImpl(void) override; int32_t NumberOfCapabilities(const char* deviceUniqueIdUTF8) override; int32_t GetCapability(const char* deviceUniqueIdUTF8, - uint32_t deviceCapabilityNumber, + const uint32_t deviceCapabilityNumber, VideoCaptureCapability& capability) override; int32_t GetBestMatchedCapability(const char* deviceUniqueIdUTF8, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h b/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h index 3bbe217cb..0f60092d7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_capture/video_capture.h @@ -44,7 +44,7 @@ class VideoCaptureModule : public rtc::RefCountInterface { // Gets the capabilities of the named device. virtual int32_t GetCapability(const char* deviceUniqueIdUTF8, - uint32_t deviceCapabilityNumber, + const uint32_t deviceCapabilityNumber, VideoCaptureCapability& capability) = 0; // Gets clockwise angle the captured frames should be rotated in order diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc index 79a31d9ba..967ec7d45 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.cc @@ -820,8 +820,17 @@ VideoEncoder::EncoderInfo LibaomAv1Encoder::GetEncoderInfo() const { } // namespace +const bool kIsLibaomAv1EncoderSupported = true; + std::unique_ptr CreateLibaomAv1Encoder() { return std::make_unique(); } +bool LibaomAv1EncoderSupportsScalabilityMode( + absl::string_view scalability_mode) { + // For AV1, the scalability mode is supported if we can create the scalability + // structure. + return ScalabilityStructureConfig(scalability_mode) != absl::nullopt; +} + } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h index e69df9e8b..0d81c9c17 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder.h @@ -12,11 +12,18 @@ #include +#include "absl/base/attributes.h" #include "absl/strings/string_view.h" #include "api/video_codecs/video_encoder.h" namespace webrtc { + +ABSL_CONST_INIT extern const bool kIsLibaomAv1EncoderSupported; + std::unique_ptr CreateLibaomAv1Encoder(); +bool LibaomAv1EncoderSupportsScalabilityMode( + absl::string_view scalability_mode); + } // namespace webrtc #endif // MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_ENCODER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc new file mode 100644 index 000000000..fff1dd9ed --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_absent.cc @@ -0,0 +1,29 @@ +/* + * Copyright (c) 2020 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" + +#include + +#include "api/video_codecs/video_encoder.h" + +namespace webrtc { + +const bool kIsLibaomAv1EncoderSupported = false; + +std::unique_ptr CreateLibaomAv1Encoder() { + return nullptr; +} + +bool LibaomAv1EncoderSupportsScalabilityMode( + absl::string_view scalability_mode) { + return false; +} + +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_supported.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_supported.cc deleted file mode 100644 index 0bb31085e..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_supported.cc +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "modules/video_coding/codecs/av1/libaom_av1_encoder_supported.h" - -#include "modules/video_coding/svc/create_scalability_structure.h" - -#if defined(RTC_USE_LIBAOM_AV1_ENCODER) -#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" // nogncheck -#endif - -namespace webrtc { -#if defined(RTC_USE_LIBAOM_AV1_ENCODER) -const bool kIsLibaomAv1EncoderSupported = true; -std::unique_ptr CreateLibaomAv1EncoderIfSupported() { - return CreateLibaomAv1Encoder(); -} -bool LibaomAv1EncoderSupportsScalabilityMode( - absl::string_view scalability_mode) { - // For libaom AV1, the scalability mode is supported if we can create the - // scalability structure. - return ScalabilityStructureConfig(scalability_mode) != absl::nullopt; -} -#else -const bool kIsLibaomAv1EncoderSupported = false; -std::unique_ptr CreateLibaomAv1EncoderIfSupported() { - return nullptr; -} -bool LibaomAv1EncoderSupportsScalabilityMode( - absl::string_view scalability_mode) { - return false; -} -#endif - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_supported.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_supported.h deleted file mode 100644 index 84dd8d600..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/av1/libaom_av1_encoder_supported.h +++ /dev/null @@ -1,29 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_ENCODER_SUPPORTED_H_ -#define MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_ENCODER_SUPPORTED_H_ - -#include - -#include "absl/base/attributes.h" -#include "absl/strings/string_view.h" -#include "api/video_codecs/video_encoder.h" - -namespace webrtc { - -ABSL_CONST_INIT extern const bool kIsLibaomAv1EncoderSupported; - -std::unique_ptr CreateLibaomAv1EncoderIfSupported(); -bool LibaomAv1EncoderSupportsScalabilityMode( - absl::string_view scalability_mode); - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_CODECS_AV1_LIBAOM_AV1_ENCODER_SUPPORTED_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc index 2ac19ba0b..8324b7c74 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264.cc @@ -80,34 +80,18 @@ std::vector SupportedH264Codecs() { // // We support both packetization modes 0 (mandatory) and 1 (optional, // preferred). - return { - CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, - "1"), - CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, - "0"), - CreateH264Format(H264Profile::kProfileConstrainedBaseline, - H264Level::kLevel3_1, "1"), - CreateH264Format(H264Profile::kProfileConstrainedBaseline, - H264Level::kLevel3_1, "0"), - CreateH264Format(H264Profile::kProfileMain, H264Level::kLevel3_1, "1"), - CreateH264Format(H264Profile::kProfileMain, H264Level::kLevel3_1, "0")}; -} - -std::vector SupportedH264DecoderCodecs() { - TRACE_EVENT0("webrtc", __func__); - if (!IsH264CodecSupported()) - return std::vector(); - - std::vector supportedCodecs = SupportedH264Codecs(); - - // OpenH264 doesn't yet support High Predictive 4:4:4 encoding but it does - // support decoding. - supportedCodecs.push_back(CreateH264Format( - H264Profile::kProfilePredictiveHigh444, H264Level::kLevel3_1, "1")); - supportedCodecs.push_back(CreateH264Format( - H264Profile::kProfilePredictiveHigh444, H264Level::kLevel3_1, "0")); - - return supportedCodecs; + return {CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, + "1"), + CreateH264Format(H264Profile::kProfileBaseline, H264Level::kLevel3_1, + "0"), + CreateH264Format(H264Profile::kProfileConstrainedBaseline, + H264Level::kLevel3_1, "1"), + CreateH264Format(H264Profile::kProfileConstrainedBaseline, + H264Level::kLevel3_1, "0"), + CreateH264Format(H264Profile::kProfileMain, + H264Level::kLevel3_1, "1"), + CreateH264Format(H264Profile::kProfileMain, + H264Level::kLevel3_1, "0")}; } std::unique_ptr H264Encoder::Create( diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc index 626f1d445..a494009ee 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/h264_decoder_impl.cc @@ -41,10 +41,8 @@ namespace webrtc { namespace { -constexpr std::array kPixelFormatsDefault = { - AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV444P}; -constexpr std::array kPixelFormatsFullRange = { - AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ444P}; +const AVPixelFormat kPixelFormatDefault = AV_PIX_FMT_YUV420P; +const AVPixelFormat kPixelFormatFullRange = AV_PIX_FMT_YUVJ420P; const size_t kYPlaneIndex = 0; const size_t kUPlaneIndex = 1; const size_t kVPlaneIndex = 2; @@ -78,17 +76,9 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, // Necessary capability to be allowed to provide our own buffers. RTC_DCHECK(context->codec->capabilities | AV_CODEC_CAP_DR1); - // Limited or full range YUV420 or YUV444 is expected. - auto pixelFormatDefault = std::find_if( - kPixelFormatsDefault.begin(), kPixelFormatsDefault.end(), - [context](AVPixelFormat format) { return context->pix_fmt == format; }); - auto pixelFormatFullRange = std::find_if( - kPixelFormatsFullRange.begin(), kPixelFormatsFullRange.end(), - [context](AVPixelFormat format) { return context->pix_fmt == format; }); - // Limited or full range YUV420 is expected. - RTC_CHECK(pixelFormatDefault != kPixelFormatsDefault.end() || - pixelFormatFullRange != kPixelFormatsFullRange.end()); + RTC_CHECK(context->pix_fmt == kPixelFormatDefault || + context->pix_fmt == kPixelFormatFullRange); // `av_frame->width` and `av_frame->height` are set by FFmpeg. These are the // actual image's dimensions and may be different from `context->width` and @@ -122,43 +112,8 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, // http://crbug.com/390941. Our pool is set up to zero-initialize new buffers. // TODO(nisse): Delete that feature from the video pool, instead add // an explicit call to InitializeData here. - rtc::scoped_refptr frame_buffer; - rtc::scoped_refptr i444_buffer; - rtc::scoped_refptr i420_buffer; - switch (context->pix_fmt) { - case AV_PIX_FMT_YUV420P: - case AV_PIX_FMT_YUVJ420P: - i420_buffer = - decoder->ffmpeg_buffer_pool_.CreateI420Buffer(width, height); - // Set `av_frame` members as required by FFmpeg. - av_frame->data[kYPlaneIndex] = i420_buffer->MutableDataY(); - av_frame->linesize[kYPlaneIndex] = i420_buffer->StrideY(); - av_frame->data[kUPlaneIndex] = i420_buffer->MutableDataU(); - av_frame->linesize[kUPlaneIndex] = i420_buffer->StrideU(); - av_frame->data[kVPlaneIndex] = i420_buffer->MutableDataV(); - av_frame->linesize[kVPlaneIndex] = i420_buffer->StrideV(); - RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data); - frame_buffer = i420_buffer; - break; - case AV_PIX_FMT_YUV444P: - case AV_PIX_FMT_YUVJ444P: - i444_buffer = - decoder->ffmpeg_buffer_pool_.CreateI444Buffer(width, height); - // Set `av_frame` members as required by FFmpeg. - av_frame->data[kYPlaneIndex] = i444_buffer->MutableDataY(); - av_frame->linesize[kYPlaneIndex] = i444_buffer->StrideY(); - av_frame->data[kUPlaneIndex] = i444_buffer->MutableDataU(); - av_frame->linesize[kUPlaneIndex] = i444_buffer->StrideU(); - av_frame->data[kVPlaneIndex] = i444_buffer->MutableDataV(); - av_frame->linesize[kVPlaneIndex] = i444_buffer->StrideV(); - frame_buffer = i444_buffer; - break; - default: - RTC_LOG(LS_ERROR) << "Unsupported buffer type " << context->pix_fmt - << ". Check supported supported pixel formats!"; - decoder->ReportError(); - return -1; - } + rtc::scoped_refptr frame_buffer = + decoder->ffmpeg_buffer_pool_.CreateI420Buffer(width, height); int y_size = width * height; int uv_size = frame_buffer->ChromaWidth() * frame_buffer->ChromaHeight(); @@ -170,6 +125,15 @@ int H264DecoderImpl::AVGetBuffer2(AVCodecContext* context, av_frame->format = context->pix_fmt; av_frame->reordered_opaque = context->reordered_opaque; + // Set `av_frame` members as required by FFmpeg. + av_frame->data[kYPlaneIndex] = frame_buffer->MutableDataY(); + av_frame->linesize[kYPlaneIndex] = frame_buffer->StrideY(); + av_frame->data[kUPlaneIndex] = frame_buffer->MutableDataU(); + av_frame->linesize[kUPlaneIndex] = frame_buffer->StrideU(); + av_frame->data[kVPlaneIndex] = frame_buffer->MutableDataV(); + av_frame->linesize[kVPlaneIndex] = frame_buffer->StrideV(); + RTC_DCHECK_EQ(av_frame->extended_data, av_frame->data); + // Create a VideoFrame object, to keep a reference to the buffer. // TODO(nisse): The VideoFrame's timestamp and rotation info is not used. // Refactor to do not use a VideoFrame object at all. @@ -233,6 +197,7 @@ bool H264DecoderImpl::Configure(const Settings& settings) { av_context_->coded_width = resolution.Width(); av_context_->coded_height = resolution.Height(); } + av_context_->pix_fmt = kPixelFormatDefault; av_context_->extradata = nullptr; av_context_->extradata_size = 0; @@ -352,103 +317,47 @@ int32_t H264DecoderImpl::Decode(const EncodedImage& input_image, RTC_DCHECK(input_frame); rtc::scoped_refptr frame_buffer = input_frame->video_frame_buffer(); - - // Instantiate Planar YUV8 buffer according to video frame buffer type - const webrtc::PlanarYuv8Buffer* planar_yuv8_buffer = nullptr; - VideoFrameBuffer::Type video_frame_buffer_type = frame_buffer->type(); - switch (video_frame_buffer_type) { - case VideoFrameBuffer::Type::kI420: - planar_yuv8_buffer = frame_buffer->GetI420(); - break; - case VideoFrameBuffer::Type::kI444: - planar_yuv8_buffer = frame_buffer->GetI444(); - break; - default: - // If this code is changed to allow other video frame buffer type, - // make sure that the code below which wraps I420/I444 buffer and - // code which converts to NV12 is changed - // to work with new video frame buffer type - - RTC_LOG(LS_ERROR) << "frame_buffer type: " - << static_cast(video_frame_buffer_type) - << " is not supported!"; - ReportError(); - return WEBRTC_VIDEO_CODEC_ERROR; - } + const webrtc::I420BufferInterface* i420_buffer = frame_buffer->GetI420(); // When needed, FFmpeg applies cropping by moving plane pointers and adjusting // frame width/height. Ensure that cropped buffers lie within the allocated // memory. - RTC_DCHECK_LE(av_frame_->width, planar_yuv8_buffer->width()); - RTC_DCHECK_LE(av_frame_->height, planar_yuv8_buffer->height()); - RTC_DCHECK_GE(av_frame_->data[kYPlaneIndex], planar_yuv8_buffer->DataY()); - RTC_DCHECK_LE(av_frame_->data[kYPlaneIndex] + - av_frame_->linesize[kYPlaneIndex] * av_frame_->height, - planar_yuv8_buffer->DataY() + planar_yuv8_buffer->StrideY() * - planar_yuv8_buffer->height()); - RTC_DCHECK_GE(av_frame_->data[kUPlaneIndex], planar_yuv8_buffer->DataU()); + RTC_DCHECK_LE(av_frame_->width, i420_buffer->width()); + RTC_DCHECK_LE(av_frame_->height, i420_buffer->height()); + RTC_DCHECK_GE(av_frame_->data[kYPlaneIndex], i420_buffer->DataY()); + RTC_DCHECK_LE( + av_frame_->data[kYPlaneIndex] + + av_frame_->linesize[kYPlaneIndex] * av_frame_->height, + i420_buffer->DataY() + i420_buffer->StrideY() * i420_buffer->height()); + RTC_DCHECK_GE(av_frame_->data[kUPlaneIndex], i420_buffer->DataU()); RTC_DCHECK_LE(av_frame_->data[kUPlaneIndex] + av_frame_->linesize[kUPlaneIndex] * av_frame_->height / 2, - planar_yuv8_buffer->DataU() + planar_yuv8_buffer->StrideU() * - planar_yuv8_buffer->height() / - 2); - RTC_DCHECK_GE(av_frame_->data[kVPlaneIndex], planar_yuv8_buffer->DataV()); + i420_buffer->DataU() + + i420_buffer->StrideU() * i420_buffer->height() / 2); + RTC_DCHECK_GE(av_frame_->data[kVPlaneIndex], i420_buffer->DataV()); RTC_DCHECK_LE(av_frame_->data[kVPlaneIndex] + av_frame_->linesize[kVPlaneIndex] * av_frame_->height / 2, - planar_yuv8_buffer->DataV() + planar_yuv8_buffer->StrideV() * - planar_yuv8_buffer->height() / - 2); + i420_buffer->DataV() + + i420_buffer->StrideV() * i420_buffer->height() / 2); - rtc::scoped_refptr cropped_buffer; - if (video_frame_buffer_type == VideoFrameBuffer::Type::kI420) { - cropped_buffer = WrapI420Buffer( - av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], - av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], - av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], - av_frame_->linesize[kVPlaneIndex], - // To keep reference alive. - [frame_buffer] {}); - } else { - cropped_buffer = WrapI444Buffer( - av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], - av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], - av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], - av_frame_->linesize[kVPlaneIndex], - // To keep reference alive. - [frame_buffer] {}); - } + rtc::scoped_refptr cropped_buffer = WrapI420Buffer( + av_frame_->width, av_frame_->height, av_frame_->data[kYPlaneIndex], + av_frame_->linesize[kYPlaneIndex], av_frame_->data[kUPlaneIndex], + av_frame_->linesize[kUPlaneIndex], av_frame_->data[kVPlaneIndex], + av_frame_->linesize[kVPlaneIndex], + // To keep reference alive. + [frame_buffer] {}); if (preferred_output_format_ == VideoFrameBuffer::Type::kNV12) { + const I420BufferInterface* cropped_i420 = cropped_buffer->GetI420(); auto nv12_buffer = output_buffer_pool_.CreateNV12Buffer( - cropped_buffer->width(), cropped_buffer->height()); - - const PlanarYuv8Buffer* cropped_planar_yuv8_buffer = nullptr; - if (video_frame_buffer_type == VideoFrameBuffer::Type::kI420) { - cropped_planar_yuv8_buffer = cropped_buffer->GetI420(); - libyuv::I420ToNV12(cropped_planar_yuv8_buffer->DataY(), - cropped_planar_yuv8_buffer->StrideY(), - cropped_planar_yuv8_buffer->DataU(), - cropped_planar_yuv8_buffer->StrideU(), - cropped_planar_yuv8_buffer->DataV(), - cropped_planar_yuv8_buffer->StrideV(), - nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), - nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), - planar_yuv8_buffer->width(), - planar_yuv8_buffer->height()); - } else { - cropped_planar_yuv8_buffer = cropped_buffer->GetI444(); - libyuv::I444ToNV12(cropped_planar_yuv8_buffer->DataY(), - cropped_planar_yuv8_buffer->StrideY(), - cropped_planar_yuv8_buffer->DataU(), - cropped_planar_yuv8_buffer->StrideU(), - cropped_planar_yuv8_buffer->DataV(), - cropped_planar_yuv8_buffer->StrideV(), - nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), - nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), - planar_yuv8_buffer->width(), - planar_yuv8_buffer->height()); - } - + cropped_i420->width(), cropped_i420->height()); + libyuv::I420ToNV12(cropped_i420->DataY(), cropped_i420->StrideY(), + cropped_i420->DataU(), cropped_i420->StrideU(), + cropped_i420->DataV(), cropped_i420->StrideV(), + nv12_buffer->MutableDataY(), nv12_buffer->StrideY(), + nv12_buffer->MutableDataUV(), nv12_buffer->StrideUV(), + i420_buffer->width(), i420_buffer->height()); cropped_buffer = nv12_buffer; } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h index 8c201d2b6..8d1eebc79 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/h264/include/h264.h @@ -38,15 +38,10 @@ CreateH264Format(H264Profile profile, // and is not thread-safe. RTC_EXPORT void DisableRtcUseH264(); -// Returns a vector with all supported internal H264 encode profiles that we can +// Returns a vector with all supported internal H264 profiles that we can // negotiate in SDP, in order of preference. std::vector SupportedH264Codecs(); -// Returns a vector with all supported internal H264 decode profiles that we can -// negotiate in SDP, in order of preference. This will be available for receive -// only connections. -std::vector SupportedH264DecoderCodecs(); - class RTC_EXPORT H264Encoder : public VideoEncoder { public: static std::unique_ptr Create(const cricket::VideoCodec& codec); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.h index f43c90ea3..9e801e1b8 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/interface/libvpx_interface.h @@ -15,10 +15,10 @@ #include -#include -#include -#include -#include +#include "libvpx/vp8cx.h" +#include "libvpx/vpx_codec.h" +#include "libvpx/vpx_encoder.h" +#include "libvpx/vpx_image.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc index 4c786c0d1..8ab2c9649 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.cc @@ -31,9 +31,9 @@ #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" #include "third_party/libyuv/include/libyuv/convert.h" -#include -#include -#include +#include "libvpx/vp8.h" +#include "libvpx/vp8dx.h" +#include "libvpx/vpx_decoder.h" namespace webrtc { namespace { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h index 709a138c7..97e82b535 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_decoder.h @@ -19,8 +19,8 @@ #include "common_video/include/video_frame_buffer_pool.h" #include "modules/video_coding/codecs/vp8/include/vp8.h" #include "modules/video_coding/include/video_codec_interface.h" -#include -#include +#include "libvpx/vp8dx.h" +#include "libvpx/vpx_decoder.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc index 942c6aa01..e9a26b89b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.cc @@ -39,7 +39,7 @@ #include "rtc_base/trace_event.h" #include "system_wrappers/include/field_trial.h" #include "third_party/libyuv/include/libyuv/scale.h" -#include +#include "libvpx/vp8cx.h" namespace webrtc { namespace { @@ -591,7 +591,7 @@ int LibvpxVp8Encoder::InitEncode(const VideoCodec* inst, } // Allow the user to set the complexity for the base stream. - switch (inst->GetVideoEncoderComplexity()) { + switch (inst->VP8().complexity) { case VideoCodecComplexity::kComplexityHigh: cpu_speed_[0] = -5; break; @@ -856,19 +856,16 @@ uint32_t LibvpxVp8Encoder::MaxIntraTarget(uint32_t optimalBuffersize) { } uint32_t LibvpxVp8Encoder::FrameDropThreshold(size_t spatial_idx) const { - if (!codec_.VP8().frameDroppingOn) { - return 0; - } - + bool enable_frame_dropping = codec_.VP8().frameDroppingOn; // If temporal layers are used, they get to override the frame dropping // setting, as eg. ScreenshareLayers does not work as intended with frame // dropping on and DefaultTemporalLayers will have performance issues with // frame dropping off. RTC_DCHECK(frame_buffer_controller_); RTC_DCHECK_LT(spatial_idx, frame_buffer_controller_->StreamCount()); - return frame_buffer_controller_->SupportsEncoderFrameDropping(spatial_idx) - ? 30 - : 0; + enable_frame_dropping = + frame_buffer_controller_->SupportsEncoderFrameDropping(spatial_idx); + return enable_frame_dropping ? 30 : 0; } size_t LibvpxVp8Encoder::SteadyStateSize(int sid, int tid) { @@ -1187,8 +1184,6 @@ int LibvpxVp8Encoder::GetEncodedPartitions(const VideoFrame& input_image, libvpx_->codec_control(&encoders_[encoder_idx], VP8E_GET_LAST_QUANTIZER, &qp_128); encoded_images_[encoder_idx].qp_ = qp_128; - encoded_images_[encoder_idx].SetAtTargetQuality( - qp_128 <= variable_framerate_experiment_.steady_state_qp); encoded_complete_callback_->OnEncodedImage(encoded_images_[encoder_idx], &codec_specific); const size_t steady_state_size = SteadyStateSize( diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h index 80845abaa..d96fa7321 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h @@ -28,8 +28,8 @@ #include "rtc_base/experiments/cpu_speed_experiment.h" #include "rtc_base/experiments/encoder_info_settings.h" #include "rtc_base/experiments/rate_control_settings.h" -#include -#include +#include "libvpx/vp8cx.h" +#include "libvpx/vpx_encoder.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9_globals.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9_globals.h index e6f644ec1..87dafe4cd 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9_globals.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/include/vp9_globals.h @@ -46,14 +46,14 @@ struct GofInfoVP9 { case kTemporalStructureMode1: num_frames_in_gof = 1; temporal_idx[0] = 0; - temporal_up_switch[0] = true; + temporal_up_switch[0] = false; num_ref_pics[0] = 1; pid_diff[0][0] = 1; break; case kTemporalStructureMode2: num_frames_in_gof = 2; temporal_idx[0] = 0; - temporal_up_switch[0] = true; + temporal_up_switch[0] = false; num_ref_pics[0] = 1; pid_diff[0][0] = 2; @@ -65,7 +65,7 @@ struct GofInfoVP9 { case kTemporalStructureMode3: num_frames_in_gof = 4; temporal_idx[0] = 0; - temporal_up_switch[0] = true; + temporal_up_switch[0] = false; num_ref_pics[0] = 1; pid_diff[0][0] = 4; @@ -87,7 +87,7 @@ struct GofInfoVP9 { case kTemporalStructureMode4: num_frames_in_gof = 8; temporal_idx[0] = 0; - temporal_up_switch[0] = true; + temporal_up_switch[0] = false; num_ref_pics[0] = 1; pid_diff[0][0] = 4; @@ -97,12 +97,12 @@ struct GofInfoVP9 { pid_diff[1][0] = 1; temporal_idx[2] = 1; - temporal_up_switch[2] = false; + temporal_up_switch[2] = true; num_ref_pics[2] = 1; pid_diff[2][0] = 2; temporal_idx[3] = 2; - temporal_up_switch[3] = true; + temporal_up_switch[3] = false; num_ref_pics[3] = 2; pid_diff[3][0] = 1; pid_diff[3][1] = 2; @@ -113,7 +113,7 @@ struct GofInfoVP9 { pid_diff[4][0] = 4; temporal_idx[5] = 2; - temporal_up_switch[5] = true; + temporal_up_switch[5] = false; num_ref_pics[5] = 2; pid_diff[5][0] = 1; pid_diff[5][1] = 2; @@ -125,7 +125,7 @@ struct GofInfoVP9 { pid_diff[6][1] = 4; temporal_idx[7] = 2; - temporal_up_switch[7] = true; + temporal_up_switch[7] = false; num_ref_pics[7] = 2; pid_diff[7][0] = 1; pid_diff[7][1] = 2; @@ -195,10 +195,7 @@ struct RTPVideoHeaderVP9 { uint8_t temporal_idx; // Temporal layer index, or kNoTemporalIdx. uint8_t spatial_idx; // Spatial layer index, or kNoSpatialIdx. bool temporal_up_switch; // True if upswitch to higher frame rate is possible - // meaning subsequent higher temporal layer pictures - // will not depend on any picture before the current - // picture (in coding order) with temporal layer ID - // greater than `temporal_idx` of this frame. + // starting from this frame. bool inter_layer_predicted; // Frame is dependent on directly lower spatial // layer frame. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc index e2f351c80..f6ebc022a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.cc @@ -24,8 +24,8 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "third_party/libyuv/include/libyuv/convert.h" -#include -#include +#include "libvpx/vp8dx.h" +#include "libvpx/vpx_decoder.h" namespace webrtc { namespace { @@ -275,8 +275,8 @@ int LibvpxVp9Decoder::ReturnFrame( // This buffer contains all of `img`'s image data, a reference counted // Vp9FrameBuffer. (libvpx is done with the buffers after a few // vpx_codec_decode calls or vpx_codec_destroy). - rtc::scoped_refptr img_buffer( - static_cast(img->fb_priv)); + rtc::scoped_refptr img_buffer = + static_cast(img->fb_priv); // The buffer can be used directly by the VideoFrame (without copy) by // using a Wrapped*Buffer. diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h index 5c9ae2cf9..1c86df59d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h @@ -19,7 +19,7 @@ #include "common_video/include/video_frame_buffer_pool.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" #include "modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.h" -#include +#include "libvpx/vp8cx.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc index 30107bc07..5a6cc8ff9 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.cc @@ -40,8 +40,8 @@ #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "third_party/libyuv/include/libyuv/convert.h" -#include -#include +#include "libvpx/vp8cx.h" +#include "libvpx/vpx_encoder.h" namespace webrtc { @@ -959,7 +959,7 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, const size_t gof_idx = (pics_since_key_ + 1) % gof_.num_frames_in_gof; layer_id.temporal_layer_id = gof_.temporal_idx[gof_idx]; - if (codec_.mode == VideoCodecMode::kScreensharing) { + if (VideoCodecMode::kScreensharing == codec_.mode) { const uint32_t frame_timestamp_ms = 1000 * input_image.timestamp() / kVideoPayloadTypeFrequency; @@ -1212,7 +1212,8 @@ int LibvpxVp9Encoder::Encode(const VideoFrame& input_image, bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, absl::optional* spatial_idx, - const vpx_codec_cx_pkt& pkt) { + const vpx_codec_cx_pkt& pkt, + uint32_t timestamp) { RTC_CHECK(codec_specific != nullptr); codec_specific->codecType = kVideoCodecVP9; CodecSpecificInfoVP9* vp9_info = &(codec_specific->codecSpecific.VP9); @@ -1247,6 +1248,9 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, *spatial_idx = layer_id.spatial_layer_id; } + // TODO(asapersson): this info has to be obtained from the encoder. + vp9_info->temporal_up_switch = false; + const bool is_key_pic = (pics_since_key_ == 0); const bool is_inter_layer_pred_allowed = (inter_layer_pred_ == InterLayerPredMode::kOn || @@ -1279,20 +1283,6 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, vp9_info); if (vp9_info->flexible_mode) { vp9_info->gof_idx = kNoGofIdx; - if (!svc_controller_) { - if (num_temporal_layers_ == 1) { - vp9_info->temporal_up_switch = true; - } else { - // In flexible mode with > 1 temporal layer but no SVC controller we - // can't techincally determine if a frame is an upswitch point, use - // gof-based data as proxy for now. - // TODO(sprang): Remove once SVC controller is the only choice. - vp9_info->gof_idx = - static_cast(pics_since_key_ % gof_.num_frames_in_gof); - vp9_info->temporal_up_switch = - gof_.temporal_up_switch[vp9_info->gof_idx]; - } - } } else { vp9_info->gof_idx = static_cast(pics_since_key_ % gof_.num_frames_in_gof); @@ -1363,23 +1353,6 @@ bool LibvpxVp9Encoder::PopulateCodecSpecific(CodecSpecificInfo* codec_specific, svc_params_.scaling_factor_den[sid]); } } - if (is_flexible_mode_) { - // Populate data for legacy temporal-upswitch state. - // We can switch up to a higher temporal layer only if all temporal layers - // higher than this (within the current spatial layer) are switch points. - vp9_info->temporal_up_switch = true; - for (int i = layer_id.temporal_layer_id + 1; i < num_temporal_layers_; - ++i) { - // Assumes decode targets are always ordered first by spatial then by - // temporal id. - size_t dti_index = - (layer_id.spatial_layer_id * num_temporal_layers_) + i; - vp9_info->temporal_up_switch &= - (codec_specific->generic_frame_info - ->decode_target_indications[dti_index] == - DecodeTargetIndication::kSwitch); - } - } } return true; } @@ -1455,6 +1428,8 @@ void LibvpxVp9Encoder::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, ref_buf_list.push_back(ref_buf_.at(0)); } + size_t max_ref_temporal_layer_id = 0; + std::vector ref_pid_list; vp9_info->num_ref_pics = 0; @@ -1486,6 +1461,9 @@ void LibvpxVp9Encoder::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, vp9_info->p_diff[vp9_info->num_ref_pics] = static_cast(p_diff); ++vp9_info->num_ref_pics; + + max_ref_temporal_layer_id = + std::max(max_ref_temporal_layer_id, ref_buf.temporal_layer_id); } else { RTC_DCHECK(inter_layer_predicted); // RTP spec only allows to use previous spatial layer for inter-layer @@ -1493,6 +1471,10 @@ void LibvpxVp9Encoder::FillReferenceIndices(const vpx_codec_cx_pkt& pkt, RTC_DCHECK_EQ(ref_buf.spatial_layer_id + 1, layer_id.spatial_layer_id); } } + + vp9_info->temporal_up_switch = + (max_ref_temporal_layer_id < + static_cast(layer_id.temporal_layer_id)); } void LibvpxVp9Encoder::UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, @@ -1652,19 +1634,8 @@ void LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { encoded_image_.SetEncodedData(EncodedImageBuffer::Create( static_cast(pkt->data.frame.buf), pkt->data.frame.sz)); - codec_specific_ = {}; - absl::optional spatial_index; - if (!PopulateCodecSpecific(&codec_specific_, &spatial_index, *pkt)) { - // Drop the frame. - encoded_image_.set_size(0); - return; - } - encoded_image_.SetSpatialIndex(spatial_index); - const bool is_key_frame = - ((pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? true : false) && - !codec_specific_.codecSpecific.VP9.inter_layer_predicted; - + (pkt->data.frame.flags & VPX_FRAME_IS_KEY) ? true : false; // Ensure encoder issued key frame on request. RTC_DCHECK(is_key_frame || !force_key_frame_); @@ -1675,6 +1646,16 @@ void LibvpxVp9Encoder::GetEncodedLayerFrame(const vpx_codec_cx_pkt* pkt) { force_key_frame_ = false; } + codec_specific_ = {}; + absl::optional spatial_index; + if (!PopulateCodecSpecific(&codec_specific_, &spatial_index, *pkt, + input_image_->timestamp())) { + // Drop the frame. + encoded_image_.set_size(0); + return; + } + encoded_image_.SetSpatialIndex(spatial_index); + UpdateReferenceBuffers(*pkt, pics_since_key_); TRACE_COUNTER1("webrtc", "EncodedFrameSize", encoded_image_.size()); diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h index 055393b09..430e012f1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h @@ -29,7 +29,7 @@ #include "modules/video_coding/svc/scalable_video_controller.h" #include "modules/video_coding/utility/framerate_controller_deprecated.h" #include "rtc_base/experiments/encoder_info_settings.h" -#include +#include "libvpx/vp8cx.h" namespace webrtc { @@ -67,12 +67,14 @@ class LibvpxVp9Encoder : public VP9Encoder { bool PopulateCodecSpecific(CodecSpecificInfo* codec_specific, absl::optional* spatial_idx, - const vpx_codec_cx_pkt& pkt); + const vpx_codec_cx_pkt& pkt, + uint32_t timestamp); void FillReferenceIndices(const vpx_codec_cx_pkt& pkt, - size_t pic_num, - bool inter_layer_predicted, + const size_t pic_num, + const bool inter_layer_predicted, CodecSpecificInfoVP9* vp9_info); - void UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, size_t pic_num); + void UpdateReferenceBuffers(const vpx_codec_cx_pkt& pkt, + const size_t pic_num); vpx_svc_ref_frame_config_t SetReferences( bool is_key_pic, size_t first_active_spatial_layer_id); @@ -106,7 +108,7 @@ class LibvpxVp9Encoder : public VP9Encoder { size_t SteadyStateSize(int sid, int tid); - void MaybeRewrapRawWithFormat(vpx_img_fmt fmt); + void MaybeRewrapRawWithFormat(const vpx_img_fmt fmt); // Prepares `raw_` to reference image data of `buffer`, or of mapped or scaled // versions of `buffer`. Returns the buffer that got referenced as a result, // allowing the caller to keep a reference to it until after encoding has diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc index fbba9dad3..2e486bb97 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9.cc @@ -18,9 +18,9 @@ #include "modules/video_coding/codecs/vp9/libvpx_vp9_decoder.h" #include "modules/video_coding/codecs/vp9/libvpx_vp9_encoder.h" #include "rtc_base/checks.h" -#include -#include -#include +#include "libvpx/vp8cx.h" +#include "libvpx/vp8dx.h" +#include "libvpx/vpx_codec.h" namespace webrtc { namespace { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc index 9ff4ea62c..808127f7b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/codecs/vp9/vp9_frame_buffer_pool.cc @@ -15,9 +15,9 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" -#include -#include -#include +#include "libvpx/vpx_codec.h" +#include "libvpx/vpx_decoder.h" +#include "libvpx/vpx_frame_buffer.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/fec_controller_default.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/fec_controller_default.h index a97dea011..6b9e8eb8e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/fec_controller_default.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/fec_controller_default.h @@ -19,6 +19,7 @@ #include "api/fec_controller.h" #include "modules/video_coding/media_opt_util.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" @@ -31,10 +32,6 @@ class FecControllerDefault : public FecController { VCMProtectionCallback* protection_callback); explicit FecControllerDefault(Clock* clock); ~FecControllerDefault() override; - - FecControllerDefault(const FecControllerDefault&) = delete; - FecControllerDefault& operator=(const FecControllerDefault&) = delete; - void SetProtectionCallback( VCMProtectionCallback* protection_callback) override; void SetProtectionMethod(bool enable_fec, bool enable_nack) override; @@ -47,8 +44,9 @@ class FecControllerDefault : public FecController { uint8_t fraction_lost, std::vector loss_mask_vector, int64_t round_trip_time_ms) override; - void UpdateWithEncodedData(size_t encoded_image_length, - VideoFrameType encoded_image_frametype) override; + void UpdateWithEncodedData( + const size_t encoded_image_length, + const VideoFrameType encoded_image_frametype) override; bool UseLossVectorMask() override; float GetProtectionOverheadRateThreshold(); @@ -60,7 +58,7 @@ class FecControllerDefault : public FecController { std::unique_ptr loss_prot_logic_ RTC_GUARDED_BY(mutex_); size_t max_payload_size_ RTC_GUARDED_BY(mutex_); - + RTC_DISALLOW_COPY_AND_ASSIGN(FecControllerDefault); const float overhead_threshold_; }; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc index b15db9636..ba46327a7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.cc @@ -18,10 +18,8 @@ #include #include -#include "absl/container/inlined_vector.h" #include "api/video/encoded_image.h" #include "api/video/video_timing.h" -#include "modules/video_coding/frame_helpers.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/jitter_estimator.h" #include "modules/video_coding/timing.h" @@ -106,8 +104,7 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() { RTC_DCHECK(!callback_task_.Running()); int64_t wait_ms = FindNextFrame(clock_->TimeInMilliseconds()); callback_task_ = RepeatingTaskHandle::DelayedStart( - callback_queue_->Get(), TimeDelta::Millis(wait_ms), - [this] { + callback_queue_->Get(), TimeDelta::Millis(wait_ms), [this] { RTC_DCHECK_RUN_ON(&callback_checker_); // If this task has not been cancelled, we did not get any new frames // while waiting. Continue with frame delivery. @@ -133,8 +130,7 @@ void FrameBuffer::StartWaitForNextFrameOnQueue() { // Deliver frame, if any. Otherwise signal timeout. frame_handler(std::move(frame)); return TimeDelta::Zero(); // Ignored. - }, - TaskQueueBase::DelayPrecision::kHigh); + }); } int64_t FrameBuffer::FindNextFrame(int64_t now_ms) { @@ -254,8 +250,7 @@ std::unique_ptr FrameBuffer::GetNextFrame() { int64_t render_time_ms = first_frame.RenderTime(); int64_t receive_time_ms = first_frame.ReceivedTime(); // Gracefully handle bad RTP timestamps and render time issues. - if (FrameHasBadRenderTiming(first_frame.RenderTimeMs(), now_ms, - timing_->TargetVideoDelay())) { + if (HasBadRenderTiming(first_frame, now_ms)) { jitter_estimator_.Reset(); timing_->Reset(); render_time_ms = timing_->RenderTimeMs(first_frame.Timestamp(), now_ms); @@ -323,6 +318,35 @@ std::unique_ptr FrameBuffer::GetNextFrame() { } } +bool FrameBuffer::HasBadRenderTiming(const EncodedFrame& frame, + int64_t now_ms) { + // Assume that render timing errors are due to changes in the video stream. + int64_t render_time_ms = frame.RenderTimeMs(); + // Zero render time means render immediately. + if (render_time_ms == 0) { + return false; + } + if (render_time_ms < 0) { + return true; + } + const int64_t kMaxVideoDelayMs = 10000; + if (std::abs(render_time_ms - now_ms) > kMaxVideoDelayMs) { + int frame_delay = static_cast(std::abs(render_time_ms - now_ms)); + RTC_LOG(LS_WARNING) + << "A frame about to be decoded is out of the configured " + "delay bounds (" + << frame_delay << " > " << kMaxVideoDelayMs + << "). Resetting the video jitter buffer."; + return true; + } + if (static_cast(timing_->TargetVideoDelay()) > kMaxVideoDelayMs) { + RTC_LOG(LS_WARNING) << "The video target delay has grown larger than " + << kMaxVideoDelayMs << " ms."; + return true; + } + return false; +} + void FrameBuffer::SetProtectionMode(VCMVideoProtection mode) { TRACE_EVENT0("webrtc", "FrameBuffer::SetProtectionMode"); MutexLock lock(&mutex_); @@ -636,11 +660,39 @@ void FrameBuffer::ClearFramesAndHistory() { std::unique_ptr FrameBuffer::CombineAndDeleteFrames( std::vector> frames) const { RTC_DCHECK(!frames.empty()); - absl::InlinedVector, 4> inlined; - for (auto& frame : frames) { - inlined.push_back(std::move(frame)); + size_t total_length = 0; + for (const auto& frame : frames) { + total_length += frame->size(); } - return webrtc::CombineAndDeleteFrames(std::move(inlined)); + const EncodedFrame& last_frame = *frames.back(); + std::unique_ptr first_frame = std::move(frames[0]); + auto encoded_image_buffer = EncodedImageBuffer::Create(total_length); + uint8_t* buffer = encoded_image_buffer->data(); + first_frame->SetSpatialLayerFrameSize(first_frame->SpatialIndex().value_or(0), + first_frame->size()); + memcpy(buffer, first_frame->data(), first_frame->size()); + buffer += first_frame->size(); + + // Spatial index of combined frame is set equal to spatial index of its top + // spatial layer. + first_frame->SetSpatialIndex(last_frame.SpatialIndex().value_or(0)); + + first_frame->video_timing_mutable()->network2_timestamp_ms = + last_frame.video_timing().network2_timestamp_ms; + first_frame->video_timing_mutable()->receive_finish_ms = + last_frame.video_timing().receive_finish_ms; + + // Append all remaining frames to the first one. + for (size_t i = 1; i < frames.size(); ++i) { + // Let |next_frame| fall out of scope so it is deleted after copying. + std::unique_ptr next_frame = std::move(frames[i]); + first_frame->SetSpatialLayerFrameSize( + next_frame->SpatialIndex().value_or(0), next_frame->size()); + memcpy(buffer, next_frame->data(), next_frame->size()); + buffer += next_frame->size(); + } + first_frame->SetEncodedData(encoded_image_buffer); + return first_frame; } FrameBuffer::FrameInfo::FrameInfo() = default; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h index f2e515c8e..411c69cef 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer2.h @@ -147,6 +147,9 @@ class FrameBuffer { void ClearFramesAndHistory() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + bool HasBadRenderTiming(const EncodedFrame& frame, int64_t now_ms) + RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); + // The cleaner solution would be to have the NextFrame function return a // vector of frames, but until the decoding pipeline can support decoding // multiple frames at the same time we combine all frames to one frame and diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.cc index d02a99749..32de3683c 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.cc @@ -65,7 +65,7 @@ bool IsLastFrameInTemporalUnit(const FrameIteratorT& it) { FrameBuffer::FrameBuffer(int max_size, int max_decode_history) : legacy_frame_id_jump_behavior_( - !field_trial::IsDisabled("WebRTC-LegacyFrameIdJumpBehavior")), + field_trial::IsEnabled("WebRTC-LegacyFrameIdJumpBehavior")), max_size_(max_size), decoded_frame_history_(max_decode_history) {} @@ -177,10 +177,6 @@ int FrameBuffer::GetTotalNumberOfDroppedFrames() const { return num_dropped_frames_; } -size_t FrameBuffer::CurrentSize() const { - return frames_.size(); -} - bool FrameBuffer::IsContinuous(const FrameIterator& it) const { for (int64_t reference : GetReferences(it)) { if (decoded_frame_history_.WasDecoded(reference)) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.h index 1f3f71a4a..796c51db7 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_buffer3.h @@ -57,7 +57,6 @@ class FrameBuffer { int GetTotalNumberOfContinuousTemporalUnits() const; int GetTotalNumberOfDroppedFrames() const; - size_t CurrentSize() const; private: struct FrameInfo { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.cc deleted file mode 100644 index 870f198dc..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.cc +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "modules/video_coding/frame_helpers.h" - -#include - -#include "rtc_base/logging.h" - -namespace webrtc { - -bool FrameHasBadRenderTiming(int64_t render_time_ms, - int64_t now_ms, - int target_video_delay) { - // Zero render time means render immediately. - if (render_time_ms == 0) { - return false; - } - if (render_time_ms < 0) { - return true; - } - const int64_t kMaxVideoDelayMs = 10000; - if (std::abs(render_time_ms - now_ms) > kMaxVideoDelayMs) { - int frame_delay = static_cast(std::abs(render_time_ms - now_ms)); - RTC_LOG(LS_WARNING) - << "A frame about to be decoded is out of the configured " - "delay bounds (" - << frame_delay << " > " << kMaxVideoDelayMs - << "). Resetting the video jitter buffer."; - return true; - } - if (target_video_delay > kMaxVideoDelayMs) { - RTC_LOG(LS_WARNING) << "The video target delay has grown larger than " - << kMaxVideoDelayMs << " ms."; - return true; - } - return false; -} - -std::unique_ptr CombineAndDeleteFrames( - absl::InlinedVector, 4> frames) { - RTC_DCHECK(!frames.empty()); - - if (frames.size() == 1) { - return std::move(frames[0]); - } - - size_t total_length = 0; - for (const auto& frame : frames) { - total_length += frame->size(); - } - const EncodedFrame& last_frame = *frames.back(); - std::unique_ptr first_frame = std::move(frames[0]); - auto encoded_image_buffer = EncodedImageBuffer::Create(total_length); - uint8_t* buffer = encoded_image_buffer->data(); - first_frame->SetSpatialLayerFrameSize(first_frame->SpatialIndex().value_or(0), - first_frame->size()); - memcpy(buffer, first_frame->data(), first_frame->size()); - buffer += first_frame->size(); - - // Spatial index of combined frame is set equal to spatial index of its top - // spatial layer. - first_frame->SetSpatialIndex(last_frame.SpatialIndex().value_or(0)); - - first_frame->video_timing_mutable()->network2_timestamp_ms = - last_frame.video_timing().network2_timestamp_ms; - first_frame->video_timing_mutable()->receive_finish_ms = - last_frame.video_timing().receive_finish_ms; - - // Append all remaining frames to the first one. - for (size_t i = 1; i < frames.size(); ++i) { - // Let |next_frame| fall out of scope so it is deleted after copying. - std::unique_ptr next_frame = std::move(frames[i]); - first_frame->SetSpatialLayerFrameSize( - next_frame->SpatialIndex().value_or(0), next_frame->size()); - memcpy(buffer, next_frame->data(), next_frame->size()); - buffer += next_frame->size(); - } - first_frame->SetEncodedData(encoded_image_buffer); - return first_frame; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.h deleted file mode 100644 index e329d0370..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/frame_helpers.h +++ /dev/null @@ -1,31 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef MODULES_VIDEO_CODING_FRAME_HELPERS_H_ -#define MODULES_VIDEO_CODING_FRAME_HELPERS_H_ - -#include - -#include "absl/container/inlined_vector.h" -#include "api/video/encoded_frame.h" - -namespace webrtc { - -// TODO(https://bugs.webrtc.org/13589): Switch to using Timestamp and TimeDelta. -bool FrameHasBadRenderTiming(int64_t render_time_ms, - int64_t now_ms, - int target_video_delay); - -std::unique_ptr CombineAndDeleteFrames( - absl::InlinedVector, 4> frames); - -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_FRAME_HELPERS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/h264_sprop_parameter_sets.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/h264_sprop_parameter_sets.h index 8a32f31cc..dbf27ef03 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/h264_sprop_parameter_sets.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/h264_sprop_parameter_sets.h @@ -15,15 +15,13 @@ #include #include +#include "rtc_base/constructor_magic.h" + namespace webrtc { class H264SpropParameterSets { public: H264SpropParameterSets() {} - - H264SpropParameterSets(const H264SpropParameterSets&) = delete; - H264SpropParameterSets& operator=(const H264SpropParameterSets&) = delete; - bool DecodeSprop(const std::string& sprop); const std::vector& sps_nalu() { return sps_; } const std::vector& pps_nalu() { return pps_; } @@ -31,6 +29,7 @@ class H264SpropParameterSets { private: std::vector sps_; std::vector pps_; + RTC_DISALLOW_COPY_AND_ASSIGN(H264SpropParameterSets); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.h index df7581a87..137a687de 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/jitter_buffer.h @@ -19,6 +19,7 @@ #include "modules/include/module_common_types.h" #include "modules/include/module_common_types_public.h" +#include "modules/utility/include/process_thread.h" #include "modules/video_coding/decoding_state.h" #include "modules/video_coding/event_wrapper.h" #include "modules/video_coding/include/video_coding.h" @@ -26,6 +27,7 @@ #include "modules/video_coding/inter_frame_delay.h" #include "modules/video_coding/jitter_buffer_common.h" #include "modules/video_coding/jitter_estimator.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -73,9 +75,6 @@ class VCMJitterBuffer { ~VCMJitterBuffer(); - VCMJitterBuffer(const VCMJitterBuffer&) = delete; - VCMJitterBuffer& operator=(const VCMJitterBuffer&) = delete; - // Initializes and starts jitter buffer. void Start(); @@ -266,6 +265,8 @@ class VCMJitterBuffer { // average_packets_per_frame converges fast if we have fewer than this many // frames. int frame_counter_; + + RTC_DISALLOW_COPY_AND_ASSIGN(VCMJitterBuffer); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc index 59b027e2c..4381cf095 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_seq_num_only_ref_finder.cc @@ -116,7 +116,7 @@ void RtpSeqNumOnlyRefFinder::RetryStashedFrames( case kHandOff: complete_frame = true; res.push_back(std::move(*frame_it)); - [[fallthrough]]; + ABSL_FALLTHROUGH_INTENDED; case kDrop: frame_it = stashed_frames_.erase(frame_it); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc index 64f1bae30..66cddacd1 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp8_ref_finder.cc @@ -218,7 +218,7 @@ void RtpVp8RefFinder::RetryStashedFrames( case kHandOff: complete_frame = true; res.push_back(std::move(*frame_it)); - [[fallthrough]]; + ABSL_FALLTHROUGH_INTENDED; case kDrop: frame_it = stashed_frames_.erase(frame_it); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc index fd271f81f..cab579278 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/rtp_vp9_ref_finder.cc @@ -314,7 +314,7 @@ void RtpVp9RefFinder::RetryStashedFrames( case kHandOff: complete_frame = true; res.push_back(std::move(*frame_it)); - [[fallthrough]]; + ABSL_FALLTHROUGH_INTENDED; case kDrop: frame_it = stashed_frames_.erase(frame_it); } diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc index df9841690..99e525a5d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/timing.cc @@ -10,6 +10,7 @@ #include "modules/video_coding/timing.h" + #include #include "rtc_base/experiments/field_trial_parser.h" diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_defines.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_defines.h deleted file mode 100644 index 83d6691b8..000000000 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_defines.h +++ /dev/null @@ -1,23 +0,0 @@ -/* - * Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -/* - * This file contains definitions that are common to the IvfFileReader and - * IvfFileWriter classes. - */ - -#ifndef MODULES_VIDEO_CODING_UTILITY_IVF_DEFINES_H_ -#define MODULES_VIDEO_CODING_UTILITY_IVF_DEFINES_H_ - -namespace webrtc { -constexpr size_t kIvfHeaderSize = 32; -} // namespace webrtc - -#endif // MODULES_VIDEO_CODING_UTILITY_IVF_DEFINES_H_ diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc index 85d1fa00d..63925702b 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.cc @@ -15,12 +15,12 @@ #include "api/video_codecs/video_codec.h" #include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/video_coding/utility/ivf_defines.h" #include "rtc_base/logging.h" namespace webrtc { namespace { +constexpr size_t kIvfHeaderSize = 32; constexpr size_t kIvfFrameHeaderSize = 12; constexpr int kCodecTypeBytesCount = 4; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.h index 75f2e3ac8..5e0634f9f 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_reader.h @@ -26,10 +26,6 @@ class IvfFileReader { // Creates IvfFileReader. Returns nullptr if error acquired. static std::unique_ptr Create(FileWrapper file); ~IvfFileReader(); - - IvfFileReader(const IvfFileReader&) = delete; - IvfFileReader& operator=(const IvfFileReader&) = delete; - // Reinitializes reader. Returns false if any error acquired. bool Reset(); @@ -75,6 +71,8 @@ class IvfFileReader { absl::optional next_frame_header_; bool has_error_; + + RTC_DISALLOW_COPY_AND_ASSIGN(IvfFileReader); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc index 668390a78..77c90ee15 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.cc @@ -14,7 +14,6 @@ #include "api/video_codecs/video_codec.h" #include "modules/rtp_rtcp/source/byte_io.h" -#include "modules/video_coding/utility/ivf_defines.h" #include "rtc_base/checks.h" #include "rtc_base/logging.h" @@ -23,11 +22,7 @@ namespace webrtc { -namespace { - -constexpr int kDefaultWidth = 1280; -constexpr int kDefaultHeight = 720; -} // namespace +const size_t kIvfHeaderSize = 32; IvfFileWriter::IvfFileWriter(FileWrapper file, size_t byte_limit) : codec_type_(kVideoCodecGeneric), @@ -127,14 +122,10 @@ bool IvfFileWriter::WriteHeader() { bool IvfFileWriter::InitFromFirstFrame(const EncodedImage& encoded_image, VideoCodecType codec_type) { - if (encoded_image._encodedWidth == 0 || encoded_image._encodedHeight == 0) { - width_ = kDefaultWidth; - height_ = kDefaultHeight; - } else { - width_ = encoded_image._encodedWidth; - height_ = encoded_image._encodedHeight; - } - + width_ = encoded_image._encodedWidth; + height_ = encoded_image._encodedHeight; + RTC_CHECK_GT(width_, 0); + RTC_CHECK_GT(height_, 0); using_capture_timestamps_ = encoded_image.Timestamp() == 0; codec_type_ = codec_type; diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.h index b53459b5d..874f60adf 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/ivf_file_writer.h @@ -18,6 +18,7 @@ #include "api/video/encoded_image.h" #include "api/video/video_codec_type.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/time_utils.h" @@ -33,9 +34,6 @@ class IvfFileWriter { size_t byte_limit); ~IvfFileWriter(); - IvfFileWriter(const IvfFileWriter&) = delete; - IvfFileWriter& operator=(const IvfFileWriter&) = delete; - bool WriteFrame(const EncodedImage& encoded_image, VideoCodecType codec_type); bool Close(); @@ -59,6 +57,8 @@ class IvfFileWriter { bool using_capture_timestamps_; rtc::TimestampWrapAroundHandler wrap_handler_; FileWrapper file_; + + RTC_DISALLOW_COPY_AND_ASSIGN(IvfFileWriter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.h index 6f93dbde7..9b2f9696e 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_rate_allocator.h @@ -19,6 +19,7 @@ #include "api/video/video_bitrate_allocation.h" #include "api/video/video_bitrate_allocator.h" #include "api/video_codecs/video_codec.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/rate_control_settings.h" #include "rtc_base/experiments/stable_target_rate_experiment.h" @@ -29,9 +30,6 @@ class SimulcastRateAllocator : public VideoBitrateAllocator { explicit SimulcastRateAllocator(const VideoCodec& codec); ~SimulcastRateAllocator() override; - SimulcastRateAllocator(const SimulcastRateAllocator&) = delete; - SimulcastRateAllocator& operator=(const SimulcastRateAllocator&) = delete; - VideoBitrateAllocation Allocate( VideoBitrateAllocationParameters parameters) override; const VideoCodec& GetCodec() const; @@ -63,6 +61,8 @@ class SimulcastRateAllocator : public VideoBitrateAllocator { const RateControlSettings rate_control_settings_; std::vector stream_enabled_; bool legacy_conference_mode_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SimulcastRateAllocator); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h index cdfdc609d..a3d3fc66a 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/simulcast_test_fixture_impl.h @@ -64,10 +64,10 @@ class SimulcastTestFixtureImpl final : public SimulcastTestFixture { void SetUpCodec(const int* temporal_layer_profile); void SetUpRateAllocator(); void SetRates(uint32_t bitrate_kbps, uint32_t fps); - void RunActiveStreamsTest(std::vector active_streams); - void UpdateActiveStreams(std::vector active_streams); + void RunActiveStreamsTest(const std::vector active_streams); + void UpdateActiveStreams(const std::vector active_streams); void ExpectStreams(VideoFrameType frame_type, - std::vector expected_streams_active); + const std::vector expected_streams_active); void ExpectStreams(VideoFrameType frame_type, int expected_video_streams); void VerifyTemporalIdxAndSyncForAllSpatialLayers( TestEncodedImageCallback* encoder_callback, diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc index bf9d51f69..867967ddc 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc +++ b/TMessagesProj/jni/voip/webrtc/modules/video_coding/utility/vp9_uncompressed_header_parser.cc @@ -158,8 +158,7 @@ void Vp9ReadQp(BitstreamReader& br, Vp9UncompressedHeader* frame_info) { void Vp9ReadSegmentationParams(BitstreamReader& br, Vp9UncompressedHeader* frame_info) { constexpr int kSegmentationFeatureBits[kVp9SegLvlMax] = {8, 6, 2, 0}; - constexpr bool kSegmentationFeatureSigned[kVp9SegLvlMax] = {true, true, false, - false}; + constexpr bool kSegmentationFeatureSigned[kVp9SegLvlMax] = {1, 1, 0, 0}; frame_info->segmentation_enabled = br.Read(); if (!frame_info->segmentation_enabled) { diff --git a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/skin_detection.h b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/skin_detection.h index 7f2e17aa8..7be791f2d 100644 --- a/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/skin_detection.h +++ b/TMessagesProj/jni/voip/webrtc/modules/video_processing/util/skin_detection.h @@ -19,11 +19,11 @@ typedef unsigned char uint8_t; bool MbHasSkinColor(const uint8_t* y_src, const uint8_t* u_src, const uint8_t* v_src, - int stride_y, - int stride_u, - int stride_v, - int mb_row, - int mb_col); + const int stride_y, + const int stride_u, + const int stride_v, + const int mb_row, + const int mb_col); } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h index 90cfa3509..f3de0722f 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/fuzzers/dcsctp_fuzzers.h @@ -16,7 +16,6 @@ #include #include "api/array_view.h" -#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/dcsctp_socket.h" namespace dcsctp { @@ -59,9 +58,7 @@ class FuzzerCallbacks : public DcSctpSocketCallbacks { void SendPacket(rtc::ArrayView data) override { sent_packets_.emplace_back(std::vector(data.begin(), data.end())); } - std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) override { - // The fuzzer timeouts don't implement |precision|. + std::unique_ptr CreateTimeout() override { return std::make_unique(active_timeouts_); } TimeMs TimeMillis() override { return TimeMs(42); } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h index 700158538..2b56094ac 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/public/dcsctp_socket.h @@ -17,7 +17,6 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" -#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/dcsctp_handover_state.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_options.h" @@ -265,26 +264,9 @@ class DcSctpSocketCallbacks { // Called when the library wants to create a Timeout. The callback must return // an object that implements that interface. // - // Low precision tasks are scheduled more efficiently by using leeway to - // reduce Idle Wake Ups and is the preferred precision whenever possible. High - // precision timeouts do not have this leeway, but is still limited by OS - // timer precision. At the time of writing, kLow's additional leeway may be up - // to 17 ms, but please see webrtc::TaskQueueBase::DelayPrecision for - // up-to-date information. - // // Note that it's NOT ALLOWED to call into this library from within this // callback. - virtual std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) { - // TODO(hbos): When dependencies have migrated to this new signature, make - // this pure virtual and delete the other version. - return CreateTimeout(); - } - // TODO(hbos): When dependencies have migrated to the other signature, delete - // this version. - virtual std::unique_ptr CreateTimeout() { - return CreateTimeout(webrtc::TaskQueueBase::DelayPrecision::kLow); - } + virtual std::unique_ptr CreateTimeout() = 0; // Returns the current time in milliseconds (from any epoch). // diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc index 5791d6805..36ade9230 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/reassembly_queue.cc @@ -175,7 +175,9 @@ bool ReassemblyQueue::MaybeResetStreamsDeferred(TSN cum_ack_tsn) { // https://tools.ietf.org/html/rfc6525#section-5.2.2 // "Any queued TSNs (queued at step E2) MUST now be released and processed // normally." - for (auto& [tsn, data] : deferred_chunks) { + for (auto& p : deferred_chunks) { + const TSN& tsn = p.first; + Data& data = p.second; queued_bytes_ -= data.size(); Add(tsn, std::move(data)); } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc index 4ccc2e527..d00482435 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.cc @@ -105,12 +105,12 @@ TraditionalReassemblyStreams::TraditionalReassemblyStreams( int TraditionalReassemblyStreams::UnorderedStream::Add(UnwrappedTSN tsn, Data data) { int queued_bytes = data.size(); - auto [it, inserted] = chunks_.emplace(tsn, std::move(data)); - if (!inserted) { + auto p = chunks_.emplace(tsn, std::move(data)); + if (!p.second /* !inserted */) { return 0; } - queued_bytes -= TryToAssembleMessage(it); + queued_bytes -= TryToAssembleMessage(p.first); return queued_bytes; } @@ -225,8 +225,8 @@ int TraditionalReassemblyStreams::OrderedStream::Add(UnwrappedTSN tsn, int queued_bytes = data.size(); UnwrappedSSN ssn = ssn_unwrapper_.Unwrap(data.ssn); - auto [unused, inserted] = chunks_by_ssn_[ssn].emplace(tsn, std::move(data)); - if (!inserted) { + auto p = chunks_by_ssn_[ssn].emplace(tsn, std::move(data)); + if (!p.second /* !inserted */) { return 0; } @@ -275,8 +275,8 @@ size_t TraditionalReassemblyStreams::HandleForwardTsn( size_t bytes_removed = 0; // The `skipped_streams` only cover ordered messages - need to // iterate all unordered streams manually to remove those chunks. - for (auto& [unused, stream] : unordered_streams_) { - bytes_removed += stream.EraseTo(new_cumulative_ack_tsn); + for (auto& entry : unordered_streams_) { + bytes_removed += entry.second.EraseTo(new_cumulative_ack_tsn); } for (const auto& skipped_stream : skipped_streams) { @@ -292,7 +292,9 @@ size_t TraditionalReassemblyStreams::HandleForwardTsn( void TraditionalReassemblyStreams::ResetStreams( rtc::ArrayView stream_ids) { if (stream_ids.empty()) { - for (auto& [stream_id, stream] : ordered_streams_) { + for (auto& entry : ordered_streams_) { + const StreamID& stream_id = entry.first; + OrderedStream& stream = entry.second; RTC_DLOG(LS_VERBOSE) << log_prefix_ << "Resetting implicit stream_id=" << *stream_id; stream.Reset(); @@ -312,14 +314,14 @@ void TraditionalReassemblyStreams::ResetStreams( HandoverReadinessStatus TraditionalReassemblyStreams::GetHandoverReadiness() const { HandoverReadinessStatus status; - for (const auto& [unused, stream] : ordered_streams_) { - if (stream.has_unassembled_chunks()) { + for (const auto& entry : ordered_streams_) { + if (entry.second.has_unassembled_chunks()) { status.Add(HandoverUnreadinessReason::kOrderedStreamHasUnassembledChunks); break; } } - for (const auto& [unused, stream] : unordered_streams_) { - if (stream.has_unassembled_chunks()) { + for (const auto& entry : unordered_streams_) { + if (entry.second.has_unassembled_chunks()) { status.Add( HandoverUnreadinessReason::kUnorderedStreamHasUnassembledChunks); break; @@ -330,15 +332,15 @@ HandoverReadinessStatus TraditionalReassemblyStreams::GetHandoverReadiness() void TraditionalReassemblyStreams::AddHandoverState( DcSctpSocketHandoverState& state) { - for (const auto& [stream_id, stream] : ordered_streams_) { + for (const auto& entry : ordered_streams_) { DcSctpSocketHandoverState::OrderedStream state_stream; - state_stream.id = stream_id.value(); - state_stream.next_ssn = stream.next_ssn().value(); + state_stream.id = entry.first.value(); + state_stream.next_ssn = entry.second.next_ssn().value(); state.rx.ordered_streams.push_back(std::move(state_stream)); } - for (const auto& [stream_id, unused] : unordered_streams_) { + for (const auto& entry : unordered_streams_) { DcSctpSocketHandoverState::UnorderedStream state_stream; - state_stream.id = stream_id.value(); + state_stream.id = entry.first.value(); state.rx.unordered_streams.push_back(std::move(state_stream)); } } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h index 2fac9ff68..0c724327e 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/rx/traditional_reassembly_streams.h @@ -55,7 +55,8 @@ class TraditionalReassemblyStreams : public ReassemblyStreams { explicit StreamBase(TraditionalReassemblyStreams* parent) : parent_(*parent) {} - size_t AssembleMessage(ChunkMap::iterator start, ChunkMap::iterator end); + size_t AssembleMessage(const ChunkMap::iterator start, + const ChunkMap::iterator end); TraditionalReassemblyStreams& parent_; }; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.cc index 34d809cbe..b4af10e88 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.cc @@ -61,10 +61,9 @@ SendPacketStatus CallbackDeferrer::SendPacketWithStatus( return underlying_.SendPacketWithStatus(data); } -std::unique_ptr CallbackDeferrer::CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) { +std::unique_ptr CallbackDeferrer::CreateTimeout() { // Will not be deferred - call directly. - return underlying_.CreateTimeout(precision); + return underlying_.CreateTimeout(); } TimeMs CallbackDeferrer::TimeMillis() { diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h index 863093c5c..918b1df32 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/callback_deferrer.h @@ -21,7 +21,6 @@ #include "api/array_view.h" #include "api/ref_counted_base.h" #include "api/scoped_refptr.h" -#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_socket.h" #include "rtc_base/ref_counted_object.h" @@ -63,8 +62,7 @@ class CallbackDeferrer : public DcSctpSocketCallbacks { // Implementation of DcSctpSocketCallbacks SendPacketStatus SendPacketWithStatus( rtc::ArrayView data) override; - std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) override; + std::unique_ptr CreateTimeout() override; TimeMs TimeMillis() override; uint32_t GetRandomInt(uint32_t low, uint32_t high) override; void OnMessageReceived(DcSctpMessage message) override; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc index b93584ed4..815391094 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/dcsctp_socket.cc @@ -22,7 +22,6 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" -#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/packet/chunk/abort_chunk.h" #include "net/dcsctp/packet/chunk/chunk.h" #include "net/dcsctp/packet/chunk/cookie_ack_chunk.h" @@ -163,9 +162,7 @@ DcSctpSocket::DcSctpSocket(absl::string_view log_prefix, packet_observer_(std::move(packet_observer)), options_(options), callbacks_(callbacks), - timer_manager_([this](webrtc::TaskQueueBase::DelayPrecision precision) { - return callbacks_.CreateTimeout(precision); - }), + timer_manager_([this]() { return callbacks_.CreateTimeout(); }), t1_init_(timer_manager_.CreateTimer( "t1-init", absl::bind_front(&DcSctpSocket::OnInitTimerExpiry, this), @@ -196,7 +193,7 @@ DcSctpSocket::DcSctpSocket(absl::string_view log_prefix, [this]() { callbacks_.OnTotalBufferedAmountLow(); }) {} std::string DcSctpSocket::log_prefix() const { - return log_prefix_ + "[" + std::string(ToString(state_)) + "] "; + return log_prefix_ + "[" + std::string(ToString(state_)) + "] "; } bool DcSctpSocket::IsConsistent() const { @@ -497,13 +494,17 @@ SocketState DcSctpSocket::state() const { case State::kClosed: return SocketState::kClosed; case State::kCookieWait: + ABSL_FALLTHROUGH_INTENDED; case State::kCookieEchoed: return SocketState::kConnecting; case State::kEstablished: return SocketState::kConnected; case State::kShutdownPending: + ABSL_FALLTHROUGH_INTENDED; case State::kShutdownSent: + ABSL_FALLTHROUGH_INTENDED; case State::kShutdownReceived: + ABSL_FALLTHROUGH_INTENDED; case State::kShutdownAckSent: return SocketState::kShuttingDown; } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h index 1e30777e8..a49a0b332 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/mock_dcsctp_socket_callbacks.h @@ -20,7 +20,6 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/array_view.h" -#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/dcsctp_message.h" #include "net/dcsctp/public/dcsctp_socket.h" #include "net/dcsctp/public/timeout.h" @@ -88,9 +87,7 @@ class MockDcSctpSocketCallbacks : public DcSctpSocketCallbacks { (rtc::ArrayView data), (override)); - std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) override { - // The fake timeout manager does not implement |precision|. + std::unique_ptr CreateTimeout() override { return timeout_manager_.CreateTimeout(); } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc index 0fc0d4c02..2e4e96873 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.cc @@ -140,7 +140,9 @@ void TransmissionControlBlock::SendBufferedPackets(SctpPacket::Builder& builder, auto chunks = retransmission_queue_.GetChunksToSend(now, builder.bytes_remaining()); - for (auto& [tsn, data] : chunks) { + for (auto& elem : chunks) { + TSN tsn = elem.first; + Data data = std::move(elem.second); if (capabilities_.message_interleaving) { builder.Add(IDataChunk(tsn, std::move(data), false)); } else { diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h index 8cefbc65f..6d9dfc5e7 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/socket/transmission_control_block.h @@ -19,7 +19,6 @@ #include "absl/functional/bind_front.h" #include "absl/strings/string_view.h" -#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/common/sequence_numbers.h" #include "net/dcsctp/packet/chunk/cookie_echo_chunk.h" #include "net/dcsctp/packet/sctp_packet.h" @@ -79,9 +78,7 @@ class TransmissionControlBlock : public Context { this), TimerOptions(options.delayed_ack_max_timeout, TimerBackoffAlgorithm::kExponential, - /*max_restarts=*/0, - /*max_backoff_duration=*/absl::nullopt, - webrtc::TaskQueueBase::DelayPrecision::kHigh))), + /*max_restarts=*/0))), my_verification_tag_(my_verification_tag), my_initial_tsn_(my_initial_tsn), peer_verification_tag_(peer_verification_tag), diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h index f917c740a..859450b1c 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/testing/data_generator.h @@ -38,14 +38,14 @@ class DataGenerator { // "is_end" flag. Data Ordered(std::vector payload, absl::string_view flags = "", - DataGeneratorOptions opts = {}); + const DataGeneratorOptions opts = {}); // Generates unordered "data" with the provided `payload` and flags, which can // contain "B" for setting the "is_beginning" flag, and/or "E" for setting the // "is_end" flag. Data Unordered(std::vector payload, absl::string_view flags = "", - DataGeneratorOptions opts = {}); + const DataGeneratorOptions opts = {}); // Resets the Message ID identifier - simulating a "stream reset". void ResetStream() { message_id_ = MID(0); } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h index f87227577..e8f50d93c 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/fake_timeout.h @@ -18,7 +18,6 @@ #include #include "absl/types/optional.h" -#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/timeout.h" #include "rtc_base/checks.h" #include "rtc_base/containers/flat_set.h" @@ -28,8 +27,8 @@ namespace dcsctp { // A timeout used in tests. class FakeTimeout : public Timeout { public: - FakeTimeout(std::function get_time, - std::function on_delete) + explicit FakeTimeout(std::function get_time, + std::function on_delete) : get_time_(std::move(get_time)), on_delete_(std::move(on_delete)) {} ~FakeTimeout() override { on_delete_(this); } @@ -69,17 +68,12 @@ class FakeTimeoutManager { explicit FakeTimeoutManager(std::function get_time) : get_time_(std::move(get_time)) {} - std::unique_ptr CreateTimeout() { + std::unique_ptr CreateTimeout() { auto timer = std::make_unique( get_time_, [this](FakeTimeout* timer) { timers_.erase(timer); }); timers_.insert(timer.get()); return timer; } - std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision) { - // FakeTimeout does not support implement |precision|. - return CreateTimeout(); - } // NOTE: This can't return a vector, as calling EvaluateHasExpired requires // calling socket->HandleTimeout directly afterwards, as the owning Timer diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc index 4d1dc1907..6d3054eeb 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.cc @@ -16,10 +16,8 @@ namespace dcsctp { TaskQueueTimeoutFactory::TaskQueueTimeout::TaskQueueTimeout( - TaskQueueTimeoutFactory& parent, - webrtc::TaskQueueBase::DelayPrecision precision) + TaskQueueTimeoutFactory& parent) : parent_(parent), - precision_(precision), pending_task_safety_flag_(webrtc::PendingTaskSafetyFlag::Create()) {} TaskQueueTimeoutFactory::TaskQueueTimeout::~TaskQueueTimeout() { @@ -56,8 +54,7 @@ void TaskQueueTimeoutFactory::TaskQueueTimeout::Start(DurationMs duration_ms, } posted_task_expiration_ = timeout_expiration_; - parent_.task_queue_.PostDelayedTaskWithPrecision( - precision_, + parent_.task_queue_.PostDelayedTask( webrtc::ToQueuedTask( pending_task_safety_flag_, [timeout_id, this]() { diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h index 600b29244..e8d12df59 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/task_queue_timeout.h @@ -45,17 +45,14 @@ class TaskQueueTimeoutFactory { on_expired_(std::move(on_expired)) {} // Creates an implementation of `Timeout`. - std::unique_ptr CreateTimeout( - webrtc::TaskQueueBase::DelayPrecision precision = - webrtc::TaskQueueBase::DelayPrecision::kLow) { - return std::make_unique(*this, precision); + std::unique_ptr CreateTimeout() { + return std::make_unique(*this); } private: class TaskQueueTimeout : public Timeout { public: - TaskQueueTimeout(TaskQueueTimeoutFactory& parent, - webrtc::TaskQueueBase::DelayPrecision precision); + explicit TaskQueueTimeout(TaskQueueTimeoutFactory& parent); ~TaskQueueTimeout(); void Start(DurationMs duration_ms, TimeoutID timeout_id) override; @@ -63,7 +60,6 @@ class TaskQueueTimeoutFactory { private: TaskQueueTimeoutFactory& parent_; - const webrtc::TaskQueueBase::DelayPrecision precision_; // A safety flag to ensure that posted tasks to the task queue don't // reference these object when they go out of scope. Note that this safety // flag will be re-created if the scheduled-but-not-yet-expired task is not diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc index bde07638a..deee7db0c 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.cc @@ -144,7 +144,7 @@ std::unique_ptr TimerManager::CreateTimer(absl::string_view name, // after 800 million reconnections on a single socket. Ensure this will never // happen. RTC_CHECK_NE(*id, std::numeric_limits::max()); - std::unique_ptr timeout = create_timeout_(options.precision); + std::unique_ptr timeout = create_timeout_(); RTC_CHECK(timeout != nullptr); auto timer = absl::WrapUnique(new Timer( id, name, std::move(on_expired), [this, id]() { timers_.erase(id); }, diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h index 31b496dc8..49dff34e4 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/timer/timer.h @@ -21,7 +21,6 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" -#include "api/task_queue/task_queue_base.h" #include "net/dcsctp/public/timeout.h" #include "rtc_base/strong_alias.h" @@ -53,21 +52,10 @@ struct TimerOptions { TimerBackoffAlgorithm backoff_algorithm, absl::optional max_restarts, absl::optional max_backoff_duration) - : TimerOptions(duration, - backoff_algorithm, - max_restarts, - max_backoff_duration, - webrtc::TaskQueueBase::DelayPrecision::kLow) {} - TimerOptions(DurationMs duration, - TimerBackoffAlgorithm backoff_algorithm, - absl::optional max_restarts, - absl::optional max_backoff_duration, - webrtc::TaskQueueBase::DelayPrecision precision) : duration(duration), backoff_algorithm(backoff_algorithm), max_restarts(max_restarts), - max_backoff_duration(max_backoff_duration), - precision(precision) {} + max_backoff_duration(max_backoff_duration) {} // The initial timer duration. Can be overridden with `set_duration`. const DurationMs duration; @@ -79,8 +67,6 @@ struct TimerOptions { const absl::optional max_restarts; // The maximum timeout value for exponential backoff. const absl::optional max_backoff_duration; - // The precision of the webrtc::TaskQueueBase used for scheduling. - const webrtc::TaskQueueBase::DelayPrecision precision; }; // A high-level timer (in contrast to the low-level `Timeout` class). @@ -186,8 +172,7 @@ class Timer { class TimerManager { public: explicit TimerManager( - std::function( - webrtc::TaskQueueBase::DelayPrecision)> create_timeout) + std::function()> create_timeout) : create_timeout_(std::move(create_timeout)) {} // Creates a timer with name `name` that will expire (when started) after @@ -200,9 +185,7 @@ class TimerManager { void HandleTimeout(TimeoutID timeout_id); private: - const std::function( - webrtc::TaskQueueBase::DelayPrecision)> - create_timeout_; + const std::function()> create_timeout_; std::map timers_; TimerID next_id_ = TimerID(0); }; diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.cc index 05277d058..dc998def2 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/outstanding_data.cc @@ -74,14 +74,14 @@ bool OutstandingData::IsConsistent() const { size_t actual_outstanding_items = 0; std::set actual_to_be_retransmitted; - for (const auto& [tsn, item] : outstanding_data_) { - if (item.is_outstanding()) { - actual_outstanding_bytes += GetSerializedChunkSize(item.data()); + for (const auto& elem : outstanding_data_) { + if (elem.second.is_outstanding()) { + actual_outstanding_bytes += GetSerializedChunkSize(elem.second.data()); ++actual_outstanding_items; } - if (item.should_be_retransmitted()) { - actual_to_be_retransmitted.insert(tsn); + if (elem.second.should_be_retransmitted()) { + actual_to_be_retransmitted.insert(elem.first); } } @@ -263,7 +263,10 @@ void OutstandingData::AbandonAllFor(const Item& item) { << *tsn.Wrap(); } - for (auto& [tsn, other] : outstanding_data_) { + for (auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + Item& other = elem.second; + if (!other.is_abandoned() && other.data().stream_id == item.data().stream_id && other.data().is_unordered == item.data().is_unordered && @@ -315,7 +318,10 @@ std::vector> OutstandingData::GetChunksToBeRetransmitted( } void OutstandingData::ExpireOutstandingChunks(TimeMs now) { - for (const auto& [tsn, item] : outstanding_data_) { + for (const auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + const Item& item = elem.second; + // Chunks that are nacked can be expired. Care should be taken not to expire // unacked (in-flight) chunks as they might have been received, but the SACK // is either delayed or in-flight and may be received later. @@ -372,7 +378,9 @@ absl::optional OutstandingData::Insert( } void OutstandingData::NackAll() { - for (auto& [tsn, item] : outstanding_data_) { + for (auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + Item& item = elem.second; if (!item.is_acked()) { NackItem(tsn, item, /*retransmit_now=*/true); } @@ -398,21 +406,21 @@ std::vector> OutstandingData::GetChunkStatesForTesting() const { std::vector> states; states.emplace_back(last_cumulative_tsn_ack_.Wrap(), State::kAcked); - for (const auto& [tsn, item] : outstanding_data_) { + for (const auto& elem : outstanding_data_) { State state; - if (item.is_abandoned()) { + if (elem.second.is_abandoned()) { state = State::kAbandoned; - } else if (item.should_be_retransmitted()) { + } else if (elem.second.should_be_retransmitted()) { state = State::kToBeRetransmitted; - } else if (item.is_acked()) { + } else if (elem.second.is_acked()) { state = State::kAcked; - } else if (item.is_outstanding()) { + } else if (elem.second.is_outstanding()) { state = State::kInFlight; } else { state = State::kNacked; } - states.emplace_back(tsn.Wrap(), state); + states.emplace_back(elem.first.Wrap(), state); } return states; } @@ -430,7 +438,10 @@ ForwardTsnChunk OutstandingData::CreateForwardTsn() const { std::map skipped_per_ordered_stream; UnwrappedTSN new_cumulative_ack = last_cumulative_tsn_ack_; - for (const auto& [tsn, item] : outstanding_data_) { + for (const auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + const Item& item = elem.second; + if ((tsn != new_cumulative_ack.next_value()) || !item.is_abandoned()) { break; } @@ -443,8 +454,8 @@ ForwardTsnChunk OutstandingData::CreateForwardTsn() const { std::vector skipped_streams; skipped_streams.reserve(skipped_per_ordered_stream.size()); - for (const auto& [stream_id, ssn] : skipped_per_ordered_stream) { - skipped_streams.emplace_back(stream_id, ssn); + for (const auto& elem : skipped_per_ordered_stream) { + skipped_streams.emplace_back(elem.first, elem.second); } return ForwardTsnChunk(new_cumulative_ack.Wrap(), std::move(skipped_streams)); } @@ -453,7 +464,10 @@ IForwardTsnChunk OutstandingData::CreateIForwardTsn() const { std::map, MID> skipped_per_stream; UnwrappedTSN new_cumulative_ack = last_cumulative_tsn_ack_; - for (const auto& [tsn, item] : outstanding_data_) { + for (const auto& elem : outstanding_data_) { + UnwrappedTSN tsn = elem.first; + const Item& item = elem.second; + if ((tsn != new_cumulative_ack.next_value()) || !item.is_abandoned()) { break; } @@ -468,7 +482,9 @@ IForwardTsnChunk OutstandingData::CreateIForwardTsn() const { std::vector skipped_streams; skipped_streams.reserve(skipped_per_stream.size()); - for (const auto& [stream, message_id] : skipped_per_stream) { + for (const auto& elem : skipped_per_stream) { + const std::pair& stream = elem.first; + MID message_id = elem.second; skipped_streams.emplace_back(stream.first, stream.second, message_id); } diff --git a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.cc b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.cc index a20ccb244..21744cc0a 100644 --- a/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.cc +++ b/TMessagesProj/jni/voip/webrtc/net/dcsctp/tx/rr_send_queue.cc @@ -76,8 +76,8 @@ void RRSendQueue::OutgoingStream::AddHandoverState( bool RRSendQueue::IsConsistent() const { size_t total_buffered_amount = 0; - for (const auto& [unused, stream] : streams_) { - total_buffered_amount += stream.buffered_amount().value(); + for (const auto& stream_entry : streams_) { + total_buffered_amount += stream_entry.second.buffered_amount().value(); } if (previous_message_has_ended_) { @@ -391,8 +391,9 @@ void RRSendQueue::PrepareResetStreams(rtc::ArrayView streams) { bool RRSendQueue::CanResetStreams() const { // Streams can be reset if those streams that are paused don't have any // messages that are partially sent. - for (auto& [unused, stream] : streams_) { - if (stream.is_paused() && stream.has_partially_sent_message()) { + for (auto& stream : streams_) { + if (stream.second.is_paused() && + stream.second.has_partially_sent_message()) { return false; } } @@ -400,17 +401,17 @@ bool RRSendQueue::CanResetStreams() const { } void RRSendQueue::CommitResetStreams() { - for (auto& [unused, stream] : streams_) { - if (stream.is_paused()) { - stream.Reset(); + for (auto& stream_entry : streams_) { + if (stream_entry.second.is_paused()) { + stream_entry.second.Reset(); } } RTC_DCHECK(IsConsistent()); } void RRSendQueue::RollbackResetStreams() { - for (auto& [unused, stream] : streams_) { - stream.Resume(); + for (auto& stream_entry : streams_) { + stream_entry.second.Resume(); } RTC_DCHECK(IsConsistent()); } @@ -418,7 +419,8 @@ void RRSendQueue::RollbackResetStreams() { void RRSendQueue::Reset() { // Recalculate buffered amount, as partially sent messages may have been put // fully back in the queue. - for (auto& [unused, stream] : streams_) { + for (auto& stream_entry : streams_) { + OutgoingStream& stream = stream_entry.second; stream.Reset(); } previous_message_has_ended_ = true; @@ -469,10 +471,10 @@ HandoverReadinessStatus RRSendQueue::GetHandoverReadiness() const { } void RRSendQueue::AddHandoverState(DcSctpSocketHandoverState& state) { - for (const auto& [stream_id, stream] : streams_) { + for (const auto& entry : streams_) { DcSctpSocketHandoverState::OutgoingStream state_stream; - state_stream.id = stream_id.value(); - stream.AddHandoverState(state_stream); + state_stream.id = entry.first.value(); + entry.second.AddHandoverState(state_stream); state.tx.streams.push_back(std::move(state_stream)); } } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/async_stun_tcp_socket.h b/TMessagesProj/jni/voip/webrtc/p2p/base/async_stun_tcp_socket.h index f0df42b52..eb4eef7cd 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/async_stun_tcp_socket.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/async_stun_tcp_socket.h @@ -15,6 +15,7 @@ #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_tcp_socket.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" @@ -31,9 +32,6 @@ class AsyncStunTCPSocket : public rtc::AsyncTCPSocketBase { explicit AsyncStunTCPSocket(rtc::Socket* socket); - AsyncStunTCPSocket(const AsyncStunTCPSocket&) = delete; - AsyncStunTCPSocket& operator=(const AsyncStunTCPSocket&) = delete; - int Send(const void* pv, size_t cb, const rtc::PacketOptions& options) override; @@ -44,6 +42,8 @@ class AsyncStunTCPSocket : public rtc::AsyncTCPSocketBase { // This method also returns the number of padding bytes needed/added to the // turn message. `pad_bytes` should be used only when `is_turn` is true. size_t GetExpectedLength(const void* data, size_t len, int* pad_bytes); + + RTC_DISALLOW_COPY_AND_ASSIGN(AsyncStunTCPSocket); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.cc index 9025fbe2a..81fb324d1 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/basic_ice_controller.cc @@ -83,8 +83,6 @@ void BasicIceController::OnConnectionDestroyed(const Connection* connection) { pinged_connections_.erase(connection); unpinged_connections_.erase(connection); connections_.erase(absl::c_find(connections_, connection)); - if (selected_connection_ == connection) - selected_connection_ = nullptr; } bool BasicIceController::HasPingableConnection() const { diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc index 1d674e2bc..e0e5c150b 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.cc @@ -18,7 +18,6 @@ #include #include "absl/algorithm/container.h" -#include "absl/memory/memory.h" #include "absl/strings/match.h" #include "p2p/base/port_allocator.h" #include "rtc_base/checks.h" @@ -168,7 +167,6 @@ ConnectionRequest::ConnectionRequest(Connection* connection) : StunRequest(new IceMessage()), connection_(connection) {} void ConnectionRequest::Prepare(StunMessage* request) { - RTC_DCHECK_RUN_ON(connection_->network_thread_); request->SetType(STUN_BINDING_REQUEST); std::string username; connection_->port()->CreateStunUsername( @@ -209,10 +207,10 @@ void ConnectionRequest::Prepare(StunMessage* request) { request->AddAttribute( std::make_unique(STUN_ATTR_USE_CANDIDATE)); } - if (connection_->nomination_ && - connection_->nomination_ != connection_->acked_nomination()) { + if (connection_->nomination() && + connection_->nomination() != connection_->acked_nomination()) { request->AddAttribute(std::make_unique( - STUN_ATTR_NOMINATION, connection_->nomination_)); + STUN_ATTR_NOMINATION, connection_->nomination())); } } else if (connection_->port()->GetIceRole() == ICEROLE_CONTROLLED) { request->AddAttribute(std::make_unique( @@ -259,22 +257,18 @@ void ConnectionRequest::Prepare(StunMessage* request) { } void ConnectionRequest::OnResponse(StunMessage* response) { - RTC_DCHECK_RUN_ON(connection_->network_thread_); connection_->OnConnectionRequestResponse(this, response); } void ConnectionRequest::OnErrorResponse(StunMessage* response) { - RTC_DCHECK_RUN_ON(connection_->network_thread_); connection_->OnConnectionRequestErrorResponse(this, response); } void ConnectionRequest::OnTimeout() { - RTC_DCHECK_RUN_ON(connection_->network_thread_); connection_->OnConnectionRequestTimeout(this); } void ConnectionRequest::OnSent() { - RTC_DCHECK_RUN_ON(connection_->network_thread_); connection_->OnConnectionRequestSent(this); // Each request is sent only once. After a single delay , the request will // time out. @@ -288,8 +282,7 @@ int ConnectionRequest::resend_delay() { Connection::Connection(Port* port, size_t index, const Candidate& remote_candidate) - : network_thread_(port->thread()), - id_(rtc::CreateRandomId()), + : id_(rtc::CreateRandomId()), port_(port), local_candidate_index_(index), remote_candidate_(remote_candidate), @@ -300,6 +293,7 @@ Connection::Connection(Port* port, connected_(true), pruned_(false), use_candidate_attr_(false), + remote_ice_mode_(ICEMODE_FULL), requests_(port->thread()), rtt_(DEFAULT_RTT), last_ping_sent_(0), @@ -311,7 +305,6 @@ Connection::Connection(Port* port, time_created_ms_(rtc::TimeMillis()), field_trials_(&kDefaultFieldTrials), rtt_estimate_(DEFAULT_RTT_ESTIMATE_HALF_TIME_MS) { - RTC_DCHECK_RUN_ON(network_thread_); // All of our connections start in WAITING state. // TODO(mallinath) - Start connections from STATE_FROZEN. // Wire up to send stun packets @@ -319,16 +312,9 @@ Connection::Connection(Port* port, RTC_LOG(LS_INFO) << ToString() << ": Connection created"; } -Connection::~Connection() { - RTC_DCHECK_RUN_ON(network_thread_); -} - -webrtc::TaskQueueBase* Connection::network_thread() const { - return network_thread_; -} +Connection::~Connection() {} const Candidate& Connection::local_candidate() const { - RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(local_candidate_index_ < port_->Candidates().size()); return port_->Candidates()[local_candidate_index_]; } @@ -371,7 +357,6 @@ uint64_t Connection::priority() const { } void Connection::set_write_state(WriteState value) { - RTC_DCHECK_RUN_ON(network_thread_); WriteState old_value = write_state_; write_state_ = value; if (value != old_value) { @@ -382,7 +367,6 @@ void Connection::set_write_state(WriteState value) { } void Connection::UpdateReceiving(int64_t now) { - RTC_DCHECK_RUN_ON(network_thread_); bool receiving; if (last_ping_sent() < last_ping_response_received()) { // We consider any candidate pair that has its last connectivity check @@ -408,7 +392,6 @@ void Connection::UpdateReceiving(int64_t now) { } void Connection::set_state(IceCandidatePairState state) { - RTC_DCHECK_RUN_ON(network_thread_); IceCandidatePairState old_state = state_; state_ = state; if (state != old_state) { @@ -417,7 +400,6 @@ void Connection::set_state(IceCandidatePairState state) { } void Connection::set_connected(bool value) { - RTC_DCHECK_RUN_ON(network_thread_); bool old_value = connected_; connected_ = value; if (value != old_value) { @@ -426,74 +408,27 @@ void Connection::set_connected(bool value) { } } -bool Connection::use_candidate_attr() const { - RTC_DCHECK_RUN_ON(network_thread_); - return use_candidate_attr_; -} - void Connection::set_use_candidate_attr(bool enable) { - RTC_DCHECK_RUN_ON(network_thread_); use_candidate_attr_ = enable; } -void Connection::set_nomination(uint32_t value) { - RTC_DCHECK_RUN_ON(network_thread_); - nomination_ = value; -} - -uint32_t Connection::remote_nomination() const { - RTC_DCHECK_RUN_ON(network_thread_); - return remote_nomination_; -} - -bool Connection::nominated() const { - RTC_DCHECK_RUN_ON(network_thread_); - return acked_nomination_ || remote_nomination_; -} - int Connection::unwritable_timeout() const { - RTC_DCHECK_RUN_ON(network_thread_); return unwritable_timeout_.value_or(CONNECTION_WRITE_CONNECT_TIMEOUT); } -void Connection::set_unwritable_timeout(const absl::optional& value_ms) { - RTC_DCHECK_RUN_ON(network_thread_); - unwritable_timeout_ = value_ms; -} - int Connection::unwritable_min_checks() const { - RTC_DCHECK_RUN_ON(network_thread_); return unwritable_min_checks_.value_or(CONNECTION_WRITE_CONNECT_FAILURES); } -void Connection::set_unwritable_min_checks(const absl::optional& value) { - RTC_DCHECK_RUN_ON(network_thread_); - unwritable_min_checks_ = value; -} - int Connection::inactive_timeout() const { - RTC_DCHECK_RUN_ON(network_thread_); return inactive_timeout_.value_or(CONNECTION_WRITE_TIMEOUT); } -void Connection::set_inactive_timeout(const absl::optional& value) { - RTC_DCHECK_RUN_ON(network_thread_); - inactive_timeout_ = value; -} - int Connection::receiving_timeout() const { - RTC_DCHECK_RUN_ON(network_thread_); return receiving_timeout_.value_or(WEAK_CONNECTION_RECEIVE_TIMEOUT); } -void Connection::set_receiving_timeout( - absl::optional receiving_timeout_ms) { - RTC_DCHECK_RUN_ON(network_thread_); - receiving_timeout_ = receiving_timeout_ms; -} - void Connection::SetIceFieldTrials(const IceFieldTrials* field_trials) { - RTC_DCHECK_RUN_ON(network_thread_); field_trials_ = field_trials; rtt_estimate_.SetHalfTime(field_trials->rtt_estimate_halftime_ms); } @@ -501,7 +436,6 @@ void Connection::SetIceFieldTrials(const IceFieldTrials* field_trials) { void Connection::OnSendStunPacket(const void* data, size_t size, StunRequest* req) { - RTC_DCHECK_RUN_ON(network_thread_); rtc::PacketOptions options(port_->StunDscpValue()); options.info_signaled_after_sent.packet_type = rtc::PacketType::kIceConnectivityCheck; @@ -518,7 +452,6 @@ void Connection::OnSendStunPacket(const void* data, void Connection::OnReadPacket(const char* data, size_t size, int64_t packet_time_us) { - RTC_DCHECK_RUN_ON(network_thread_); std::unique_ptr msg; std::string remote_ufrag; const rtc::SocketAddress& addr(remote_candidate_.address()); @@ -602,7 +535,6 @@ void Connection::OnReadPacket(const char* data, } void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) { - RTC_DCHECK_RUN_ON(network_thread_); // This connection should now be receiving. ReceivedPing(msg->transaction_id()); if (webrtc::field_trial::IsEnabled("WebRTC-ExtraICEPing") && @@ -702,7 +634,6 @@ void Connection::HandleStunBindingOrGoogPingRequest(IceMessage* msg) { } void Connection::SendStunBindingResponse(const StunMessage* request) { - RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(request->type() == STUN_BINDING_REQUEST); // Retrieve the username from the request. @@ -759,7 +690,6 @@ void Connection::SendStunBindingResponse(const StunMessage* request) { } void Connection::SendGoogPingResponse(const StunMessage* request) { - RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(request->type() == GOOG_PING_REQUEST); // Fill in the response message. @@ -771,7 +701,6 @@ void Connection::SendGoogPingResponse(const StunMessage* request) { } void Connection::SendResponseMessage(const StunMessage& response) { - RTC_DCHECK_RUN_ON(network_thread_); // Where I send the response. const rtc::SocketAddress& addr = remote_candidate_.address(); @@ -802,28 +731,11 @@ void Connection::SendResponseMessage(const StunMessage& response) { } } -uint32_t Connection::acked_nomination() const { - RTC_DCHECK_RUN_ON(network_thread_); - return acked_nomination_; -} - -void Connection::set_remote_nomination(uint32_t remote_nomination) { - RTC_DCHECK_RUN_ON(network_thread_); - remote_nomination_ = remote_nomination; -} - void Connection::OnReadyToSend() { - RTC_DCHECK_RUN_ON(network_thread_); SignalReadyToSend(this); } -bool Connection::pruned() const { - RTC_DCHECK_RUN_ON(network_thread_); - return pruned_; -} - void Connection::Prune() { - RTC_DCHECK_RUN_ON(network_thread_); if (!pruned_ || active()) { RTC_LOG(LS_INFO) << ToString() << ": Connection pruned"; pruned_ = true; @@ -833,47 +745,27 @@ void Connection::Prune() { } void Connection::Destroy() { - RTC_DCHECK_RUN_ON(network_thread_); - if (pending_delete_) - return; - - pending_delete_ = true; - - RTC_DLOG(LS_VERBOSE) << ToString() << ": Connection destroyed"; - - // Fire the 'destroyed' event before deleting the object. This is done - // intentionally to avoid a situation whereby the signal might have dangling - // pointers to objects that have been deleted by the time the async task - // that deletes the connection object runs. - SignalDestroyed(this); - SignalDestroyed.disconnect_all(); - + // TODO(deadbeef, nisse): This may leak if an application closes a + // PeerConnection and then quickly destroys the PeerConnectionFactory (along + // with the networking thread on which this message is posted). Also affects + // tests, with a workaround in + // AutoSocketServerThread::~AutoSocketServerThread. + RTC_LOG(LS_VERBOSE) << ToString() << ": Connection destroyed"; + port_->thread()->Post(RTC_FROM_HERE, this, MSG_DELETE); LogCandidatePairConfig(webrtc::IceCandidatePairConfigType::kDestroyed); - - // Unwind the stack before deleting the object in case upstream callers - // need to refer to the Connection's state as part of teardown. - // NOTE: We move ownership of 'this' into the capture section of the lambda - // so that the object will always be deleted, including if PostTask fails. - // In such a case (only tests), deletion would happen inside of the call - // to `Destroy()`. - network_thread_->PostTask( - webrtc::ToQueuedTask([me = absl::WrapUnique(this)]() {})); } void Connection::FailAndDestroy() { - RTC_DCHECK_RUN_ON(network_thread_); set_state(IceCandidatePairState::FAILED); Destroy(); } void Connection::FailAndPrune() { - RTC_DCHECK_RUN_ON(network_thread_); set_state(IceCandidatePairState::FAILED); Prune(); } void Connection::PrintPingsSinceLastResponse(std::string* s, size_t max) { - RTC_DCHECK_RUN_ON(network_thread_); rtc::StringBuilder oss; if (pings_since_last_response_.size() > max) { for (size_t i = 0; i < max; i++) { @@ -889,28 +781,7 @@ void Connection::PrintPingsSinceLastResponse(std::string* s, size_t max) { *s = oss.str(); } -bool Connection::reported() const { - RTC_DCHECK_RUN_ON(network_thread_); - return reported_; -} - -void Connection::set_reported(bool reported) { - RTC_DCHECK_RUN_ON(network_thread_); - reported_ = reported; -} - -bool Connection::selected() const { - RTC_DCHECK_RUN_ON(network_thread_); - return selected_; -} - -void Connection::set_selected(bool selected) { - RTC_DCHECK_RUN_ON(network_thread_); - selected_ = selected; -} - void Connection::UpdateState(int64_t now) { - RTC_DCHECK_RUN_ON(network_thread_); int rtt = ConservativeRTTEstimate(rtt_); if (RTC_LOG_CHECK_LEVEL(LS_VERBOSE)) { @@ -967,13 +838,7 @@ void Connection::UpdateState(int64_t now) { } } -int64_t Connection::last_ping_sent() const { - RTC_DCHECK_RUN_ON(network_thread_); - return last_ping_sent_; -} - void Connection::Ping(int64_t now) { - RTC_DCHECK_RUN_ON(network_thread_); last_ping_sent_ = now; ConnectionRequest* req = new ConnectionRequest(this); // If not using renomination, we use "1" to mean "nominated" and "0" to mean @@ -992,38 +857,13 @@ void Connection::Ping(int64_t now) { num_pings_sent_++; } -int64_t Connection::last_ping_response_received() const { - RTC_DCHECK_RUN_ON(network_thread_); - return last_ping_response_received_; -} - -const absl::optional& Connection::last_ping_id_received() const { - RTC_DCHECK_RUN_ON(network_thread_); - return last_ping_id_received_; -} - -// Used to check if any STUN ping response has been received. -int Connection::rtt_samples() const { - RTC_DCHECK_RUN_ON(network_thread_); - return rtt_samples_; -} - -// Called whenever a valid ping is received on this connection. This is -// public because the connection intercepts the first ping for us. -int64_t Connection::last_ping_received() const { - RTC_DCHECK_RUN_ON(network_thread_); - return last_ping_received_; -} - void Connection::ReceivedPing(const absl::optional& request_id) { - RTC_DCHECK_RUN_ON(network_thread_); last_ping_received_ = rtc::TimeMillis(); last_ping_id_received_ = request_id; UpdateReceiving(last_ping_received_); } void Connection::HandlePiggybackCheckAcknowledgementIfAny(StunMessage* msg) { - RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK(msg->type() == STUN_BINDING_REQUEST || msg->type() == GOOG_PING_REQUEST); const StunByteStringAttribute* last_ice_check_received_attr = @@ -1044,21 +884,10 @@ void Connection::HandlePiggybackCheckAcknowledgementIfAny(StunMessage* msg) { } } -int64_t Connection::last_send_data() const { - RTC_DCHECK_RUN_ON(network_thread_); - return last_send_data_; -} - -int64_t Connection::last_data_received() const { - RTC_DCHECK_RUN_ON(network_thread_); - return last_data_received_; -} - void Connection::ReceivedPingResponse( int rtt, const std::string& request_id, const absl::optional& nomination) { - RTC_DCHECK_RUN_ON(network_thread_); RTC_DCHECK_GE(rtt, 0); // We've already validated that this is a STUN binding response with // the correct local and remote username for this connection. @@ -1087,39 +916,7 @@ void Connection::ReceivedPingResponse( rtt_samples_++; } -Connection::WriteState Connection::write_state() const { - RTC_DCHECK_RUN_ON(network_thread_); - return write_state_; -} - -bool Connection::writable() const { - RTC_DCHECK_RUN_ON(network_thread_); - return write_state_ == STATE_WRITABLE; -} - -bool Connection::receiving() const { - RTC_DCHECK_RUN_ON(network_thread_); - return receiving_; -} - -// Determines whether the connection has finished connecting. This can only -// be false for TCP connections. -bool Connection::connected() const { - RTC_DCHECK_RUN_ON(network_thread_); - return connected_; -} - -bool Connection::weak() const { - return !(writable() && receiving() && connected()); -} - -bool Connection::active() const { - RTC_DCHECK_RUN_ON(network_thread_); - return write_state_ != STATE_WRITE_TIMEOUT; -} - bool Connection::dead(int64_t now) const { - RTC_DCHECK_RUN_ON(network_thread_); if (last_received() > 0) { // If it has ever received anything, we keep it alive // - if it has recevied last DEAD_CONNECTION_RECEIVE_TIMEOUT (30s) @@ -1163,11 +960,6 @@ bool Connection::dead(int64_t now) const { return now > (time_created_ms_ + MIN_CONNECTION_LIFETIME); } -int Connection::rtt() const { - RTC_DCHECK_RUN_ON(network_thread_); - return rtt_; -} - bool Connection::stable(int64_t now) const { // A connection is stable if it's RTT has converged and it isn't missing any // responses. We should send pings at a higher rate until the RTT converges @@ -1186,7 +978,6 @@ uint32_t Connection::ComputeNetworkCost() const { } std::string Connection::ToString() const { - RTC_DCHECK_RUN_ON(network_thread_); const absl::string_view CONNECT_STATE_ABBREV[2] = { "-", // not connected (false) "C", // connected (true) @@ -1223,8 +1014,8 @@ std::string Connection::ToString() const { << ":" << remote.address().ToSensitiveString() << "|" << CONNECT_STATE_ABBREV[connected()] << RECEIVE_STATE_ABBREV[receiving()] << WRITE_STATE_ABBREV[write_state()] << ICESTATE[static_cast(state())] - << "|" << SELECTED_STATE_ABBREV[selected_] << "|" << remote_nomination() - << "|" << nomination_ << "|" << priority() << "|"; + << "|" << SELECTED_STATE_ABBREV[selected()] << "|" << remote_nomination() + << "|" << nomination() << "|" << priority() << "|"; if (rtt_ < DEFAULT_RTT) { ss << rtt_ << "]"; } else { @@ -1238,7 +1029,6 @@ std::string Connection::ToSensitiveString() const { } const webrtc::IceCandidatePairDescription& Connection::ToLogDescription() { - RTC_DCHECK_RUN_ON(network_thread_); if (log_description_.has_value()) { return log_description_.value(); } @@ -1262,12 +1052,6 @@ const webrtc::IceCandidatePairDescription& Connection::ToLogDescription() { return log_description_.value(); } -void Connection::set_ice_event_log(webrtc::IceEventLog* ice_event_log) { - RTC_DCHECK_RUN_ON(network_thread_); - ice_event_log_ = ice_event_log; -} - -// RTC_RUN_ON(network_thread_) void Connection::LogCandidatePairConfig( webrtc::IceCandidatePairConfigType type) { if (ice_event_log_ == nullptr) { @@ -1276,7 +1060,6 @@ void Connection::LogCandidatePairConfig( ice_event_log_->LogCandidatePairConfig(type, id(), ToLogDescription()); } -// RTC_RUN_ON(network_thread_) void Connection::LogCandidatePairEvent(webrtc::IceCandidatePairEventType type, uint32_t transaction_id) { if (ice_event_log_ == nullptr) { @@ -1287,7 +1070,6 @@ void Connection::LogCandidatePairEvent(webrtc::IceCandidatePairEventType type, void Connection::OnConnectionRequestResponse(ConnectionRequest* request, StunMessage* response) { - RTC_DCHECK_RUN_ON(network_thread_); // Log at LS_INFO if we receive a ping response on an unwritable // connection. rtc::LoggingSeverity sev = !writable() ? rtc::LS_INFO : rtc::LS_VERBOSE; @@ -1377,7 +1159,6 @@ void Connection::OnConnectionRequestTimeout(ConnectionRequest* request) { << request->Elapsed() << " ms"; } -// RTC_RUN_ON(network_thread_). void Connection::OnConnectionRequestSent(ConnectionRequest* request) { // Log at LS_INFO if we send a ping on an unwritable connection. rtc::LoggingSeverity sev = !writable() ? rtc::LS_INFO : rtc::LS_VERBOSE; @@ -1385,7 +1166,7 @@ void Connection::OnConnectionRequestSent(ConnectionRequest* request) { << StunMethodToString(request->msg()->type()) << ", id=" << rtc::hex_encode(request->id()) << ", use_candidate=" << use_candidate_attr() - << ", nomination=" << nomination_; + << ", nomination=" << nomination(); stats_.sent_ping_requests_total++; LogCandidatePairEvent(webrtc::IceCandidatePairEventType::kCheckSent, request->reduced_transaction_id()); @@ -1398,16 +1179,6 @@ void Connection::HandleRoleConflictFromPeer() { port_->SignalRoleConflict(port_); } -IceCandidatePairState Connection::state() const { - RTC_DCHECK_RUN_ON(network_thread_); - return state_; -} - -int Connection::num_pings_sent() const { - RTC_DCHECK_RUN_ON(network_thread_); - return num_pings_sent_; -} - void Connection::MaybeSetRemoteIceParametersAndGeneration( const IceParameters& ice_params, int generation) { @@ -1438,19 +1209,20 @@ void Connection::MaybeUpdatePeerReflexiveCandidate( } } +void Connection::OnMessage(rtc::Message* pmsg) { + RTC_DCHECK(pmsg->message_id == MSG_DELETE); + RTC_LOG(LS_INFO) << "Connection deleted with number of pings sent: " + << num_pings_sent_; + SignalDestroyed(this); + delete this; +} + int64_t Connection::last_received() const { - RTC_DCHECK_RUN_ON(network_thread_); return std::max(last_data_received_, std::max(last_ping_received_, last_ping_response_received_)); } -int64_t Connection::receiving_unchanged_since() const { - RTC_DCHECK_RUN_ON(network_thread_); - return receiving_unchanged_since_; -} - ConnectionInfo Connection::stats() { - RTC_DCHECK_RUN_ON(network_thread_); stats_.recv_bytes_second = round(recv_rate_tracker_.ComputeRate()); stats_.recv_total_bytes = recv_rate_tracker_.TotalSampleCount(); stats_.sent_bytes_second = round(send_rate_tracker_.ComputeRate()); @@ -1537,12 +1309,10 @@ void Connection::MaybeUpdateLocalCandidate(ConnectionRequest* request, } bool Connection::rtt_converged() const { - RTC_DCHECK_RUN_ON(network_thread_); return rtt_samples_ > (RTT_RATIO + 1); } bool Connection::missing_responses(int64_t now) const { - RTC_DCHECK_RUN_ON(network_thread_); if (pings_since_last_response_.empty()) { return false; } @@ -1553,7 +1323,6 @@ bool Connection::missing_responses(int64_t now) const { bool Connection::TooManyOutstandingPings( const absl::optional& max_outstanding_pings) const { - RTC_DCHECK_RUN_ON(network_thread_); if (!max_outstanding_pings.has_value()) { return false; } @@ -1564,7 +1333,6 @@ bool Connection::TooManyOutstandingPings( return true; } -// RTC_RUN_ON(network_thread_). bool Connection::ShouldSendGoogPing(const StunMessage* message) { if (remote_support_goog_ping_ == true && cached_stun_binding_ && cached_stun_binding_->EqualAttributes(message, [](int type) { @@ -1582,7 +1350,6 @@ bool Connection::ShouldSendGoogPing(const StunMessage* message) { } void Connection::ForgetLearnedState() { - RTC_DCHECK_RUN_ON(network_thread_); RTC_LOG(LS_INFO) << ToString() << ": Connection forget learned state"; requests_.Clear(); receiving_ = false; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h index a7b349075..7efe7d65e 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/connection.h @@ -70,7 +70,9 @@ class ConnectionRequest : public StunRequest { // Represents a communication link between a port on the local client and a // port on the remote client. -class Connection : public CandidatePairInterface, public sigslot::has_slots<> { +class Connection : public CandidatePairInterface, + public rtc::MessageHandlerAutoCleanup, + public sigslot::has_slots<> { public: struct SentPing { SentPing(const std::string id, int64_t sent_time, uint32_t nomination) @@ -86,8 +88,6 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { // A unique ID assigned when the connection is created. uint32_t id() const { return id_; } - webrtc::TaskQueueBase* network_thread() const; - // Implementation of virtual methods in CandidatePairInterface. // Returns the description of the local port const Candidate& local_candidate() const override; @@ -109,28 +109,34 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { STATE_WRITE_TIMEOUT = 3, // we have had a large number of ping failures }; - WriteState write_state() const; - bool writable() const; - bool receiving() const; + WriteState write_state() const { return write_state_; } + bool writable() const { return write_state_ == STATE_WRITABLE; } + bool receiving() const { return receiving_; } // Determines whether the connection has finished connecting. This can only // be false for TCP connections. - bool connected() const; - bool weak() const; - bool active() const; + bool connected() const { return connected_; } + bool weak() const { return !(writable() && receiving() && connected()); } + bool active() const { return write_state_ != STATE_WRITE_TIMEOUT; } // A connection is dead if it can be safely deleted. bool dead(int64_t now) const; // Estimate of the round-trip time over this connection. - int rtt() const; + int rtt() const { return rtt_; } int unwritable_timeout() const; - void set_unwritable_timeout(const absl::optional& value_ms); + void set_unwritable_timeout(const absl::optional& value_ms) { + unwritable_timeout_ = value_ms; + } int unwritable_min_checks() const; - void set_unwritable_min_checks(const absl::optional& value); + void set_unwritable_min_checks(const absl::optional& value) { + unwritable_min_checks_ = value; + } int inactive_timeout() const; - void set_inactive_timeout(const absl::optional& value); + void set_inactive_timeout(const absl::optional& value) { + inactive_timeout_ = value; + } // Gets the `ConnectionInfo` stats, where `best_connection` has not been // populated (default value false). @@ -166,15 +172,15 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { // still keep it around in case the other side wants to use it. But we can // safely stop pinging on it and we can allow it to time out if the other // side stops using it as well. - bool pruned() const; + bool pruned() const { return pruned_; } void Prune(); - bool use_candidate_attr() const; + bool use_candidate_attr() const { return use_candidate_attr_; } void set_use_candidate_attr(bool enable); - void set_nomination(uint32_t value); + void set_nomination(uint32_t value) { nomination_ = value; } - uint32_t remote_nomination() const; + uint32_t remote_nomination() const { return remote_nomination_; } // One or several pairs may be nominated based on if Regular or Aggressive // Nomination is used. https://tools.ietf.org/html/rfc5245#section-8 // `nominated` is defined both for the controlling or controlled agent based @@ -182,10 +188,13 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { // gets its `remote_nomination_` set when pinged by the controlling agent with // a nomination value. The controlling agent gets its `acked_nomination_` set // when receiving a response to a nominating ping. - bool nominated() const; + bool nominated() const { return acked_nomination_ || remote_nomination_; } + void set_remote_ice_mode(IceMode mode) { remote_ice_mode_ = mode; } int receiving_timeout() const; - void set_receiving_timeout(absl::optional receiving_timeout_ms); + void set_receiving_timeout(absl::optional receiving_timeout_ms) { + receiving_timeout_ = receiving_timeout_ms; + } // Makes the connection go away. void Destroy(); @@ -202,23 +211,24 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { void UpdateState(int64_t now); // Called when this connection should try checking writability again. - int64_t last_ping_sent() const; + int64_t last_ping_sent() const { return last_ping_sent_; } void Ping(int64_t now); void ReceivedPingResponse( int rtt, const std::string& request_id, const absl::optional& nomination = absl::nullopt); - - int64_t last_ping_response_received() const; - const absl::optional& last_ping_id_received() const; - + int64_t last_ping_response_received() const { + return last_ping_response_received_; + } + const absl::optional& last_ping_id_received() const { + return last_ping_id_received_; + } // Used to check if any STUN ping response has been received. - int rtt_samples() const; + int rtt_samples() const { return rtt_samples_; } // Called whenever a valid ping is received on this connection. This is // public because the connection intercepts the first ping for us. - int64_t last_ping_received() const; - + int64_t last_ping_received() const { return last_ping_received_; } void ReceivedPing( const absl::optional& request_id = absl::nullopt); // Handles the binding request; sends a response if this is a valid request. @@ -228,8 +238,8 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { // connectivity check from the peer. void HandlePiggybackCheckAcknowledgementIfAny(StunMessage* msg); // Timestamp when data was last sent (or attempted to be sent). - int64_t last_send_data() const; - int64_t last_data_received() const; + int64_t last_send_data() const { return last_send_data_; } + int64_t last_data_received() const { return last_data_received_; } // Debugging description of this connection std::string ToDebugId() const; @@ -237,19 +247,19 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { std::string ToSensitiveString() const; // Structured description of this candidate pair. const webrtc::IceCandidatePairDescription& ToLogDescription(); - void set_ice_event_log(webrtc::IceEventLog* ice_event_log); - + void set_ice_event_log(webrtc::IceEventLog* ice_event_log) { + ice_event_log_ = ice_event_log; + } // Prints pings_since_last_response_ into a string. void PrintPingsSinceLastResponse(std::string* pings, size_t max); - bool reported() const; - void set_reported(bool reported); - - // `set_selected` is only used for logging in ToString above. The flag is - // set true by P2PTransportChannel for its selected candidate pair. - // TODO(tommi): Remove `selected()` once not referenced downstream. - bool selected() const; - void set_selected(bool selected); + bool reported() const { return reported_; } + void set_reported(bool reported) { reported_ = reported; } + // The following two methods are only used for logging in ToString above, and + // this flag is set true by P2PTransportChannel for its selected candidate + // pair. + bool selected() const { return selected_; } + void set_selected(bool selected) { selected_ = selected; } // This signal will be fired if this connection is nominated by the // controlling side. @@ -258,9 +268,11 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { // Invoked when Connection receives STUN error response with 487 code. void HandleRoleConflictFromPeer(); - IceCandidatePairState state() const; + IceCandidatePairState state() const { return state_; } - int num_pings_sent() const; + int num_pings_sent() const { return num_pings_sent_; } + + IceMode remote_ice_mode() const { return remote_ice_mode_; } uint32_t ComputeNetworkCost() const; @@ -279,7 +291,9 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { // response in milliseconds int64_t last_received() const; // Returns the last time when the connection changed its receiving state. - int64_t receiving_unchanged_since() const; + int64_t receiving_unchanged_since() const { + return receiving_unchanged_since_; + } bool stable(int64_t now) const; @@ -314,10 +328,16 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { const Port* PortForTest() const { return port_; } // Public for unit tests. - uint32_t acked_nomination() const; - void set_remote_nomination(uint32_t remote_nomination); + uint32_t acked_nomination() const { return acked_nomination_; } + + // Public for unit tests. + void set_remote_nomination(uint32_t remote_nomination) { + remote_nomination_ = remote_nomination; + } protected: + enum { MSG_DELETE = 0, MSG_FIRST_AVAILABLE }; + // Constructs a new connection to the given remote port. Connection(Port* port, size_t index, const Candidate& candidate); @@ -328,12 +348,9 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { virtual void OnConnectionRequestResponse(ConnectionRequest* req, StunMessage* response); void OnConnectionRequestErrorResponse(ConnectionRequest* req, - StunMessage* response) - RTC_RUN_ON(network_thread_); - void OnConnectionRequestTimeout(ConnectionRequest* req) - RTC_RUN_ON(network_thread_); - void OnConnectionRequestSent(ConnectionRequest* req) - RTC_RUN_ON(network_thread_); + StunMessage* response); + void OnConnectionRequestTimeout(ConnectionRequest* req); + void OnConnectionRequestSent(ConnectionRequest* req); bool rtt_converged() const; @@ -347,120 +364,103 @@ class Connection : public CandidatePairInterface, public sigslot::has_slots<> { void set_state(IceCandidatePairState state); void set_connected(bool value); + uint32_t nomination() const { return nomination_; } + + void OnMessage(rtc::Message* pmsg) override; + // The local port where this connection sends and receives packets. Port* port() { return port_; } const Port* port() const { return port_; } - // NOTE: A pointer to the network thread is held by `port_` so in theory we - // shouldn't need to hold on to this pointer here, but rather defer to - // port_->thread(). However, some tests delete the classes in the wrong order - // so `port_` may be deleted before an instance of this class is deleted. - // TODO(tommi): This ^^^ should be fixed. - webrtc::TaskQueueBase* const network_thread_; - const uint32_t id_; - Port* const port_; - size_t local_candidate_index_ RTC_GUARDED_BY(network_thread_); + uint32_t id_; + Port* port_; + size_t local_candidate_index_; Candidate remote_candidate_; ConnectionInfo stats_; rtc::RateTracker recv_rate_tracker_; rtc::RateTracker send_rate_tracker_; int64_t last_send_data_ = 0; - // Set to true when deletion has been scheduled and must not be done again. - // See `Destroy()` for more details. - bool pending_delete_ RTC_GUARDED_BY(network_thread_) = false; private: // Update the local candidate based on the mapped address attribute. // If the local candidate changed, fires SignalStateChange. void MaybeUpdateLocalCandidate(ConnectionRequest* request, - StunMessage* response) - RTC_RUN_ON(network_thread_); + StunMessage* response); - void LogCandidatePairConfig(webrtc::IceCandidatePairConfigType type) - RTC_RUN_ON(network_thread_); + void LogCandidatePairConfig(webrtc::IceCandidatePairConfigType type); void LogCandidatePairEvent(webrtc::IceCandidatePairEventType type, - uint32_t transaction_id) - RTC_RUN_ON(network_thread_); + uint32_t transaction_id); // Check if this IceMessage is identical // to last message ack:ed STUN_BINDING_REQUEST. - bool ShouldSendGoogPing(const StunMessage* message) - RTC_RUN_ON(network_thread_); + bool ShouldSendGoogPing(const StunMessage* message); - WriteState write_state_ RTC_GUARDED_BY(network_thread_); - bool receiving_ RTC_GUARDED_BY(network_thread_); - bool connected_ RTC_GUARDED_BY(network_thread_); - bool pruned_ RTC_GUARDED_BY(network_thread_); - bool selected_ RTC_GUARDED_BY(network_thread_) = false; + WriteState write_state_; + bool receiving_; + bool connected_; + bool pruned_; + bool selected_ = false; // By default `use_candidate_attr_` flag will be true, // as we will be using aggressive nomination. // But when peer is ice-lite, this flag "must" be initialized to false and // turn on when connection becomes "best connection". - bool use_candidate_attr_ RTC_GUARDED_BY(network_thread_); + bool use_candidate_attr_; // Used by the controlling side to indicate that this connection will be // selected for transmission if the peer supports ICE-renomination when this // value is positive. A larger-value indicates that a connection is nominated // later and should be selected by the controlled side with higher precedence. // A zero-value indicates not nominating this connection. - uint32_t nomination_ RTC_GUARDED_BY(network_thread_) = 0; + uint32_t nomination_ = 0; // The last nomination that has been acknowledged. - uint32_t acked_nomination_ RTC_GUARDED_BY(network_thread_) = 0; + uint32_t acked_nomination_ = 0; // Used by the controlled side to remember the nomination value received from // the controlling side. When the peer does not support ICE re-nomination, its // value will be 1 if the connection has been nominated. - uint32_t remote_nomination_ RTC_GUARDED_BY(network_thread_) = 0; + uint32_t remote_nomination_ = 0; - StunRequestManager requests_ RTC_GUARDED_BY(network_thread_); - int rtt_ RTC_GUARDED_BY(network_thread_); - int rtt_samples_ RTC_GUARDED_BY(network_thread_) = 0; + IceMode remote_ice_mode_; + StunRequestManager requests_; + int rtt_; + int rtt_samples_ = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-totalroundtriptime - uint64_t total_round_trip_time_ms_ RTC_GUARDED_BY(network_thread_) = 0; + uint64_t total_round_trip_time_ms_ = 0; // https://w3c.github.io/webrtc-stats/#dom-rtcicecandidatepairstats-currentroundtriptime - absl::optional current_round_trip_time_ms_ - RTC_GUARDED_BY(network_thread_); - int64_t last_ping_sent_ RTC_GUARDED_BY( - network_thread_); // last time we sent a ping to the other side - int64_t last_ping_received_ - RTC_GUARDED_BY(network_thread_); // last time we received a ping from the - // other side - int64_t last_data_received_ RTC_GUARDED_BY(network_thread_); - int64_t last_ping_response_received_ RTC_GUARDED_BY(network_thread_); - int64_t receiving_unchanged_since_ RTC_GUARDED_BY(network_thread_) = 0; - std::vector pings_since_last_response_ - RTC_GUARDED_BY(network_thread_); + absl::optional current_round_trip_time_ms_; + int64_t last_ping_sent_; // last time we sent a ping to the other side + int64_t last_ping_received_; // last time we received a ping from the other + // side + int64_t last_data_received_; + int64_t last_ping_response_received_; + int64_t receiving_unchanged_since_ = 0; + std::vector pings_since_last_response_; // Transaction ID of the last connectivity check received. Null if having not // received a ping yet. - absl::optional last_ping_id_received_ - RTC_GUARDED_BY(network_thread_); + absl::optional last_ping_id_received_; - absl::optional unwritable_timeout_ RTC_GUARDED_BY(network_thread_); - absl::optional unwritable_min_checks_ RTC_GUARDED_BY(network_thread_); - absl::optional inactive_timeout_ RTC_GUARDED_BY(network_thread_); + absl::optional unwritable_timeout_; + absl::optional unwritable_min_checks_; + absl::optional inactive_timeout_; - bool reported_ RTC_GUARDED_BY(network_thread_); - IceCandidatePairState state_ RTC_GUARDED_BY(network_thread_); + bool reported_; + IceCandidatePairState state_; // Time duration to switch from receiving to not receiving. - absl::optional receiving_timeout_ RTC_GUARDED_BY(network_thread_); - int64_t time_created_ms_ RTC_GUARDED_BY(network_thread_); - int num_pings_sent_ RTC_GUARDED_BY(network_thread_) = 0; + absl::optional receiving_timeout_; + int64_t time_created_ms_; + int num_pings_sent_ = 0; - absl::optional log_description_ - RTC_GUARDED_BY(network_thread_); - webrtc::IceEventLog* ice_event_log_ RTC_GUARDED_BY(network_thread_) = nullptr; + absl::optional log_description_; + webrtc::IceEventLog* ice_event_log_ = nullptr; // GOOG_PING_REQUEST is sent in place of STUN_BINDING_REQUEST // if configured via field trial, the remote peer supports it (signaled // in STUN_BINDING) and if the last STUN BINDING is identical to the one // that is about to be sent. - absl::optional remote_support_goog_ping_ - RTC_GUARDED_BY(network_thread_); - std::unique_ptr cached_stun_binding_ - RTC_GUARDED_BY(network_thread_); + absl::optional remote_support_goog_ping_; + std::unique_ptr cached_stun_binding_; const IceFieldTrials* field_trials_; - rtc::EventBasedExponentialMovingAverage rtt_estimate_ - RTC_GUARDED_BY(network_thread_); + rtc::EventBasedExponentialMovingAverage rtt_estimate_; friend class Port; friend class ConnectionRequest; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h index d503a928b..edfa8896c 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport.h @@ -22,6 +22,7 @@ #include "p2p/base/ice_transport_internal.h" #include "rtc_base/buffer.h" #include "rtc_base/buffer_queue.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/stream.h" #include "rtc_base/strings/string_builder.h" @@ -39,9 +40,6 @@ class StreamInterfaceChannel : public rtc::StreamInterface { public: explicit StreamInterfaceChannel(IceTransportInternal* ice_transport); - StreamInterfaceChannel(const StreamInterfaceChannel&) = delete; - StreamInterfaceChannel& operator=(const StreamInterfaceChannel&) = delete; - // Push in a packet; this gets pulled out from Read(). bool OnPacketReceived(const char* data, size_t size); @@ -62,6 +60,8 @@ class StreamInterfaceChannel : public rtc::StreamInterface { IceTransportInternal* const ice_transport_; // owned by DtlsTransport rtc::StreamState state_ RTC_GUARDED_BY(sequence_checker_); rtc::BufferQueue packets_ RTC_GUARDED_BY(sequence_checker_); + + RTC_DISALLOW_COPY_AND_ASSIGN(StreamInterfaceChannel); }; // This class provides a DTLS SSLStreamAdapter inside a TransportChannel-style @@ -110,9 +110,6 @@ class DtlsTransport : public DtlsTransportInternal { ~DtlsTransport() override; - DtlsTransport(const DtlsTransport&) = delete; - DtlsTransport& operator=(const DtlsTransport&) = delete; - webrtc::DtlsTransportState dtls_state() const override; const std::string& transport_name() const override; int component() const override; @@ -251,6 +248,8 @@ class DtlsTransport : public DtlsTransportInternal { bool writable_ = false; webrtc::RtcEventLog* const event_log_; + + RTC_DISALLOW_COPY_AND_ASSIGN(DtlsTransport); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h index 24c682ff0..0b26a7fd7 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/dtls_transport_internal.h @@ -25,6 +25,7 @@ #include "p2p/base/ice_transport_internal.h" #include "p2p/base/packet_transport_internal.h" #include "rtc_base/callback_list.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_stream_adapter.h" @@ -47,9 +48,6 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal { public: ~DtlsTransportInternal() override; - DtlsTransportInternal(const DtlsTransportInternal&) = delete; - DtlsTransportInternal& operator=(const DtlsTransportInternal&) = delete; - virtual webrtc::DtlsTransportState dtls_state() const = 0; virtual int component() const = 0; @@ -137,6 +135,7 @@ class DtlsTransportInternal : public rtc::PacketTransportInternal { DtlsTransportInternal(); private: + RTC_DISALLOW_COPY_AND_ASSIGN(DtlsTransportInternal); webrtc::CallbackList dtls_handshake_error_callback_list_; webrtc::CallbackList diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc index 5667bc516..f6a3858a6 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.cc @@ -70,15 +70,40 @@ uint32_t GetWeakPingIntervalInFieldTrial() { return cricket::WEAK_PING_INTERVAL; } +rtc::AdapterType GuessAdapterTypeFromNetworkCost(int network_cost) { + // The current network costs have been unchanged since they were added + // to webrtc. If they ever were to change we would need to reconsider + // this method. + switch (network_cost) { + case rtc::kNetworkCostMin: + return rtc::ADAPTER_TYPE_ETHERNET; + case rtc::kNetworkCostLow: + return rtc::ADAPTER_TYPE_WIFI; + case rtc::kNetworkCostCellular: + return rtc::ADAPTER_TYPE_CELLULAR; + case rtc::kNetworkCostCellular2G: + return rtc::ADAPTER_TYPE_CELLULAR_2G; + case rtc::kNetworkCostCellular3G: + return rtc::ADAPTER_TYPE_CELLULAR_3G; + case rtc::kNetworkCostCellular4G: + return rtc::ADAPTER_TYPE_CELLULAR_4G; + case rtc::kNetworkCostCellular5G: + return rtc::ADAPTER_TYPE_CELLULAR_5G; + case rtc::kNetworkCostUnknown: + return rtc::ADAPTER_TYPE_UNKNOWN; + case rtc::kNetworkCostMax: + return rtc::ADAPTER_TYPE_ANY; + } + return rtc::ADAPTER_TYPE_UNKNOWN; +} + rtc::RouteEndpoint CreateRouteEndpointFromCandidate( bool local, const cricket::Candidate& candidate, bool uses_turn) { auto adapter_type = candidate.network_type(); if (!local && adapter_type == rtc::ADAPTER_TYPE_UNKNOWN) { - bool vpn; - std::tie(adapter_type, vpn) = - rtc::Network::GuessAdapterFromNetworkCost(candidate.network_cost()); + adapter_type = GuessAdapterTypeFromNetworkCost(candidate.network_cost()); } // TODO(bugs.webrtc.org/9446) : Rewrite if information about remote network @@ -228,7 +253,6 @@ P2PTransportChannel::~P2PTransportChannel() { RTC_DCHECK_RUN_ON(network_thread_); std::vector copy(connections().begin(), connections().end()); for (Connection* con : copy) { - con->SignalDestroyed.disconnect(this); con->Destroy(); } resolvers_.clear(); @@ -265,6 +289,7 @@ void P2PTransportChannel::AddAllocatorSession( void P2PTransportChannel::AddConnection(Connection* connection) { RTC_DCHECK_RUN_ON(network_thread_); + connection->set_remote_ice_mode(remote_ice_mode_); connection->set_receiving_timeout(config_.receiving_timeout); connection->set_unwritable_timeout(config_.ice_unwritable_timeout); connection->set_unwritable_min_checks(config_.ice_unwritable_min_checks); @@ -776,26 +801,6 @@ void P2PTransportChannel::SetIceConfig(const IceConfig& config) { ice_controller_->SetIceConfig(config_); - // DSCP override, allow user to specify (any) int value - // that will be used for tagging all packets. - webrtc::StructParametersParser::Create("override_dscp", - &field_trials_.override_dscp) - ->Parse(webrtc::field_trial::FindFullName("WebRTC-DscpFieldTrial")); - - if (field_trials_.override_dscp) { - SetOption(rtc::Socket::OPT_DSCP, *field_trials_.override_dscp); - } - - std::string field_trial_string = - webrtc::field_trial::FindFullName("WebRTC-SetSocketReceiveBuffer"); - int receive_buffer_size_kb = 0; - sscanf(field_trial_string.c_str(), "Enabled-%d", &receive_buffer_size_kb); - if (receive_buffer_size_kb > 0) { - RTC_LOG(LS_INFO) << "Set WebRTC-SetSocketReceiveBuffer: Enabled and set to " - << receive_buffer_size_kb << "kb"; - SetOption(rtc::Socket::OPT_RCVBUF, receive_buffer_size_kb * 1024); - } - RTC_DCHECK(ValidateIceConfig(config_).ok()); } @@ -1550,10 +1555,6 @@ void P2PTransportChannel::RememberRemoteCandidate( // port objects. int P2PTransportChannel::SetOption(rtc::Socket::Option opt, int value) { RTC_DCHECK_RUN_ON(network_thread_); - if (field_trials_.override_dscp && opt == rtc::Socket::OPT_DSCP) { - value = *field_trials_.override_dscp; - } - OptionMap::iterator it = options_.find(opt); if (it == options_.end()) { options_.insert(std::make_pair(opt, value)); @@ -1667,17 +1668,6 @@ rtc::ArrayView P2PTransportChannel::connections() const { res.size()); } -void P2PTransportChannel::RemoveConnectionForTest(Connection* connection) { - RTC_DCHECK_RUN_ON(network_thread_); - RTC_DCHECK(FindConnection(connection)); - connection->SignalDestroyed.disconnect(this); - ice_controller_->OnConnectionDestroyed(connection); - RTC_DCHECK(!FindConnection(connection)); - if (selected_connection_ == connection) - selected_connection_ = nullptr; - connection->Destroy(); -} - // Monitor connection states. void P2PTransportChannel::UpdateConnectionStates() { RTC_DCHECK_RUN_ON(network_thread_); @@ -1685,11 +1675,7 @@ void P2PTransportChannel::UpdateConnectionStates() { // We need to copy the list of connections since some may delete themselves // when we call UpdateState. - // NOTE: We copy the connections() vector in case `UpdateState` triggers the - // Connection to be destroyed (which will cause a callback that alters - // the connections() vector). - std::vector copy(connections().begin(), connections().end()); - for (Connection* c : copy) { + for (Connection* c : connections()) { c->UpdateState(now); } } @@ -2000,31 +1986,12 @@ void P2PTransportChannel::MaybeStopPortAllocatorSessions() { } } -// RTC_RUN_ON(network_thread_) -void P2PTransportChannel::OnSelectedConnectionDestroyed() { - RTC_LOG(LS_INFO) << "Selected connection destroyed. Will choose a new one."; - IceControllerEvent reason = IceControllerEvent::SELECTED_CONNECTION_DESTROYED; - SwitchSelectedConnection(nullptr, reason); - RequestSortAndStateUpdate(reason); -} - // If all connections timed out, delete them all. void P2PTransportChannel::HandleAllTimedOut() { RTC_DCHECK_RUN_ON(network_thread_); - bool update_selected_connection = false; - std::vector copy(connections().begin(), connections().end()); - for (Connection* connection : copy) { - if (selected_connection_ == connection) { - selected_connection_ = nullptr; - update_selected_connection = true; - } - connection->SignalDestroyed.disconnect(this); - ice_controller_->OnConnectionDestroyed(connection); + for (Connection* connection : connections()) { connection->Destroy(); } - - if (update_selected_connection) - OnSelectedConnectionDestroyed(); } bool P2PTransportChannel::ReadyToSend(Connection* connection) const { @@ -2158,7 +2125,11 @@ void P2PTransportChannel::OnConnectionDestroyed(Connection* connection) { // we can just set selected to nullptr and re-choose a best assuming that // there was no selected connection. if (selected_connection_ == connection) { - OnSelectedConnectionDestroyed(); + RTC_LOG(LS_INFO) << "Selected connection destroyed. Will choose a new one."; + IceControllerEvent reason = + IceControllerEvent::SELECTED_CONNECTION_DESTROYED; + SwitchSelectedConnection(nullptr, reason); + RequestSortAndStateUpdate(reason); } else { // If a non-selected connection was destroyed, we don't need to re-sort but // we do need to update state, because we could be switching to "failed" or diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h index d06234603..28248e794 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel.h @@ -56,6 +56,7 @@ #include "p2p/base/transport_description.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/dscp.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/network_route.h" @@ -123,9 +124,6 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { IceControllerFactoryInterface* ice_controller_factory = nullptr); ~P2PTransportChannel() override; - P2PTransportChannel(const P2PTransportChannel&) = delete; - P2PTransportChannel& operator=(const P2PTransportChannel&) = delete; - // From TransportChannelImpl: IceTransportState GetState() const override; webrtc::IceTransportState GetIceTransportState() const override; @@ -212,7 +210,6 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { // Public for unit tests. rtc::ArrayView connections() const; - void RemoveConnectionForTest(Connection* connection); // Public for unit tests. PortAllocatorSession* allocator_session() const { @@ -273,7 +270,6 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { void UpdateState(); void HandleAllTimedOut(); void MaybeStopPortAllocatorSessions(); - void OnSelectedConnectionDestroyed() RTC_RUN_ON(network_thread_); // ComputeIceTransportState computes the RTCIceTransportState as described in // https://w3c.github.io/webrtc-pc/#dom-rtcicetransportstate. ComputeState @@ -498,6 +494,8 @@ class RTC_EXPORT P2PTransportChannel : public IceTransportInternal { int64_t last_data_received_ms_ = 0; IceFieldTrials field_trials_; + + RTC_DISALLOW_COPY_AND_ASSIGN(P2PTransportChannel); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h index f05623dd3..4987f1cbc 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/p2p_transport_channel_ice_field_trials.h @@ -61,9 +61,6 @@ struct IceFieldTrials { // Stop gathering when having a strong connection. bool stop_gather_on_strongly_connected = true; - - // DSCP taging. - absl::optional override_dscp; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc index c616658fb..51297c46c 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port.cc @@ -194,10 +194,8 @@ Port::~Port() { ++iter; } - for (uint32_t i = 0; i < list.size(); i++) { - list[i]->SignalDestroyed.disconnect(this); + for (uint32_t i = 0; i < list.size(); i++) delete list[i]; - } } const std::string& Port::Type() const { @@ -608,15 +606,6 @@ rtc::DiffServCodePoint Port::StunDscpValue() const { return rtc::DSCP_NO_CHANGE; } -void Port::DestroyAllConnections() { - RTC_DCHECK_RUN_ON(thread_); - for (auto kv : connections_) { - kv.second->SignalDestroyed.disconnect(this); - kv.second->Destroy(); - } - connections_.clear(); -} - void Port::set_timeout_delay(int delay) { RTC_DCHECK_RUN_ON(thread_); // Although this method is meant to only be used by tests, some downstream diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/port.h index 1ec82f704..991872902 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/port.h @@ -435,8 +435,6 @@ class Port : public PortInterface, // Extra work to be done in subclasses when a connection is destroyed. virtual void HandleConnectionDestroyed(Connection* conn) {} - void DestroyAllConnections(); - void CopyPortInformationToPacketInfo(rtc::PacketInfo* info) const; MdnsNameRegistrationStatus mdns_name_registration_status() const { diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc index 1ba648604..c9fbefdc1 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/stun_port.cc @@ -99,7 +99,7 @@ class StunBindingRequest : public StunRequest { << port_->Network()->name() << ")"; port_->OnStunBindingOrResolveRequestFailed( server_addr_, SERVER_NOT_REACHABLE_ERROR, - "STUN binding request timed out."); + "STUN allocate request timed out."); } private: @@ -512,7 +512,7 @@ void UDPPort::OnStunBindingRequestSucceeded( } rtc::StringBuilder url; - url << "stun:" << stun_server_addr.hostname() << ":" + url << "stun:" << stun_server_addr.ipaddr().ToString() << ":" << stun_server_addr.port(); AddAddress(stun_reflected_addr, socket_->GetLocalAddress(), related_address, UDP_PROTOCOL_NAME, "", "", STUN_PORT_TYPE, diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc index 445b0d03a..9d542074a 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.cc @@ -79,7 +79,6 @@ #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" #include "rtc_base/rate_tracker.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace cricket { @@ -367,9 +366,7 @@ TCPConnection::TCPConnection(TCPPort* port, } } -TCPConnection::~TCPConnection() { - RTC_DCHECK_RUN_ON(network_thread_); -} +TCPConnection::~TCPConnection() {} int TCPConnection::Send(const void* data, size_t size, @@ -496,20 +493,11 @@ void TCPConnection::OnClose(rtc::AsyncPacketSocket* socket, int error) { // events. pretending_to_be_writable_ = true; - // If this connection can't become connected and writable again in 5 - // seconds, it's time to tear this down. This is the case for the original - // TCP connection on passive side during a reconnect. // We don't attempt reconnect right here. This is to avoid a case where the // shutdown is intentional and reconnect is not necessary. We only reconnect // when the connection is used to Send() or Ping(). - port()->thread()->PostDelayedTask( - webrtc::ToQueuedTask(network_safety_, - [this]() { - if (pretending_to_be_writable_) { - Destroy(); - } - }), - reconnection_timeout()); + port()->thread()->PostDelayed(RTC_FROM_HERE, reconnection_timeout(), this, + MSG_TCPCONNECTION_DELAYED_ONCLOSE); } else if (!pretending_to_be_writable_) { // OnClose could be called when the underneath socket times out during the // initial connect() (i.e. `pretending_to_be_writable_` is false) . We have @@ -519,6 +507,24 @@ void TCPConnection::OnClose(rtc::AsyncPacketSocket* socket, int error) { } } +void TCPConnection::OnMessage(rtc::Message* pmsg) { + switch (pmsg->message_id) { + case MSG_TCPCONNECTION_DELAYED_ONCLOSE: + // If this connection can't become connected and writable again in 5 + // seconds, it's time to tear this down. This is the case for the original + // TCP connection on passive side during a reconnect. + if (pretending_to_be_writable_) { + Destroy(); + } + break; + case MSG_TCPCONNECTION_FAILED_CREATE_SOCKET: + FailAndPrune(); + break; + default: + Connection::OnMessage(pmsg); + } +} + void TCPConnection::MaybeReconnect() { // Only reconnect for an outgoing TCPConnection when OnClose was signaled and // no outstanding reconnect is pending. @@ -570,13 +576,13 @@ void TCPConnection::CreateOutgoingTcpSocket() { } else { RTC_LOG(LS_WARNING) << ToString() << ": Failed to create connection to " << remote_candidate().address().ToSensitiveString(); - set_state(IceCandidatePairState::FAILED); // We can't FailAndPrune directly here. FailAndPrune and deletes all // the StunRequests from the request_map_. And if this is in the stack // of Connection::Ping(), we are still using the request. // Unwind the stack and defer the FailAndPrune. - port()->thread()->PostTask( - webrtc::ToQueuedTask(network_safety_, [this]() { FailAndPrune(); })); + set_state(IceCandidatePairState::FAILED); + port()->thread()->Post(RTC_FROM_HERE, this, + MSG_TCPCONNECTION_FAILED_CREATE_SOCKET); } } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.h index 07d483cc3..932af50aa 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/tcp_port.h @@ -20,7 +20,6 @@ #include "p2p/base/port.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/containers/flat_map.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" namespace cricket { @@ -136,6 +135,8 @@ class TCPConnection : public Connection { rtc::AsyncPacketSocket* socket() { return socket_.get(); } + void OnMessage(rtc::Message* pmsg) override; + // Allow test cases to overwrite the default timeout period. int reconnection_timeout() const { return reconnection_timeout_; } void set_reconnection_timeout(int timeout_in_ms) { @@ -143,6 +144,11 @@ class TCPConnection : public Connection { } protected: + enum { + MSG_TCPCONNECTION_DELAYED_ONCLOSE = Connection::MSG_FIRST_AVAILABLE, + MSG_TCPCONNECTION_FAILED_CREATE_SOCKET, + }; + // Set waiting_for_stun_binding_complete_ to false to allow data packets in // addition to what Port::OnConnectionRequestResponse does. void OnConnectionRequestResponse(ConnectionRequest* req, @@ -184,8 +190,6 @@ class TCPConnection : public Connection { // Allow test case to overwrite the default timeout period. int reconnection_timeout_; - webrtc::ScopedTaskSafety network_safety_; - friend class TCPPort; }; diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc index bde5bf603..07c106043 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.cc @@ -862,9 +862,10 @@ void TurnPort::OnAllocateSuccess(const rtc::SocketAddress& address, related_address, // Related address. UDP_PROTOCOL_NAME, ProtoToString(server_address_.proto), // The first hop protocol. - "", // TCP candidate type, empty for turn candidates. + "", // TCP canddiate type, empty for turn candidates. RELAY_PORT_TYPE, GetRelayPreference(server_address_.proto), - server_priority_, ReconstructedServerUrl(), true); + server_priority_, ReconstructedServerUrl(false /* use_hostname */), + true); } void TurnPort::OnAllocateError(int error_code, const std::string& reason) { @@ -880,8 +881,9 @@ void TurnPort::OnAllocateError(int error_code, const std::string& reason) { port = 0; } SignalCandidateError( - this, IceCandidateErrorEvent(address, port, ReconstructedServerUrl(), - error_code, reason)); + this, IceCandidateErrorEvent( + address, port, ReconstructedServerUrl(true /* use_hostname */), + error_code, reason)); } void TurnPort::OnRefreshError() { @@ -920,7 +922,9 @@ void TurnPort::Close() { // Stop the port from creating new connections. state_ = STATE_DISCONNECTED; // Delete all existing connections; stop sending data. - DestroyAllConnections(); + for (auto kv : connections()) { + kv.second->Destroy(); + } SignalTurnPortClosed(this); } @@ -1268,6 +1272,10 @@ void TurnPort::HandleConnectionDestroyed(Connection* conn) { const rtc::SocketAddress& remote_address = conn->remote_candidate().address(); TurnEntry* entry = FindEntry(remote_address); RTC_DCHECK(entry != NULL); + ScheduleEntryDestruction(entry); +} + +void TurnPort::ScheduleEntryDestruction(TurnEntry* entry) { RTC_DCHECK(!entry->destruction_timestamp().has_value()); int64_t timestamp = rtc::TimeMillis(); entry->set_destruction_timestamp(timestamp); @@ -1289,7 +1297,7 @@ bool TurnPort::SetEntryChannelId(const rtc::SocketAddress& address, return true; } -std::string TurnPort::ReconstructedServerUrl() { +std::string TurnPort::ReconstructedServerUrl(bool use_hostname) { // draft-petithuguenin-behave-turn-uris-01 // turnURI = scheme ":" turn-host [ ":" turn-port ] // [ "?transport=" transport ] @@ -1312,8 +1320,10 @@ std::string TurnPort::ReconstructedServerUrl() { break; } rtc::StringBuilder url; - url << scheme << ":" << server_address_.address.hostname() << ":" - << server_address_.address.port() << "?transport=" << transport; + url << scheme << ":" + << (use_hostname ? server_address_.address.hostname() + : server_address_.address.ipaddr().ToString()) + << ":" << server_address_.address.port() << "?transport=" << transport; return url.Release(); } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h index 172dcef5a..7b8e3b9af 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/base/turn_port.h @@ -361,13 +361,14 @@ class TurnPort : public Port { // Destroys the entry only if `timestamp` matches the destruction timestamp // in `entry`. void DestroyEntryIfNotCancelled(TurnEntry* entry, int64_t timestamp); + void ScheduleEntryDestruction(TurnEntry* entry); // Marks the connection with remote address `address` failed and // pruned (a.k.a. write-timed-out). Returns true if a connection is found. bool FailAndPruneConnection(const rtc::SocketAddress& address); // Reconstruct the URL of the server which the candidate is gathered from. - std::string ReconstructedServerUrl(); + std::string ReconstructedServerUrl(bool use_hostname); void MaybeAddTurnLoggingId(StunMessage* message); diff --git a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc index 06eabb83d..9967d9a44 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.cc @@ -179,7 +179,6 @@ BasicPortAllocator::BasicPortAllocator(rtc::NetworkManager* network_manager, : network_manager_(network_manager), socket_factory_(socket_factory) { InitRelayPortFactory(nullptr); RTC_DCHECK(relay_port_factory_ != nullptr); - RTC_DCHECK(network_manager_ != nullptr); SetConfiguration(stun_servers, std::vector(), 0, webrtc::NO_PRUNE, nullptr); } diff --git a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h index b1dc7b12a..c043cae6e 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/client/basic_port_allocator.h @@ -30,12 +30,8 @@ namespace cricket { class RTC_EXPORT BasicPortAllocator : public PortAllocator { public: - // The NetworkManager is a mandatory argument. The other arguments are - // optional. All these objects are owned by caller and must have a life time - // that exceeds that of BasicPortAllocator. - // TODO(bugs.webrtc.org/13145): The SocketFactory should be mandatory, but - // currenly isn't. When not specified, one is created internally, based on the - // socket server associated with the thread calling CreateSession. + // note: The (optional) relay_port_factory is owned by caller + // and must have a life time that exceeds that of BasicPortAllocator. BasicPortAllocator(rtc::NetworkManager* network_manager, rtc::PacketSocketFactory* socket_factory, webrtc::TurnCustomizer* customizer = nullptr, diff --git a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc index efe0fbdea..419523031 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc +++ b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.cc @@ -21,6 +21,7 @@ #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_resolver_interface.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/helpers.h" #include "rtc_base/logging.h" #include "rtc_base/task_utils/to_queued_task.h" @@ -68,9 +69,6 @@ class StunProber::Requester : public sigslot::has_slots<> { const std::vector& server_ips); ~Requester() override; - Requester(const Requester&) = delete; - Requester& operator=(const Requester&) = delete; - // There is no callback for SendStunRequest as the underneath socket send is // expected to be completed immediately. Otherwise, it'll skip this request // and move to the next one. @@ -107,6 +105,8 @@ class StunProber::Requester : public sigslot::has_slots<> { int16_t num_response_received_ = 0; webrtc::SequenceChecker& thread_checker_; + + RTC_DISALLOW_COPY_AND_ASSIGN(Requester); }; StunProber::Requester::Requester( diff --git a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h index b1acd7704..fe2f14ca4 100644 --- a/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h +++ b/TMessagesProj/jni/voip/webrtc/p2p/stunprober/stun_prober.h @@ -17,6 +17,7 @@ #include "api/sequence_checker.h" #include "rtc_base/byte_buffer.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ip_address.h" #include "rtc_base/network.h" #include "rtc_base/socket_address.h" @@ -100,9 +101,6 @@ class RTC_EXPORT StunProber : public sigslot::has_slots<> { const rtc::NetworkManager::NetworkList& networks); ~StunProber() override; - StunProber(const StunProber&) = delete; - StunProber& operator=(const StunProber&) = delete; - // Begin performing the probe test against the `servers`. If // `shared_socket_mode` is false, each request will be done with a new socket. // Otherwise, a unique socket will be used for a single round of requests @@ -121,7 +119,7 @@ class RTC_EXPORT StunProber : public sigslot::has_slots<> { int stun_ta_interval_ms, int requests_per_ip, int timeout_ms, - AsyncCallback finish_callback); + const AsyncCallback finish_callback); // TODO(guoweis): The combination of Prepare() and Run() are equivalent to the // Start() above. Remove Start() once everything is migrated. @@ -243,6 +241,8 @@ class RTC_EXPORT StunProber : public sigslot::has_slots<> { rtc::NetworkManager::NetworkList networks_; webrtc::ScopedTaskSafety task_safety_; + + RTC_DISALLOW_COPY_AND_ASSIGN(StunProber); }; } // namespace stunprober diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc index 3a306720c..7890d9b1e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.cc @@ -12,7 +12,6 @@ #include -#include #include #include @@ -21,29 +20,25 @@ #include "pc/media_stream_track_proxy.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" -#include "rtc_base/ref_counted_object.h" +#include "rtc_base/logging.h" #include "rtc_base/task_utils/to_queued_task.h" namespace webrtc { -AudioRtpReceiver::AudioRtpReceiver( - rtc::Thread* worker_thread, - std::string receiver_id, - std::vector stream_ids, - bool is_unified_plan, - cricket::VoiceMediaChannel* voice_channel /*= nullptr*/) +AudioRtpReceiver::AudioRtpReceiver(rtc::Thread* worker_thread, + std::string receiver_id, + std::vector stream_ids, + bool is_unified_plan) : AudioRtpReceiver(worker_thread, receiver_id, CreateStreamsFromIds(std::move(stream_ids)), - is_unified_plan, - voice_channel) {} + is_unified_plan) {} AudioRtpReceiver::AudioRtpReceiver( rtc::Thread* worker_thread, const std::string& receiver_id, const std::vector>& streams, - bool is_unified_plan, - cricket::VoiceMediaChannel* voice_channel /*= nullptr*/) + bool is_unified_plan) : worker_thread_(worker_thread), id_(receiver_id), source_(rtc::make_ref_counted( @@ -54,8 +49,7 @@ AudioRtpReceiver::AudioRtpReceiver( track_(AudioTrackProxyWithInternal::Create( rtc::Thread::Current(), AudioTrack::Create(receiver_id, source_))), - media_channel_(voice_channel), - cached_track_enabled_(track_->internal()->enabled()), + cached_track_enabled_(track_->enabled()), attachment_id_(GenerateUniqueId()), worker_thread_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()) { RTC_DCHECK(worker_thread_); @@ -67,6 +61,7 @@ AudioRtpReceiver::AudioRtpReceiver( AudioRtpReceiver::~AudioRtpReceiver() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK(stopped_); RTC_DCHECK(!media_channel_); track_->GetSource()->UnregisterAudioObserver(this); @@ -75,25 +70,21 @@ AudioRtpReceiver::~AudioRtpReceiver() { void AudioRtpReceiver::OnChanged() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - const bool enabled = track_->internal()->enabled(); - if (cached_track_enabled_ == enabled) - return; - cached_track_enabled_ = enabled; - worker_thread_->PostTask( - ToQueuedTask(worker_thread_safety_, [this, enabled]() { - RTC_DCHECK_RUN_ON(worker_thread_); - Reconfigure(enabled); - })); + if (cached_track_enabled_ != track_->enabled()) { + cached_track_enabled_ = track_->enabled(); + worker_thread_->PostTask(ToQueuedTask( + worker_thread_safety_, + [this, enabled = cached_track_enabled_, volume = cached_volume_]() { + RTC_DCHECK_RUN_ON(worker_thread_); + Reconfigure(enabled, volume); + })); + } } // RTC_RUN_ON(worker_thread_) void AudioRtpReceiver::SetOutputVolume_w(double volume) { RTC_DCHECK_GE(volume, 0.0); RTC_DCHECK_LE(volume, 10.0); - - if (!media_channel_) - return; - ssrc_ ? media_channel_->SetOutputVolume(*ssrc_, volume) : media_channel_->SetDefaultOutputVolume(volume); } @@ -103,18 +94,23 @@ void AudioRtpReceiver::OnSetVolume(double volume) { RTC_DCHECK_GE(volume, 0); RTC_DCHECK_LE(volume, 10); - bool track_enabled = track_->internal()->enabled(); - worker_thread_->Invoke(RTC_FROM_HERE, [&]() { - RTC_DCHECK_RUN_ON(worker_thread_); - // Update the cached_volume_ even when stopped, to allow clients to set - // the volume before starting/restarting, eg see crbug.com/1272566. - cached_volume_ = volume; - // When the track is disabled, the volume of the source, which is the - // corresponding WebRtc Voice Engine channel will be 0. So we do not - // allow setting the volume to the source when the track is disabled. - if (track_enabled) - SetOutputVolume_w(volume); - }); + // Update the cached_volume_ even when stopped_, to allow clients to set the + // volume before starting/restarting, eg see crbug.com/1272566. + cached_volume_ = volume; + + if (stopped_) + return; + + // When the track is disabled, the volume of the source, which is the + // corresponding WebRtc Voice Engine channel will be 0. So we do not allow + // setting the volume to the source when the track is disabled. + if (track_->enabled()) { + worker_thread_->PostTask( + ToQueuedTask(worker_thread_safety_, [this, volume = cached_volume_]() { + RTC_DCHECK_RUN_ON(worker_thread_); + SetOutputVolume_w(volume); + })); + } } rtc::scoped_refptr AudioRtpReceiver::dtls_transport() @@ -163,47 +159,63 @@ AudioRtpReceiver::GetFrameDecryptor() const { void AudioRtpReceiver::Stop() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - source_->SetState(MediaSourceInterface::kEnded); + // TODO(deadbeef): Need to do more here to fully stop receiving packets. + if (!stopped_) { + source_->SetState(MediaSourceInterface::kEnded); + stopped_ = true; + } + + worker_thread_->Invoke(RTC_FROM_HERE, [&]() { + RTC_DCHECK_RUN_ON(worker_thread_); + + if (media_channel_) + SetOutputVolume_w(0.0); + + SetMediaChannel_w(nullptr); + }); +} + +void AudioRtpReceiver::StopAndEndTrack() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + Stop(); track_->internal()->set_ended(); } -void AudioRtpReceiver::SetSourceEnded() { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - source_->SetState(MediaSourceInterface::kEnded); -} - -// RTC_RUN_ON(&signaling_thread_checker_) void AudioRtpReceiver::RestartMediaChannel(absl::optional ssrc) { - bool enabled = track_->internal()->enabled(); - MediaSourceInterface::SourceState state = source_->state(); - worker_thread_->Invoke(RTC_FROM_HERE, [&]() { - RTC_DCHECK_RUN_ON(worker_thread_); - RestartMediaChannel_w(std::move(ssrc), enabled, state); - }); - source_->SetState(MediaSourceInterface::kLive); -} + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + bool ok = worker_thread_->Invoke( + RTC_FROM_HERE, [&, enabled = cached_track_enabled_, + volume = cached_volume_, was_stopped = stopped_]() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + RTC_DCHECK(was_stopped); + return false; // Can't restart. + } -// RTC_RUN_ON(worker_thread_) -void AudioRtpReceiver::RestartMediaChannel_w( - absl::optional ssrc, - bool track_enabled, - MediaSourceInterface::SourceState state) { - if (!media_channel_) - return; // Can't restart. + if (!was_stopped && ssrc_ == ssrc) { + // Already running with that ssrc. + RTC_DCHECK(worker_thread_safety_->alive()); + return true; + } - if (state != MediaSourceInterface::kInitializing) { - if (ssrc_ == ssrc) - return; - source_->Stop(media_channel_, ssrc_); - } + if (!was_stopped) { + source_->Stop(media_channel_, ssrc_); + } - ssrc_ = std::move(ssrc); - source_->Start(media_channel_, ssrc_); - if (ssrc_) { - media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); - } + ssrc_ = std::move(ssrc); + source_->Start(media_channel_, ssrc_); + if (ssrc_) { + media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); + } - Reconfigure(track_enabled); + Reconfigure(enabled, volume); + return true; + }); + + if (!ok) + return; + + stopped_ = false; } void AudioRtpReceiver::SetupMediaChannel(uint32_t ssrc) { @@ -285,10 +297,10 @@ void AudioRtpReceiver::SetDepacketizerToDecoderFrameTransformer( } // RTC_RUN_ON(worker_thread_) -void AudioRtpReceiver::Reconfigure(bool track_enabled) { +void AudioRtpReceiver::Reconfigure(bool track_enabled, double volume) { RTC_DCHECK(media_channel_); - SetOutputVolume_w(track_enabled ? cached_volume_ : 0); + SetOutputVolume_w(track_enabled ? volume : 0); if (ssrc_ && frame_decryptor_) { // Reattach the frame decryptor if we were reconfigured. @@ -319,12 +331,21 @@ void AudioRtpReceiver::SetJitterBufferMinimumDelay( } void AudioRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { - RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); - if (!media_channel && media_channel_) - SetOutputVolume_w(0.0); + if (stopped_ && !media_channel) + return; + + worker_thread_->Invoke(RTC_FROM_HERE, [&] { + RTC_DCHECK_RUN_ON(worker_thread_); + SetMediaChannel_w(media_channel); + }); +} + +// RTC_RUN_ON(worker_thread_) +void AudioRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { media_channel ? worker_thread_safety_->SetAlive() : worker_thread_safety_->SetNotAlive(); media_channel_ = static_cast(media_channel); diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h index 6f70243c0..aef497db7 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_rtp_receiver.h @@ -45,24 +45,16 @@ class AudioRtpReceiver : public ObserverInterface, public AudioSourceInterface::AudioObserver, public RtpReceiverInternal { public: - // The constructor supports optionally passing the voice channel to the - // instance at construction time without having to call `SetMediaChannel()` - // on the worker thread straight after construction. - // However, when using that, the assumption is that right after construction, - // a call to either `SetupUnsignaledMediaChannel` or `SetupMediaChannel` - // will be made, which will internally start the source on the worker thread. AudioRtpReceiver(rtc::Thread* worker_thread, std::string receiver_id, std::vector stream_ids, - bool is_unified_plan, - cricket::VoiceMediaChannel* voice_channel = nullptr); + bool is_unified_plan); // TODO(https://crbug.com/webrtc/9480): Remove this when streams() is removed. AudioRtpReceiver( rtc::Thread* worker_thread, const std::string& receiver_id, const std::vector>& streams, - bool is_unified_plan, - cricket::VoiceMediaChannel* media_channel = nullptr); + bool is_unified_plan); virtual ~AudioRtpReceiver(); // ObserverInterface implementation @@ -98,7 +90,7 @@ class AudioRtpReceiver : public ObserverInterface, // RtpReceiverInternal implementation. void Stop() override; - void SetSourceEnded() override; + void StopAndEndTrack() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; uint32_t ssrc() const override; @@ -122,14 +114,12 @@ class AudioRtpReceiver : public ObserverInterface, override; private: - void RestartMediaChannel(absl::optional ssrc) - RTC_RUN_ON(&signaling_thread_checker_); - void RestartMediaChannel_w(absl::optional ssrc, - bool track_enabled, - MediaSourceInterface::SourceState state) + void RestartMediaChannel(absl::optional ssrc); + void Reconfigure(bool track_enabled, double volume) RTC_RUN_ON(worker_thread_); - void Reconfigure(bool track_enabled) RTC_RUN_ON(worker_thread_); void SetOutputVolume_w(double volume) RTC_RUN_ON(worker_thread_); + void SetMediaChannel_w(cricket::MediaChannel* media_channel) + RTC_RUN_ON(worker_thread_); RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_; rtc::Thread* const worker_thread_; @@ -142,7 +132,8 @@ class AudioRtpReceiver : public ObserverInterface, std::vector> streams_ RTC_GUARDED_BY(&signaling_thread_checker_); bool cached_track_enabled_ RTC_GUARDED_BY(&signaling_thread_checker_); - double cached_volume_ RTC_GUARDED_BY(worker_thread_) = 1.0; + double cached_volume_ RTC_GUARDED_BY(&signaling_thread_checker_) = 1.0; + bool stopped_ RTC_GUARDED_BY(&signaling_thread_checker_) = true; RtpReceiverObserverInterface* observer_ RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc b/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc index ae8914d63..be087f693 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_track.cc @@ -32,7 +32,7 @@ AudioTrack::AudioTrack(const std::string& label, } AudioTrack::~AudioTrack() { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK_RUN_ON(&thread_checker_); set_state(MediaStreamTrackInterface::kEnded); if (audio_source_) audio_source_->UnregisterObserver(this); @@ -48,19 +48,19 @@ AudioSourceInterface* AudioTrack::GetSource() const { } void AudioTrack::AddSink(AudioTrackSinkInterface* sink) { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK_RUN_ON(&thread_checker_); if (audio_source_) audio_source_->AddSink(sink); } void AudioTrack::RemoveSink(AudioTrackSinkInterface* sink) { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK_RUN_ON(&thread_checker_); if (audio_source_) audio_source_->RemoveSink(sink); } void AudioTrack::OnChanged() { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK_RUN_ON(&thread_checker_); if (audio_source_->state() == MediaSourceInterface::kEnded) { set_state(kEnded); } else { diff --git a/TMessagesProj/jni/voip/webrtc/pc/audio_track.h b/TMessagesProj/jni/voip/webrtc/pc/audio_track.h index 920bb948b..8a705cf8f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/audio_track.h +++ b/TMessagesProj/jni/voip/webrtc/pc/audio_track.h @@ -17,14 +17,9 @@ #include "api/media_stream_track.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" -#include "rtc_base/system/no_unique_address.h" namespace webrtc { -// TODO(tommi): Instead of inheriting from `MediaStreamTrack<>`, implement the -// properties directly in this class. `MediaStreamTrack` doesn't guard against -// conflicting access, so we'd need to override those methods anyway in this -// class in order to make sure things are correctly checked. class AudioTrack : public MediaStreamTrack, public ObserverInterface { protected: @@ -58,7 +53,7 @@ class AudioTrack : public MediaStreamTrack, private: const rtc::scoped_refptr audio_source_; - RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker signaling_thread_checker_; + SequenceChecker thread_checker_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel.cc b/TMessagesProj/jni/voip/webrtc/pc/channel.cc index f08b36b96..c2e3250a7 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/channel.cc @@ -12,26 +12,26 @@ #include #include -#include -#include +#include +#include #include +#include "absl/algorithm/container.h" #include "absl/strings/string_view.h" #include "api/rtp_parameters.h" #include "api/sequence_checker.h" -#include "api/units/timestamp.h" +#include "api/task_queue/queued_task.h" #include "media/base/codec.h" #include "media/base/rid_description.h" #include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" -#include "p2p/base/dtls_transport_internal.h" #include "pc/rtp_media_utils.h" #include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/network_route.h" -#include "rtc_base/strings/string_format.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/synchronization/mutex.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/trace_event.h" @@ -39,7 +39,6 @@ namespace cricket { namespace { -using ::rtc::StringFormat; using ::rtc::UniqueRandomIdGenerator; using ::webrtc::PendingTaskSafetyFlag; using ::webrtc::SdpType; @@ -80,6 +79,12 @@ struct StreamFinder { } // namespace +static void SafeSetError(const std::string& message, std::string* error_desc) { + if (error_desc) { + *error_desc = message; + } +} + template void RtpParametersFromMediaDescription( const MediaContentDescriptionImpl* desc, @@ -100,13 +105,9 @@ void RtpParametersFromMediaDescription( template void RtpSendParametersFromMediaDescription( const MediaContentDescriptionImpl* desc, - webrtc::RtpExtension::Filter extensions_filter, + const RtpHeaderExtensions& extensions, + bool is_stream_active, RtpSendParameters* send_params) { - RtpHeaderExtensions extensions = - webrtc::RtpExtension::DeduplicateHeaderExtensions( - desc->rtp_header_extensions(), extensions_filter); - const bool is_stream_active = - webrtc::RtpTransceiverDirectionHasRecv(desc->direction()); RtpParametersFromMediaDescription(desc, extensions, is_stream_active, send_params); send_params->max_bandwidth_bps = desc->bandwidth(); @@ -117,7 +118,7 @@ BaseChannel::BaseChannel(rtc::Thread* worker_thread, rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr media_channel, - const std::string& mid, + const std::string& content_name, bool srtp_required, webrtc::CryptoOptions crypto_options, UniqueRandomIdGenerator* ssrc_generator) @@ -125,18 +126,15 @@ BaseChannel::BaseChannel(rtc::Thread* worker_thread, network_thread_(network_thread), signaling_thread_(signaling_thread), alive_(PendingTaskSafetyFlag::Create()), + content_name_(content_name), srtp_required_(srtp_required), - extensions_filter_( - crypto_options.srtp.enable_encrypted_rtp_header_extensions - ? webrtc::RtpExtension::kPreferEncryptedExtension - : webrtc::RtpExtension::kDiscardEncryptedExtension), + crypto_options_(crypto_options), media_channel_(std::move(media_channel)), - demuxer_criteria_(mid), ssrc_generator_(ssrc_generator) { RTC_DCHECK_RUN_ON(worker_thread_); - RTC_DCHECK(media_channel_); RTC_DCHECK(ssrc_generator_); - RTC_DLOG(LS_INFO) << "Created channel: " << ToString(); + demuxer_criteria_.mid = content_name; + RTC_LOG(LS_INFO) << "Created channel: " << ToString(); } BaseChannel::~BaseChannel() { @@ -151,17 +149,27 @@ BaseChannel::~BaseChannel() { } std::string BaseChannel::ToString() const { - return StringFormat("{mid: %s, media_type: %s}", mid().c_str(), - MediaTypeToString(media_channel_->media_type()).c_str()); + rtc::StringBuilder sb; + sb << "{mid: " << content_name_; + if (media_channel_) { + sb << ", media_type: " << MediaTypeToString(media_channel_->media_type()); + } + sb << "}"; + return sb.Release(); } -bool BaseChannel::ConnectToRtpTransport_n() { +bool BaseChannel::ConnectToRtpTransport() { RTC_DCHECK(rtp_transport_); RTC_DCHECK(media_channel()); // We don't need to call OnDemuxerCriteriaUpdatePending/Complete because // there's no previous criteria to worry about. - if (!rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this)) { + bool result = rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this); + if (result) { + previous_demuxer_criteria_ = demuxer_criteria_; + } else { + previous_demuxer_criteria_ = {}; + RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString(); return false; } rtp_transport_->SignalReadyToSend.connect( @@ -175,7 +183,7 @@ bool BaseChannel::ConnectToRtpTransport_n() { return true; } -void BaseChannel::DisconnectFromRtpTransport_n() { +void BaseChannel::DisconnectFromRtpTransport() { RTC_DCHECK(rtp_transport_); RTC_DCHECK(media_channel()); rtp_transport_->UnregisterRtpDemuxerSink(this); @@ -183,8 +191,32 @@ void BaseChannel::DisconnectFromRtpTransport_n() { rtp_transport_->SignalNetworkRouteChanged.disconnect(this); rtp_transport_->SignalWritableState.disconnect(this); rtp_transport_->SignalSentPacket.disconnect(this); - rtp_transport_ = nullptr; - media_channel_->SetInterface(nullptr); +} + +void BaseChannel::Init_w(webrtc::RtpTransportInternal* rtp_transport) { + RTC_DCHECK_RUN_ON(worker_thread()); + + network_thread_->Invoke(RTC_FROM_HERE, [this, rtp_transport] { + SetRtpTransport(rtp_transport); + // Both RTP and RTCP channels should be set, we can call SetInterface on + // the media channel and it can set network options. + media_channel_->SetInterface(this); + }); +} + +void BaseChannel::Deinit() { + RTC_DCHECK_RUN_ON(worker_thread()); + // Packets arrive on the network thread, processing packets calls virtual + // functions, so need to stop this process in Deinit that is called in + // derived classes destructor. + network_thread_->Invoke(RTC_FROM_HERE, [&] { + RTC_DCHECK_RUN_ON(network_thread()); + media_channel_->SetInterface(/*iface=*/nullptr); + + if (rtp_transport_) { + DisconnectFromRtpTransport(); + } + }); } bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) { @@ -195,25 +227,16 @@ bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) { } if (rtp_transport_) { - DisconnectFromRtpTransport_n(); - // Clear the cached header extensions on the worker. - worker_thread_->PostTask(ToQueuedTask(alive_, [this] { - RTC_DCHECK_RUN_ON(worker_thread()); - rtp_header_extensions_.clear(); - })); + DisconnectFromRtpTransport(); } rtp_transport_ = rtp_transport; if (rtp_transport_) { transport_name_ = rtp_transport_->transport_name(); - if (!ConnectToRtpTransport_n()) { + if (!ConnectToRtpTransport()) { return false; } - - RTC_DCHECK(!media_channel_->HasNetworkInterface()); - media_channel_->SetInterface(this); - - media_channel_->OnReadyToSend(rtp_transport_->IsReadyToSend()); + OnTransportReadyToSend(rtp_transport_->IsReadyToSend()); UpdateWritableState_n(); // Set the cached socket options. @@ -226,7 +249,6 @@ bool BaseChannel::SetRtpTransport(webrtc::RtpTransportInternal* rtp_transport) { } } } - return true; } @@ -253,7 +275,7 @@ void BaseChannel::Enable(bool enable) { bool BaseChannel::SetLocalContent(const MediaContentDescription* content, SdpType type, - std::string& error_desc) { + std::string* error_desc) { RTC_DCHECK_RUN_ON(worker_thread()); TRACE_EVENT0("webrtc", "BaseChannel::SetLocalContent"); return SetLocalContent_w(content, type, error_desc); @@ -261,31 +283,31 @@ bool BaseChannel::SetLocalContent(const MediaContentDescription* content, bool BaseChannel::SetRemoteContent(const MediaContentDescription* content, SdpType type, - std::string& error_desc) { + std::string* error_desc) { RTC_DCHECK_RUN_ON(worker_thread()); TRACE_EVENT0("webrtc", "BaseChannel::SetRemoteContent"); return SetRemoteContent_w(content, type, error_desc); } bool BaseChannel::SetPayloadTypeDemuxingEnabled(bool enabled) { - // TODO(bugs.webrtc.org/11993): The demuxer state needs to be managed on the - // network thread. At the moment there's a workaround for inconsistent state - // between the worker and network thread because of this (see - // OnDemuxerCriteriaUpdatePending elsewhere in this file) and - // SetPayloadTypeDemuxingEnabled_w has an Invoke over to the network thread - // to apply state updates. RTC_DCHECK_RUN_ON(worker_thread()); TRACE_EVENT0("webrtc", "BaseChannel::SetPayloadTypeDemuxingEnabled"); return SetPayloadTypeDemuxingEnabled_w(enabled); } +bool BaseChannel::IsReadyToReceiveMedia_w() const { + // Receive data if we are enabled and have local content, + return enabled_ && + webrtc::RtpTransceiverDirectionHasRecv(local_content_direction_); +} + bool BaseChannel::IsReadyToSendMedia_w() const { // Send outgoing data if we are enabled, have local and remote content, // and we have had some form of connectivity. return enabled_ && webrtc::RtpTransceiverDirectionHasRecv(remote_content_direction_) && webrtc::RtpTransceiverDirectionHasSend(local_content_direction_) && - was_ever_writable_; + was_ever_writable(); } bool BaseChannel::SendPacket(rtc::CopyOnWriteBuffer* packet, @@ -302,7 +324,6 @@ int BaseChannel::SetOption(SocketType type, rtc::Socket::Option opt, int value) { RTC_DCHECK_RUN_ON(network_thread()); - RTC_DCHECK(network_initialized()); RTC_DCHECK(rtp_transport_); switch (type) { case ST_RTP: @@ -319,7 +340,6 @@ int BaseChannel::SetOption(SocketType type, void BaseChannel::OnWritableState(bool writable) { RTC_DCHECK_RUN_ON(network_thread()); - RTC_DCHECK(network_initialized()); if (writable) { ChannelWritable_n(); } else { @@ -329,11 +349,9 @@ void BaseChannel::OnWritableState(bool writable) { void BaseChannel::OnNetworkRouteChanged( absl::optional network_route) { - RTC_DCHECK_RUN_ON(network_thread()); - RTC_DCHECK(network_initialized()); - RTC_LOG(LS_INFO) << "Network route changed for " << ToString(); + RTC_DCHECK_RUN_ON(network_thread()); rtc::NetworkRoute new_route; if (network_route) { new_route = *(network_route); @@ -349,19 +367,11 @@ void BaseChannel::SetFirstPacketReceivedCallback( std::function callback) { RTC_DCHECK_RUN_ON(network_thread()); RTC_DCHECK(!on_first_packet_received_ || !callback); - - // TODO(bugs.webrtc.org/11992): Rename SetFirstPacketReceivedCallback to - // something that indicates network thread initialization/uninitialization and - // call Init_n() / Deinit_n() respectively. - // if (!callback) - // Deinit_n(); - on_first_packet_received_ = std::move(callback); } void BaseChannel::OnTransportReadyToSend(bool ready) { RTC_DCHECK_RUN_ON(network_thread()); - RTC_DCHECK(network_initialized()); media_channel_->OnReadyToSend(ready); } @@ -369,15 +379,22 @@ bool BaseChannel::SendPacket(bool rtcp, rtc::CopyOnWriteBuffer* packet, const rtc::PacketOptions& options) { RTC_DCHECK_RUN_ON(network_thread()); - RTC_DCHECK(network_initialized()); - TRACE_EVENT0("webrtc", "BaseChannel::SendPacket"); - // Until all the code is migrated to use RtpPacketType instead of bool. RtpPacketType packet_type = rtcp ? RtpPacketType::kRtcp : RtpPacketType::kRtp; + // SendPacket gets called from MediaEngine, on a pacer or an encoder thread. + // If the thread is not our network thread, we will post to our network + // so that the real work happens on our network. This avoids us having to + // synchronize access to all the pieces of the send path, including + // SRTP and the inner workings of the transport channels. + // The only downside is that we can't return a proper failure code if + // needed. Since UDP is unreliable anyway, this should be a non-issue. - // Ensure we have a place to send this packet before doing anything. We might - // get RTCP packets that we don't intend to send. If we've negotiated RTCP - // mux, send RTCP over the RTP transport. + TRACE_EVENT0("webrtc", "BaseChannel::SendPacket"); + + // Now that we are on the correct thread, ensure we have a place to send this + // packet before doing anything. (We might get RTCP packets that we don't + // intend to send.) If we've negotiated RTCP mux, send RTCP over the RTP + // transport. if (!rtp_transport_ || !rtp_transport_->IsWritable(rtcp)) { return false; } @@ -395,25 +412,30 @@ bool BaseChannel::SendPacket(bool rtcp, // The audio/video engines may attempt to send RTCP packets as soon as the // streams are created, so don't treat this as an error for RTCP. // See: https://bugs.chromium.org/p/webrtc/issues/detail?id=6809 - // However, there shouldn't be any RTP packets sent before SRTP is set - // up (and SetSend(true) is called). - RTC_DCHECK(rtcp) << "Can't send outgoing RTP packet for " << ToString() - << " when SRTP is inactive and crypto is required"; + if (rtcp) { + return false; + } + // However, there shouldn't be any RTP packets sent before SRTP is set up + // (and SetSend(true) is called). + RTC_LOG(LS_ERROR) << "Can't send outgoing RTP packet for " << ToString() + << " when SRTP is inactive and crypto is required"; + RTC_DCHECK_NOTREACHED(); return false; } - RTC_DLOG(LS_WARNING) << "Sending an " << (rtcp ? "RTCP" : "RTP") + std::string packet_type = rtcp ? "RTCP" : "RTP"; + RTC_DLOG(LS_WARNING) << "Sending an " << packet_type << " packet without encryption for " << ToString() << "."; } + // Bon voyage. return rtcp ? rtp_transport_->SendRtcpPacket(packet, options, PF_SRTP_BYPASS) : rtp_transport_->SendRtpPacket(packet, options, PF_SRTP_BYPASS); } void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { RTC_DCHECK_RUN_ON(network_thread()); - RTC_DCHECK(network_initialized()); if (on_first_packet_received_) { on_first_packet_received_(); @@ -446,74 +468,41 @@ void BaseChannel::OnRtpPacket(const webrtc::RtpPacketReceived& parsed_packet) { } } -bool BaseChannel::MaybeUpdateDemuxerAndRtpExtensions_w( - bool update_demuxer, - absl::optional extensions, - std::string& error_desc) { - if (extensions) { - if (rtp_header_extensions_ == extensions) { - extensions.reset(); // No need to update header extensions. - } else { - rtp_header_extensions_ = *extensions; - } - } - - if (!update_demuxer && !extensions) - return true; // No update needed. - - // TODO(bugs.webrtc.org/13536): See if we can do this asynchronously. - - if (update_demuxer) - media_channel()->OnDemuxerCriteriaUpdatePending(); - - bool success = network_thread()->Invoke(RTC_FROM_HERE, [&]() mutable { +void BaseChannel::UpdateRtpHeaderExtensionMap( + const RtpHeaderExtensions& header_extensions) { + // Update the header extension map on network thread in case there is data + // race. + // + // NOTE: This doesn't take the BUNDLE case in account meaning the RTP header + // extension maps are not merged when BUNDLE is enabled. This is fine because + // the ID for MID should be consistent among all the RTP transports. + network_thread_->Invoke(RTC_FROM_HERE, [this, &header_extensions] { RTC_DCHECK_RUN_ON(network_thread()); - // NOTE: This doesn't take the BUNDLE case in account meaning the RTP header - // extension maps are not merged when BUNDLE is enabled. This is fine - // because the ID for MID should be consistent among all the RTP transports. - if (extensions) - rtp_transport_->UpdateRtpHeaderExtensionMap(*extensions); - - if (!update_demuxer) - return true; - - if (!rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria_, this)) { - error_desc = - StringFormat("Failed to apply demuxer criteria for '%s': '%s'.", - mid().c_str(), demuxer_criteria_.ToString().c_str()); - return false; - } - return true; + rtp_transport_->UpdateRtpHeaderExtensionMap(header_extensions); }); - - if (update_demuxer) - media_channel()->OnDemuxerCriteriaUpdateComplete(); - - return success; } bool BaseChannel::RegisterRtpDemuxerSink_w() { + if (demuxer_criteria_ == previous_demuxer_criteria_) { + return true; + } media_channel_->OnDemuxerCriteriaUpdatePending(); // Copy demuxer criteria, since they're a worker-thread variable // and we want to pass them to the network thread - bool ret = network_thread_->Invoke( + return network_thread_->Invoke( RTC_FROM_HERE, [this, demuxer_criteria = demuxer_criteria_] { RTC_DCHECK_RUN_ON(network_thread()); - if (!rtp_transport_) { - // Transport was disconnected before attempting to update the - // criteria. This can happen while setting the remote description. - // See chromium:1295469 for an example. - return false; + RTC_DCHECK(rtp_transport_); + bool result = + rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria, this); + if (result) { + previous_demuxer_criteria_ = demuxer_criteria; + } else { + previous_demuxer_criteria_ = {}; } - // Note that RegisterRtpDemuxerSink first unregisters the sink if - // already registered. So this will change the state of the class - // whether the call succeeds or not. - return rtp_transport_->RegisterRtpDemuxerSink(demuxer_criteria, this); + media_channel_->OnDemuxerCriteriaUpdateComplete(); + return result; }); - - media_channel_->OnDemuxerCriteriaUpdateComplete(); - - return ret; } void BaseChannel::EnableMedia_w() { @@ -573,17 +562,23 @@ void BaseChannel::ChannelNotWritable_n() { RTC_LOG(LS_INFO) << "Channel not writable (" << ToString() << ")"; } -bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { - RTC_LOG_THREAD_BLOCK_COUNT(); +bool BaseChannel::AddRecvStream_w(const StreamParams& sp) { + return media_channel()->AddRecvStream(sp); +} +bool BaseChannel::RemoveRecvStream_w(uint32_t ssrc) { + return media_channel()->RemoveRecvStream(ssrc); +} + +void BaseChannel::ResetUnsignaledRecvStream_w() { + media_channel()->ResetUnsignaledRecvStream(); +} + +bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { if (enabled == payload_type_demuxing_enabled_) { return true; } - payload_type_demuxing_enabled_ = enabled; - - bool config_changed = false; - if (!enabled) { // TODO(crbug.com/11477): This will remove *all* unsignaled streams (those // without an explicitly signaled SSRC), which may include streams that @@ -591,32 +586,27 @@ bool BaseChannel::SetPayloadTypeDemuxingEnabled_w(bool enabled) { // streams that were matched based on payload type alone, but currently // there is no straightforward way to identify those streams. media_channel()->ResetUnsignaledRecvStream(); - if (!demuxer_criteria_.payload_types().empty()) { - config_changed = true; - demuxer_criteria_.payload_types().clear(); + demuxer_criteria_.payload_types.clear(); + if (!RegisterRtpDemuxerSink_w()) { + RTC_LOG(LS_ERROR) << "Failed to disable payload type demuxing for " + << ToString(); + return false; } } else if (!payload_types_.empty()) { - for (const auto& type : payload_types_) { - if (demuxer_criteria_.payload_types().insert(type).second) { - config_changed = true; - } + demuxer_criteria_.payload_types.insert(payload_types_.begin(), + payload_types_.end()); + if (!RegisterRtpDemuxerSink_w()) { + RTC_LOG(LS_ERROR) << "Failed to enable payload type demuxing for " + << ToString(); + return false; } - } else { - RTC_DCHECK(demuxer_criteria_.payload_types().empty()); } - - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); - - if (!config_changed) - return true; - - // Note: This synchronously hops to the network thread. - return RegisterRtpDemuxerSink_w(); + return true; } bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, SdpType type, - std::string& error_desc) { + std::string* error_desc) { // In the case of RIDs (where SSRCs are not negotiated), this method will // generate an SSRC for each layer in StreamParams. That representation will // be stored internally in `local_streams_`. @@ -636,10 +626,11 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, continue; } if (!media_channel()->RemoveSendStream(old_stream.first_ssrc())) { - error_desc = StringFormat( - "Failed to remove send stream with ssrc %u from m-section with " - "mid='%s'.", - old_stream.first_ssrc(), mid().c_str()); + rtc::StringBuilder desc; + desc << "Failed to remove send stream with ssrc " + << old_stream.first_ssrc() << " from m-section with mid='" + << content_name() << "'."; + SafeSetError(desc.str(), error_desc); ret = false; } } @@ -662,10 +653,11 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, RTC_DCHECK(new_stream.has_ssrcs() || new_stream.has_rids()); if (new_stream.has_ssrcs() && new_stream.has_rids()) { - error_desc = StringFormat( - "Failed to add send stream: %u into m-section with mid='%s'. Stream " - "has both SSRCs and RIDs.", - new_stream.first_ssrc(), mid().c_str()); + rtc::StringBuilder desc; + desc << "Failed to add send stream: " << new_stream.first_ssrc() + << " into m-section with mid='" << content_name() + << "'. Stream has both SSRCs and RIDs."; + SafeSetError(desc.str(), error_desc); ret = false; continue; } @@ -682,9 +674,10 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, RTC_LOG(LS_INFO) << "Add send stream ssrc: " << new_stream.ssrcs[0] << " into " << ToString(); } else { - error_desc = StringFormat( - "Failed to add send stream ssrc: %u into m-section with mid='%s'", - new_stream.first_ssrc(), mid().c_str()); + rtc::StringBuilder desc; + desc << "Failed to add send stream ssrc: " << new_stream.first_ssrc() + << " into m-section with mid='" << content_name() << "'"; + SafeSetError(desc.str(), error_desc); ret = false; } } @@ -692,128 +685,96 @@ bool BaseChannel::UpdateLocalStreams_w(const std::vector& streams, return ret; } -bool BaseChannel::UpdateRemoteStreams_w(const MediaContentDescription* content, - SdpType type, - std::string& error_desc) { - RTC_LOG_THREAD_BLOCK_COUNT(); - bool needs_re_registration = false; - if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) { - RTC_DLOG(LS_VERBOSE) << "UpdateRemoteStreams_w: remote side will not send " - "- disable payload type demuxing for " - << ToString(); - if (ClearHandledPayloadTypes()) { - needs_re_registration = payload_type_demuxing_enabled_; - } - } - - const std::vector& streams = content->streams(); - const bool new_has_unsignaled_ssrcs = HasStreamWithNoSsrcs(streams); - const bool old_has_unsignaled_ssrcs = HasStreamWithNoSsrcs(remote_streams_); - +bool BaseChannel::UpdateRemoteStreams_w( + const std::vector& streams, + SdpType type, + std::string* error_desc) { // Check for streams that have been removed. + bool ret = true; for (const StreamParams& old_stream : remote_streams_) { // If we no longer have an unsignaled stream, we would like to remove // the unsignaled stream params that are cached. - if (!old_stream.has_ssrcs() && !new_has_unsignaled_ssrcs) { - media_channel()->ResetUnsignaledRecvStream(); + if (!old_stream.has_ssrcs() && !HasStreamWithNoSsrcs(streams)) { + ResetUnsignaledRecvStream_w(); RTC_LOG(LS_INFO) << "Reset unsignaled remote stream for " << ToString() << "."; } else if (old_stream.has_ssrcs() && !GetStreamBySsrc(streams, old_stream.first_ssrc())) { - if (media_channel()->RemoveRecvStream(old_stream.first_ssrc())) { + if (RemoveRecvStream_w(old_stream.first_ssrc())) { RTC_LOG(LS_INFO) << "Remove remote ssrc: " << old_stream.first_ssrc() << " from " << ToString() << "."; } else { - error_desc = StringFormat( - "Failed to remove remote stream with ssrc %u from m-section with " - "mid='%s'.", - old_stream.first_ssrc(), mid().c_str()); - return false; + rtc::StringBuilder desc; + desc << "Failed to remove remote stream with ssrc " + << old_stream.first_ssrc() << " from m-section with mid='" + << content_name() << "'."; + SafeSetError(desc.str(), error_desc); + ret = false; } } } - + demuxer_criteria_.ssrcs.clear(); // Check for new streams. - webrtc::flat_set ssrcs; for (const StreamParams& new_stream : streams) { // We allow a StreamParams with an empty list of SSRCs, in which case the // MediaChannel will cache the parameters and use them for any unsignaled // stream received later. - if ((!new_stream.has_ssrcs() && !old_has_unsignaled_ssrcs) || + if ((!new_stream.has_ssrcs() && !HasStreamWithNoSsrcs(remote_streams_)) || !GetStreamBySsrc(remote_streams_, new_stream.first_ssrc())) { - if (media_channel()->AddRecvStream(new_stream)) { + if (AddRecvStream_w(new_stream)) { RTC_LOG(LS_INFO) << "Add remote ssrc: " << (new_stream.has_ssrcs() ? std::to_string(new_stream.first_ssrc()) : "unsignaled") << " to " << ToString(); } else { - error_desc = - StringFormat("Failed to add remote stream ssrc: %s to %s", - new_stream.has_ssrcs() - ? std::to_string(new_stream.first_ssrc()).c_str() - : "unsignaled", - ToString().c_str()); - return false; + rtc::StringBuilder desc; + desc << "Failed to add remote stream ssrc: " + << (new_stream.has_ssrcs() + ? std::to_string(new_stream.first_ssrc()) + : "unsignaled") + << " to " << ToString(); + SafeSetError(desc.str(), error_desc); + ret = false; } } // Update the receiving SSRCs. - ssrcs.insert(new_stream.ssrcs.begin(), new_stream.ssrcs.end()); + demuxer_criteria_.ssrcs.insert(new_stream.ssrcs.begin(), + new_stream.ssrcs.end()); } - - if (demuxer_criteria_.ssrcs() != ssrcs) { - demuxer_criteria_.ssrcs() = std::move(ssrcs); - needs_re_registration = true; + // Re-register the sink to update the receiving ssrcs. + if (!RegisterRtpDemuxerSink_w()) { + RTC_LOG(LS_ERROR) << "Failed to set up demuxing for " << ToString(); + ret = false; } - - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); - - // Re-register the sink to update after changing the demuxer criteria. - if (needs_re_registration && !RegisterRtpDemuxerSink_w()) { - error_desc = StringFormat("Failed to set up audio demuxing for mid='%s'.", - mid().c_str()); - return false; - } - remote_streams_ = streams; - - set_remote_content_direction(content->direction()); - UpdateMediaSendRecvState_w(); - - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); - - return true; + return ret; } RtpHeaderExtensions BaseChannel::GetDeduplicatedRtpHeaderExtensions( const RtpHeaderExtensions& extensions) { - return webrtc::RtpExtension::DeduplicateHeaderExtensions(extensions, - extensions_filter_); + return webrtc::RtpExtension::DeduplicateHeaderExtensions( + extensions, crypto_options_.srtp.enable_encrypted_rtp_header_extensions + ? webrtc::RtpExtension::kPreferEncryptedExtension + : webrtc::RtpExtension::kDiscardEncryptedExtension); } -bool BaseChannel::MaybeAddHandledPayloadType(int payload_type) { - bool demuxer_criteria_modified = false; +void BaseChannel::MaybeAddHandledPayloadType(int payload_type) { if (payload_type_demuxing_enabled_) { - demuxer_criteria_modified = demuxer_criteria_.payload_types() - .insert(static_cast(payload_type)) - .second; + demuxer_criteria_.payload_types.insert(static_cast(payload_type)); } // Even if payload type demuxing is currently disabled, we need to remember // the payload types in case it's re-enabled later. payload_types_.insert(static_cast(payload_type)); - return demuxer_criteria_modified; } -bool BaseChannel::ClearHandledPayloadTypes() { - const bool was_empty = demuxer_criteria_.payload_types().empty(); - demuxer_criteria_.payload_types().clear(); +void BaseChannel::ClearHandledPayloadTypes() { + demuxer_criteria_.payload_types.clear(); payload_types_.clear(); - return !was_empty; } void BaseChannel::SignalSentPacket_n(const rtc::SentPacket& sent_packet) { RTC_DCHECK_RUN_ON(network_thread()); - RTC_DCHECK(network_initialized()); media_channel()->OnPacketSent(sent_packet); } @@ -821,7 +782,7 @@ VoiceChannel::VoiceChannel(rtc::Thread* worker_thread, rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr media_channel, - const std::string& mid, + const std::string& content_name, bool srtp_required, webrtc::CryptoOptions crypto_options, UniqueRandomIdGenerator* ssrc_generator) @@ -829,7 +790,7 @@ VoiceChannel::VoiceChannel(rtc::Thread* worker_thread, network_thread, signaling_thread, std::move(media_channel), - mid, + content_name, srtp_required, crypto_options, ssrc_generator) {} @@ -838,113 +799,148 @@ VoiceChannel::~VoiceChannel() { TRACE_EVENT0("webrtc", "VoiceChannel::~VoiceChannel"); // this can't be done in the base class, since it calls a virtual DisableMedia_w(); + Deinit(); } void VoiceChannel::UpdateMediaSendRecvState_w() { // Render incoming data if we're the active call, and we have the local // content. We receive data on the default channel and multiplexed streams. - bool ready_to_receive = enabled() && webrtc::RtpTransceiverDirectionHasRecv( - local_content_direction()); - media_channel()->SetPlayout(ready_to_receive); + RTC_DCHECK_RUN_ON(worker_thread()); + bool recv = IsReadyToReceiveMedia_w(); + media_channel()->SetPlayout(recv); // Send outgoing data if we're the active call, we have the remote content, // and we have had some form of connectivity. bool send = IsReadyToSendMedia_w(); media_channel()->SetSend(send); - RTC_LOG(LS_INFO) << "Changing voice state, recv=" << ready_to_receive - << " send=" << send << " for " << ToString(); + RTC_LOG(LS_INFO) << "Changing voice state, recv=" << recv << " send=" << send + << " for " << ToString(); } bool VoiceChannel::SetLocalContent_w(const MediaContentDescription* content, SdpType type, - std::string& error_desc) { + std::string* error_desc) { TRACE_EVENT0("webrtc", "VoiceChannel::SetLocalContent_w"); - RTC_DLOG(LS_INFO) << "Setting local voice description for " << ToString(); + RTC_DCHECK_RUN_ON(worker_thread()); + RTC_LOG(LS_INFO) << "Setting local voice description for " << ToString(); - RTC_LOG_THREAD_BLOCK_COUNT(); - - RtpHeaderExtensions header_extensions = + RtpHeaderExtensions rtp_header_extensions = GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions()); - bool update_header_extensions = true; + // TODO(tommi): There's a hop to the network thread here. + // some of the below is also network thread related. + UpdateRtpHeaderExtensionMap(rtp_header_extensions); media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); AudioRecvParameters recv_params = last_recv_params_; RtpParametersFromMediaDescription( - content->as_audio(), header_extensions, + content->as_audio(), rtp_header_extensions, webrtc::RtpTransceiverDirectionHasRecv(content->direction()), &recv_params); if (!media_channel()->SetRecvParameters(recv_params)) { - error_desc = StringFormat( + SafeSetError( "Failed to set local audio description recv parameters for m-section " - "with mid='%s'.", - mid().c_str()); + "with mid='" + + content_name() + "'.", + error_desc); return false; } - bool criteria_modified = false; if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) { for (const AudioCodec& codec : content->as_audio()->codecs()) { - if (MaybeAddHandledPayloadType(codec.id)) { - criteria_modified = true; - } + MaybeAddHandledPayloadType(codec.id); + } + // Need to re-register the sink to update the handled payload. + if (!RegisterRtpDemuxerSink_w()) { + RTC_LOG(LS_ERROR) << "Failed to set up audio demuxing for " << ToString(); + return false; } } last_recv_params_ = recv_params; + // TODO(pthatcher): Move local streams into AudioSendParameters, and + // only give it to the media channel once we have a remote + // description too (without a remote description, we won't be able + // to send them anyway). if (!UpdateLocalStreams_w(content->as_audio()->streams(), type, error_desc)) { - RTC_DCHECK(!error_desc.empty()); + SafeSetError( + "Failed to set local audio description streams for m-section with " + "mid='" + + content_name() + "'.", + error_desc); return false; } set_local_content_direction(content->direction()); UpdateMediaSendRecvState_w(); - - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); - - bool success = MaybeUpdateDemuxerAndRtpExtensions_w( - criteria_modified, - update_header_extensions - ? absl::optional(std::move(header_extensions)) - : absl::nullopt, - error_desc); - - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); - - return success; + return true; } bool VoiceChannel::SetRemoteContent_w(const MediaContentDescription* content, SdpType type, - std::string& error_desc) { + std::string* error_desc) { TRACE_EVENT0("webrtc", "VoiceChannel::SetRemoteContent_w"); + RTC_DCHECK_RUN_ON(worker_thread()); RTC_LOG(LS_INFO) << "Setting remote voice description for " << ToString(); + const AudioContentDescription* audio = content->as_audio(); + + RtpHeaderExtensions rtp_header_extensions = + GetDeduplicatedRtpHeaderExtensions(audio->rtp_header_extensions()); + AudioSendParameters send_params = last_send_params_; - RtpSendParametersFromMediaDescription(content->as_audio(), - extensions_filter(), &send_params); - send_params.mid = mid(); + RtpSendParametersFromMediaDescription( + audio, rtp_header_extensions, + webrtc::RtpTransceiverDirectionHasRecv(audio->direction()), &send_params); + send_params.mid = content_name(); bool parameters_applied = media_channel()->SetSendParameters(send_params); if (!parameters_applied) { - error_desc = StringFormat( + SafeSetError( "Failed to set remote audio description send parameters for m-section " - "with mid='%s'.", - mid().c_str()); + "with mid='" + + content_name() + "'.", + error_desc); return false; } last_send_params_ = send_params; - return UpdateRemoteStreams_w(content, type, error_desc); + if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) { + RTC_DLOG(LS_VERBOSE) << "SetRemoteContent_w: remote side will not send - " + "disable payload type demuxing for " + << ToString(); + ClearHandledPayloadTypes(); + if (!RegisterRtpDemuxerSink_w()) { + RTC_LOG(LS_ERROR) << "Failed to update audio demuxing for " << ToString(); + return false; + } + } + + // TODO(pthatcher): Move remote streams into AudioRecvParameters, + // and only give it to the media channel once we have a local + // description too (without a local description, we won't be able to + // recv them anyway). + if (!UpdateRemoteStreams_w(audio->streams(), type, error_desc)) { + SafeSetError( + "Failed to set remote audio description streams for m-section with " + "mid='" + + content_name() + "'.", + error_desc); + return false; + } + + set_remote_content_direction(content->direction()); + UpdateMediaSendRecvState_w(); + return true; } VideoChannel::VideoChannel(rtc::Thread* worker_thread, rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr media_channel, - const std::string& mid, + const std::string& content_name, bool srtp_required, webrtc::CryptoOptions crypto_options, UniqueRandomIdGenerator* ssrc_generator) @@ -952,7 +948,7 @@ VideoChannel::VideoChannel(rtc::Thread* worker_thread, network_thread, signaling_thread, std::move(media_channel), - mid, + content_name, srtp_required, crypto_options, ssrc_generator) {} @@ -961,13 +957,19 @@ VideoChannel::~VideoChannel() { TRACE_EVENT0("webrtc", "VideoChannel::~VideoChannel"); // this can't be done in the base class, since it calls a virtual DisableMedia_w(); + Deinit(); } void VideoChannel::UpdateMediaSendRecvState_w() { // Send outgoing data if we're the active call, we have the remote content, // and we have had some form of connectivity. + RTC_DCHECK_RUN_ON(worker_thread()); bool send = IsReadyToSendMedia_w(); - media_channel()->SetSend(send); + if (!media_channel()->SetSend(send)) { + RTC_LOG(LS_ERROR) << "Failed to SetSend on video channel: " + ToString(); + // TODO(gangji): Report error back to server. + } + RTC_LOG(LS_INFO) << "Changing video state, send=" << send << " for " << ToString(); } @@ -980,21 +982,20 @@ void VideoChannel::FillBitrateInfo(BandwidthEstimationInfo* bwe_info) { bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, SdpType type, - std::string& error_desc) { + std::string* error_desc) { TRACE_EVENT0("webrtc", "VideoChannel::SetLocalContent_w"); - RTC_DLOG(LS_INFO) << "Setting local video description for " << ToString(); + RTC_DCHECK_RUN_ON(worker_thread()); + RTC_LOG(LS_INFO) << "Setting local video description for " << ToString(); - RTC_LOG_THREAD_BLOCK_COUNT(); - - RtpHeaderExtensions header_extensions = + RtpHeaderExtensions rtp_header_extensions = GetDeduplicatedRtpHeaderExtensions(content->rtp_header_extensions()); - bool update_header_extensions = true; + UpdateRtpHeaderExtensionMap(rtp_header_extensions); media_channel()->SetExtmapAllowMixed(content->extmap_allow_mixed()); VideoRecvParameters recv_params = last_recv_params_; RtpParametersFromMediaDescription( - content->as_video(), header_extensions, + content->as_video(), rtp_header_extensions, webrtc::RtpTransceiverDirectionHasRecv(content->direction()), &recv_params); @@ -1009,10 +1010,11 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, send_codec.packetization.reset(); needs_send_params_update = true; } else if (recv_codec->packetization != send_codec.packetization) { - error_desc = StringFormat( + SafeSetError( "Failed to set local answer due to invalid codec packetization " - "specified in m-section with mid='%s'.", - mid().c_str()); + "specified in m-section with mid='" + + content_name() + "'.", + error_desc); return false; } } @@ -1020,18 +1022,22 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, } if (!media_channel()->SetRecvParameters(recv_params)) { - error_desc = StringFormat( + SafeSetError( "Failed to set local video description recv parameters for m-section " - "with mid='%s'.", - mid().c_str()); + "with mid='" + + content_name() + "'.", + error_desc); return false; } - bool criteria_modified = false; if (webrtc::RtpTransceiverDirectionHasRecv(content->direction())) { for (const VideoCodec& codec : content->as_video()->codecs()) { - if (MaybeAddHandledPayloadType(codec.id)) - criteria_modified = true; + MaybeAddHandledPayloadType(codec.id); + } + // Need to re-register the sink to update the handled payload. + if (!RegisterRtpDemuxerSink_w()) { + RTC_LOG(LS_ERROR) << "Failed to set up video demuxing for " << ToString(); + return false; } } @@ -1039,49 +1045,52 @@ bool VideoChannel::SetLocalContent_w(const MediaContentDescription* content, if (needs_send_params_update) { if (!media_channel()->SetSendParameters(send_params)) { - error_desc = StringFormat( - "Failed to set send parameters for m-section with mid='%s'.", - mid().c_str()); + SafeSetError("Failed to set send parameters for m-section with mid='" + + content_name() + "'.", + error_desc); return false; } last_send_params_ = send_params; } + // TODO(pthatcher): Move local streams into VideoSendParameters, and + // only give it to the media channel once we have a remote + // description too (without a remote description, we won't be able + // to send them anyway). if (!UpdateLocalStreams_w(content->as_video()->streams(), type, error_desc)) { - RTC_DCHECK(!error_desc.empty()); + SafeSetError( + "Failed to set local video description streams for m-section with " + "mid='" + + content_name() + "'.", + error_desc); return false; } set_local_content_direction(content->direction()); UpdateMediaSendRecvState_w(); - - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); - - bool success = MaybeUpdateDemuxerAndRtpExtensions_w( - criteria_modified, - update_header_extensions - ? absl::optional(std::move(header_extensions)) - : absl::nullopt, - error_desc); - - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); - - return success; + return true; } bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, SdpType type, - std::string& error_desc) { + std::string* error_desc) { TRACE_EVENT0("webrtc", "VideoChannel::SetRemoteContent_w"); + RTC_DCHECK_RUN_ON(worker_thread()); RTC_LOG(LS_INFO) << "Setting remote video description for " << ToString(); const VideoContentDescription* video = content->as_video(); + RtpHeaderExtensions rtp_header_extensions = + GetDeduplicatedRtpHeaderExtensions(video->rtp_header_extensions()); + VideoSendParameters send_params = last_send_params_; - RtpSendParametersFromMediaDescription(video, extensions_filter(), - &send_params); - send_params.mid = mid(); - send_params.conference_mode = video->conference_mode(); + RtpSendParametersFromMediaDescription( + video, rtp_header_extensions, + webrtc::RtpTransceiverDirectionHasRecv(video->direction()), &send_params); + if (video->conference_mode()) { + send_params.conference_mode = true; + } + send_params.mid = content_name(); VideoRecvParameters recv_params = last_recv_params_; @@ -1094,10 +1103,11 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, recv_codec.packetization.reset(); needs_recv_params_update = true; } else if (send_codec->packetization != recv_codec.packetization) { - error_desc = StringFormat( + SafeSetError( "Failed to set remote answer due to invalid codec packetization " - "specifid in m-section with mid='%s'.", - mid().c_str()); + "specifid in m-section with mid='" + + content_name() + "'.", + error_desc); return false; } } @@ -1105,25 +1115,51 @@ bool VideoChannel::SetRemoteContent_w(const MediaContentDescription* content, } if (!media_channel()->SetSendParameters(send_params)) { - error_desc = StringFormat( + SafeSetError( "Failed to set remote video description send parameters for m-section " - "with mid='%s'.", - mid().c_str()); + "with mid='" + + content_name() + "'.", + error_desc); return false; } last_send_params_ = send_params; if (needs_recv_params_update) { if (!media_channel()->SetRecvParameters(recv_params)) { - error_desc = StringFormat( - "Failed to set recv parameters for m-section with mid='%s'.", - mid().c_str()); + SafeSetError("Failed to set recv parameters for m-section with mid='" + + content_name() + "'.", + error_desc); return false; } last_recv_params_ = recv_params; } - return UpdateRemoteStreams_w(content, type, error_desc); + if (!webrtc::RtpTransceiverDirectionHasSend(content->direction())) { + RTC_DLOG(LS_VERBOSE) << "SetRemoteContent_w: remote side will not send - " + "disable payload type demuxing for " + << ToString(); + ClearHandledPayloadTypes(); + if (!RegisterRtpDemuxerSink_w()) { + RTC_LOG(LS_ERROR) << "Failed to update video demuxing for " << ToString(); + return false; + } + } + + // TODO(pthatcher): Move remote streams into VideoRecvParameters, + // and only give it to the media channel once we have a local + // description too (without a local description, we won't be able to + // recv them anyway). + if (!UpdateRemoteStreams_w(video->streams(), type, error_desc)) { + SafeSetError( + "Failed to set remote video description streams for m-section with " + "mid='" + + content_name() + "'.", + error_desc); + return false; + } + set_remote_content_direction(content->direction()); + UpdateMediaSendRecvState_w(); + return true; } } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel.h b/TMessagesProj/jni/voip/webrtc/pc/channel.h index 90fc529cb..4628c86bd 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel.h @@ -14,7 +14,6 @@ #include #include -#include #include #include #include @@ -22,14 +21,12 @@ #include #include -#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/call/audio_sink.h" #include "api/crypto/crypto_options.h" #include "api/function_view.h" #include "api/jsep.h" #include "api/media_types.h" -#include "api/rtp_parameters.h" #include "api/rtp_receiver_interface.h" #include "api/rtp_transceiver_direction.h" #include "api/scoped_refptr.h" @@ -55,7 +52,6 @@ #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_udp_socket.h" #include "rtc_base/checks.h" -#include "rtc_base/containers/flat_set.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/location.h" #include "rtc_base/network.h" @@ -90,6 +86,10 @@ struct CryptoParams; // and methods with _s suffix on signaling thread. // Network and worker threads may be the same thread. // +// WARNING! SUBCLASSES MUST CALL Deinit() IN THEIR DESTRUCTORS! +// This is required to avoid a data race between the destructor modifying the +// vtable, and the media channel's thread using BaseChannel as the +// NetworkInterface. class BaseChannel : public ChannelInterface, // TODO(tommi): Remove has_slots inheritance. @@ -109,21 +109,27 @@ class BaseChannel : public ChannelInterface, rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr media_channel, - const std::string& mid, + const std::string& content_name, bool srtp_required, webrtc::CryptoOptions crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator); virtual ~BaseChannel(); + virtual void Init_w(webrtc::RtpTransportInternal* rtp_transport); + + // Deinit may be called multiple times and is simply ignored if it's already + // done. + void Deinit(); rtc::Thread* worker_thread() const { return worker_thread_; } rtc::Thread* network_thread() const { return network_thread_; } - const std::string& mid() const override { return demuxer_criteria_.mid(); } + const std::string& content_name() const override { return content_name_; } // TODO(deadbeef): This is redundant; remove this. - absl::string_view transport_name() const override { + const std::string& transport_name() const override { RTC_DCHECK_RUN_ON(network_thread()); if (rtp_transport_) return rtp_transport_->transport_name(); - return ""; + // TODO(tommi): Delete this variable. + return transport_name_; } // This function returns true if using SRTP (DTLS-based keying or SDES). @@ -146,10 +152,10 @@ class BaseChannel : public ChannelInterface, // Channel control bool SetLocalContent(const MediaContentDescription* content, webrtc::SdpType type, - std::string& error_desc) override; + std::string* error_desc) override; bool SetRemoteContent(const MediaContentDescription* content, webrtc::SdpType type, - std::string& error_desc) override; + std::string* error_desc) override; // Controls whether this channel will receive packets on the basis of // matching payload type alone. This is needed for legacy endpoints that // don't signal SSRCs or use MID/RID, but doesn't make sense if there is @@ -186,47 +192,31 @@ class BaseChannel : public ChannelInterface, } protected: - void set_local_content_direction(webrtc::RtpTransceiverDirection direction) - RTC_RUN_ON(worker_thread()) { + bool was_ever_writable() const { + RTC_DCHECK_RUN_ON(worker_thread()); + return was_ever_writable_; + } + void set_local_content_direction(webrtc::RtpTransceiverDirection direction) { + RTC_DCHECK_RUN_ON(worker_thread()); local_content_direction_ = direction; } - - webrtc::RtpTransceiverDirection local_content_direction() const - RTC_RUN_ON(worker_thread()) { - return local_content_direction_; - } - - void set_remote_content_direction(webrtc::RtpTransceiverDirection direction) - RTC_RUN_ON(worker_thread()) { + void set_remote_content_direction(webrtc::RtpTransceiverDirection direction) { + RTC_DCHECK_RUN_ON(worker_thread()); remote_content_direction_ = direction; } - - webrtc::RtpTransceiverDirection remote_content_direction() const - RTC_RUN_ON(worker_thread()) { - return remote_content_direction_; - } - - webrtc::RtpExtension::Filter extensions_filter() const { - return extensions_filter_; - } - - bool network_initialized() RTC_RUN_ON(network_thread()) { - return media_channel_->HasNetworkInterface(); - } - - bool enabled() const RTC_RUN_ON(worker_thread()) { return enabled_; } - rtc::Thread* signaling_thread() const { return signaling_thread_; } - - // Call to verify that: + // These methods verify that: // * The required content description directions have been set. // * The channel is enabled. - // * The SRTP filter is active if it's needed. - // * The transport has been writable before, meaning it should be at least - // possible to succeed in sending a packet. + // * And for sending: + // - The SRTP filter is active if it's needed. + // - The transport has been writable before, meaning it should be at least + // possible to succeed in sending a packet. // // When any of these properties change, UpdateMediaSendRecvState_w should be // called. + bool IsReadyToReceiveMedia_w() const RTC_RUN_ON(worker_thread()); bool IsReadyToSendMedia_w() const RTC_RUN_ON(worker_thread()); + rtc::Thread* signaling_thread() const { return signaling_thread_; } // NetworkInterface implementation, called by MediaEngine bool SendPacket(rtc::CopyOnWriteBuffer* packet, @@ -253,8 +243,13 @@ class BaseChannel : public ChannelInterface, void ChannelWritable_n() RTC_RUN_ON(network_thread()); void ChannelNotWritable_n() RTC_RUN_ON(network_thread()); + bool AddRecvStream_w(const StreamParams& sp) RTC_RUN_ON(worker_thread()); + bool RemoveRecvStream_w(uint32_t ssrc) RTC_RUN_ON(worker_thread()); + void ResetUnsignaledRecvStream_w() RTC_RUN_ON(worker_thread()); bool SetPayloadTypeDemuxingEnabled_w(bool enabled) RTC_RUN_ON(worker_thread()); + bool AddSendStream_w(const StreamParams& sp) RTC_RUN_ON(worker_thread()); + bool RemoveSendStream_w(uint32_t ssrc) RTC_RUN_ON(worker_thread()); // Should be called whenever the conditions for // IsReadyToReceiveMedia/IsReadyToSendMedia are satisfied (or unsatisfied). @@ -263,19 +258,19 @@ class BaseChannel : public ChannelInterface, bool UpdateLocalStreams_w(const std::vector& streams, webrtc::SdpType type, - std::string& error_desc) + std::string* error_desc) RTC_RUN_ON(worker_thread()); - bool UpdateRemoteStreams_w(const MediaContentDescription* content, + bool UpdateRemoteStreams_w(const std::vector& streams, webrtc::SdpType type, - std::string& error_desc) + std::string* error_desc) RTC_RUN_ON(worker_thread()); virtual bool SetLocalContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string& error_desc) + std::string* error_desc) RTC_RUN_ON(worker_thread()) = 0; virtual bool SetRemoteContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string& error_desc) + std::string* error_desc) RTC_RUN_ON(worker_thread()) = 0; // Returns a list of RTP header extensions where any extension URI is unique. @@ -286,26 +281,12 @@ class BaseChannel : public ChannelInterface, // Add `payload_type` to `demuxer_criteria_` if payload type demuxing is // enabled. - // Returns true if the demuxer payload type changed and a re-registration - // is needed. - bool MaybeAddHandledPayloadType(int payload_type) RTC_RUN_ON(worker_thread()); + void MaybeAddHandledPayloadType(int payload_type) RTC_RUN_ON(worker_thread()); - // Returns true if the demuxer payload type criteria was non-empty before - // clearing. - bool ClearHandledPayloadTypes() RTC_RUN_ON(worker_thread()); + void ClearHandledPayloadTypes() RTC_RUN_ON(worker_thread()); - // Hops to the network thread to update the transport if an update is - // requested. If `update_demuxer` is false and `extensions` is not set, the - // function simply returns. If either of these is set, the function updates - // the transport with either or both of the demuxer criteria and the supplied - // rtp header extensions. - // Returns `true` if either an update wasn't needed or one was successfully - // applied. If the return value is `false`, then updating the demuxer criteria - // failed, which needs to be treated as an error. - bool MaybeUpdateDemuxerAndRtpExtensions_w( - bool update_demuxer, - absl::optional extensions, - std::string& error_desc) RTC_RUN_ON(worker_thread()); + void UpdateRtpHeaderExtensionMap( + const RtpHeaderExtensions& header_extensions); bool RegisterRtpDemuxerSink_w() RTC_RUN_ON(worker_thread()); @@ -313,8 +294,8 @@ class BaseChannel : public ChannelInterface, std::string ToString() const; private: - bool ConnectToRtpTransport_n() RTC_RUN_ON(network_thread()); - void DisconnectFromRtpTransport_n() RTC_RUN_ON(network_thread()); + bool ConnectToRtpTransport() RTC_RUN_ON(network_thread()); + void DisconnectFromRtpTransport() RTC_RUN_ON(network_thread()); void SignalSentPacket_n(const rtc::SentPacket& sent_packet); rtc::Thread* const worker_thread_; @@ -322,6 +303,8 @@ class BaseChannel : public ChannelInterface, rtc::Thread* const signaling_thread_; rtc::scoped_refptr alive_; + const std::string content_name_; + std::function on_first_packet_received_ RTC_GUARDED_BY(network_thread()); @@ -345,9 +328,24 @@ class BaseChannel : public ChannelInterface, bool was_ever_writable_ RTC_GUARDED_BY(worker_thread()) = false; const bool srtp_required_ = true; - // Set to either kPreferEncryptedExtension or kDiscardEncryptedExtension - // based on the supplied CryptoOptions. - const webrtc::RtpExtension::Filter extensions_filter_; + // TODO(tommi): This field shouldn't be necessary. It's a copy of + // PeerConnection::GetCryptoOptions(), which is const state. It's also only + // used to filter header extensions when calling + // `rtp_transport_->UpdateRtpHeaderExtensionMap()` when the local/remote + // content description is updated. Since the transport is actually owned + // by the transport controller that also gets updated whenever the content + // description changes, it seems we have two paths into the transports, along + // with several thread hops via various classes (such as the Channel classes) + // that only serve as additional layers and store duplicate state. The Jsep* + // family of classes already apply session description updates on the network + // thread every time it changes. + // For the Channel classes, we should be able to get rid of: + // * crypto_options (and fewer construction parameters)_ + // * UpdateRtpHeaderExtensionMap + // * GetFilteredRtpHeaderExtensions + // * Blocking thread hop to the network thread for every call to set + // local/remote content is updated. + const webrtc::CryptoOptions crypto_options_; // MediaChannel related members that should be accessed from the worker // thread. @@ -360,18 +358,22 @@ class BaseChannel : public ChannelInterface, bool payload_type_demuxing_enabled_ RTC_GUARDED_BY(worker_thread()) = true; std::vector local_streams_ RTC_GUARDED_BY(worker_thread()); std::vector remote_streams_ RTC_GUARDED_BY(worker_thread()); - webrtc::RtpTransceiverDirection local_content_direction_ RTC_GUARDED_BY( - worker_thread()) = webrtc::RtpTransceiverDirection::kInactive; - webrtc::RtpTransceiverDirection remote_content_direction_ RTC_GUARDED_BY( - worker_thread()) = webrtc::RtpTransceiverDirection::kInactive; + // TODO(bugs.webrtc.org/12230): local_content_direction and + // remote_content_direction are set on the worker thread, but accessed on the + // network thread. + webrtc::RtpTransceiverDirection local_content_direction_ = + webrtc::RtpTransceiverDirection::kInactive; + webrtc::RtpTransceiverDirection remote_content_direction_ = + webrtc::RtpTransceiverDirection::kInactive; // Cached list of payload types, used if payload type demuxing is re-enabled. - webrtc::flat_set payload_types_ RTC_GUARDED_BY(worker_thread()); - // A stored copy of the rtp header extensions as applied to the transport. - RtpHeaderExtensions rtp_header_extensions_ RTC_GUARDED_BY(worker_thread()); + std::set payload_types_ RTC_GUARDED_BY(worker_thread()); // TODO(bugs.webrtc.org/12239): Modified on worker thread, accessed // on network thread in RegisterRtpDemuxerSink_n (called from Init_w) webrtc::RtpDemuxerCriteria demuxer_criteria_; + // Accessed on the worker thread, modified on the network thread from + // RegisterRtpDemuxerSink_w's Invoke. + webrtc::RtpDemuxerCriteria previous_demuxer_criteria_; // This generator is used to generate SSRCs for local streams. // This is needed in cases where SSRCs are not negotiated or set explicitly // like in Simulcast. @@ -387,7 +389,7 @@ class VoiceChannel : public BaseChannel { rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr channel, - const std::string& mid, + const std::string& content_name, bool srtp_required, webrtc::CryptoOptions crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator); @@ -404,22 +406,20 @@ class VoiceChannel : public BaseChannel { private: // overrides from BaseChannel - void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) override; + void UpdateMediaSendRecvState_w() override; bool SetLocalContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string& error_desc) - RTC_RUN_ON(worker_thread()) override; + std::string* error_desc) override; bool SetRemoteContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string& error_desc) - RTC_RUN_ON(worker_thread()) override; + std::string* error_desc) override; // Last AudioSendParameters sent down to the media_channel() via // SetSendParameters. - AudioSendParameters last_send_params_ RTC_GUARDED_BY(worker_thread()); + AudioSendParameters last_send_params_; // Last AudioRecvParameters sent down to the media_channel() via // SetRecvParameters. - AudioRecvParameters last_recv_params_ RTC_GUARDED_BY(worker_thread()); + AudioRecvParameters last_recv_params_; }; // VideoChannel is a specialization for video. @@ -429,7 +429,7 @@ class VideoChannel : public BaseChannel { rtc::Thread* network_thread, rtc::Thread* signaling_thread, std::unique_ptr media_channel, - const std::string& mid, + const std::string& content_name, bool srtp_required, webrtc::CryptoOptions crypto_options, rtc::UniqueRandomIdGenerator* ssrc_generator); @@ -440,30 +440,28 @@ class VideoChannel : public BaseChannel { return static_cast(BaseChannel::media_channel()); } + void FillBitrateInfo(BandwidthEstimationInfo* bwe_info); + cricket::MediaType media_type() const override { return cricket::MEDIA_TYPE_VIDEO; } - void FillBitrateInfo(BandwidthEstimationInfo* bwe_info); - private: // overrides from BaseChannel - void UpdateMediaSendRecvState_w() RTC_RUN_ON(worker_thread()) override; + void UpdateMediaSendRecvState_w() override; bool SetLocalContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string& error_desc) - RTC_RUN_ON(worker_thread()) override; + std::string* error_desc) override; bool SetRemoteContent_w(const MediaContentDescription* content, webrtc::SdpType type, - std::string& error_desc) - RTC_RUN_ON(worker_thread()) override; + std::string* error_desc) override; // Last VideoSendParameters sent down to the media_channel() via // SetSendParameters. - VideoSendParameters last_send_params_ RTC_GUARDED_BY(worker_thread()); + VideoSendParameters last_send_params_; // Last VideoRecvParameters sent down to the media_channel() via // SetRecvParameters. - VideoRecvParameters last_recv_params_ RTC_GUARDED_BY(worker_thread()); + VideoRecvParameters last_recv_params_; }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h b/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h index a16a9b753..3b71f0f8b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_interface.h @@ -14,32 +14,16 @@ #include #include -#include "absl/strings/string_view.h" #include "api/jsep.h" #include "api/media_types.h" #include "media/base/media_channel.h" #include "pc/rtp_transport_internal.h" -namespace webrtc { -class Call; -class VideoBitrateAllocatorFactory; -} // namespace webrtc - namespace cricket { class MediaContentDescription; -class VideoChannel; -class VoiceChannel; -struct MediaConfig; -// A Channel is a construct that groups media streams of the same type -// (audio or video), both outgoing and incoming. -// When the PeerConnection API is used, a Channel corresponds one to one -// to an RtpTransceiver. -// When Unified Plan is used, there can only be at most one outgoing and -// one incoming stream. With Plan B, there can be more than one. - -// ChannelInterface contains methods common to voice and video channels. +// ChannelInterface contains methods common to voice, video and data channels. // As more methods are added to BaseChannel, they should be included in the // interface as well. class ChannelInterface { @@ -48,14 +32,10 @@ class ChannelInterface { virtual MediaChannel* media_channel() const = 0; - // Returns a string view for the transport name. Fetching the transport name - // must be done on the network thread only and note that the lifetime of - // the returned object should be assumed to only be the calling scope. // TODO(deadbeef): This is redundant; remove this. - virtual absl::string_view transport_name() const = 0; + virtual const std::string& transport_name() const = 0; - // TODO(tommi): Change return type to string_view. - virtual const std::string& mid() const = 0; + virtual const std::string& content_name() const = 0; // Enables or disables this channel virtual void Enable(bool enable) = 0; @@ -67,10 +47,10 @@ class ChannelInterface { // Channel control virtual bool SetLocalContent(const MediaContentDescription* content, webrtc::SdpType type, - std::string& error_desc) = 0; + std::string* error_desc) = 0; virtual bool SetRemoteContent(const MediaContentDescription* content, webrtc::SdpType type, - std::string& error_desc) = 0; + std::string* error_desc) = 0; virtual bool SetPayloadTypeDemuxingEnabled(bool enabled) = 0; // Access to the local and remote streams that were set on the channel. @@ -88,32 +68,6 @@ class ChannelInterface { virtual ~ChannelInterface() = default; }; -class ChannelFactoryInterface { - public: - virtual VideoChannel* CreateVideoChannel( - webrtc::Call* call, - const MediaConfig& media_config, - const std::string& mid, - bool srtp_required, - const webrtc::CryptoOptions& crypto_options, - const VideoOptions& options, - webrtc::VideoBitrateAllocatorFactory* - video_bitrate_allocator_factory) = 0; - - virtual VoiceChannel* CreateVoiceChannel( - webrtc::Call* call, - const MediaConfig& media_config, - const std::string& mid, - bool srtp_required, - const webrtc::CryptoOptions& crypto_options, - const AudioOptions& options) = 0; - - virtual void DestroyChannel(ChannelInterface* channel) = 0; - - protected: - virtual ~ChannelFactoryInterface() = default; -}; - } // namespace cricket #endif // PC_CHANNEL_INTERFACE_H_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc index 1482d7f86..b58830b21 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.cc @@ -10,16 +10,17 @@ #include "pc/channel_manager.h" +#include #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "absl/strings/match.h" -#include "api/media_types.h" #include "api/sequence_checker.h" #include "media/base/media_constants.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" +#include "rtc_base/logging.h" #include "rtc_base/trace_event.h" namespace cricket { @@ -30,9 +31,13 @@ std::unique_ptr ChannelManager::Create( bool enable_rtx, rtc::Thread* worker_thread, rtc::Thread* network_thread) { + RTC_DCHECK_RUN_ON(worker_thread); RTC_DCHECK(network_thread); RTC_DCHECK(worker_thread); + if (media_engine) + media_engine->Init(); + return absl::WrapUnique(new ChannelManager( std::move(media_engine), enable_rtx, worker_thread, network_thread)); } @@ -43,33 +48,16 @@ ChannelManager::ChannelManager( rtc::Thread* worker_thread, rtc::Thread* network_thread) : media_engine_(std::move(media_engine)), - signaling_thread_(rtc::Thread::Current()), worker_thread_(worker_thread), network_thread_(network_thread), enable_rtx_(enable_rtx) { - RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(worker_thread_); RTC_DCHECK(network_thread_); - - if (media_engine_) { - // TODO(tommi): Change VoiceEngine to do ctor time initialization so that - // this isn't necessary. - worker_thread_->Invoke(RTC_FROM_HERE, [&] { media_engine_->Init(); }); - } + RTC_DCHECK_RUN_ON(worker_thread_); } ChannelManager::~ChannelManager() { - RTC_DCHECK_RUN_ON(signaling_thread_); - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - RTC_DCHECK_RUN_ON(worker_thread_); - RTC_DCHECK(voice_channels_.empty()); - RTC_DCHECK(video_channels_.empty()); - // While `media_engine_` is const throughout the ChannelManager's lifetime, - // it requires destruction to happen on the worker thread. Instead of - // marking the pointer as non-const, we live with this const_cast<> in the - // destructor. - const_cast&>(media_engine_).reset(); - }); + RTC_DCHECK_RUN_ON(worker_thread_); } void ChannelManager::GetSupportedAudioSendCodecs( @@ -153,9 +141,12 @@ ChannelManager::GetSupportedVideoRtpHeaderExtensions() const { VoiceChannel* ChannelManager::CreateVoiceChannel( webrtc::Call* call, const MediaConfig& media_config, - const std::string& mid, + webrtc::RtpTransportInternal* rtp_transport, + rtc::Thread* signaling_thread, + const std::string& content_name, bool srtp_required, const webrtc::CryptoOptions& crypto_options, + rtc::UniqueRandomIdGenerator* ssrc_generator, const AudioOptions& options) { RTC_DCHECK(call); RTC_DCHECK(media_engine_); @@ -164,8 +155,9 @@ VoiceChannel* ChannelManager::CreateVoiceChannel( // thread. if (!worker_thread_->IsCurrent()) { return worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return CreateVoiceChannel(call, media_config, mid, srtp_required, - crypto_options, options); + return CreateVoiceChannel(call, media_config, rtp_transport, + signaling_thread, content_name, srtp_required, + crypto_options, ssrc_generator, options); }); } @@ -178,28 +170,44 @@ VoiceChannel* ChannelManager::CreateVoiceChannel( } auto voice_channel = std::make_unique( - worker_thread_, network_thread_, signaling_thread_, - absl::WrapUnique(media_channel), mid, srtp_required, crypto_options, - &ssrc_generator_); + worker_thread_, network_thread_, signaling_thread, + absl::WrapUnique(media_channel), content_name, srtp_required, + crypto_options, ssrc_generator); + + voice_channel->Init_w(rtp_transport); VoiceChannel* voice_channel_ptr = voice_channel.get(); voice_channels_.push_back(std::move(voice_channel)); return voice_channel_ptr; } -void ChannelManager::DestroyVoiceChannel(VoiceChannel* channel) { +void ChannelManager::DestroyVoiceChannel(VoiceChannel* voice_channel) { TRACE_EVENT0("webrtc", "ChannelManager::DestroyVoiceChannel"); + RTC_DCHECK(voice_channel); + + if (!worker_thread_->IsCurrent()) { + worker_thread_->Invoke(RTC_FROM_HERE, + [&] { DestroyVoiceChannel(voice_channel); }); + return; + } + RTC_DCHECK_RUN_ON(worker_thread_); + voice_channels_.erase(absl::c_find_if( - voice_channels_, [&](const auto& p) { return p.get() == channel; })); + voice_channels_, [&](const std::unique_ptr& p) { + return p.get() == voice_channel; + })); } VideoChannel* ChannelManager::CreateVideoChannel( webrtc::Call* call, const MediaConfig& media_config, - const std::string& mid, + webrtc::RtpTransportInternal* rtp_transport, + rtc::Thread* signaling_thread, + const std::string& content_name, bool srtp_required, const webrtc::CryptoOptions& crypto_options, + rtc::UniqueRandomIdGenerator* ssrc_generator, const VideoOptions& options, webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) { RTC_DCHECK(call); @@ -209,8 +217,9 @@ VideoChannel* ChannelManager::CreateVideoChannel( // thread. if (!worker_thread_->IsCurrent()) { return worker_thread_->Invoke(RTC_FROM_HERE, [&] { - return CreateVideoChannel(call, media_config, mid, srtp_required, - crypto_options, options, + return CreateVideoChannel(call, media_config, rtp_transport, + signaling_thread, content_name, srtp_required, + crypto_options, ssrc_generator, options, video_bitrate_allocator_factory); }); } @@ -225,41 +234,32 @@ VideoChannel* ChannelManager::CreateVideoChannel( } auto video_channel = std::make_unique( - worker_thread_, network_thread_, signaling_thread_, - absl::WrapUnique(media_channel), mid, srtp_required, crypto_options, - &ssrc_generator_); + worker_thread_, network_thread_, signaling_thread, + absl::WrapUnique(media_channel), content_name, srtp_required, + crypto_options, ssrc_generator); + + video_channel->Init_w(rtp_transport); VideoChannel* video_channel_ptr = video_channel.get(); video_channels_.push_back(std::move(video_channel)); return video_channel_ptr; } -void ChannelManager::DestroyVideoChannel(VideoChannel* channel) { +void ChannelManager::DestroyVideoChannel(VideoChannel* video_channel) { TRACE_EVENT0("webrtc", "ChannelManager::DestroyVideoChannel"); + RTC_DCHECK(video_channel); + + if (!worker_thread_->IsCurrent()) { + worker_thread_->Invoke(RTC_FROM_HERE, + [&] { DestroyVideoChannel(video_channel); }); + return; + } RTC_DCHECK_RUN_ON(worker_thread_); video_channels_.erase(absl::c_find_if( - video_channels_, [&](const auto& p) { return p.get() == channel; })); -} - -void ChannelManager::DestroyChannel(ChannelInterface* channel) { - RTC_DCHECK(channel); - - if (!worker_thread_->IsCurrent()) { - // TODO(tommi): Do this asynchronously when we have a way to make sure that - // the call to DestroyChannel runs before ~Call() runs, which today happens - // inside an Invoke from the signaling thread in PeerConnectin::Close(). - worker_thread_->Invoke(RTC_FROM_HERE, - [&] { DestroyChannel(channel); }); - return; - } - - if (channel->media_type() == MEDIA_TYPE_AUDIO) { - DestroyVoiceChannel(static_cast(channel)); - } else { - RTC_DCHECK_EQ(channel->media_type(), MEDIA_TYPE_VIDEO); - DestroyVideoChannel(static_cast(channel)); - } + video_channels_, [&](const std::unique_ptr& p) { + return p.get() == video_channel; + })); } bool ChannelManager::StartAecDump(webrtc::FileWrapper file, diff --git a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h index 278e7ed3b..363f459d3 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h +++ b/TMessagesProj/jni/voip/webrtc/pc/channel_manager.h @@ -27,12 +27,10 @@ #include "media/base/media_config.h" #include "media/base/media_engine.h" #include "pc/channel.h" -#include "pc/channel_interface.h" #include "pc/rtp_transport_internal.h" #include "pc/session_description.h" #include "rtc_base/system/file_wrapper.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_annotations.h" #include "rtc_base/unique_id_generator.h" namespace cricket { @@ -45,7 +43,7 @@ namespace cricket { // voice or just video channels. // ChannelManager also allows the application to discover what devices it has // using device manager. -class ChannelManager : public ChannelFactoryInterface { +class ChannelManager final { public: // Returns an initialized instance of ChannelManager. // If media_engine is non-nullptr, then the returned ChannelManager instance @@ -57,12 +55,11 @@ class ChannelManager : public ChannelFactoryInterface { rtc::Thread* network_thread); ChannelManager() = delete; - ~ChannelManager() override; + ~ChannelManager(); rtc::Thread* worker_thread() const { return worker_thread_; } rtc::Thread* network_thread() const { return network_thread_; } MediaEngineInterface* media_engine() { return media_engine_.get(); } - rtc::UniqueRandomIdGenerator& ssrc_generator() { return ssrc_generator_; } // Retrieves the list of supported audio & video codec types. // Can be called before starting the media engine. @@ -84,10 +81,15 @@ class ChannelManager : public ChannelFactoryInterface { // Creates a voice channel, to be associated with the specified session. VoiceChannel* CreateVoiceChannel(webrtc::Call* call, const MediaConfig& media_config, - const std::string& mid, + webrtc::RtpTransportInternal* rtp_transport, + rtc::Thread* signaling_thread, + const std::string& content_name, bool srtp_required, const webrtc::CryptoOptions& crypto_options, - const AudioOptions& options) override; + rtc::UniqueRandomIdGenerator* ssrc_generator, + const AudioOptions& options); + // Destroys a voice channel created by CreateVoiceChannel. + void DestroyVoiceChannel(VoiceChannel* voice_channel); // Creates a video channel, synced with the specified voice channel, and // associated with the specified session. @@ -95,14 +97,16 @@ class ChannelManager : public ChannelFactoryInterface { VideoChannel* CreateVideoChannel( webrtc::Call* call, const MediaConfig& media_config, - const std::string& mid, + webrtc::RtpTransportInternal* rtp_transport, + rtc::Thread* signaling_thread, + const std::string& content_name, bool srtp_required, const webrtc::CryptoOptions& crypto_options, + rtc::UniqueRandomIdGenerator* ssrc_generator, const VideoOptions& options, - webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory) - override; - - void DestroyChannel(ChannelInterface* channel) override; + webrtc::VideoBitrateAllocatorFactory* video_bitrate_allocator_factory); + // Destroys a video channel created by CreateVideoChannel. + void DestroyVideoChannel(VideoChannel* video_channel); // Starts AEC dump using existing file, with a specified maximum file size in // bytes. When the limit is reached, logging will stop and the file will be @@ -112,30 +116,17 @@ class ChannelManager : public ChannelFactoryInterface { // Stops recording AEC dump. void StopAecDump(); - // Destroys a voice channel created by CreateVoiceChannel. - void DestroyVoiceChannel(VoiceChannel* voice_channel); - - // Destroys a video channel created by CreateVideoChannel. - void DestroyVideoChannel(VideoChannel* video_channel); - - protected: ChannelManager(std::unique_ptr media_engine, bool enable_rtx, rtc::Thread* worker_thread, rtc::Thread* network_thread); + private: const std::unique_ptr media_engine_; // Nullable. - rtc::Thread* const signaling_thread_; rtc::Thread* const worker_thread_; rtc::Thread* const network_thread_; - // This object should be used to generate any SSRC that is not explicitly - // specified by the user (or by the remote party). - // TODO(bugs.webrtc.org/12666): This variable is used from both the signaling - // and worker threads. See if we can't restrict usage to a single thread. - rtc::UniqueRandomIdGenerator ssrc_generator_; - // Vector contents are non-null. std::vector> voice_channels_ RTC_GUARDED_BY(worker_thread_); diff --git a/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc b/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc index d093ee3cf..d058c9d56 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/connection_context.cc @@ -10,15 +10,13 @@ #include "pc/connection_context.h" +#include #include #include #include "api/transport/field_trial_based_config.h" -#include "media/base/media_engine.h" #include "media/sctp/sctp_transport_factory.h" #include "rtc_base/helpers.h" -#include "rtc_base/internal/default_socket_server.h" -#include "rtc_base/socket_server.h" #include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/time_utils.h" @@ -26,31 +24,19 @@ namespace webrtc { namespace { -rtc::Thread* MaybeStartNetworkThread( - rtc::Thread* old_thread, - std::unique_ptr& socket_factory_holder, - std::unique_ptr& thread_holder) { +rtc::Thread* MaybeStartThread(rtc::Thread* old_thread, + const std::string& thread_name, + bool with_socket_server, + std::unique_ptr& thread_holder) { if (old_thread) { return old_thread; } - std::unique_ptr socket_server = - rtc::CreateDefaultSocketServer(); - thread_holder = std::make_unique(socket_server.get()); - socket_factory_holder = std::move(socket_server); - - thread_holder->SetName("pc_network_thread", nullptr); - thread_holder->Start(); - return thread_holder.get(); -} - -rtc::Thread* MaybeStartWorkerThread( - rtc::Thread* old_thread, - std::unique_ptr& thread_holder) { - if (old_thread) { - return old_thread; + if (with_socket_server) { + thread_holder = rtc::Thread::CreateWithSocketServer(); + } else { + thread_holder = rtc::Thread::Create(); } - thread_holder = rtc::Thread::Create(); - thread_holder->SetName("pc_worker_thread", nullptr); + thread_holder->SetName(thread_name, nullptr); thread_holder->Start(); return thread_holder.get(); } @@ -89,17 +75,19 @@ std::unique_ptr MaybeCreateSctpFactory( // Static rtc::scoped_refptr ConnectionContext::Create( PeerConnectionFactoryDependencies* dependencies) { - return rtc::scoped_refptr( - new ConnectionContext(dependencies)); + return new ConnectionContext(dependencies); } ConnectionContext::ConnectionContext( PeerConnectionFactoryDependencies* dependencies) - : network_thread_(MaybeStartNetworkThread(dependencies->network_thread, - owned_socket_factory_, - owned_network_thread_)), - worker_thread_(MaybeStartWorkerThread(dependencies->worker_thread, - owned_worker_thread_)), + : network_thread_(MaybeStartThread(dependencies->network_thread, + "pc_network_thread", + true, + owned_network_thread_)), + worker_thread_(MaybeStartThread(dependencies->worker_thread, + "pc_worker_thread", + false, + owned_worker_thread_)), signaling_thread_(MaybeWrapThread(dependencies->signaling_thread, wraps_current_thread_)), network_monitor_factory_( @@ -128,30 +116,23 @@ ConnectionContext::ConnectionContext( RTC_DCHECK_RUN_ON(signaling_thread_); rtc::InitRandom(rtc::Time32()); - rtc::SocketFactory* socket_factory = dependencies->socket_factory; - if (socket_factory == nullptr) { - if (owned_socket_factory_) { - socket_factory = owned_socket_factory_.get(); - } else { - // TODO(bugs.webrtc.org/13145): This case should be deleted. Either - // require that a PacketSocketFactory and NetworkManager always are - // injected (with no need to construct these default objects), or require - // that if a network_thread is injected, an approprite rtc::SocketServer - // should be injected too. - socket_factory = network_thread()->socketserver(); - } - } // If network_monitor_factory_ is non-null, it will be used to create a // network monitor while on the network thread. default_network_manager_ = std::make_unique( - network_monitor_factory_.get(), socket_factory); + network_monitor_factory_.get(), network_thread()->socketserver()); - default_socket_factory_ = - std::make_unique(socket_factory); + // TODO(bugs.webrtc.org/13145): Either require that a PacketSocketFactory + // always is injected (with no need to construct this default factory), or get + // the appropriate underlying SocketFactory without going through the + // rtc::Thread::socketserver() accessor. + default_socket_factory_ = std::make_unique( + network_thread()->socketserver()); - channel_manager_ = cricket::ChannelManager::Create( - std::move(dependencies->media_engine), - /*enable_rtx=*/true, worker_thread(), network_thread()); + worker_thread_->Invoke(RTC_FROM_HERE, [&]() { + channel_manager_ = cricket::ChannelManager::Create( + std::move(dependencies->media_engine), + /*enable_rtx=*/true, worker_thread(), network_thread()); + }); // Set warning levels on the threads, to give warnings when response // may be slower than is expected of the thread. @@ -165,7 +146,8 @@ ConnectionContext::ConnectionContext( ConnectionContext::~ConnectionContext() { RTC_DCHECK_RUN_ON(signaling_thread_); - channel_manager_.reset(nullptr); + worker_thread_->Invoke(RTC_FROM_HERE, + [&]() { channel_manager_.reset(nullptr); }); // Make sure `worker_thread()` and `signaling_thread()` outlive // `default_socket_factory_` and `default_network_manager_`. diff --git a/TMessagesProj/jni/voip/webrtc/pc/connection_context.h b/TMessagesProj/jni/voip/webrtc/pc/connection_context.h index 2aaa840df..8fad13c10 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/connection_context.h +++ b/TMessagesProj/jni/voip/webrtc/pc/connection_context.h @@ -29,7 +29,6 @@ #include "rtc_base/network.h" #include "rtc_base/network_monitor_factory.h" #include "rtc_base/rtc_certificate_generator.h" -#include "rtc_base/socket_factory.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" @@ -104,7 +103,6 @@ class ConnectionContext final // Note: Since owned_network_thread_ and owned_worker_thread_ are used // in the initialization of network_thread_ and worker_thread_, they // must be declared before them, so that they are initialized first. - std::unique_ptr owned_socket_factory_; std::unique_ptr owned_network_thread_ RTC_GUARDED_BY(signaling_thread_); std::unique_ptr owned_worker_thread_ diff --git a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc index 832eb03f7..e11647f2c 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.cc @@ -10,11 +10,14 @@ #include "pc/data_channel_controller.h" +#include #include +#include "absl/algorithm/container.h" +#include "absl/types/optional.h" #include "api/peer_connection_interface.h" #include "api/rtc_error.h" -#include "pc/peer_connection_internal.h" +#include "pc/peer_connection.h" #include "pc/sctp_utils.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" @@ -295,8 +298,7 @@ DataChannelController::InternalCreateSctpDataChannel( return nullptr; } sctp_data_channels_.push_back(channel); - channel->SignalClosed.connect( - pc_, &PeerConnectionInternal::OnSctpDataChannelClosed); + channel->SignalClosed.connect(pc_, &PeerConnection::OnSctpDataChannelClosed); SignalSctpDataChannelCreated_(channel.get()); return channel; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h index fa10b745c..af0e06353 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h +++ b/TMessagesProj/jni/voip/webrtc/pc/data_channel_controller.h @@ -19,7 +19,6 @@ #include #include "api/data_channel_interface.h" -#include "api/rtc_error.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "api/transport/data_channel_transport_interface.h" @@ -39,12 +38,12 @@ namespace webrtc { -class PeerConnectionInternal; +class PeerConnection; class DataChannelController : public SctpDataChannelProviderInterface, public DataChannelSink { public: - explicit DataChannelController(PeerConnectionInternal* pc) : pc_(pc) {} + explicit DataChannelController(PeerConnection* pc) : pc_(pc) {} // Not copyable or movable. DataChannelController(DataChannelController&) = delete; @@ -181,7 +180,7 @@ class DataChannelController : public SctpDataChannelProviderInterface, RTC_GUARDED_BY(signaling_thread()); // Owning PeerConnection. - PeerConnectionInternal* const pc_; + PeerConnection* const pc_; // The weak pointers must be dereferenced and invalidated on the signalling // thread only. rtc::WeakPtrFactory weak_factory_{this}; diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h index c2c51c22f..da068c9b8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_srtp_transport.h @@ -11,7 +11,6 @@ #ifndef PC_DTLS_SRTP_TRANSPORT_H_ #define PC_DTLS_SRTP_TRANSPORT_H_ -#include #include #include diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc index c9f3279fb..074f44e22 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/dtls_transport.cc @@ -19,7 +19,7 @@ #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" -#include "rtc_base/ssl_stream_adapter.h" +#include "rtc_base/ssl_certificate.h" namespace webrtc { @@ -105,35 +105,22 @@ void DtlsTransport::UpdateInformation() { if (internal_dtls_transport_->dtls_state() == DtlsTransportState::kConnected) { bool success = true; - rtc::SSLRole internal_role; - absl::optional role; int ssl_cipher_suite; int tls_version; int srtp_cipher; - success &= internal_dtls_transport_->GetDtlsRole(&internal_role); - if (success) { - switch (internal_role) { - case rtc::SSL_CLIENT: - role = DtlsTransportTlsRole::kClient; - break; - case rtc::SSL_SERVER: - role = DtlsTransportTlsRole::kServer; - break; - } - } success &= internal_dtls_transport_->GetSslVersionBytes(&tls_version); success &= internal_dtls_transport_->GetSslCipherSuite(&ssl_cipher_suite); success &= internal_dtls_transport_->GetSrtpCryptoSuite(&srtp_cipher); if (success) { info_ = DtlsTransportInformation( - internal_dtls_transport_->dtls_state(), role, tls_version, + internal_dtls_transport_->dtls_state(), tls_version, ssl_cipher_suite, srtp_cipher, internal_dtls_transport_->GetRemoteSSLCertChain()); } else { RTC_LOG(LS_ERROR) << "DtlsTransport in connected state has incomplete " "TLS information"; info_ = DtlsTransportInformation( - internal_dtls_transport_->dtls_state(), role, absl::nullopt, + internal_dtls_transport_->dtls_state(), absl::nullopt, absl::nullopt, absl::nullopt, internal_dtls_transport_->GetRemoteSSLCertChain()); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc index 8b82c31aa..46811c881 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.cc @@ -13,6 +13,8 @@ #include #include +#include + #include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/ref_counted_object.h" @@ -165,7 +167,7 @@ int DtmfSender::comma_delay() const { void DtmfSender::QueueInsertDtmf(const rtc::Location& posted_from, uint32_t delay_ms) { - signaling_thread_->PostDelayedHighPrecisionTask( + signaling_thread_->PostDelayedTask( ToQueuedTask(safety_flag_, [this] { RTC_DCHECK_RUN_ON(signaling_thread_); diff --git a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h index ae213b3bf..a208b100d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h +++ b/TMessagesProj/jni/voip/webrtc/pc/dtmf_sender.h @@ -17,14 +17,13 @@ #include "api/dtmf_sender_interface.h" #include "api/scoped_refptr.h" -#include "api/sequence_checker.h" #include "pc/proxy.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/location.h" #include "rtc_base/ref_count.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_annotations.h" // DtmfSender is the native implementation of the RTCDTMFSender defined by // the WebRTC W3C Editor's Draft. @@ -73,9 +72,6 @@ class DtmfSender : public DtmfSenderInterface, public sigslot::has_slots<> { DtmfSender(rtc::Thread* signaling_thread, DtmfProviderInterface* provider); virtual ~DtmfSender(); - DtmfSender(const DtmfSender&) = delete; - DtmfSender& operator=(const DtmfSender&) = delete; - private: DtmfSender(); @@ -100,11 +96,12 @@ class DtmfSender : public DtmfSenderInterface, public sigslot::has_slots<> { // For cancelling the tasks which feed the DTMF provider one tone at a time. rtc::scoped_refptr safety_flag_ RTC_GUARDED_BY( signaling_thread_) RTC_PT_GUARDED_BY(signaling_thread_) = nullptr; + + RTC_DISALLOW_COPY_AND_ASSIGN(DtmfSender); }; // Define proxy for DtmfSenderInterface. BEGIN_PRIMARY_PROXY_MAP(DtmfSender) - PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_METHOD1(void, RegisterObserver, DtmfSenderObserverInterface*) PROXY_METHOD0(void, UnregisterObserver) diff --git a/TMessagesProj/jni/voip/webrtc/pc/external_hmac.h b/TMessagesProj/jni/voip/webrtc/pc/external_hmac.h index 3c2936c68..3319beaed 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/external_hmac.h +++ b/TMessagesProj/jni/voip/webrtc/pc/external_hmac.h @@ -33,7 +33,6 @@ #include "third_party/libsrtp/crypto/include/auth.h" #include "third_party/libsrtp/crypto/include/crypto_types.h" #include "third_party/libsrtp/include/srtp.h" -#include "third_party/libsrtp/include/srtp_priv.h" #define EXTERNAL_HMAC_SHA1 SRTP_HMAC_SHA1 + 1 #define HMAC_KEY_LENGTH 20 diff --git a/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc b/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc index cb4145be1..88f77bf0a 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/ice_server_parsing.cc @@ -12,7 +12,9 @@ #include +#include #include // For std::isdigit. +#include #include #include "p2p/base/port_interface.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h b/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h index a6bec01ce..dc10e3d2b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jitter_buffer_delay.h @@ -16,7 +16,6 @@ #include "absl/types/optional.h" #include "api/sequence_checker.h" #include "rtc_base/system/no_unique_address.h" -#include "rtc_base/thread_annotations.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc index 1e97ad42d..6dacde629 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_ice_candidate.cc @@ -10,6 +10,8 @@ #include "api/jsep_ice_candidate.h" +#include + #include "pc/webrtc_sdp.h" // This file contains JsepIceCandidate-related functions that are not diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc index 4c57396f0..57ccf7ca6 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_session_description.cc @@ -11,20 +11,11 @@ #include "api/jsep_session_description.h" #include -#include -#include "absl/types/optional.h" -#include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" -#include "p2p/base/transport_description.h" -#include "p2p/base/transport_info.h" -#include "pc/media_session.h" // IWYU pragma: keep +#include "pc/media_session.h" #include "pc/webrtc_sdp.h" -#include "rtc_base/checks.h" -#include "rtc_base/ip_address.h" -#include "rtc_base/logging.h" -#include "rtc_base/net_helper.h" -#include "rtc_base/socket_address.h" +#include "rtc_base/arraysize.h" using cricket::SessionDescription; diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc index 00447b088..5c7f01ae6 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.cc @@ -15,7 +15,6 @@ #include #include -#include #include #include "api/array_view.h" @@ -208,15 +207,17 @@ webrtc::RTCError JsepTransport::SetLocalJsepTransportDescription( return error; } } - RTC_DCHECK(rtp_dtls_transport_->internal()); - rtp_dtls_transport_->internal()->ice_transport()->SetIceParameters( - ice_parameters); - - if (rtcp_dtls_transport_) { - RTC_DCHECK(rtcp_dtls_transport_->internal()); - rtcp_dtls_transport_->internal()->ice_transport()->SetIceParameters( + RTC_DCHECK(rtp_dtls_transport_->internal()); + rtp_dtls_transport_->internal()->ice_transport()->SetIceParameters( ice_parameters); - } + + { + if (rtcp_dtls_transport_) { + RTC_DCHECK(rtcp_dtls_transport_->internal()); + rtcp_dtls_transport_->internal()->ice_transport()->SetIceParameters( + ice_parameters); + } + } // If PRANSWER/ANSWER is set, we should decide transport protocol type. if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { error = NegotiateAndSetDtlsParameters(type); diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h index 93604a179..e3e929bfd 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport.h @@ -44,6 +44,7 @@ #include "pc/srtp_transport.h" #include "pc/transport_stats.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/ssl_fingerprint.h" #include "rtc_base/ssl_stream_adapter.h" @@ -105,9 +106,6 @@ class JsepTransport { ~JsepTransport(); - JsepTransport(const JsepTransport&) = delete; - JsepTransport& operator=(const JsepTransport&) = delete; - // Returns the MID of this transport. This is only used for logging. const std::string& mid() const { return mid_; } @@ -328,6 +326,8 @@ class JsepTransport { // `rtcp_dtls_transport_` is destroyed. The JsepTransportController will // receive the callback and update the aggregate transport states. std::function rtcp_mux_active_callback_; + + RTC_DISALLOW_COPY_AND_ASSIGN(JsepTransport); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.h b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.h index 099e24a45..aa5293475 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_collection.h @@ -18,7 +18,6 @@ #include #include -#include "api/jsep.h" #include "api/peer_connection_interface.h" #include "api/sequence_checker.h" #include "pc/jsep_transport.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc index e63742aae..b7e9f361b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.cc @@ -12,9 +12,9 @@ #include +#include #include #include -#include #include #include diff --git a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h index ed4d20ba8..fb420090d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h +++ b/TMessagesProj/jni/voip/webrtc/pc/jsep_transport_controller.h @@ -17,7 +17,6 @@ #include #include #include -#include #include #include @@ -59,6 +58,7 @@ #include "pc/transport_stats.h" #include "rtc_base/callback_list.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/helpers.h" #include "rtc_base/ref_counted_object.h" @@ -136,7 +136,7 @@ class JsepTransportController : public sigslot::has_slots<> { // Factory for SCTP transports. SctpTransportFactoryInterface* sctp_factory = nullptr; - std::function on_dtls_handshake_error_; + std::function on_dtls_handshake_error_; }; // The ICE related events are fired on the `network_thread`. @@ -150,9 +150,6 @@ class JsepTransportController : public sigslot::has_slots<> { Config config); virtual ~JsepTransportController(); - JsepTransportController(const JsepTransportController&) = delete; - JsepTransportController& operator=(const JsepTransportController&) = delete; - // The main method to be called; applies a description at the transport // level, creating/destroying transport objects as needed and updating their // properties. This includes RTP, DTLS, and ICE (but not SCTP). At least not @@ -481,6 +478,8 @@ class JsepTransportController : public sigslot::has_slots<> { rtc::scoped_refptr certificate_; BundleManager bundles_; + + RTC_DISALLOW_COPY_AND_ASSIGN(JsepTransportController); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_session.cc b/TMessagesProj/jni/voip/webrtc/pc/media_session.cc index 2a3a69887..45cedfb05 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_session.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/media_session.cc @@ -14,7 +14,8 @@ #include #include -#include +#include +#include #include #include @@ -23,6 +24,7 @@ #include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/crypto_params.h" +#include "api/video_codecs/h264_profile_level_id.h" #include "media/base/codec.h" #include "media/base/media_constants.h" #include "media/base/sdp_video_format_utils.h" @@ -1109,25 +1111,6 @@ static Codecs MatchCodecPreference( return filtered_codecs; } -// Compute the union of `codecs1` and `codecs2`. -template -std::vector ComputeCodecsUnion(const std::vector& codecs1, - const std::vector& codecs2) { - std::vector all_codecs; - UsedPayloadTypes used_payload_types; - for (const C& codec : codecs1) { - C codec_mutable = codec; - used_payload_types.FindAndSetIdUsed(&codec_mutable); - all_codecs.push_back(codec_mutable); - } - - // Use MergeCodecs to merge the second half of our list as it already checks - // and fixes problems with duplicate payload types. - MergeCodecs(codecs2, &all_codecs, &used_payload_types); - - return all_codecs; -} - // Adds all extensions from `reference_extensions` to `offered_extensions` that // don't already exist in `offered_extensions` and ensure the IDs don't // collide. If an extension is added, it's also added to `regular_extensions` or @@ -1551,9 +1534,9 @@ MediaSessionDescriptionFactory::MediaSessionDescriptionFactory( MediaSessionDescriptionFactory::MediaSessionDescriptionFactory( ChannelManager* channel_manager, - const TransportDescriptionFactory* transport_desc_factory) - : MediaSessionDescriptionFactory(transport_desc_factory, - &channel_manager->ssrc_generator()) { + const TransportDescriptionFactory* transport_desc_factory, + rtc::UniqueRandomIdGenerator* ssrc_generator) + : MediaSessionDescriptionFactory(transport_desc_factory, ssrc_generator) { channel_manager->GetSupportedAudioSendCodecs(&audio_send_codecs_); channel_manager->GetSupportedAudioReceiveCodecs(&audio_recv_codecs_); channel_manager->GetSupportedVideoSendCodecs(&video_send_codecs_); @@ -2713,9 +2696,7 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer( } } } - // Add other supported video codecs. - VideoCodecs other_video_codecs; for (const VideoCodec& codec : supported_video_codecs) { if (FindMatchingCodec(supported_video_codecs, video_codecs, codec, nullptr) && @@ -2723,13 +2704,9 @@ bool MediaSessionDescriptionFactory::AddVideoContentForAnswer( filtered_codecs, codec, nullptr)) { // We should use the local codec with local parameters and the codec id // would be correctly mapped in `NegotiateCodecs`. - other_video_codecs.push_back(codec); + filtered_codecs.push_back(codec); } } - - // Use ComputeCodecsUnion to avoid having duplicate payload IDs - filtered_codecs = - ComputeCodecsUnion(filtered_codecs, other_video_codecs); } if (session_options.raw_packetization_for_video) { @@ -2923,11 +2900,27 @@ void MediaSessionDescriptionFactory::ComputeAudioCodecsIntersectionAndUnion() { void MediaSessionDescriptionFactory::ComputeVideoCodecsIntersectionAndUnion() { video_sendrecv_codecs_.clear(); + all_video_codecs_.clear(); + // Compute the video codecs union. + for (const VideoCodec& send : video_send_codecs_) { + all_video_codecs_.push_back(send); + if (!FindMatchingCodec(video_send_codecs_, video_recv_codecs_, + send, nullptr)) { + // TODO(kron): This check is violated by the unit test: + // MediaSessionDescriptionFactoryTest.RtxWithoutApt + // Remove either the test or the check. - // Use ComputeCodecsUnion to avoid having duplicate payload IDs - all_video_codecs_ = - ComputeCodecsUnion(video_recv_codecs_, video_send_codecs_); - + // It doesn't make sense to have an RTX codec we support sending but not + // receiving. + // RTC_DCHECK(!IsRtxCodec(send)); + } + } + for (const VideoCodec& recv : video_recv_codecs_) { + if (!FindMatchingCodec(video_recv_codecs_, video_send_codecs_, + recv, nullptr)) { + all_video_codecs_.push_back(recv); + } + } // Use NegotiateCodecs to merge our codec lists, since the operation is // essentially the same. Put send_codecs as the offered_codecs, which is the // order we'd like to follow. The reasoning is that encoding is usually more diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_session.h b/TMessagesProj/jni/voip/webrtc/pc/media_session.h index aa24f015d..bb97f42b2 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_session.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_session.h @@ -141,7 +141,8 @@ class MediaSessionDescriptionFactory { // This helper automatically sets up the factory to get its configuration // from the specified ChannelManager. MediaSessionDescriptionFactory(ChannelManager* cmanager, - const TransportDescriptionFactory* factory); + const TransportDescriptionFactory* factory, + rtc::UniqueRandomIdGenerator* ssrc_generator); const AudioCodecs& audio_sendrecv_codecs() const; const AudioCodecs& audio_send_codecs() const; diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc b/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc index 011d8abf5..08a2a723d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream.cc @@ -12,7 +12,7 @@ #include -#include +#include #include "rtc_base/checks.h" #include "rtc_base/ref_counted_object.h" @@ -37,13 +37,11 @@ rtc::scoped_refptr MediaStream::Create(const std::string& id) { MediaStream::MediaStream(const std::string& id) : id_(id) {} bool MediaStream::AddTrack(AudioTrackInterface* track) { - return AddTrack( - &audio_tracks_, rtc::scoped_refptr(track)); + return AddTrack(&audio_tracks_, track); } bool MediaStream::AddTrack(VideoTrackInterface* track) { - return AddTrack( - &video_tracks_, rtc::scoped_refptr(track)); + return AddTrack(&video_tracks_, track); } bool MediaStream::RemoveTrack(AudioTrackInterface* track) { @@ -58,7 +56,7 @@ rtc::scoped_refptr MediaStream::FindAudioTrack( const std::string& track_id) { AudioTrackVector::iterator it = FindTrack(&audio_tracks_, track_id); if (it == audio_tracks_.end()) - return nullptr; + return NULL; return *it; } @@ -66,17 +64,16 @@ rtc::scoped_refptr MediaStream::FindVideoTrack( const std::string& track_id) { VideoTrackVector::iterator it = FindTrack(&video_tracks_, track_id); if (it == video_tracks_.end()) - return nullptr; + return NULL; return *it; } template -bool MediaStream::AddTrack(TrackVector* tracks, - rtc::scoped_refptr track) { +bool MediaStream::AddTrack(TrackVector* tracks, Track* track) { typename TrackVector::iterator it = FindTrack(tracks, track->id()); if (it != tracks->end()) return false; - tracks->emplace_back(std::move((track))); + tracks->push_back(track); FireOnChanged(); return true; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream.h b/TMessagesProj/jni/voip/webrtc/pc/media_stream.h index 70e58f976..6f16bea1d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream.h @@ -44,7 +44,7 @@ class MediaStream : public Notifier { private: template - bool AddTrack(TrackVector* Tracks, rtc::scoped_refptr track); + bool AddTrack(TrackVector* Tracks, Track* track); template bool RemoveTrack(TrackVector* Tracks, MediaStreamTrackInterface* track); diff --git a/TMessagesProj/jni/voip/webrtc/pc/media_stream_track_proxy.h b/TMessagesProj/jni/voip/webrtc/pc/media_stream_track_proxy.h index 2af3aedb2..f563137c7 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/media_stream_track_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/pc/media_stream_track_proxy.h @@ -44,16 +44,15 @@ PROXY_PRIMARY_THREAD_DESTRUCTOR() BYPASS_PROXY_CONSTMETHOD0(std::string, kind) BYPASS_PROXY_CONSTMETHOD0(std::string, id) PROXY_SECONDARY_CONSTMETHOD0(TrackState, state) -PROXY_CONSTMETHOD0(bool, enabled) -PROXY_METHOD1(bool, set_enabled, bool) -PROXY_CONSTMETHOD0(ContentHint, content_hint) -PROXY_METHOD1(void, set_content_hint, ContentHint) +PROXY_SECONDARY_CONSTMETHOD0(bool, enabled) +PROXY_SECONDARY_METHOD1(bool, set_enabled, bool) +PROXY_SECONDARY_CONSTMETHOD0(ContentHint, content_hint) +PROXY_SECONDARY_METHOD1(void, set_content_hint, ContentHint) PROXY_SECONDARY_METHOD2(void, AddOrUpdateSink, rtc::VideoSinkInterface*, const rtc::VideoSinkWants&) PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) -PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame) BYPASS_PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc index a315ac510..885f5573d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.cc @@ -16,16 +16,14 @@ #include #include #include -#include -#include #include #include "absl/algorithm/container.h" #include "absl/strings/match.h" -#include "absl/strings/string_view.h" #include "api/jsep_ice_candidate.h" #include "api/rtp_parameters.h" #include "api/rtp_transceiver_direction.h" +#include "api/task_queue/queued_task.h" #include "api/transport/webrtc_key_value_config.h" #include "api/uma_metrics.h" #include "api/video/video_codec_constants.h" @@ -33,7 +31,6 @@ #include "call/packet_receiver.h" #include "media/base/media_channel.h" #include "media/base/media_config.h" -#include "media/base/media_engine.h" #include "media/base/rid_description.h" #include "media/base/stream_params.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" @@ -47,9 +44,7 @@ #include "pc/channel.h" #include "pc/ice_server_parsing.h" #include "pc/rtp_receiver.h" -#include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender.h" -#include "pc/rtp_sender_proxy.h" #include "pc/sctp_transport.h" #include "pc/simulcast_description.h" #include "pc/webrtc_session_description_factory.h" @@ -58,7 +53,6 @@ #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/net_helper.h" -#include "rtc_base/network.h" #include "rtc_base/network_constants.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/socket_address.h" @@ -303,6 +297,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( bool disable_ipv6_on_wifi; int max_ipv6_networks; bool disable_link_local_networks; + bool enable_rtp_data_channel; absl::optional screencast_min_bitrate; absl::optional combined_audio_video_bwe; absl::optional enable_dtls_srtp; @@ -424,12 +419,6 @@ RTCErrorOr> PeerConnection::Create( std::unique_ptr call, const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies) { - // TODO(https://crbug.com/webrtc/13528): Remove support for kPlanB. - if (configuration.sdp_semantics == SdpSemantics::kPlanB_DEPRECATED) { - RTC_LOG(LS_WARNING) - << "PeerConnection constructed with legacy SDP semantics!"; - } - RTCError config_error = cricket::P2PTransportChannel::ValidateIceConfig( ParseIceConfig(configuration)); if (!config_error.ok()) { @@ -567,7 +556,6 @@ PeerConnection::~PeerConnection() { // port_allocator_ and transport_controller_ live on the network thread and // should be destroyed there. - transport_controller_copy_ = nullptr; network_thread()->Invoke(RTC_FROM_HERE, [this] { RTC_DCHECK_RUN_ON(network_thread()); TeardownDataChannelTransport_n(); @@ -616,28 +604,28 @@ RTCError PeerConnection::Initialize( } // Network thread initialization. - transport_controller_copy_ = - network_thread()->Invoke(RTC_FROM_HERE, [&] { - RTC_DCHECK_RUN_ON(network_thread()); - network_thread_safety_ = PendingTaskSafetyFlag::Create(); - InitializePortAllocatorResult pa_result = InitializePortAllocator_n( - stun_servers, turn_servers, configuration); - // Send information about IPv4/IPv6 status. - PeerConnectionAddressFamilyCounter address_family = - pa_result.enable_ipv6 ? kPeerConnection_IPv6 : kPeerConnection_IPv4; - RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics", - address_family, - kPeerConnectionAddressFamilyCounter_Max); - return InitializeTransportController_n(configuration, dependencies); - }); + network_thread()->Invoke(RTC_FROM_HERE, [this, &stun_servers, + &turn_servers, &configuration, + &dependencies] { + RTC_DCHECK_RUN_ON(network_thread()); + network_thread_safety_ = PendingTaskSafetyFlag::Create(); + InitializePortAllocatorResult pa_result = + InitializePortAllocator_n(stun_servers, turn_servers, configuration); + // Send information about IPv4/IPv6 status. + PeerConnectionAddressFamilyCounter address_family = + pa_result.enable_ipv6 ? kPeerConnection_IPv6 : kPeerConnection_IPv4; + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.IPMetrics", address_family, + kPeerConnectionAddressFamilyCounter_Max); + InitializeTransportController_n(configuration, dependencies); + }); configuration_ = configuration; stats_ = std::make_unique(this); stats_collector_ = RTCStatsCollector::Create(this); - sdp_handler_ = SdpOfferAnswerHandler::Create(this, configuration, - dependencies, context_); + sdp_handler_ = + SdpOfferAnswerHandler::Create(this, configuration, dependencies); rtp_manager_ = std::make_unique( IsUnifiedPlan(), signaling_thread(), worker_thread(), channel_manager(), @@ -675,7 +663,7 @@ RTCError PeerConnection::Initialize( return RTCError::OK(); } -JsepTransportController* PeerConnection::InitializeTransportController_n( +void PeerConnection::InitializeTransportController_n( const RTCConfiguration& configuration, const PeerConnectionDependencies& dependencies) { JsepTransportController::Config config; @@ -794,7 +782,6 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( }); transport_controller_->SetIceConfig(ParseIceConfig(configuration)); - return transport_controller_.get(); } rtc::scoped_refptr PeerConnection::local_streams() { @@ -860,7 +847,12 @@ RTCErrorOr> PeerConnection::AddTrack( return sender_or_error; } -RTCError PeerConnection::RemoveTrackOrError( +bool PeerConnection::RemoveTrack(RtpSenderInterface* sender) { + TRACE_EVENT0("webrtc", "PeerConnection::RemoveTrack"); + return RemoveTrackNew(sender).ok(); +} + +RTCError PeerConnection::RemoveTrackNew( rtc::scoped_refptr sender) { RTC_DCHECK_RUN_ON(signaling_thread()); if (!sender) { @@ -916,12 +908,9 @@ PeerConnection::AddTransceiver( } RtpTransportInternal* PeerConnection::GetRtpTransport(const std::string& mid) { - // TODO(bugs.webrtc.org/9987): Avoid the thread jump. - // This might be done by caching the value on the signaling thread. RTC_DCHECK_RUN_ON(signaling_thread()); return network_thread()->Invoke( RTC_FROM_HERE, [this, &mid] { - RTC_DCHECK_RUN_ON(network_thread()); auto rtp_transport = transport_controller_->GetRtpTransport(mid); RTC_DCHECK(rtp_transport); return rtp_transport; @@ -1174,9 +1163,6 @@ bool PeerConnection::GetStats(StatsObserver* observer, RTC_LOG_THREAD_BLOCK_COUNT(); stats_->UpdateStats(level); - - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(4); - // The StatsCollector is used to tell if a track is valid because it may // remember tracks that the PeerConnection previously removed. if (track && !stats_->IsValidTrack(track->id())) { @@ -1195,8 +1181,7 @@ void PeerConnection::GetStats(RTCStatsCollectorCallback* callback) { RTC_DCHECK(stats_collector_); RTC_DCHECK(callback); RTC_LOG_THREAD_BLOCK_COUNT(); - stats_collector_->GetStatsReport( - rtc::scoped_refptr(callback)); + stats_collector_->GetStatsReport(callback); } void PeerConnection::GetStats( @@ -1511,7 +1496,6 @@ RTCError PeerConnection::SetConfiguration( RTC_FROM_HERE, [this, needs_ice_restart, &ice_config, &stun_servers, &turn_servers, &modified_config, has_local_description] { - RTC_DCHECK_RUN_ON(network_thread()); // As described in JSEP, calling setConfiguration with new ICE // servers or candidate policy must set a "needs-ice-restart" bit so // that the next offer triggers an ICE restart which will pick up @@ -1534,12 +1518,9 @@ RTCError PeerConnection::SetConfiguration( if (configuration_.active_reset_srtp_params != modified_config.active_reset_srtp_params) { - // TODO(tommi): merge invokes - network_thread()->Invoke(RTC_FROM_HERE, [this, &modified_config] { - RTC_DCHECK_RUN_ON(network_thread()); - transport_controller_->SetActiveResetSrtpParams( - modified_config.active_reset_srtp_params); - }); + // TODO(tommi): move to the network thread - this hides an invoke. + transport_controller_->SetActiveResetSrtpParams( + modified_config.active_reset_srtp_params); } if (modified_config.allow_codec_switching.has_value()) { @@ -1666,10 +1647,6 @@ void PeerConnection::AddAdaptationResource( call_->AddAdaptationResource(resource); } -cricket::ChannelManager* PeerConnection::channel_manager() { - return context_->channel_manager(); -} - bool PeerConnection::StartRtcEventLog(std::unique_ptr output, int64_t output_period_ms) { return worker_thread()->Invoke( @@ -1702,13 +1679,7 @@ PeerConnection::LookupDtlsTransportByMid(const std::string& mid) { rtc::scoped_refptr PeerConnection::LookupDtlsTransportByMidInternal(const std::string& mid) { RTC_DCHECK_RUN_ON(signaling_thread()); - // TODO(bugs.webrtc.org/9987): Avoid the thread jump. - // This might be done by caching the value on the signaling thread. - return network_thread()->Invoke>( - RTC_FROM_HERE, [this, mid]() { - RTC_DCHECK_RUN_ON(network_thread()); - return transport_controller_->LookupDtlsTransportByMid(mid); - }); + return transport_controller_->LookupDtlsTransportByMid(mid); } rtc::scoped_refptr PeerConnection::GetSctpTransport() @@ -1808,7 +1779,6 @@ void PeerConnection::Close() { // TODO(tommi): ^^ That's not exactly optimal since this is yet another // blocking hop to the network thread during Close(). Further still, the // voice/video/data channels will be cleared on the worker thread. - RTC_DCHECK_RUN_ON(network_thread()); transport_controller_.reset(); port_allocator_->DiscardCandidatePool(); if (network_thread_safety_) { @@ -1968,6 +1938,8 @@ void PeerConnection::OnIceCandidateError(const std::string& address, return; } Observer()->OnIceCandidateError(address, port, url, error_code, error_text); + // Leftover not to break wpt test during migration to the new API. + Observer()->OnIceCandidateError(address + ":", url, error_code, error_text); } void PeerConnection::OnIceCandidatesRemoved( @@ -2119,6 +2091,10 @@ bool PeerConnection::ReconfigurePortAllocator_n( stun_candidate_keepalive_interval); } +cricket::ChannelManager* PeerConnection::channel_manager() const { + return context_->channel_manager(); +} + bool PeerConnection::StartRtcEventLog_w( std::unique_ptr output, int64_t output_period_ms) { @@ -2136,10 +2112,11 @@ void PeerConnection::StopRtcEventLog_w() { } } -cricket::ChannelInterface* PeerConnection::GetChannel(const std::string& mid) { +cricket::ChannelInterface* PeerConnection::GetChannel( + const std::string& content_name) { for (const auto& transceiver : rtp_manager()->transceivers()->UnsafeList()) { cricket::ChannelInterface* channel = transceiver->internal()->channel(); - if (channel && channel->mid() == mid) { + if (channel && channel->content_name() == content_name) { return channel; } } @@ -2162,26 +2139,13 @@ bool PeerConnection::GetSctpSslRole(rtc::SSLRole* role) { absl::optional dtls_role; if (sctp_mid_s_) { - dtls_role = network_thread()->Invoke>( - RTC_FROM_HERE, [this] { - RTC_DCHECK_RUN_ON(network_thread()); - return transport_controller_->GetDtlsRole(*sctp_mid_n_); - }); + dtls_role = transport_controller_->GetDtlsRole(*sctp_mid_s_); if (!dtls_role && sdp_handler_->is_caller().has_value()) { - // This works fine if we are the offerer, but can be a mistake if - // we are the answerer and the remote offer is ACTIVE. In that - // case, we will guess the role wrong. - // TODO(bugs.webrtc.org/13668): Check if this actually happens. - RTC_LOG(LS_ERROR) - << "Possible risk: DTLS role guesser is active, is_caller is " - << *sdp_handler_->is_caller(); dtls_role = *sdp_handler_->is_caller() ? rtc::SSL_SERVER : rtc::SSL_CLIENT; } - if (dtls_role) { - *role = *dtls_role; - return true; - } + *role = *dtls_role; + return true; } return false; } @@ -2196,11 +2160,7 @@ bool PeerConnection::GetSslRole(const std::string& content_name, return false; } - auto dtls_role = network_thread()->Invoke>( - RTC_FROM_HERE, [this, content_name]() { - RTC_DCHECK_RUN_ON(network_thread()); - return transport_controller_->GetDtlsRole(content_name); - }); + auto dtls_role = transport_controller_->GetDtlsRole(content_name); if (dtls_role) { *role = *dtls_role; return true; @@ -2231,7 +2191,7 @@ std::vector PeerConnection::GetDataChannelStats() const { absl::optional PeerConnection::sctp_transport_name() const { RTC_DCHECK_RUN_ON(signaling_thread()); - if (sctp_mid_s_ && transport_controller_copy_) + if (sctp_mid_s_ && transport_controller_) return sctp_transport_name_s_; return absl::optional(); } @@ -2708,8 +2668,8 @@ void PeerConnection::ReportTransportStats() { media_types_by_transport_name; for (const auto& transceiver : rtp_manager()->transceivers()->UnsafeList()) { if (transceiver->internal()->channel()) { - std::string transport_name( - transceiver->internal()->channel()->transport_name()); + const std::string& transport_name = + transceiver->internal()->channel()->transport_name(); media_types_by_transport_name[transport_name].insert( transceiver->media_type()); } @@ -2869,8 +2829,7 @@ bool PeerConnection::OnTransportChanged( if (dtls_transport) { signaling_thread()->PostTask(ToQueuedTask( signaling_thread_safety_.flag(), - [this, - name = std::string(dtls_transport->internal()->transport_name())] { + [this, name = dtls_transport->internal()->transport_name()] { RTC_DCHECK_RUN_ON(signaling_thread()); sctp_transport_name_s_ = std::move(name); })); @@ -2897,7 +2856,7 @@ void PeerConnection::StartSctpTransport(int local_port, network_thread_safety_, [this, mid = *sctp_mid_s_, local_port, remote_port, max_message_size] { rtc::scoped_refptr sctp_transport = - transport_controller_n()->GetSctpTransport(mid); + transport_controller()->GetSctpTransport(mid); if (sctp_transport) sctp_transport->Start(local_port, remote_port, max_message_size); })); diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h index cd4af9e42..7326bccd1 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection.h @@ -116,7 +116,8 @@ namespace webrtc { // - The ICE state machine. // - Generating stats. class PeerConnection : public PeerConnectionInternal, - public JsepTransportController::Observer { + public JsepTransportController::Observer, + public sigslot::has_slots<> { public: // Creates a PeerConnection and initializes it with the given values. // If the initialization fails, the function releases the PeerConnection @@ -140,7 +141,8 @@ class PeerConnection : public PeerConnectionInternal, RTCErrorOr> AddTrack( rtc::scoped_refptr track, const std::vector& stream_ids) override; - RTCError RemoveTrackOrError( + bool RemoveTrack(RtpSenderInterface* sender) override; + RTCError RemoveTrackNew( rtc::scoped_refptr sender) override; RTCErrorOr> AddTransceiver( @@ -185,9 +187,6 @@ class PeerConnection : public PeerConnectionInternal, SignalingState signaling_state() override; IceConnectionState ice_connection_state() override; - IceConnectionState ice_connection_state_internal() override { - return ice_connection_state(); - } IceConnectionState standardized_ice_connection_state() override; PeerConnectionState peer_connection_state() override; IceGatheringState ice_gathering_state() override; @@ -266,6 +265,7 @@ class PeerConnection : public PeerConnectionInternal, return context_->signaling_thread(); } + // PeerConnectionInternal implementation. rtc::Thread* network_thread() const final { return context_->network_thread(); } @@ -277,7 +277,7 @@ class PeerConnection : public PeerConnectionInternal, bool initial_offerer() const override { RTC_DCHECK_RUN_ON(signaling_thread()); - return sdp_handler_->initial_offerer(); + return transport_controller_ && transport_controller_->initial_offerer(); } std::vector< @@ -312,81 +312,73 @@ class PeerConnection : public PeerConnectionInternal, bool GetSslRole(const std::string& content_name, rtc::SSLRole* role) override; // Functions needed by DataChannelController - void NoteDataAddedEvent() override { NoteUsageEvent(UsageEvent::DATA_ADDED); } + void NoteDataAddedEvent() { NoteUsageEvent(UsageEvent::DATA_ADDED); } // Returns the observer. Will crash on CHECK if the observer is removed. - PeerConnectionObserver* Observer() const override; - bool IsClosed() const override { + PeerConnectionObserver* Observer() const; + bool IsClosed() const { RTC_DCHECK_RUN_ON(signaling_thread()); return !sdp_handler_ || sdp_handler_->signaling_state() == PeerConnectionInterface::kClosed; } // Get current SSL role used by SCTP's underlying transport. - bool GetSctpSslRole(rtc::SSLRole* role) override; + bool GetSctpSslRole(rtc::SSLRole* role); // Handler for the "channel closed" signal - void OnSctpDataChannelClosed(DataChannelInterface* channel) override; + void OnSctpDataChannelClosed(DataChannelInterface* channel); bool ShouldFireNegotiationNeededEvent(uint32_t event_id) override; // Functions needed by SdpOfferAnswerHandler - StatsCollector* stats() override { + StatsCollector* stats() { RTC_DCHECK_RUN_ON(signaling_thread()); return stats_.get(); } - DataChannelController* data_channel_controller() override { + DataChannelController* data_channel_controller() { RTC_DCHECK_RUN_ON(signaling_thread()); return &data_channel_controller_; } - bool dtls_enabled() const override { + bool dtls_enabled() const { RTC_DCHECK_RUN_ON(signaling_thread()); return dtls_enabled_; } - const PeerConnectionInterface::RTCConfiguration* configuration() - const override { + const PeerConnectionInterface::RTCConfiguration* configuration() const { RTC_DCHECK_RUN_ON(signaling_thread()); return &configuration_; } - PeerConnectionMessageHandler* message_handler() override { + PeerConnectionMessageHandler* message_handler() { RTC_DCHECK_RUN_ON(signaling_thread()); return &message_handler_; } - RtpTransmissionManager* rtp_manager() override { return rtp_manager_.get(); } - const RtpTransmissionManager* rtp_manager() const override { + RtpTransmissionManager* rtp_manager() { return rtp_manager_.get(); } + const RtpTransmissionManager* rtp_manager() const { return rtp_manager_.get(); } - cricket::ChannelManager* channel_manager(); + cricket::ChannelManager* channel_manager() const; - JsepTransportController* transport_controller_s() override { - RTC_DCHECK_RUN_ON(signaling_thread()); - return transport_controller_copy_; - } - JsepTransportController* transport_controller_n() override { - RTC_DCHECK_RUN_ON(network_thread()); + JsepTransportController* transport_controller() { return transport_controller_.get(); } - cricket::PortAllocator* port_allocator() override { - return port_allocator_.get(); - } - Call* call_ptr() override { return call_ptr_; } + cricket::PortAllocator* port_allocator() { return port_allocator_.get(); } + Call* call_ptr() { return call_ptr_; } ConnectionContext* context() { return context_.get(); } - const PeerConnectionFactoryInterface::Options* options() const override { + const PeerConnectionFactoryInterface::Options* options() const { return &options_; } - void SetIceConnectionState(IceConnectionState new_state) override; - void NoteUsageEvent(UsageEvent event) override; + void SetIceConnectionState(IceConnectionState new_state); + void NoteUsageEvent(UsageEvent event); // Asynchronously adds a remote candidate on the network thread. void AddRemoteCandidate(const std::string& mid, - const cricket::Candidate& candidate) override; + const cricket::Candidate& candidate); // Report the UMA metric SdpFormatReceived for the given remote description. void ReportSdpFormatReceived( - const SessionDescriptionInterface& remote_description) override; + const SessionDescriptionInterface& remote_description); // Report the UMA metric BundleUsage for the given remote description. void ReportSdpBundleUsage( - const SessionDescriptionInterface& remote_description) override; + const SessionDescriptionInterface& remote_description); // Returns true if the PeerConnection is configured to use Unified Plan // semantics for creating offers/answers and setting local/remote @@ -394,34 +386,34 @@ class PeerConnection : public PeerConnectionInternal, // to the user. If this is false, Plan B semantics are assumed. // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once // sufficient time has passed. - bool IsUnifiedPlan() const override { + bool IsUnifiedPlan() const { RTC_DCHECK_RUN_ON(signaling_thread()); return is_unified_plan_; } bool ValidateBundleSettings( const cricket::SessionDescription* desc, const std::map& - bundle_groups_by_mid) override; + bundle_groups_by_mid); // Returns the MID for the data section associated with the // SCTP data channel, if it has been set. If no data // channels are configured this will return nullopt. - absl::optional GetDataMid() const override; + absl::optional GetDataMid() const; - void SetSctpDataMid(const std::string& mid) override; + void SetSctpDataMid(const std::string& mid); - void ResetSctpDataMid() override; + void ResetSctpDataMid(); // Asynchronously calls SctpTransport::Start() on the network thread for // `sctp_mid()` if set. Called as part of setting the local description. void StartSctpTransport(int local_port, int remote_port, - int max_message_size) override; + int max_message_size); // Returns the CryptoOptions for this PeerConnection. This will always // return the RTCConfiguration.crypto_options if set and will only default // back to the PeerConnectionFactory settings if nothing was set. - CryptoOptions GetCryptoOptions() override; + CryptoOptions GetCryptoOptions(); // Internal implementation for AddTransceiver family of methods. If // `fire_callback` is set, fires OnRenegotiationNeeded callback if successful. @@ -429,19 +421,19 @@ class PeerConnection : public PeerConnectionInternal, cricket::MediaType media_type, rtc::scoped_refptr track, const RtpTransceiverInit& init, - bool fire_callback = true) override; + bool fire_callback = true); // Returns rtp transport, result can not be nullptr. RtpTransportInternal* GetRtpTransport(const std::string& mid); // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by // this session. - bool SrtpRequired() const override; + bool SrtpRequired() const; - bool SetupDataChannelTransport_n(const std::string& mid) override + bool SetupDataChannelTransport_n(const std::string& mid) RTC_RUN_ON(network_thread()); - void TeardownDataChannelTransport_n() override RTC_RUN_ON(network_thread()); - cricket::ChannelInterface* GetChannel(const std::string& mid) + void TeardownDataChannelTransport_n() RTC_RUN_ON(network_thread()); + cricket::ChannelInterface* GetChannel(const std::string& content_name) RTC_RUN_ON(network_thread()); // Functions made public for testing. @@ -467,7 +459,7 @@ class PeerConnection : public PeerConnectionInternal, RTCError Initialize( const PeerConnectionInterface::RTCConfiguration& configuration, PeerConnectionDependencies dependencies); - JsepTransportController* InitializeTransportController_n( + void InitializeTransportController_n( const RTCConfiguration& configuration, const PeerConnectionDependencies& dependencies) RTC_RUN_ON(network_thread()); @@ -668,14 +660,9 @@ class PeerConnection : public PeerConnectionInternal, const std::string session_id_; - // The transport controller is set and used on the network thread. - // Some functions pass the value of the transport_controller_ pointer - // around as arguments while running on the signaling thread; these - // use the transport_controller_copy. - std::unique_ptr transport_controller_ - RTC_GUARDED_BY(network_thread()); - JsepTransportController* transport_controller_copy_ - RTC_GUARDED_BY(signaling_thread()) = nullptr; + std::unique_ptr + transport_controller_; // TODO(bugs.webrtc.org/9987): Accessed on both + // signaling and network thread. // `sctp_mid_` is the content name (MID) in SDP. // Note: this is used as the data channel MID by both SCTP and data channel diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_adaptation_integrationtest.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_adaptation_integrationtest.cc deleted file mode 100644 index b5a5f5231..000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_adaptation_integrationtest.cc +++ /dev/null @@ -1,172 +0,0 @@ -/* - * Copyright 2020 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include -#include - -#include "absl/types/optional.h" -#include "api/adaptation/resource.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/media_stream_interface.h" -#include "api/peer_connection_interface.h" -#include "api/rtc_error.h" -#include "api/rtp_parameters.h" -#include "api/rtp_sender_interface.h" -#include "api/scoped_refptr.h" -#include "api/video/video_source_interface.h" -#include "call/adaptation/test/fake_resource.h" -#include "pc/test/fake_periodic_video_source.h" -#include "pc/test/fake_periodic_video_track_source.h" -#include "pc/test/peer_connection_test_wrapper.h" -#include "rtc_base/checks.h" -#include "rtc_base/gunit.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/thread.h" -#include "rtc_base/time_utils.h" -#include "rtc_base/virtual_socket_server.h" -#include "test/gtest.h" - -namespace webrtc { - -const int64_t kDefaultTimeoutMs = 5000; - -struct TrackWithPeriodicSource { - rtc::scoped_refptr track; - rtc::scoped_refptr periodic_track_source; -}; - -// Performs an O/A exchange and waits until the signaling state is stable again. -void Negotiate(rtc::scoped_refptr caller, - rtc::scoped_refptr callee) { - // Wire up callbacks and listeners such that a full O/A is performed in - // response to CreateOffer(). - PeerConnectionTestWrapper::Connect(caller.get(), callee.get()); - caller->CreateOffer(PeerConnectionInterface::RTCOfferAnswerOptions()); - caller->WaitForNegotiation(); -} - -TrackWithPeriodicSource CreateTrackWithPeriodicSource( - rtc::scoped_refptr factory) { - FakePeriodicVideoSource::Config periodic_track_source_config; - periodic_track_source_config.frame_interval_ms = 100; - periodic_track_source_config.timestamp_offset_ms = rtc::TimeMillis(); - rtc::scoped_refptr periodic_track_source = - rtc::make_ref_counted( - periodic_track_source_config, /* remote */ false); - TrackWithPeriodicSource track_with_source; - track_with_source.track = - factory->CreateVideoTrack("PeriodicTrack", periodic_track_source); - track_with_source.periodic_track_source = periodic_track_source; - return track_with_source; -} - -// Triggers overuse and obtains VideoSinkWants. Adaptation processing happens in -// parallel and this function makes no guarantee that the returnd VideoSinkWants -// have yet to reflect the overuse signal. Used together with EXPECT_TRUE_WAIT -// to "spam overuse until a change is observed". -rtc::VideoSinkWants TriggerOveruseAndGetSinkWants( - rtc::scoped_refptr fake_resource, - const FakePeriodicVideoSource& source) { - fake_resource->SetUsageState(ResourceUsageState::kOveruse); - return source.wants(); -} - -class PeerConnectionAdaptationIntegrationTest : public ::testing::Test { - public: - PeerConnectionAdaptationIntegrationTest() - : virtual_socket_server_(), - network_thread_(new rtc::Thread(&virtual_socket_server_)), - worker_thread_(rtc::Thread::Create()) { - RTC_CHECK(network_thread_->Start()); - RTC_CHECK(worker_thread_->Start()); - } - - rtc::scoped_refptr CreatePcWrapper( - const char* name) { - rtc::scoped_refptr pc_wrapper = - rtc::make_ref_counted( - name, network_thread_.get(), worker_thread_.get()); - PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = SdpSemantics::kUnifiedPlan; - EXPECT_TRUE(pc_wrapper->CreatePc(config, CreateBuiltinAudioEncoderFactory(), - CreateBuiltinAudioDecoderFactory())); - return pc_wrapper; - } - - protected: - rtc::VirtualSocketServer virtual_socket_server_; - std::unique_ptr network_thread_; - std::unique_ptr worker_thread_; -}; - -TEST_F(PeerConnectionAdaptationIntegrationTest, - ResouceInjectedAfterNegotiationCausesReductionInResolution) { - auto caller_wrapper = CreatePcWrapper("caller"); - auto caller = caller_wrapper->pc(); - auto callee_wrapper = CreatePcWrapper("callee"); - - // Adding a track and negotiating ensures that a VideoSendStream exists. - TrackWithPeriodicSource track_with_source = - CreateTrackWithPeriodicSource(caller_wrapper->pc_factory()); - auto sender = caller->AddTrack(track_with_source.track, {}).value(); - Negotiate(caller_wrapper, callee_wrapper); - // Prefer degrading resolution. - auto parameters = sender->GetParameters(); - parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE; - sender->SetParameters(parameters); - - const auto& source = - track_with_source.periodic_track_source->fake_periodic_source(); - int pixel_count_before_overuse = source.wants().max_pixel_count; - - // Inject a fake resource and spam kOveruse until resolution becomes limited. - auto fake_resource = FakeResource::Create("FakeResource"); - caller->AddAdaptationResource(fake_resource); - EXPECT_TRUE_WAIT( - TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count < - pixel_count_before_overuse, - kDefaultTimeoutMs); -} - -TEST_F(PeerConnectionAdaptationIntegrationTest, - ResouceInjectedBeforeNegotiationCausesReductionInResolution) { - auto caller_wrapper = CreatePcWrapper("caller"); - auto caller = caller_wrapper->pc(); - auto callee_wrapper = CreatePcWrapper("callee"); - - // Inject a fake resource before adding any tracks or negotiating. - auto fake_resource = FakeResource::Create("FakeResource"); - caller->AddAdaptationResource(fake_resource); - - // Adding a track and negotiating ensures that a VideoSendStream exists. - TrackWithPeriodicSource track_with_source = - CreateTrackWithPeriodicSource(caller_wrapper->pc_factory()); - auto sender = caller->AddTrack(track_with_source.track, {}).value(); - Negotiate(caller_wrapper, callee_wrapper); - // Prefer degrading resolution. - auto parameters = sender->GetParameters(); - parameters.degradation_preference = DegradationPreference::MAINTAIN_FRAMERATE; - sender->SetParameters(parameters); - - const auto& source = - track_with_source.periodic_track_source->fake_periodic_source(); - int pixel_count_before_overuse = source.wants().max_pixel_count; - - // Spam kOveruse until resolution becomes limited. - EXPECT_TRUE_WAIT( - TriggerOveruseAndGetSinkWants(fake_resource, source).max_pixel_count < - pixel_count_before_overuse, - kDefaultTimeoutMs); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc index 262ca90cd..3ab969dc3 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.cc @@ -10,7 +10,7 @@ #include "pc/peer_connection_factory.h" -#include +#include #include #include "absl/strings/match.h" @@ -105,8 +105,7 @@ PeerConnectionFactory::PeerConnectionFactory( transport_controller_send_factory_( (dependencies->transport_controller_send_factory) ? std::move(dependencies->transport_controller_send_factory) - : std::make_unique()), - metronome_(std::move(dependencies->metronome)) {} + : std::make_unique()) {} PeerConnectionFactory::PeerConnectionFactory( PeerConnectionFactoryDependencies dependencies) @@ -285,8 +284,7 @@ rtc::scoped_refptr PeerConnectionFactory::CreateAudioTrack( const std::string& id, AudioSourceInterface* source) { RTC_DCHECK(signaling_thread()->IsCurrent()); - rtc::scoped_refptr track( - AudioTrack::Create(id, rtc::scoped_refptr(source))); + rtc::scoped_refptr track(AudioTrack::Create(id, source)); return AudioTrackProxy::Create(signaling_thread(), track); } @@ -349,7 +347,6 @@ std::unique_ptr PeerConnectionFactory::CreateCall_w( call_config.trials = &trials(); call_config.rtp_transport_controller_send_factory = transport_controller_send_factory_.get(); - call_config.metronome = metronome_.get(); return std::unique_ptr( context_->call_factory()->CreateCall(call_config)); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h index f09ca66e6..4946ec6ea 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_factory.h @@ -23,7 +23,6 @@ #include "api/fec_controller.h" #include "api/media_stream_interface.h" #include "api/media_types.h" -#include "api/metronome/metronome.h" #include "api/neteq/neteq_factory.h" #include "api/network_state_predictor.h" #include "api/peer_connection_interface.h" @@ -153,7 +152,6 @@ class PeerConnectionFactory : public PeerConnectionFactoryInterface { std::unique_ptr neteq_factory_; const std::unique_ptr transport_controller_send_factory_; - std::unique_ptr metronome_; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h index 16caade6c..6f9761291 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_internal.h @@ -19,121 +19,20 @@ #include "api/peer_connection_interface.h" #include "call/call.h" -#include "pc/jsep_transport_controller.h" -#include "pc/peer_connection_message_handler.h" #include "pc/rtp_transceiver.h" -#include "pc/rtp_transmission_manager.h" #include "pc/sctp_data_channel.h" namespace webrtc { -class DataChannelController; -class StatsCollector; - -// This interface defines the functions that are needed for -// SdpOfferAnswerHandler to access PeerConnection internal state. -class PeerConnectionSdpMethods { - public: - virtual ~PeerConnectionSdpMethods() = default; - - // The SDP session ID as defined by RFC 3264. - virtual std::string session_id() const = 0; - - // Returns true if the ICE restart flag above was set, and no ICE restart has - // occurred yet for this transport (by applying a local description with - // changed ufrag/password). If the transport has been deleted as a result of - // bundling, returns false. - virtual bool NeedsIceRestart(const std::string& content_name) const = 0; - - virtual absl::optional sctp_mid() const = 0; - - // Functions below this comment are known to only be accessed - // from SdpOfferAnswerHandler. - // Return a pointer to the active configuration. - virtual const PeerConnectionInterface::RTCConfiguration* configuration() - const = 0; - - // Report the UMA metric SdpFormatReceived for the given remote description. - virtual void ReportSdpFormatReceived( - const SessionDescriptionInterface& remote_description) = 0; - - // Report the UMA metric BundleUsage for the given remote description. - virtual void ReportSdpBundleUsage( - const SessionDescriptionInterface& remote_description) = 0; - - virtual PeerConnectionMessageHandler* message_handler() = 0; - virtual RtpTransmissionManager* rtp_manager() = 0; - virtual const RtpTransmissionManager* rtp_manager() const = 0; - virtual bool dtls_enabled() const = 0; - virtual const PeerConnectionFactoryInterface::Options* options() const = 0; - - // Returns the CryptoOptions for this PeerConnection. This will always - // return the RTCConfiguration.crypto_options if set and will only default - // back to the PeerConnectionFactory settings if nothing was set. - virtual CryptoOptions GetCryptoOptions() = 0; - virtual JsepTransportController* transport_controller_s() = 0; - virtual JsepTransportController* transport_controller_n() = 0; - virtual DataChannelController* data_channel_controller() = 0; - virtual cricket::PortAllocator* port_allocator() = 0; - virtual StatsCollector* stats() = 0; - // Returns the observer. Will crash on CHECK if the observer is removed. - virtual PeerConnectionObserver* Observer() const = 0; - virtual bool GetSctpSslRole(rtc::SSLRole* role) = 0; - virtual PeerConnectionInterface::IceConnectionState - ice_connection_state_internal() = 0; - virtual void SetIceConnectionState( - PeerConnectionInterface::IceConnectionState new_state) = 0; - virtual void NoteUsageEvent(UsageEvent event) = 0; - virtual bool IsClosed() const = 0; - // Returns true if the PeerConnection is configured to use Unified Plan - // semantics for creating offers/answers and setting local/remote - // descriptions. If this is true the RtpTransceiver API will also be available - // to the user. If this is false, Plan B semantics are assumed. - // TODO(bugs.webrtc.org/8530): Flip the default to be Unified Plan once - // sufficient time has passed. - virtual bool IsUnifiedPlan() const = 0; - virtual bool ValidateBundleSettings( - const cricket::SessionDescription* desc, - const std::map& - bundle_groups_by_mid) = 0; - - virtual absl::optional GetDataMid() const = 0; - // Internal implementation for AddTransceiver family of methods. If - // `fire_callback` is set, fires OnRenegotiationNeeded callback if successful. - virtual RTCErrorOr> - AddTransceiver(cricket::MediaType media_type, - rtc::scoped_refptr track, - const RtpTransceiverInit& init, - bool fire_callback = true) = 0; - // Asynchronously calls SctpTransport::Start() on the network thread for - // `sctp_mid()` if set. Called as part of setting the local description. - virtual void StartSctpTransport(int local_port, - int remote_port, - int max_message_size) = 0; - - // Asynchronously adds a remote candidate on the network thread. - virtual void AddRemoteCandidate(const std::string& mid, - const cricket::Candidate& candidate) = 0; - - virtual Call* call_ptr() = 0; - // Returns true if SRTP (either using DTLS-SRTP or SDES) is required by - // this session. - virtual bool SrtpRequired() const = 0; - virtual bool SetupDataChannelTransport_n(const std::string& mid) = 0; - virtual void TeardownDataChannelTransport_n() = 0; - virtual void SetSctpDataMid(const std::string& mid) = 0; - virtual void ResetSctpDataMid() = 0; -}; - -// Functions defined in this class are called by other objects, -// but not by SdpOfferAnswerHandler. -class PeerConnectionInternal : public PeerConnectionInterface, - public PeerConnectionSdpMethods, - public sigslot::has_slots<> { +// Internal interface for extra PeerConnection methods. +class PeerConnectionInternal : public PeerConnectionInterface { public: virtual rtc::Thread* network_thread() const = 0; virtual rtc::Thread* worker_thread() const = 0; + // The SDP session ID as defined by RFC 3264. + virtual std::string session_id() const = 0; + // Returns true if we were the initial offerer. virtual bool initial_offerer() const = 0; @@ -151,6 +50,7 @@ class PeerConnectionInternal : public PeerConnectionInterface, } virtual absl::optional sctp_transport_name() const = 0; + virtual absl::optional sctp_mid() const = 0; virtual cricket::CandidateStatsList GetPooledCandidateStats() const = 0; @@ -171,13 +71,15 @@ class PeerConnectionInternal : public PeerConnectionInterface, // Returns true if there was an ICE restart initiated by the remote offer. virtual bool IceRestartPending(const std::string& content_name) const = 0; + // Returns true if the ICE restart flag above was set, and no ICE restart has + // occurred yet for this transport (by applying a local description with + // changed ufrag/password). If the transport has been deleted as a result of + // bundling, returns false. + virtual bool NeedsIceRestart(const std::string& content_name) const = 0; + // Get SSL role for an arbitrary m= section (handles bundling correctly). virtual bool GetSslRole(const std::string& content_name, rtc::SSLRole* role) = 0; - // Functions needed by DataChannelController - virtual void NoteDataAddedEvent() {} - // Handler for the "channel closed" signal - virtual void OnSctpDataChannelClosed(DataChannelInterface* channel) {} }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc index 77db3e45e..54f75f00a 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_message_handler.cc @@ -10,7 +10,6 @@ #include "pc/peer_connection_message_handler.h" -#include #include #include "api/jsep.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_proxy.h b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_proxy.h index 643602971..7601c9d05 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_proxy.h @@ -35,9 +35,8 @@ PROXY_METHOD2(RTCErrorOr>, AddTrack, rtc::scoped_refptr, const std::vector&) -PROXY_METHOD1(RTCError, - RemoveTrackOrError, - rtc::scoped_refptr) +PROXY_METHOD1(bool, RemoveTrack, RtpSenderInterface*) +PROXY_METHOD1(RTCError, RemoveTrackNew, rtc::scoped_refptr) PROXY_METHOD1(RTCErrorOr>, AddTransceiver, rtc::scoped_refptr) diff --git a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc index 6a7a30edc..3b4d28f0d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/peer_connection_wrapper.cc @@ -12,6 +12,8 @@ #include +#include +#include #include #include @@ -166,7 +168,8 @@ bool PeerConnectionWrapper::SetRemoteDescription( bool PeerConnectionWrapper::SetRemoteDescription( std::unique_ptr desc, RTCError* error_out) { - auto observer = rtc::make_ref_counted(); + rtc::scoped_refptr observer = + new FakeSetRemoteDescriptionObserver(); pc()->SetRemoteDescription(std::move(desc), observer); EXPECT_EQ_WAIT(true, observer->called(), kDefaultTimeout); bool ok = observer->error().ok(); diff --git a/TMessagesProj/jni/voip/webrtc/pc/proxy.h b/TMessagesProj/jni/voip/webrtc/pc/proxy.h index e48f47918..565ae8017 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/proxy.h +++ b/TMessagesProj/jni/voip/webrtc/pc/proxy.h @@ -59,8 +59,6 @@ #ifndef PC_PROXY_H_ #define PC_PROXY_H_ -#include - #include #include #include @@ -71,7 +69,6 @@ #include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" #include "rtc_base/event.h" -#include "rtc_base/location.h" #include "rtc_base/message_handler.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/string_utils.h" @@ -205,66 +202,61 @@ class ConstMethodCall : public QueuedTask { #define PROXY_STRINGIZE(x) PROXY_STRINGIZE_IMPL(x) // Helper macros to reduce code duplication. -#define PROXY_MAP_BOILERPLATE(class_name) \ - template \ - class class_name##ProxyWithInternal; \ - typedef class_name##ProxyWithInternal \ - class_name##Proxy; \ - template \ - class class_name##ProxyWithInternal : public class_name##Interface { \ - protected: \ - static constexpr char proxy_name_[] = #class_name "Proxy"; \ - typedef class_name##Interface C; \ - \ - public: \ - const INTERNAL_CLASS* internal() const { return c(); } \ - INTERNAL_CLASS* internal() { return c(); } +#define PROXY_MAP_BOILERPLATE(c) \ + template \ + class c##ProxyWithInternal; \ + typedef c##ProxyWithInternal c##Proxy; \ + template \ + class c##ProxyWithInternal : public c##Interface { \ + protected: \ + static constexpr char proxy_name_[] = #c "Proxy"; \ + typedef c##Interface C; \ + \ + public: \ + const INTERNAL_CLASS* internal() const { return c_; } \ + INTERNAL_CLASS* internal() { return c_; } // clang-format off // clang-format would put the semicolon alone, // leading to a presubmit error (cpplint.py) -#define END_PROXY_MAP(class_name) \ - }; \ - template \ - constexpr char class_name##ProxyWithInternal::proxy_name_[]; +#define END_PROXY_MAP(c) \ + }; \ + template \ + constexpr char c##ProxyWithInternal::proxy_name_[]; // clang-format on -#define PRIMARY_PROXY_MAP_BOILERPLATE(class_name) \ - protected: \ - class_name##ProxyWithInternal(rtc::Thread* primary_thread, \ - INTERNAL_CLASS* c) \ - : primary_thread_(primary_thread), c_(c) {} \ - \ - private: \ +#define PRIMARY_PROXY_MAP_BOILERPLATE(c) \ + protected: \ + c##ProxyWithInternal(rtc::Thread* primary_thread, INTERNAL_CLASS* c) \ + : primary_thread_(primary_thread), c_(c) {} \ + \ + private: \ mutable rtc::Thread* primary_thread_; -#define SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \ - protected: \ - class_name##ProxyWithInternal(rtc::Thread* primary_thread, \ - rtc::Thread* secondary_thread, \ - INTERNAL_CLASS* c) \ - : primary_thread_(primary_thread), \ - secondary_thread_(secondary_thread), \ - c_(c) {} \ - \ - private: \ - mutable rtc::Thread* primary_thread_; \ +#define SECONDARY_PROXY_MAP_BOILERPLATE(c) \ + protected: \ + c##ProxyWithInternal(rtc::Thread* primary_thread, \ + rtc::Thread* secondary_thread, INTERNAL_CLASS* c) \ + : primary_thread_(primary_thread), \ + secondary_thread_(secondary_thread), \ + c_(c) {} \ + \ + private: \ + mutable rtc::Thread* primary_thread_; \ mutable rtc::Thread* secondary_thread_; // Note that the destructor is protected so that the proxy can only be // destroyed via RefCountInterface. -#define REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \ - protected: \ - ~class_name##ProxyWithInternal() { \ - MethodCall call( \ - this, &class_name##ProxyWithInternal::DestroyInternal); \ - call.Marshal(RTC_FROM_HERE, destructor_thread()); \ - } \ - \ - private: \ - const INTERNAL_CLASS* c() const { return c_.get(); } \ - INTERNAL_CLASS* c() { return c_.get(); } \ - void DestroyInternal() { c_ = nullptr; } \ +#define REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ + protected: \ + ~c##ProxyWithInternal() { \ + MethodCall call( \ + this, &c##ProxyWithInternal::DestroyInternal); \ + call.Marshal(RTC_FROM_HERE, destructor_thread()); \ + } \ + \ + private: \ + void DestroyInternal() { c_ = nullptr; } \ rtc::scoped_refptr c_; // Note: This doesn't use a unique_ptr, because it intends to handle a corner @@ -272,54 +264,50 @@ class ConstMethodCall : public QueuedTask { // this proxy object. If relying on a unique_ptr to delete the object, its // inner pointer would be set to null before this reentrant callback would have // a chance to run, resulting in a segfault. -#define OWNED_PROXY_MAP_BOILERPLATE(class_name) \ - public: \ - ~class_name##ProxyWithInternal() { \ - MethodCall call( \ - this, &class_name##ProxyWithInternal::DestroyInternal); \ - call.Marshal(RTC_FROM_HERE, destructor_thread()); \ - } \ - \ - private: \ - const INTERNAL_CLASS* c() const { return c_; } \ - INTERNAL_CLASS* c() { return c_; } \ - void DestroyInternal() { delete c_; } \ +#define OWNED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + ~c##ProxyWithInternal() { \ + MethodCall call( \ + this, &c##ProxyWithInternal::DestroyInternal); \ + call.Marshal(RTC_FROM_HERE, destructor_thread()); \ + } \ + \ + private: \ + void DestroyInternal() { delete c_; } \ INTERNAL_CLASS* c_; -#define BEGIN_PRIMARY_PROXY_MAP(class_name) \ - PROXY_MAP_BOILERPLATE(class_name) \ - PRIMARY_PROXY_MAP_BOILERPLATE(class_name) \ - REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \ - public: \ - static rtc::scoped_refptr Create( \ - rtc::Thread* primary_thread, INTERNAL_CLASS* c) { \ - return rtc::make_ref_counted( \ - primary_thread, c); \ +#define BEGIN_PRIMARY_PROXY_MAP(c) \ + PROXY_MAP_BOILERPLATE(c) \ + PRIMARY_PROXY_MAP_BOILERPLATE(c) \ + REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + static rtc::scoped_refptr Create( \ + rtc::Thread* primary_thread, INTERNAL_CLASS* c) { \ + return rtc::make_ref_counted(primary_thread, c); \ } -#define BEGIN_PROXY_MAP(class_name) \ - PROXY_MAP_BOILERPLATE(class_name) \ - SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \ - REFCOUNTED_PROXY_MAP_BOILERPLATE(class_name) \ +#define BEGIN_PROXY_MAP(c) \ + PROXY_MAP_BOILERPLATE(c) \ + SECONDARY_PROXY_MAP_BOILERPLATE(c) \ + REFCOUNTED_PROXY_MAP_BOILERPLATE(c) \ + public: \ + static rtc::scoped_refptr Create( \ + rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \ + INTERNAL_CLASS* c) { \ + return rtc::make_ref_counted(primary_thread, \ + secondary_thread, c); \ + } + +#define BEGIN_OWNED_PROXY_MAP(c) \ + PROXY_MAP_BOILERPLATE(c) \ + SECONDARY_PROXY_MAP_BOILERPLATE(c) \ + OWNED_PROXY_MAP_BOILERPLATE(c) \ public: \ - static rtc::scoped_refptr Create( \ + static std::unique_ptr Create( \ rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \ - INTERNAL_CLASS* c) { \ - return rtc::make_ref_counted( \ - primary_thread, secondary_thread, c); \ - } - -#define BEGIN_OWNED_PROXY_MAP(class_name) \ - PROXY_MAP_BOILERPLATE(class_name) \ - SECONDARY_PROXY_MAP_BOILERPLATE(class_name) \ - OWNED_PROXY_MAP_BOILERPLATE(class_name) \ - public: \ - static std::unique_ptr Create( \ - rtc::Thread* primary_thread, rtc::Thread* secondary_thread, \ - std::unique_ptr c) { \ - return std::unique_ptr( \ - new class_name##ProxyWithInternal(primary_thread, secondary_thread, \ - c.release())); \ + std::unique_ptr c) { \ + return std::unique_ptr(new c##ProxyWithInternal( \ + primary_thread, secondary_thread, c.release())); \ } #define PROXY_PRIMARY_THREAD_DESTRUCTOR() \ @@ -351,124 +339,124 @@ class ConstMethodCall : public QueuedTask { #define PROXY_METHOD0(r, method) \ r method() override { \ TRACE_BOILERPLATE(method); \ - MethodCall call(c(), &C::method); \ + MethodCall call(c_, &C::method); \ return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } #define PROXY_CONSTMETHOD0(r, method) \ r method() const override { \ TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c(), &C::method); \ + ConstMethodCall call(c_, &C::method); \ return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } -#define PROXY_METHOD1(r, method, t1) \ - r method(t1 a1) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c(), &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD1(r, method, t1) \ + r method(t1 a1) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } -#define PROXY_CONSTMETHOD1(r, method, t1) \ - r method(t1 a1) const override { \ - TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c(), &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_CONSTMETHOD1(r, method, t1) \ + r method(t1 a1) const override { \ + TRACE_BOILERPLATE(method); \ + ConstMethodCall call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } -#define PROXY_METHOD2(r, method, t1, t2) \ - r method(t1 a1, t2 a2) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c(), &C::method, std::move(a1), \ - std::move(a2)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } -#define PROXY_METHOD3(r, method, t1, t2, t3) \ - r method(t1 a1, t2 a2, t3 a3) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c(), &C::method, std::move(a1), \ - std::move(a2), std::move(a3)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD3(r, method, t1, t2, t3) \ + r method(t1 a1, t2 a2, t3 a3) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } -#define PROXY_METHOD4(r, method, t1, t2, t3, t4) \ - r method(t1 a1, t2 a2, t3 a3, t4 a4) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c(), &C::method, std::move(a1), \ - std::move(a2), std::move(a3), \ - std::move(a4)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD4(r, method, t1, t2, t3, t4) \ + r method(t1 a1, t2 a2, t3 a3, t4 a4) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3), \ + std::move(a4)); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } -#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \ - r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c(), &C::method, std::move(a1), \ - std::move(a2), std::move(a3), \ - std::move(a4), std::move(a5)); \ - return call.Marshal(RTC_FROM_HERE, primary_thread_); \ +#define PROXY_METHOD5(r, method, t1, t2, t3, t4, t5) \ + r method(t1 a1, t2 a2, t3 a3, t4 a4, t5 a5) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3), \ + std::move(a4), std::move(a5)); \ + return call.Marshal(RTC_FROM_HERE, primary_thread_); \ } // Define methods which should be invoked on the secondary thread. #define PROXY_SECONDARY_METHOD0(r, method) \ r method() override { \ TRACE_BOILERPLATE(method); \ - MethodCall call(c(), &C::method); \ + MethodCall call(c_, &C::method); \ return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } #define PROXY_SECONDARY_CONSTMETHOD0(r, method) \ r method() const override { \ TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c(), &C::method); \ + ConstMethodCall call(c_, &C::method); \ return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_SECONDARY_METHOD1(r, method, t1) \ - r method(t1 a1) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c(), &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_METHOD1(r, method, t1) \ + r method(t1 a1) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_SECONDARY_CONSTMETHOD1(r, method, t1) \ - r method(t1 a1) const override { \ - TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c(), &C::method, std::move(a1)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_CONSTMETHOD1(r, method, t1) \ + r method(t1 a1) const override { \ + TRACE_BOILERPLATE(method); \ + ConstMethodCall call(c_, &C::method, std::move(a1)); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_SECONDARY_METHOD2(r, method, t1, t2) \ - r method(t1 a1, t2 a2) override { \ - TRACE_BOILERPLATE(method); \ - MethodCall call(c(), &C::method, std::move(a1), \ - std::move(a2)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_METHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2)); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_SECONDARY_CONSTMETHOD2(r, method, t1, t2) \ - r method(t1 a1, t2 a2) const override { \ - TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c(), &C::method, std::move(a1), \ - std::move(a2)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ - } - -#define PROXY_SECONDARY_METHOD3(r, method, t1, t2, t3) \ - r method(t1 a1, t2 a2, t3 a3) override { \ +#define PROXY_SECONDARY_CONSTMETHOD2(r, method, t1, t2) \ + r method(t1 a1, t2 a2) const override { \ TRACE_BOILERPLATE(method); \ - MethodCall call(c(), &C::method, std::move(a1), \ - std::move(a2), std::move(a3)); \ + ConstMethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2)); \ return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } -#define PROXY_SECONDARY_CONSTMETHOD3(r, method, t1, t2) \ - r method(t1 a1, t2 a2, t3 a3) const override { \ - TRACE_BOILERPLATE(method); \ - ConstMethodCall call(c(), &C::method, std::move(a1), \ - std::move(a2), std::move(a3)); \ - return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ +#define PROXY_SECONDARY_METHOD3(r, method, t1, t2, t3) \ + r method(t1 a1, t2 a2, t3 a3) override { \ + TRACE_BOILERPLATE(method); \ + MethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ + } + +#define PROXY_SECONDARY_CONSTMETHOD3(r, method, t1, t2) \ + r method(t1 a1, t2 a2, t3 a3) const override { \ + TRACE_BOILERPLATE(method); \ + ConstMethodCall call(c_, &C::method, std::move(a1), \ + std::move(a2), std::move(a3)); \ + return call.Marshal(RTC_FROM_HERE, secondary_thread_); \ } // For use when returning purely const state (set during construction). diff --git a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc index 781e4512b..dc890e737 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/remote_audio_source.cc @@ -13,7 +13,6 @@ #include #include -#include #include "absl/algorithm/container.h" #include "api/scoped_refptr.h" @@ -56,7 +55,7 @@ RemoteAudioSource::RemoteAudioSource( : main_thread_(rtc::Thread::Current()), worker_thread_(worker_thread), on_audio_channel_gone_action_(on_audio_channel_gone_action), - state_(MediaSourceInterface::kInitializing) { + state_(MediaSourceInterface::kLive) { RTC_DCHECK(main_thread_); RTC_DCHECK(worker_thread_); } @@ -135,6 +134,11 @@ void RemoteAudioSource::AddSink(AudioTrackSinkInterface* sink) { RTC_DCHECK_RUN_ON(main_thread_); RTC_DCHECK(sink); + if (state_ != MediaSourceInterface::kLive) { + RTC_LOG(LS_ERROR) << "Can't register sink as the source isn't live."; + return; + } + MutexLock lock(&sink_lock_); RTC_DCHECK(!absl::c_linear_search(sinks_, sink)); sinks_.push_back(sink); diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc index 13c62ec97..025feb929 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.cc @@ -10,28 +10,26 @@ #include "pc/rtc_stats_collector.h" -#include #include +#include #include #include #include #include -#include #include #include -#include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/candidate.h" -#include "api/dtls_transport_interface.h" #include "api/media_stream_interface.h" #include "api/rtp_parameters.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" #include "api/sequence_checker.h" #include "api/stats/rtc_stats.h" #include "api/stats/rtcstats_objects.h" #include "api/task_queue/queued_task.h" -#include "api/units/time_delta.h" #include "api/video/video_content_type.h" #include "common_video/include/quality_limitation_reason.h" #include "media/base/media_channel.h" @@ -39,6 +37,7 @@ #include "modules/rtp_rtcp/include/report_block_data.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" #include "p2p/base/connection_info.h" +#include "p2p/base/dtls_transport_internal.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" #include "p2p/base/port.h" @@ -46,8 +45,6 @@ #include "pc/channel_interface.h" #include "pc/data_channel_utils.h" #include "pc/rtc_stats_traversal.h" -#include "pc/rtp_receiver_proxy.h" -#include "pc/rtp_sender_proxy.h" #include "pc/webrtc_sdp.h" #include "rtc_base/checks.h" #include "rtc_base/ip_address.h" @@ -272,12 +269,9 @@ std::map QualityLimitationDurationToRTCQualityLimitationDuration( std::map durations_ms) { std::map result; - // The internal duration is defined in milliseconds while the spec defines - // the value in seconds: - // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationdurations for (const auto& elem : durations_ms) { result[QualityLimitationReasonToRTCQualityLimitationReason(elem.first)] = - elem.second / static_cast(rtc::kNumMillisecsPerSec); + elem.second; } return result; } @@ -736,7 +730,6 @@ const std::string& ProduceIceCandidateStats(int64_t timestamp_us, NetworkAdapterTypeToStatsType(candidate.network_type()); const std::string& candidate_type = candidate.type(); const std::string& relay_protocol = candidate.relay_protocol(); - const std::string& url = candidate.url(); if (candidate_type == cricket::RELAY_PORT_TYPE || (candidate_type == cricket::PRFLX_PORT_TYPE && !relay_protocol.empty())) { @@ -744,13 +737,6 @@ const std::string& ProduceIceCandidateStats(int64_t timestamp_us, relay_protocol.compare("tcp") == 0 || relay_protocol.compare("tls") == 0); candidate_stats->relay_protocol = relay_protocol; - if (!url.empty()) { - candidate_stats->url = url; - } - } else if (candidate_type == cricket::STUN_PORT_TYPE) { - if (!url.empty()) { - candidate_stats->url = url; - } } } else { // We don't expect to know the adapter type of remote candidates. @@ -985,7 +971,7 @@ void ProduceSenderMediaTrackStats( if (sender_info) { voice_sender_info = sender_info; } else { - RTC_DLOG(LS_INFO) + RTC_LOG(LS_INFO) << "RTCStatsCollector: No voice sender info for sender with ssrc " << sender->ssrc(); } @@ -1013,8 +999,8 @@ void ProduceSenderMediaTrackStats( if (sender_info) { video_sender_info = sender_info; } else { - RTC_DLOG(LS_INFO) - << "No video sender info for sender with ssrc " << sender->ssrc(); + RTC_LOG(LS_INFO) << "No video sender info for sender with ssrc " + << sender->ssrc(); } } std::unique_ptr video_track_stats = @@ -1238,8 +1224,7 @@ void RTCStatsCollector::GetStatsReportInternal( std::vector requests_; }; signaling_thread_->PostTask(std::make_unique( - rtc::scoped_refptr(this), cached_report_, - std::move(requests))); + this, cached_report_, std::move(requests))); } else if (!num_pending_partial_reports_) { // Only start gathering stats if we're not already gathering stats. In the // case of already gathering stats, `callback_` will be invoked when there @@ -1263,6 +1248,7 @@ void RTCStatsCollector::GetStatsReportInternal( network_report_event_.Reset(); rtc::scoped_refptr collector(this); network_thread_->PostTask( + RTC_FROM_HERE, [collector, sctp_transport_name = pc_->sctp_transport_name(), timestamp_us]() mutable { collector->ProducePartialResultsOnNetworkThread( @@ -1351,7 +1337,7 @@ void RTCStatsCollector::ProducePartialResultsOnNetworkThread( network_report_event_.Set(); rtc::scoped_refptr collector(this); signaling_thread_->PostTask( - [collector] { collector->MergeNetworkReport_s(); }); + RTC_FROM_HERE, [collector] { collector->MergeNetworkReport_s(); }); } void RTCStatsCollector::ProducePartialResultsOnNetworkThreadImpl( @@ -1674,16 +1660,14 @@ void RTCStatsCollector::ProduceMediaStreamTrackStats_s( for (const RtpTransceiverStatsInfo& stats : transceiver_stats_infos_) { std::vector> senders; for (const auto& sender : stats.transceiver->senders()) { - senders.push_back( - rtc::scoped_refptr(sender->internal())); + senders.push_back(sender->internal()); } ProduceSenderMediaTrackStats(timestamp_us, *stats.track_media_info_map, senders, report); std::vector> receivers; for (const auto& receiver : stats.transceiver->receivers()) { - receivers.push_back( - rtc::scoped_refptr(receiver->internal())); + receivers.push_back(receiver->internal()); } ProduceReceiverMediaTrackStats(timestamp_us, *stats.track_media_info_map, receivers, report); @@ -2163,8 +2147,8 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { continue; } - stats.mid = channel->mid(); - stats.transport_name = std::string(channel->transport_name()); + stats.mid = channel->content_name(); + stats.transport_name = channel->transport_name(); if (media_type == cricket::MEDIA_TYPE_AUDIO) { auto* voice_channel = static_cast(channel); @@ -2226,13 +2210,11 @@ void RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n() { } std::vector> senders; for (const auto& sender : transceiver->senders()) { - senders.push_back( - rtc::scoped_refptr(sender->internal())); + senders.push_back(sender->internal()); } std::vector> receivers; for (const auto& receiver : transceiver->receivers()) { - receivers.push_back( - rtc::scoped_refptr(receiver->internal())); + receivers.push_back(receiver->internal()); } stats.track_media_info_map = std::make_unique( std::move(voice_media_info), std::move(video_media_info), senders, diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h index e6d9d184f..c84e6d3fe 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_collector.h @@ -12,8 +12,6 @@ #define PC_RTC_STATS_COLLECTOR_H_ #include - -#include #include #include #include diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc b/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc deleted file mode 100644 index be9cd6fbc..000000000 --- a/TMessagesProj/jni/voip/webrtc/pc/rtc_stats_integrationtest.cc +++ /dev/null @@ -1,1325 +0,0 @@ -/* - * Copyright 2016 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include -#include - -#include -#include -#include -#include - -#include "absl/algorithm/container.h" -#include "absl/strings/match.h" -#include "api/audio_codecs/builtin_audio_decoder_factory.h" -#include "api/audio_codecs/builtin_audio_encoder_factory.h" -#include "api/audio_options.h" -#include "api/data_channel_interface.h" -#include "api/peer_connection_interface.h" -#include "api/rtp_receiver_interface.h" -#include "api/rtp_sender_interface.h" -#include "api/scoped_refptr.h" -#include "api/stats/rtc_stats.h" -#include "api/stats/rtc_stats_report.h" -#include "api/stats/rtcstats_objects.h" -#include "pc/rtc_stats_traversal.h" -#include "pc/test/peer_connection_test_wrapper.h" -#include "pc/test/rtc_stats_obtainer.h" -#include "rtc_base/checks.h" -#include "rtc_base/event_tracer.h" -#include "rtc_base/gunit.h" -#include "rtc_base/ref_counted_object.h" -#include "rtc_base/thread.h" -#include "rtc_base/trace_event.h" -#include "rtc_base/virtual_socket_server.h" -#include "test/gmock.h" -#include "test/gtest.h" - -using ::testing::Contains; - -namespace webrtc { - -namespace { - -const int64_t kGetStatsTimeoutMs = 10000; - -const unsigned char* GetCategoryEnabledHandler(const char* name) { - if (strcmp("webrtc_stats", name) != 0) { - return reinterpret_cast(""); - } - return reinterpret_cast(name); -} - -class RTCStatsReportTraceListener { - public: - static void SetUp() { - if (!traced_report_) - traced_report_ = new RTCStatsReportTraceListener(); - traced_report_->last_trace_ = ""; - SetupEventTracer(&GetCategoryEnabledHandler, - &RTCStatsReportTraceListener::AddTraceEventHandler); - } - - static const std::string& last_trace() { - RTC_DCHECK(traced_report_); - return traced_report_->last_trace_; - } - - private: - static void AddTraceEventHandler( - char phase, - const unsigned char* category_enabled, - const char* name, - unsigned long long id, // NOLINT(runtime/int) - int num_args, - const char** arg_names, - const unsigned char* arg_types, - const unsigned long long* arg_values, // NOLINT(runtime/int) - unsigned char flags) { - RTC_DCHECK(traced_report_); - EXPECT_STREQ("webrtc_stats", - reinterpret_cast(category_enabled)); - EXPECT_STREQ("webrtc_stats", name); - EXPECT_EQ(1, num_args); - EXPECT_STREQ("report", arg_names[0]); - EXPECT_EQ(TRACE_VALUE_TYPE_COPY_STRING, arg_types[0]); - - traced_report_->last_trace_ = reinterpret_cast(arg_values[0]); - } - - static RTCStatsReportTraceListener* traced_report_; - std::string last_trace_; -}; - -RTCStatsReportTraceListener* RTCStatsReportTraceListener::traced_report_ = - nullptr; - -class RTCStatsIntegrationTest : public ::testing::Test { - public: - RTCStatsIntegrationTest() - : network_thread_(new rtc::Thread(&virtual_socket_server_)), - worker_thread_(rtc::Thread::Create()) { - RTCStatsReportTraceListener::SetUp(); - - RTC_CHECK(network_thread_->Start()); - RTC_CHECK(worker_thread_->Start()); - - caller_ = rtc::make_ref_counted( - "caller", network_thread_.get(), worker_thread_.get()); - callee_ = rtc::make_ref_counted( - "callee", network_thread_.get(), worker_thread_.get()); - } - - void StartCall() { - // Create PeerConnections and "connect" sigslots - PeerConnectionInterface::RTCConfiguration config; - config.sdp_semantics = SdpSemantics::kUnifiedPlan; - PeerConnectionInterface::IceServer ice_server; - ice_server.uri = "stun:1.1.1.1:3478"; - config.servers.push_back(ice_server); - EXPECT_TRUE(caller_->CreatePc(config, CreateBuiltinAudioEncoderFactory(), - CreateBuiltinAudioDecoderFactory())); - EXPECT_TRUE(callee_->CreatePc(config, CreateBuiltinAudioEncoderFactory(), - CreateBuiltinAudioDecoderFactory())); - PeerConnectionTestWrapper::Connect(caller_.get(), callee_.get()); - - // Get user media for audio and video - caller_->GetAndAddUserMedia(true, cricket::AudioOptions(), true); - callee_->GetAndAddUserMedia(true, cricket::AudioOptions(), true); - - // Create data channels - DataChannelInit init; - caller_->CreateDataChannel("data", init); - callee_->CreateDataChannel("data", init); - - // Negotiate and wait for call to establish - caller_->CreateOffer(PeerConnectionInterface::RTCOfferAnswerOptions()); - caller_->WaitForCallEstablished(); - callee_->WaitForCallEstablished(); - } - - rtc::scoped_refptr GetStatsFromCaller() { - return GetStats(caller_->pc()); - } - rtc::scoped_refptr GetStatsFromCaller( - rtc::scoped_refptr selector) { - return GetStats(caller_->pc(), selector); - } - rtc::scoped_refptr GetStatsFromCaller( - rtc::scoped_refptr selector) { - return GetStats(caller_->pc(), selector); - } - - rtc::scoped_refptr GetStatsFromCallee() { - return GetStats(callee_->pc()); - } - rtc::scoped_refptr GetStatsFromCallee( - rtc::scoped_refptr selector) { - return GetStats(callee_->pc(), selector); - } - rtc::scoped_refptr GetStatsFromCallee( - rtc::scoped_refptr selector) { - return GetStats(callee_->pc(), selector); - } - - protected: - static rtc::scoped_refptr GetStats( - PeerConnectionInterface* pc) { - rtc::scoped_refptr stats_obtainer = - RTCStatsObtainer::Create(); - pc->GetStats(stats_obtainer); - EXPECT_TRUE_WAIT(stats_obtainer->report(), kGetStatsTimeoutMs); - return stats_obtainer->report(); - } - - template - static rtc::scoped_refptr GetStats( - PeerConnectionInterface* pc, - rtc::scoped_refptr selector) { - rtc::scoped_refptr stats_obtainer = - RTCStatsObtainer::Create(); - pc->GetStats(selector, stats_obtainer); - EXPECT_TRUE_WAIT(stats_obtainer->report(), kGetStatsTimeoutMs); - return stats_obtainer->report(); - } - - // `network_thread_` uses `virtual_socket_server_` so they must be - // constructed/destructed in the correct order. - rtc::VirtualSocketServer virtual_socket_server_; - std::unique_ptr network_thread_; - std::unique_ptr worker_thread_; - rtc::scoped_refptr caller_; - rtc::scoped_refptr callee_; -}; - -class RTCStatsVerifier { - public: - RTCStatsVerifier(const RTCStatsReport* report, const RTCStats* stats) - : report_(report), stats_(stats), all_tests_successful_(true) { - RTC_CHECK(report_); - RTC_CHECK(stats_); - for (const RTCStatsMemberInterface* member : stats_->Members()) { - untested_members_.insert(member); - } - } - - void MarkMemberTested(const RTCStatsMemberInterface& member, - bool test_successful) { - untested_members_.erase(&member); - all_tests_successful_ &= test_successful; - } - - void TestMemberIsDefined(const RTCStatsMemberInterface& member) { - EXPECT_TRUE(member.is_defined()) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was undefined."; - MarkMemberTested(member, member.is_defined()); - } - - void TestMemberIsUndefined(const RTCStatsMemberInterface& member) { - EXPECT_FALSE(member.is_defined()) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was defined (" << member.ValueToString() << ")."; - MarkMemberTested(member, !member.is_defined()); - } - - template - void TestMemberIsPositive(const RTCStatsMemberInterface& member) { - EXPECT_TRUE(member.is_defined()) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was undefined."; - if (!member.is_defined()) { - MarkMemberTested(member, false); - return; - } - bool is_positive = *member.cast_to>() > T(0); - EXPECT_TRUE(is_positive) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was not positive (" << member.ValueToString() << ")."; - MarkMemberTested(member, is_positive); - } - - template - void TestMemberIsNonNegative(const RTCStatsMemberInterface& member) { - EXPECT_TRUE(member.is_defined()) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was undefined."; - if (!member.is_defined()) { - MarkMemberTested(member, false); - return; - } - bool is_non_negative = *member.cast_to>() >= T(0); - EXPECT_TRUE(is_non_negative) - << stats_->type() << "." << member.name() << "[" << stats_->id() - << "] was not non-negative (" << member.ValueToString() << ")."; - MarkMemberTested(member, is_non_negative); - } - - void TestMemberIsIDReference(const RTCStatsMemberInterface& member, - const char* expected_type) { - TestMemberIsIDReference(member, expected_type, false); - } - - void TestMemberIsOptionalIDReference(const RTCStatsMemberInterface& member, - const char* expected_type) { - TestMemberIsIDReference(member, expected_type, true); - } - - bool ExpectAllMembersSuccessfullyTested() { - if (untested_members_.empty()) - return all_tests_successful_; - for (const RTCStatsMemberInterface* member : untested_members_) { - EXPECT_TRUE(false) << stats_->type() << "." << member->name() << "[" - << stats_->id() << "] was not tested."; - } - return false; - } - - private: - void TestMemberIsIDReference(const RTCStatsMemberInterface& member, - const char* expected_type, - bool optional) { - if (optional && !member.is_defined()) { - MarkMemberTested(member, true); - return; - } - bool valid_reference = false; - if (member.is_defined()) { - if (member.type() == RTCStatsMemberInterface::kString) { - // A single ID. - const RTCStatsMember& id = - member.cast_to>(); - const RTCStats* referenced_stats = report_->Get(*id); - valid_reference = - referenced_stats && referenced_stats->type() == expected_type; - } else if (member.type() == RTCStatsMemberInterface::kSequenceString) { - // A vector of IDs. - valid_reference = true; - const RTCStatsMember>& ids = - member.cast_to>>(); - for (const std::string& id : *ids) { - const RTCStats* referenced_stats = report_->Get(id); - if (!referenced_stats || referenced_stats->type() != expected_type) { - valid_reference = false; - break; - } - } - } - } - EXPECT_TRUE(valid_reference) - << stats_->type() << "." << member.name() - << " is not a reference to an " - "existing dictionary of type " - << expected_type << " (value: " - << (member.is_defined() ? member.ValueToString() : "null") << ")."; - MarkMemberTested(member, valid_reference); - } - - rtc::scoped_refptr report_; - const RTCStats* stats_; - std::set untested_members_; - bool all_tests_successful_; -}; - -class RTCStatsReportVerifier { - public: - static std::set StatsTypes() { - std::set stats_types; - stats_types.insert(RTCCertificateStats::kType); - stats_types.insert(RTCCodecStats::kType); - stats_types.insert(RTCDataChannelStats::kType); - stats_types.insert(RTCIceCandidatePairStats::kType); - stats_types.insert(RTCLocalIceCandidateStats::kType); - stats_types.insert(RTCRemoteIceCandidateStats::kType); - stats_types.insert(RTCMediaStreamStats::kType); - stats_types.insert(RTCMediaStreamTrackStats::kType); - stats_types.insert(RTCPeerConnectionStats::kType); - stats_types.insert(RTCInboundRTPStreamStats::kType); - stats_types.insert(RTCOutboundRTPStreamStats::kType); - stats_types.insert(RTCTransportStats::kType); - return stats_types; - } - - explicit RTCStatsReportVerifier(const RTCStatsReport* report) - : report_(report) {} - - void VerifyReport(std::vector allowed_missing_stats) { - std::set missing_stats = StatsTypes(); - bool verify_successful = true; - std::vector transport_stats = - report_->GetStatsOfType(); - EXPECT_EQ(transport_stats.size(), 1U); - std::string selected_candidate_pair_id = - *transport_stats[0]->selected_candidate_pair_id; - for (const RTCStats& stats : *report_) { - missing_stats.erase(stats.type()); - if (stats.type() == RTCCertificateStats::kType) { - verify_successful &= - VerifyRTCCertificateStats(stats.cast_to()); - } else if (stats.type() == RTCCodecStats::kType) { - verify_successful &= - VerifyRTCCodecStats(stats.cast_to()); - } else if (stats.type() == RTCDataChannelStats::kType) { - verify_successful &= - VerifyRTCDataChannelStats(stats.cast_to()); - } else if (stats.type() == RTCIceCandidatePairStats::kType) { - verify_successful &= VerifyRTCIceCandidatePairStats( - stats.cast_to(), - stats.id() == selected_candidate_pair_id); - } else if (stats.type() == RTCLocalIceCandidateStats::kType) { - verify_successful &= VerifyRTCLocalIceCandidateStats( - stats.cast_to()); - } else if (stats.type() == RTCRemoteIceCandidateStats::kType) { - verify_successful &= VerifyRTCRemoteIceCandidateStats( - stats.cast_to()); - } else if (stats.type() == RTCMediaStreamStats::kType) { - verify_successful &= - VerifyRTCMediaStreamStats(stats.cast_to()); - } else if (stats.type() == RTCMediaStreamTrackStats::kType) { - verify_successful &= VerifyRTCMediaStreamTrackStats( - stats.cast_to()); - } else if (stats.type() == RTCPeerConnectionStats::kType) { - verify_successful &= VerifyRTCPeerConnectionStats( - stats.cast_to()); - } else if (stats.type() == RTCInboundRTPStreamStats::kType) { - verify_successful &= VerifyRTCInboundRTPStreamStats( - stats.cast_to()); - } else if (stats.type() == RTCOutboundRTPStreamStats::kType) { - verify_successful &= VerifyRTCOutboundRTPStreamStats( - stats.cast_to()); - } else if (stats.type() == RTCRemoteInboundRtpStreamStats::kType) { - verify_successful &= VerifyRTCRemoteInboundRtpStreamStats( - stats.cast_to()); - } else if (stats.type() == RTCRemoteOutboundRtpStreamStats::kType) { - verify_successful &= VerifyRTCRemoteOutboundRTPStreamStats( - stats.cast_to()); - } else if (stats.type() == RTCAudioSourceStats::kType) { - // RTCAudioSourceStats::kType and RTCVideoSourceStats::kType both have - // the value "media-source", but they are distinguishable with pointer - // equality (==). In JavaScript they would be distinguished with `kind`. - verify_successful &= - VerifyRTCAudioSourceStats(stats.cast_to()); - } else if (stats.type() == RTCVideoSourceStats::kType) { - // RTCAudioSourceStats::kType and RTCVideoSourceStats::kType both have - // the value "media-source", but they are distinguishable with pointer - // equality (==). In JavaScript they would be distinguished with `kind`. - verify_successful &= - VerifyRTCVideoSourceStats(stats.cast_to()); - } else if (stats.type() == RTCTransportStats::kType) { - verify_successful &= - VerifyRTCTransportStats(stats.cast_to()); - } else { - EXPECT_TRUE(false) << "Unrecognized stats type: " << stats.type(); - verify_successful = false; - } - } - for (const char* missing : missing_stats) { - if (!absl::c_linear_search(allowed_missing_stats, missing)) { - verify_successful = false; - EXPECT_TRUE(false) << "Missing expected stats type: " << missing; - } - } - EXPECT_TRUE(verify_successful) - << "One or more problems with the stats. This is the report:\n" - << report_->ToJson(); - } - - bool VerifyRTCCertificateStats(const RTCCertificateStats& certificate) { - RTCStatsVerifier verifier(report_, &certificate); - verifier.TestMemberIsDefined(certificate.fingerprint); - verifier.TestMemberIsDefined(certificate.fingerprint_algorithm); - verifier.TestMemberIsDefined(certificate.base64_certificate); - verifier.TestMemberIsOptionalIDReference(certificate.issuer_certificate_id, - RTCCertificateStats::kType); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCCodecStats(const RTCCodecStats& codec) { - RTCStatsVerifier verifier(report_, &codec); - verifier.TestMemberIsIDReference(codec.transport_id, - RTCTransportStats::kType); - verifier.TestMemberIsDefined(codec.payload_type); - verifier.TestMemberIsDefined(codec.mime_type); - verifier.TestMemberIsPositive(codec.clock_rate); - - if (codec.mime_type->rfind("audio", 0) == 0) - verifier.TestMemberIsPositive(codec.channels); - else - verifier.TestMemberIsUndefined(codec.channels); - - // sdp_fmtp_line is an optional field. - verifier.MarkMemberTested(codec.sdp_fmtp_line, true); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCDataChannelStats(const RTCDataChannelStats& data_channel) { - RTCStatsVerifier verifier(report_, &data_channel); - verifier.TestMemberIsDefined(data_channel.label); - verifier.TestMemberIsDefined(data_channel.protocol); - verifier.TestMemberIsDefined(data_channel.data_channel_identifier); - verifier.TestMemberIsDefined(data_channel.state); - verifier.TestMemberIsNonNegative(data_channel.messages_sent); - verifier.TestMemberIsNonNegative(data_channel.bytes_sent); - verifier.TestMemberIsNonNegative(data_channel.messages_received); - verifier.TestMemberIsNonNegative(data_channel.bytes_received); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCIceCandidatePairStats( - const RTCIceCandidatePairStats& candidate_pair, - bool is_selected_pair) { - RTCStatsVerifier verifier(report_, &candidate_pair); - verifier.TestMemberIsIDReference(candidate_pair.transport_id, - RTCTransportStats::kType); - verifier.TestMemberIsIDReference(candidate_pair.local_candidate_id, - RTCLocalIceCandidateStats::kType); - verifier.TestMemberIsIDReference(candidate_pair.remote_candidate_id, - RTCRemoteIceCandidateStats::kType); - verifier.TestMemberIsDefined(candidate_pair.state); - verifier.TestMemberIsNonNegative(candidate_pair.priority); - verifier.TestMemberIsDefined(candidate_pair.nominated); - verifier.TestMemberIsDefined(candidate_pair.writable); - verifier.TestMemberIsUndefined(candidate_pair.readable); - verifier.TestMemberIsNonNegative(candidate_pair.packets_sent); - verifier.TestMemberIsNonNegative( - candidate_pair.packets_discarded_on_send); - verifier.TestMemberIsNonNegative(candidate_pair.packets_received); - verifier.TestMemberIsNonNegative(candidate_pair.bytes_sent); - verifier.TestMemberIsNonNegative( - candidate_pair.bytes_discarded_on_send); - verifier.TestMemberIsNonNegative(candidate_pair.bytes_received); - verifier.TestMemberIsNonNegative( - candidate_pair.total_round_trip_time); - verifier.TestMemberIsNonNegative( - candidate_pair.current_round_trip_time); - if (is_selected_pair) { - verifier.TestMemberIsNonNegative( - candidate_pair.available_outgoing_bitrate); - // A pair should be nominated in order to be selected. - EXPECT_TRUE(*candidate_pair.nominated); - } else { - verifier.TestMemberIsUndefined(candidate_pair.available_outgoing_bitrate); - } - verifier.TestMemberIsUndefined(candidate_pair.available_incoming_bitrate); - verifier.TestMemberIsNonNegative( - candidate_pair.requests_received); - verifier.TestMemberIsNonNegative(candidate_pair.requests_sent); - verifier.TestMemberIsNonNegative( - candidate_pair.responses_received); - verifier.TestMemberIsNonNegative(candidate_pair.responses_sent); - verifier.TestMemberIsUndefined(candidate_pair.retransmissions_received); - verifier.TestMemberIsUndefined(candidate_pair.retransmissions_sent); - verifier.TestMemberIsUndefined(candidate_pair.consent_requests_received); - verifier.TestMemberIsNonNegative( - candidate_pair.consent_requests_sent); - verifier.TestMemberIsUndefined(candidate_pair.consent_responses_received); - verifier.TestMemberIsUndefined(candidate_pair.consent_responses_sent); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCIceCandidateStats(const RTCIceCandidateStats& candidate) { - RTCStatsVerifier verifier(report_, &candidate); - verifier.TestMemberIsIDReference(candidate.transport_id, - RTCTransportStats::kType); - verifier.TestMemberIsDefined(candidate.is_remote); - if (*candidate.is_remote) { - verifier.TestMemberIsUndefined(candidate.network_type); - } else { - verifier.TestMemberIsDefined(candidate.network_type); - } - verifier.TestMemberIsDefined(candidate.ip); - verifier.TestMemberIsDefined(candidate.address); - verifier.TestMemberIsNonNegative(candidate.port); - verifier.TestMemberIsDefined(candidate.protocol); - verifier.TestMemberIsDefined(candidate.candidate_type); - verifier.TestMemberIsNonNegative(candidate.priority); - verifier.TestMemberIsUndefined(candidate.url); - verifier.TestMemberIsUndefined(candidate.relay_protocol); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCLocalIceCandidateStats( - const RTCLocalIceCandidateStats& local_candidate) { - return VerifyRTCIceCandidateStats(local_candidate); - } - - bool VerifyRTCRemoteIceCandidateStats( - const RTCRemoteIceCandidateStats& remote_candidate) { - return VerifyRTCIceCandidateStats(remote_candidate); - } - - bool VerifyRTCMediaStreamStats(const RTCMediaStreamStats& media_stream) { - RTCStatsVerifier verifier(report_, &media_stream); - verifier.TestMemberIsDefined(media_stream.stream_identifier); - verifier.TestMemberIsIDReference(media_stream.track_ids, - RTCMediaStreamTrackStats::kType); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCMediaStreamTrackStats( - const RTCMediaStreamTrackStats& media_stream_track) { - RTCStatsVerifier verifier(report_, &media_stream_track); - verifier.TestMemberIsDefined(media_stream_track.track_identifier); - verifier.TestMemberIsDefined(media_stream_track.remote_source); - verifier.TestMemberIsDefined(media_stream_track.ended); - verifier.TestMemberIsDefined(media_stream_track.detached); - verifier.TestMemberIsDefined(media_stream_track.kind); - RTC_DCHECK(media_stream_track.remote_source.is_defined()); - // Video or audio media stream track? - if (*media_stream_track.kind == RTCMediaStreamTrackKind::kVideo) { - // The type of the referenced media source depends on kind. - if (*media_stream_track.remote_source) { - verifier.TestMemberIsUndefined(media_stream_track.media_source_id); - verifier.TestMemberIsNonNegative( - media_stream_track.jitter_buffer_delay); - verifier.TestMemberIsNonNegative( - media_stream_track.jitter_buffer_emitted_count); - verifier.TestMemberIsUndefined(media_stream_track.frames_sent); - verifier.TestMemberIsUndefined(media_stream_track.huge_frames_sent); - verifier.TestMemberIsNonNegative( - media_stream_track.frames_received); - verifier.TestMemberIsNonNegative( - media_stream_track.frames_decoded); - verifier.TestMemberIsNonNegative( - media_stream_track.frames_dropped); - verifier.TestMemberIsNonNegative( - media_stream_track.freeze_count); - verifier.TestMemberIsNonNegative( - media_stream_track.pause_count); - verifier.TestMemberIsNonNegative( - media_stream_track.total_freezes_duration); - verifier.TestMemberIsNonNegative( - media_stream_track.total_pauses_duration); - verifier.TestMemberIsNonNegative( - media_stream_track.total_frames_duration); - verifier.TestMemberIsNonNegative( - media_stream_track.sum_squared_frame_durations); - } else { - verifier.TestMemberIsIDReference(media_stream_track.media_source_id, - RTCVideoSourceStats::kType); - // Local tracks have no jitter buffer. - verifier.TestMemberIsUndefined(media_stream_track.jitter_buffer_delay); - verifier.TestMemberIsUndefined( - media_stream_track.jitter_buffer_emitted_count); - verifier.TestMemberIsNonNegative( - media_stream_track.frames_sent); - verifier.TestMemberIsNonNegative( - media_stream_track.huge_frames_sent); - verifier.TestMemberIsUndefined(media_stream_track.frames_received); - verifier.TestMemberIsUndefined(media_stream_track.frames_decoded); - verifier.TestMemberIsUndefined(media_stream_track.frames_dropped); - verifier.TestMemberIsUndefined(media_stream_track.freeze_count); - verifier.TestMemberIsUndefined(media_stream_track.pause_count); - verifier.TestMemberIsUndefined( - media_stream_track.total_freezes_duration); - verifier.TestMemberIsUndefined( - media_stream_track.total_pauses_duration); - verifier.TestMemberIsUndefined( - media_stream_track.total_frames_duration); - verifier.TestMemberIsUndefined( - media_stream_track.sum_squared_frame_durations); - } - // Video-only members - verifier.TestMemberIsNonNegative( - media_stream_track.frame_width); - verifier.TestMemberIsNonNegative( - media_stream_track.frame_height); - verifier.TestMemberIsUndefined(media_stream_track.frames_per_second); - verifier.TestMemberIsUndefined(media_stream_track.frames_corrupted); - verifier.TestMemberIsUndefined(media_stream_track.partial_frames_lost); - verifier.TestMemberIsUndefined(media_stream_track.full_frames_lost); - // Audio-only members should be undefined - verifier.TestMemberIsUndefined(media_stream_track.audio_level); - verifier.TestMemberIsUndefined(media_stream_track.echo_return_loss); - verifier.TestMemberIsUndefined( - media_stream_track.echo_return_loss_enhancement); - verifier.TestMemberIsUndefined(media_stream_track.total_audio_energy); - verifier.TestMemberIsUndefined(media_stream_track.total_samples_duration); - verifier.TestMemberIsUndefined(media_stream_track.total_samples_received); - verifier.TestMemberIsUndefined(media_stream_track.concealed_samples); - verifier.TestMemberIsUndefined( - media_stream_track.silent_concealed_samples); - verifier.TestMemberIsUndefined(media_stream_track.concealment_events); - verifier.TestMemberIsUndefined( - media_stream_track.inserted_samples_for_deceleration); - verifier.TestMemberIsUndefined( - media_stream_track.removed_samples_for_acceleration); - verifier.TestMemberIsUndefined(media_stream_track.jitter_buffer_flushes); - verifier.TestMemberIsUndefined( - media_stream_track.delayed_packet_outage_samples); - verifier.TestMemberIsUndefined( - media_stream_track.relative_packet_arrival_delay); - verifier.TestMemberIsUndefined(media_stream_track.interruption_count); - verifier.TestMemberIsUndefined( - media_stream_track.total_interruption_duration); - verifier.TestMemberIsUndefined( - media_stream_track.jitter_buffer_target_delay); - } else { - RTC_DCHECK_EQ(*media_stream_track.kind, RTCMediaStreamTrackKind::kAudio); - // The type of the referenced media source depends on kind. - if (*media_stream_track.remote_source) { - // Remote tracks don't have media source stats. - verifier.TestMemberIsUndefined(media_stream_track.media_source_id); - verifier.TestMemberIsNonNegative( - media_stream_track.jitter_buffer_delay); - verifier.TestMemberIsNonNegative( - media_stream_track.jitter_buffer_emitted_count); - verifier.TestMemberIsNonNegative( - media_stream_track.jitter_buffer_target_delay); - verifier.TestMemberIsPositive(media_stream_track.audio_level); - verifier.TestMemberIsPositive( - media_stream_track.total_audio_energy); - verifier.TestMemberIsPositive( - media_stream_track.total_samples_received); - verifier.TestMemberIsPositive( - media_stream_track.total_samples_duration); - verifier.TestMemberIsNonNegative( - media_stream_track.concealed_samples); - verifier.TestMemberIsNonNegative( - media_stream_track.silent_concealed_samples); - verifier.TestMemberIsNonNegative( - media_stream_track.concealment_events); - verifier.TestMemberIsNonNegative( - media_stream_track.inserted_samples_for_deceleration); - verifier.TestMemberIsNonNegative( - media_stream_track.removed_samples_for_acceleration); - verifier.TestMemberIsNonNegative( - media_stream_track.jitter_buffer_flushes); - verifier.TestMemberIsNonNegative( - media_stream_track.delayed_packet_outage_samples); - verifier.TestMemberIsNonNegative( - media_stream_track.relative_packet_arrival_delay); - verifier.TestMemberIsNonNegative( - media_stream_track.interruption_count); - verifier.TestMemberIsNonNegative( - media_stream_track.total_interruption_duration); - } else { - verifier.TestMemberIsIDReference(media_stream_track.media_source_id, - RTCAudioSourceStats::kType); - // Local audio tracks have no jitter buffer. - verifier.TestMemberIsUndefined(media_stream_track.jitter_buffer_delay); - verifier.TestMemberIsUndefined( - media_stream_track.jitter_buffer_emitted_count); - verifier.TestMemberIsUndefined( - media_stream_track.jitter_buffer_target_delay); - verifier.TestMemberIsUndefined(media_stream_track.audio_level); - verifier.TestMemberIsUndefined(media_stream_track.total_audio_energy); - verifier.TestMemberIsUndefined( - media_stream_track.total_samples_received); - verifier.TestMemberIsUndefined( - media_stream_track.total_samples_duration); - verifier.TestMemberIsUndefined(media_stream_track.concealed_samples); - verifier.TestMemberIsUndefined( - media_stream_track.silent_concealed_samples); - verifier.TestMemberIsUndefined(media_stream_track.concealment_events); - verifier.TestMemberIsUndefined( - media_stream_track.inserted_samples_for_deceleration); - verifier.TestMemberIsUndefined( - media_stream_track.removed_samples_for_acceleration); - verifier.TestMemberIsUndefined( - media_stream_track.jitter_buffer_flushes); - verifier.TestMemberIsUndefined( - media_stream_track.delayed_packet_outage_samples); - verifier.TestMemberIsUndefined( - media_stream_track.relative_packet_arrival_delay); - verifier.TestMemberIsUndefined(media_stream_track.interruption_count); - verifier.TestMemberIsUndefined( - media_stream_track.total_interruption_duration); - } - // Video-only members should be undefined - verifier.TestMemberIsUndefined(media_stream_track.frame_width); - verifier.TestMemberIsUndefined(media_stream_track.frame_height); - verifier.TestMemberIsUndefined(media_stream_track.frames_per_second); - verifier.TestMemberIsUndefined(media_stream_track.frames_sent); - verifier.TestMemberIsUndefined(media_stream_track.huge_frames_sent); - verifier.TestMemberIsUndefined(media_stream_track.frames_received); - verifier.TestMemberIsUndefined(media_stream_track.frames_decoded); - verifier.TestMemberIsUndefined(media_stream_track.frames_dropped); - verifier.TestMemberIsUndefined(media_stream_track.frames_corrupted); - verifier.TestMemberIsUndefined(media_stream_track.partial_frames_lost); - verifier.TestMemberIsUndefined(media_stream_track.full_frames_lost); - verifier.TestMemberIsUndefined(media_stream_track.freeze_count); - verifier.TestMemberIsUndefined(media_stream_track.pause_count); - verifier.TestMemberIsUndefined(media_stream_track.total_freezes_duration); - verifier.TestMemberIsUndefined(media_stream_track.total_pauses_duration); - verifier.TestMemberIsUndefined(media_stream_track.total_frames_duration); - verifier.TestMemberIsUndefined( - media_stream_track.sum_squared_frame_durations); - // Audio-only members - // TODO(hbos): `echo_return_loss` and `echo_return_loss_enhancement` are - // flaky on msan bot (sometimes defined, sometimes undefined). Should the - // test run until available or is there a way to have it always be - // defined? crbug.com/627816 - verifier.MarkMemberTested(media_stream_track.echo_return_loss, true); - verifier.MarkMemberTested(media_stream_track.echo_return_loss_enhancement, - true); - } - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCPeerConnectionStats( - const RTCPeerConnectionStats& peer_connection) { - RTCStatsVerifier verifier(report_, &peer_connection); - verifier.TestMemberIsNonNegative( - peer_connection.data_channels_opened); - verifier.TestMemberIsNonNegative( - peer_connection.data_channels_closed); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - void VerifyRTCRTPStreamStats(const RTCRTPStreamStats& stream, - RTCStatsVerifier& verifier) { - verifier.TestMemberIsDefined(stream.ssrc); - verifier.TestMemberIsDefined(stream.kind); - // Some legacy metrics are only defined for some of the RTP types in the - // hierarcy. - if (stream.type() == RTCInboundRTPStreamStats::kType || - stream.type() == RTCOutboundRTPStreamStats::kType) { - verifier.TestMemberIsDefined(stream.media_type); - verifier.TestMemberIsIDReference(stream.track_id, - RTCMediaStreamTrackStats::kType); - } else { - verifier.TestMemberIsUndefined(stream.media_type); - verifier.TestMemberIsUndefined(stream.track_id); - } - verifier.TestMemberIsIDReference(stream.transport_id, - RTCTransportStats::kType); - verifier.TestMemberIsIDReference(stream.codec_id, RTCCodecStats::kType); - } - - void VerifyRTCSentRTPStreamStats(const RTCSentRtpStreamStats& sent_stream, - RTCStatsVerifier& verifier) { - VerifyRTCRTPStreamStats(sent_stream, verifier); - verifier.TestMemberIsDefined(sent_stream.packets_sent); - verifier.TestMemberIsDefined(sent_stream.bytes_sent); - } - - bool VerifyRTCInboundRTPStreamStats( - const RTCInboundRTPStreamStats& inbound_stream) { - RTCStatsVerifier verifier(report_, &inbound_stream); - VerifyRTCReceivedRtpStreamStats(inbound_stream, verifier, - inbound_stream.media_type.is_defined() && - *inbound_stream.media_type == "audio"); - verifier.TestMemberIsOptionalIDReference( - inbound_stream.remote_id, RTCRemoteOutboundRtpStreamStats::kType); - if (inbound_stream.media_type.is_defined() && - *inbound_stream.media_type == "video") { - verifier.TestMemberIsNonNegative(inbound_stream.qp_sum); - verifier.TestMemberIsDefined(inbound_stream.decoder_implementation); - } else { - verifier.TestMemberIsUndefined(inbound_stream.qp_sum); - verifier.TestMemberIsUndefined(inbound_stream.decoder_implementation); - } - verifier.TestMemberIsNonNegative(inbound_stream.packets_received); - if (inbound_stream.media_type.is_defined() && - *inbound_stream.media_type == "audio") { - verifier.TestMemberIsNonNegative( - inbound_stream.fec_packets_received); - verifier.TestMemberIsNonNegative( - inbound_stream.fec_packets_discarded); - } else { - verifier.TestMemberIsUndefined(inbound_stream.fec_packets_received); - verifier.TestMemberIsUndefined(inbound_stream.fec_packets_discarded); - } - verifier.TestMemberIsNonNegative(inbound_stream.bytes_received); - verifier.TestMemberIsNonNegative( - inbound_stream.header_bytes_received); - verifier.TestMemberIsDefined(inbound_stream.last_packet_received_timestamp); - if (inbound_stream.frames_received.ValueOrDefault(0) > 0) { - verifier.TestMemberIsNonNegative(inbound_stream.frame_width); - verifier.TestMemberIsNonNegative(inbound_stream.frame_height); - } else { - verifier.TestMemberIsUndefined(inbound_stream.frame_width); - verifier.TestMemberIsUndefined(inbound_stream.frame_height); - } - if (inbound_stream.frames_per_second.is_defined()) { - verifier.TestMemberIsNonNegative( - inbound_stream.frames_per_second); - } else { - verifier.TestMemberIsUndefined(inbound_stream.frames_per_second); - } - verifier.TestMemberIsUndefined(inbound_stream.frame_bit_depth); - verifier.TestMemberIsNonNegative( - inbound_stream.jitter_buffer_delay); - verifier.TestMemberIsNonNegative( - inbound_stream.jitter_buffer_emitted_count); - if (inbound_stream.media_type.is_defined() && - *inbound_stream.media_type == "video") { - verifier.TestMemberIsUndefined(inbound_stream.total_samples_received); - verifier.TestMemberIsUndefined(inbound_stream.concealed_samples); - verifier.TestMemberIsUndefined(inbound_stream.silent_concealed_samples); - verifier.TestMemberIsUndefined(inbound_stream.concealment_events); - verifier.TestMemberIsUndefined( - inbound_stream.inserted_samples_for_deceleration); - verifier.TestMemberIsUndefined( - inbound_stream.removed_samples_for_acceleration); - verifier.TestMemberIsUndefined(inbound_stream.audio_level); - verifier.TestMemberIsUndefined(inbound_stream.total_audio_energy); - verifier.TestMemberIsUndefined(inbound_stream.total_samples_duration); - verifier.TestMemberIsNonNegative(inbound_stream.frames_received); - verifier.TestMemberIsNonNegative(inbound_stream.fir_count); - verifier.TestMemberIsNonNegative(inbound_stream.pli_count); - verifier.TestMemberIsNonNegative(inbound_stream.nack_count); - } else { - verifier.TestMemberIsUndefined(inbound_stream.fir_count); - verifier.TestMemberIsUndefined(inbound_stream.pli_count); - verifier.TestMemberIsUndefined(inbound_stream.nack_count); - verifier.TestMemberIsPositive( - inbound_stream.total_samples_received); - verifier.TestMemberIsNonNegative( - inbound_stream.concealed_samples); - verifier.TestMemberIsNonNegative( - inbound_stream.silent_concealed_samples); - verifier.TestMemberIsNonNegative( - inbound_stream.concealment_events); - verifier.TestMemberIsNonNegative( - inbound_stream.inserted_samples_for_deceleration); - verifier.TestMemberIsNonNegative( - inbound_stream.removed_samples_for_acceleration); - verifier.TestMemberIsPositive(inbound_stream.audio_level); - verifier.TestMemberIsPositive(inbound_stream.total_audio_energy); - verifier.TestMemberIsPositive( - inbound_stream.total_samples_duration); - verifier.TestMemberIsUndefined(inbound_stream.frames_received); - } - verifier.TestMemberIsUndefined(inbound_stream.round_trip_time); - verifier.TestMemberIsUndefined(inbound_stream.packets_repaired); - verifier.TestMemberIsUndefined(inbound_stream.burst_packets_lost); - verifier.TestMemberIsUndefined(inbound_stream.burst_packets_discarded); - verifier.TestMemberIsUndefined(inbound_stream.burst_loss_count); - verifier.TestMemberIsUndefined(inbound_stream.burst_discard_count); - verifier.TestMemberIsUndefined(inbound_stream.burst_loss_rate); - verifier.TestMemberIsUndefined(inbound_stream.burst_discard_rate); - verifier.TestMemberIsUndefined(inbound_stream.gap_loss_rate); - verifier.TestMemberIsUndefined(inbound_stream.gap_discard_rate); - // Test runtime too short to get an estimate (at least two RTCP sender - // reports need to be received). - verifier.MarkMemberTested(inbound_stream.estimated_playout_timestamp, true); - if (inbound_stream.media_type.is_defined() && - *inbound_stream.media_type == "video") { - verifier.TestMemberIsDefined(inbound_stream.frames_decoded); - verifier.TestMemberIsDefined(inbound_stream.key_frames_decoded); - verifier.TestMemberIsNonNegative(inbound_stream.frames_dropped); - verifier.TestMemberIsNonNegative( - inbound_stream.total_decode_time); - verifier.TestMemberIsNonNegative( - inbound_stream.total_inter_frame_delay); - verifier.TestMemberIsNonNegative( - inbound_stream.total_squared_inter_frame_delay); - // The integration test is not set up to test screen share; don't require - // this to be present. - verifier.MarkMemberTested(inbound_stream.content_type, true); - } else { - verifier.TestMemberIsUndefined(inbound_stream.frames_decoded); - verifier.TestMemberIsUndefined(inbound_stream.key_frames_decoded); - verifier.TestMemberIsUndefined(inbound_stream.frames_dropped); - verifier.TestMemberIsUndefined(inbound_stream.total_decode_time); - verifier.TestMemberIsUndefined(inbound_stream.total_inter_frame_delay); - verifier.TestMemberIsUndefined( - inbound_stream.total_squared_inter_frame_delay); - verifier.TestMemberIsUndefined(inbound_stream.content_type); - } - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCOutboundRTPStreamStats( - const RTCOutboundRTPStreamStats& outbound_stream) { - RTCStatsVerifier verifier(report_, &outbound_stream); - VerifyRTCRTPStreamStats(outbound_stream, verifier); - if (outbound_stream.media_type.is_defined() && - *outbound_stream.media_type == "video") { - verifier.TestMemberIsIDReference(outbound_stream.media_source_id, - RTCVideoSourceStats::kType); - verifier.TestMemberIsNonNegative(outbound_stream.fir_count); - verifier.TestMemberIsNonNegative(outbound_stream.pli_count); - if (*outbound_stream.frames_encoded > 0) { - verifier.TestMemberIsNonNegative(outbound_stream.qp_sum); - } else { - verifier.TestMemberIsUndefined(outbound_stream.qp_sum); - } - } else { - verifier.TestMemberIsUndefined(outbound_stream.fir_count); - verifier.TestMemberIsUndefined(outbound_stream.pli_count); - verifier.TestMemberIsIDReference(outbound_stream.media_source_id, - RTCAudioSourceStats::kType); - verifier.TestMemberIsUndefined(outbound_stream.qp_sum); - } - verifier.TestMemberIsNonNegative(outbound_stream.nack_count); - verifier.TestMemberIsOptionalIDReference( - outbound_stream.remote_id, RTCRemoteInboundRtpStreamStats::kType); - verifier.TestMemberIsNonNegative(outbound_stream.packets_sent); - verifier.TestMemberIsNonNegative( - outbound_stream.retransmitted_packets_sent); - verifier.TestMemberIsNonNegative(outbound_stream.bytes_sent); - verifier.TestMemberIsNonNegative( - outbound_stream.header_bytes_sent); - verifier.TestMemberIsNonNegative( - outbound_stream.retransmitted_bytes_sent); - if (outbound_stream.media_type.is_defined() && - *outbound_stream.media_type == "video") { - verifier.TestMemberIsDefined(outbound_stream.frames_encoded); - verifier.TestMemberIsDefined(outbound_stream.key_frames_encoded); - verifier.TestMemberIsNonNegative( - outbound_stream.total_encode_time); - verifier.TestMemberIsNonNegative( - outbound_stream.total_encoded_bytes_target); - verifier.TestMemberIsNonNegative( - outbound_stream.total_packet_send_delay); - verifier.TestMemberIsDefined(outbound_stream.quality_limitation_reason); - verifier.TestMemberIsDefined( - outbound_stream.quality_limitation_durations); - verifier.TestMemberIsNonNegative( - outbound_stream.quality_limitation_resolution_changes); - // The integration test is not set up to test screen share; don't require - // this to be present. - verifier.MarkMemberTested(outbound_stream.content_type, true); - verifier.TestMemberIsDefined(outbound_stream.encoder_implementation); - // Unless an implementation-specific amount of time has passed and at - // least one frame has been encoded, undefined is reported. Because it - // is hard to tell what is the case here, we treat FPS as optional. - // TODO(hbos): Update the tests to run until all implemented metrics - // should be populated. - if (outbound_stream.frames_per_second.is_defined()) { - verifier.TestMemberIsNonNegative( - outbound_stream.frames_per_second); - } else { - verifier.TestMemberIsUndefined(outbound_stream.frames_per_second); - } - verifier.TestMemberIsNonNegative(outbound_stream.frame_height); - verifier.TestMemberIsNonNegative(outbound_stream.frame_width); - verifier.TestMemberIsNonNegative(outbound_stream.frames_sent); - verifier.TestMemberIsNonNegative( - outbound_stream.huge_frames_sent); - verifier.TestMemberIsUndefined(outbound_stream.target_bitrate); - verifier.MarkMemberTested(outbound_stream.rid, true); - } else { - verifier.TestMemberIsUndefined(outbound_stream.frames_encoded); - verifier.TestMemberIsUndefined(outbound_stream.key_frames_encoded); - verifier.TestMemberIsUndefined(outbound_stream.total_encode_time); - verifier.TestMemberIsUndefined( - outbound_stream.total_encoded_bytes_target); - // TODO(https://crbug.com/webrtc/10635): Implement for audio as well. - verifier.TestMemberIsUndefined(outbound_stream.total_packet_send_delay); - verifier.TestMemberIsUndefined(outbound_stream.quality_limitation_reason); - verifier.TestMemberIsUndefined( - outbound_stream.quality_limitation_durations); - verifier.TestMemberIsUndefined( - outbound_stream.quality_limitation_resolution_changes); - verifier.TestMemberIsUndefined(outbound_stream.content_type); - // TODO(hbos): Implement for audio as well. - verifier.TestMemberIsUndefined(outbound_stream.encoder_implementation); - verifier.TestMemberIsUndefined(outbound_stream.rid); - verifier.TestMemberIsUndefined(outbound_stream.frames_per_second); - verifier.TestMemberIsUndefined(outbound_stream.frame_height); - verifier.TestMemberIsUndefined(outbound_stream.frame_width); - verifier.TestMemberIsUndefined(outbound_stream.frames_sent); - verifier.TestMemberIsUndefined(outbound_stream.huge_frames_sent); - verifier.TestMemberIsNonNegative(outbound_stream.target_bitrate); - } - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - void VerifyRTCReceivedRtpStreamStats( - const RTCReceivedRtpStreamStats& received_rtp, - RTCStatsVerifier& verifier, - bool packets_discarded_defined) { - VerifyRTCRTPStreamStats(received_rtp, verifier); - verifier.TestMemberIsNonNegative(received_rtp.jitter); - verifier.TestMemberIsDefined(received_rtp.packets_lost); - if (packets_discarded_defined) { - verifier.TestMemberIsNonNegative( - received_rtp.packets_discarded); - } else { - verifier.TestMemberIsUndefined(received_rtp.packets_discarded); - } - } - - bool VerifyRTCRemoteInboundRtpStreamStats( - const RTCRemoteInboundRtpStreamStats& remote_inbound_stream) { - RTCStatsVerifier verifier(report_, &remote_inbound_stream); - VerifyRTCReceivedRtpStreamStats(remote_inbound_stream, verifier, false); - verifier.TestMemberIsDefined(remote_inbound_stream.fraction_lost); - verifier.TestMemberIsIDReference(remote_inbound_stream.local_id, - RTCOutboundRTPStreamStats::kType); - verifier.TestMemberIsNonNegative( - remote_inbound_stream.round_trip_time); - verifier.TestMemberIsNonNegative( - remote_inbound_stream.total_round_trip_time); - verifier.TestMemberIsNonNegative( - remote_inbound_stream.round_trip_time_measurements); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCRemoteOutboundRTPStreamStats( - const RTCRemoteOutboundRtpStreamStats& remote_outbound_stream) { - RTCStatsVerifier verifier(report_, &remote_outbound_stream); - VerifyRTCRTPStreamStats(remote_outbound_stream, verifier); - VerifyRTCSentRTPStreamStats(remote_outbound_stream, verifier); - verifier.TestMemberIsIDReference(remote_outbound_stream.local_id, - RTCOutboundRTPStreamStats::kType); - verifier.TestMemberIsNonNegative( - remote_outbound_stream.remote_timestamp); - verifier.TestMemberIsDefined(remote_outbound_stream.reports_sent); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - void VerifyRTCMediaSourceStats(const RTCMediaSourceStats& media_source, - RTCStatsVerifier* verifier) { - verifier->TestMemberIsDefined(media_source.track_identifier); - verifier->TestMemberIsDefined(media_source.kind); - if (media_source.kind.is_defined()) { - EXPECT_TRUE((*media_source.kind == "audio" && - media_source.type() == RTCAudioSourceStats::kType) || - (*media_source.kind == "video" && - media_source.type() == RTCVideoSourceStats::kType)); - } - } - - bool VerifyRTCAudioSourceStats(const RTCAudioSourceStats& audio_source) { - RTCStatsVerifier verifier(report_, &audio_source); - VerifyRTCMediaSourceStats(audio_source, &verifier); - // Audio level, unlike audio energy, only gets updated at a certain - // frequency, so we don't require that one to be positive to avoid a race - // (https://crbug.com/webrtc/10962). - verifier.TestMemberIsNonNegative(audio_source.audio_level); - verifier.TestMemberIsPositive(audio_source.total_audio_energy); - verifier.TestMemberIsPositive(audio_source.total_samples_duration); - // TODO(hbos): `echo_return_loss` and `echo_return_loss_enhancement` are - // flaky on msan bot (sometimes defined, sometimes undefined). Should the - // test run until available or is there a way to have it always be - // defined? crbug.com/627816 - verifier.MarkMemberTested(audio_source.echo_return_loss, true); - verifier.MarkMemberTested(audio_source.echo_return_loss_enhancement, true); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCVideoSourceStats(const RTCVideoSourceStats& video_source) { - RTCStatsVerifier verifier(report_, &video_source); - VerifyRTCMediaSourceStats(video_source, &verifier); - // TODO(hbos): This integration test uses fakes that doesn't support - // VideoTrackSourceInterface::Stats. When this is fixed we should - // TestMemberIsNonNegative() for `width` and `height` instead to - // reflect real code. - verifier.TestMemberIsUndefined(video_source.width); - verifier.TestMemberIsUndefined(video_source.height); - verifier.TestMemberIsNonNegative(video_source.frames); - verifier.TestMemberIsNonNegative(video_source.frames_per_second); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - bool VerifyRTCTransportStats(const RTCTransportStats& transport) { - RTCStatsVerifier verifier(report_, &transport); - verifier.TestMemberIsNonNegative(transport.bytes_sent); - verifier.TestMemberIsNonNegative(transport.packets_sent); - verifier.TestMemberIsNonNegative(transport.bytes_received); - verifier.TestMemberIsNonNegative(transport.packets_received); - verifier.TestMemberIsOptionalIDReference(transport.rtcp_transport_stats_id, - RTCTransportStats::kType); - verifier.TestMemberIsDefined(transport.dtls_state); - verifier.TestMemberIsIDReference(transport.selected_candidate_pair_id, - RTCIceCandidatePairStats::kType); - verifier.TestMemberIsIDReference(transport.local_certificate_id, - RTCCertificateStats::kType); - verifier.TestMemberIsIDReference(transport.remote_certificate_id, - RTCCertificateStats::kType); - verifier.TestMemberIsDefined(transport.tls_version); - verifier.TestMemberIsDefined(transport.dtls_cipher); - verifier.TestMemberIsDefined(transport.srtp_cipher); - verifier.TestMemberIsPositive( - transport.selected_candidate_pair_changes); - return verifier.ExpectAllMembersSuccessfullyTested(); - } - - private: - rtc::scoped_refptr report_; -}; - -#ifdef WEBRTC_HAVE_SCTP -TEST_F(RTCStatsIntegrationTest, GetStatsFromCaller) { - StartCall(); - - rtc::scoped_refptr report = GetStatsFromCaller(); - RTCStatsReportVerifier(report.get()).VerifyReport({}); - -#if RTC_TRACE_EVENTS_ENABLED - EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace()); - #endif -} - -TEST_F(RTCStatsIntegrationTest, GetStatsFromCallee) { - StartCall(); - - rtc::scoped_refptr report = GetStatsFromCallee(); - RTCStatsReportVerifier(report.get()).VerifyReport({}); - -#if RTC_TRACE_EVENTS_ENABLED - EXPECT_EQ(report->ToJson(), RTCStatsReportTraceListener::last_trace()); - #endif -} - -// These tests exercise the integration of the stats selection algorithm inside -// of PeerConnection. See rtcstatstraveral_unittest.cc for more detailed stats -// traversal tests on particular stats graphs. -TEST_F(RTCStatsIntegrationTest, GetStatsWithSenderSelector) { - StartCall(); - ASSERT_FALSE(caller_->pc()->GetSenders().empty()); - rtc::scoped_refptr report = - GetStatsFromCaller(caller_->pc()->GetSenders()[0]); - std::vector allowed_missing_stats = { - // TODO(hbos): Include RTC[Audio/Video]ReceiverStats when implemented. - // TODO(hbos): Include RTCRemoteOutboundRtpStreamStats when implemented. - // TODO(hbos): Include RTCRtpContributingSourceStats when implemented. - RTCInboundRTPStreamStats::kType, - RTCPeerConnectionStats::kType, - RTCMediaStreamStats::kType, - RTCDataChannelStats::kType, - }; - RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats); - EXPECT_TRUE(report->size()); -} - -TEST_F(RTCStatsIntegrationTest, GetStatsWithReceiverSelector) { - StartCall(); - - ASSERT_FALSE(caller_->pc()->GetReceivers().empty()); - rtc::scoped_refptr report = - GetStatsFromCaller(caller_->pc()->GetReceivers()[0]); - std::vector allowed_missing_stats = { - // TODO(hbos): Include RTC[Audio/Video]SenderStats when implemented. - // TODO(hbos): Include RTCRemoteInboundRtpStreamStats when implemented. - // TODO(hbos): Include RTCRtpContributingSourceStats when implemented. - RTCOutboundRTPStreamStats::kType, - RTCPeerConnectionStats::kType, - RTCMediaStreamStats::kType, - RTCDataChannelStats::kType, - }; - RTCStatsReportVerifier(report.get()).VerifyReport(allowed_missing_stats); - EXPECT_TRUE(report->size()); -} - -TEST_F(RTCStatsIntegrationTest, GetStatsWithInvalidSenderSelector) { - StartCall(); - - ASSERT_FALSE(callee_->pc()->GetSenders().empty()); - // The selector is invalid for the caller because it belongs to the callee. - auto invalid_selector = callee_->pc()->GetSenders()[0]; - rtc::scoped_refptr report = - GetStatsFromCaller(invalid_selector); - EXPECT_FALSE(report->size()); -} - -TEST_F(RTCStatsIntegrationTest, GetStatsWithInvalidReceiverSelector) { - StartCall(); - - ASSERT_FALSE(callee_->pc()->GetReceivers().empty()); - // The selector is invalid for the caller because it belongs to the callee. - auto invalid_selector = callee_->pc()->GetReceivers()[0]; - rtc::scoped_refptr report = - GetStatsFromCaller(invalid_selector); - EXPECT_FALSE(report->size()); -} - -// TODO(bugs.webrtc.org/10041) For now this is equivalent to the following -// test GetsStatsWhileClosingPeerConnection, because pc() is closed by -// PeerConnectionTestWrapper. See: bugs.webrtc.org/9847 -TEST_F(RTCStatsIntegrationTest, - DISABLED_GetStatsWhileDestroyingPeerConnection) { - StartCall(); - - rtc::scoped_refptr stats_obtainer = - RTCStatsObtainer::Create(); - caller_->pc()->GetStats(stats_obtainer); - // This will destroy the peer connection. - caller_ = nullptr; - // Any pending stats requests should have completed in the act of destroying - // the peer connection. - ASSERT_TRUE(stats_obtainer->report()); - #if RTC_TRACE_EVENTS_ENABLED - EXPECT_EQ(stats_obtainer->report()->ToJson(), - RTCStatsReportTraceListener::last_trace()); - #endif -} - -TEST_F(RTCStatsIntegrationTest, GetsStatsWhileClosingPeerConnection) { - StartCall(); - - rtc::scoped_refptr stats_obtainer = - RTCStatsObtainer::Create(); - caller_->pc()->GetStats(stats_obtainer); - caller_->pc()->Close(); - - ASSERT_TRUE(stats_obtainer->report()); - #if RTC_TRACE_EVENTS_ENABLED - EXPECT_EQ(stats_obtainer->report()->ToJson(), - RTCStatsReportTraceListener::last_trace()); - #endif -} - -// GetStatsReferencedIds() is optimized to recognize what is or isn't a -// referenced ID based on dictionary type information and knowing what members -// are used as references, as opposed to iterating all members to find the ones -// with the "Id" or "Ids" suffix. As such, GetStatsReferencedIds() is tested as -// an integration test instead of a unit test in order to guard against adding -// new references and forgetting to update GetStatsReferencedIds(). -TEST_F(RTCStatsIntegrationTest, GetStatsReferencedIds) { - StartCall(); - - rtc::scoped_refptr report = GetStatsFromCallee(); - for (const RTCStats& stats : *report) { - // Find all references by looking at all string members with the "Id" or - // "Ids" suffix. - std::set expected_ids; - for (const auto* member : stats.Members()) { - if (!member->is_defined()) - continue; - if (member->type() == RTCStatsMemberInterface::kString) { - if (absl::EndsWith(member->name(), "Id")) { - const auto& id = member->cast_to>(); - expected_ids.insert(&(*id)); - } - } else if (member->type() == RTCStatsMemberInterface::kSequenceString) { - if (absl::EndsWith(member->name(), "Ids")) { - const auto& ids = - member->cast_to>>(); - for (const std::string& id : *ids) - expected_ids.insert(&id); - } - } - } - - std::vector neighbor_ids = GetStatsReferencedIds(stats); - EXPECT_EQ(neighbor_ids.size(), expected_ids.size()); - for (const std::string* neighbor_id : neighbor_ids) { - EXPECT_THAT(expected_ids, Contains(neighbor_id)); - } - for (const std::string* expected_id : expected_ids) { - EXPECT_THAT(neighbor_ids, Contains(expected_id)); - } - } -} - -TEST_F(RTCStatsIntegrationTest, GetStatsContainsNoDuplicateMembers) { - StartCall(); - - rtc::scoped_refptr report = GetStatsFromCallee(); - for (const RTCStats& stats : *report) { - std::set member_names; - for (const auto* member : stats.Members()) { - EXPECT_TRUE(member_names.find(member->name()) == member_names.end()) - << member->name() << " is a duplicate!"; - member_names.insert(member->name()); - } - } -} -#endif // WEBRTC_HAVE_SCTP - -} // namespace - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h index 5c61f5b1d..6f7986f09 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_media_utils.h @@ -11,8 +11,6 @@ #ifndef PC_RTP_MEDIA_UTILS_H_ #define PC_RTP_MEDIA_UTILS_H_ -#include // no-presubmit-check TODO(webrtc:8982) - #include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc index afba4bc94..8d3064ed9 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.cc @@ -10,10 +10,10 @@ #include "pc/rtp_parameters_conversion.h" +#include #include #include #include -#include #include #include "api/array_view.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.h index 959f3fde4..62e468572 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_parameters_conversion.h @@ -11,6 +11,7 @@ #ifndef PC_RTP_PARAMETERS_CONVERSION_H_ #define PC_RTP_PARAMETERS_CONVERSION_H_ +#include #include #include "absl/types/optional.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc index a2b3353c0..2444c9b60 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.cc @@ -17,7 +17,7 @@ #include "pc/media_stream.h" #include "pc/media_stream_proxy.h" -#include "rtc_base/thread.h" +#include "rtc_base/location.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h index 7d124dfd6..73fc5b985 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_receiver.h @@ -42,27 +42,16 @@ namespace webrtc { // Internal class used by PeerConnection. class RtpReceiverInternal : public RtpReceiverInterface { public: - // Call on the signaling thread, to let the receiver know that the the - // embedded source object should enter a stopped/ended state and the track's - // state set to `kEnded`, a final state that cannot be reversed. + // Stops receiving. The track may be reactivated. virtual void Stop() = 0; - - // Call on the signaling thread to set the source's state to `ended` before - // clearing the media channel (`SetMediaChannel(nullptr)`) on the worker - // thread. - // The difference between `Stop()` and `SetSourceEnded()` is that the latter - // does not change the state of the associated track. - // NOTE: Calling this function should be followed with a call to - // `SetMediaChannel(nullptr)` on the worker thread, to complete the operation. - virtual void SetSourceEnded() = 0; + // Stops the receiver permanently. + // Causes the associated track to enter kEnded state. Cannot be reversed. + virtual void StopAndEndTrack() = 0; // Sets the underlying MediaEngine channel associated with this RtpSender. // A VoiceMediaChannel should be used for audio RtpSenders and // a VideoMediaChannel should be used for video RtpSenders. - // NOTE: - // * SetMediaChannel(nullptr) must be called before the media channel is - // destroyed. - // * This method must be invoked on the worker thread. + // Must call SetMediaChannel(nullptr) before the media channel is destroyed. virtual void SetMediaChannel(cricket::MediaChannel* media_channel) = 0; // Configures the RtpReceiver with the underlying media channel, with the diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc index dc53105b6..d4286371b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.cc @@ -12,7 +12,6 @@ #include #include -#include #include #include @@ -112,8 +111,7 @@ bool UnimplementedRtpParameterHasValue(const RtpParameters& parameters) { RtpSenderBase::RtpSenderBase(rtc::Thread* worker_thread, const std::string& id, SetStreamsObserver* set_streams_observer) - : signaling_thread_(rtc::Thread::Current()), - worker_thread_(worker_thread), + : worker_thread_(worker_thread), id_(id), set_streams_observer_(set_streams_observer) { RTC_DCHECK(worker_thread); @@ -122,7 +120,6 @@ RtpSenderBase::RtpSenderBase(rtc::Thread* worker_thread, void RtpSenderBase::SetFrameEncryptor( rtc::scoped_refptr frame_encryptor) { - RTC_DCHECK_RUN_ON(signaling_thread_); frame_encryptor_ = std::move(frame_encryptor); // Special Case: Set the frame encryptor to any value on any existing channel. if (media_channel_ && ssrc_ && !stopped_) { @@ -139,7 +136,6 @@ void RtpSenderBase::SetMediaChannel(cricket::MediaChannel* media_channel) { } RtpParameters RtpSenderBase::GetParametersInternal() const { - RTC_DCHECK_RUN_ON(signaling_thread_); if (stopped_) { return RtpParameters(); } @@ -154,7 +150,6 @@ RtpParameters RtpSenderBase::GetParametersInternal() const { } RtpParameters RtpSenderBase::GetParameters() const { - RTC_DCHECK_RUN_ON(signaling_thread_); RtpParameters result = GetParametersInternal(); last_transaction_id_ = rtc::CreateRandomUuid(); result.transaction_id = last_transaction_id_.value(); @@ -162,7 +157,6 @@ RtpParameters RtpSenderBase::GetParameters() const { } RTCError RtpSenderBase::SetParametersInternal(const RtpParameters& parameters) { - RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!stopped_); if (UnimplementedRtpParameterHasValue(parameters)) { @@ -192,7 +186,6 @@ RTCError RtpSenderBase::SetParametersInternal(const RtpParameters& parameters) { } RTCError RtpSenderBase::SetParameters(const RtpParameters& parameters) { - RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "RtpSenderBase::SetParameters"); if (is_transceiver_stopped_) { LOG_AND_RETURN_ERROR( @@ -232,7 +225,6 @@ void RtpSenderBase::SetStreams(const std::vector& stream_ids) { } bool RtpSenderBase::SetTrack(MediaStreamTrackInterface* track) { - RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "RtpSenderBase::SetTrack"); if (stopped_) { RTC_LOG(LS_ERROR) << "SetTrack can't be called on a stopped RtpSender."; @@ -274,7 +266,6 @@ bool RtpSenderBase::SetTrack(MediaStreamTrackInterface* track) { } void RtpSenderBase::SetSsrc(uint32_t ssrc) { - RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "RtpSenderBase::SetSsrc"); if (stopped_ || ssrc == ssrc_) { return; @@ -324,7 +315,6 @@ void RtpSenderBase::SetSsrc(uint32_t ssrc) { } void RtpSenderBase::Stop() { - RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "RtpSenderBase::Stop"); // TODO(deadbeef): Need to do more here to fully stop sending packets. if (stopped_) { @@ -345,7 +335,6 @@ void RtpSenderBase::Stop() { RTCError RtpSenderBase::DisableEncodingLayers( const std::vector& rids) { - RTC_DCHECK_RUN_ON(signaling_thread_); if (stopped_) { LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_STATE, "Cannot disable encodings on a stopped sender."); @@ -392,7 +381,6 @@ RTCError RtpSenderBase::DisableEncodingLayers( void RtpSenderBase::SetEncoderToPacketizerFrameTransformer( rtc::scoped_refptr frame_transformer) { - RTC_DCHECK_RUN_ON(signaling_thread_); frame_transformer_ = std::move(frame_transformer); if (media_channel_ && ssrc_ && !stopped_) { worker_thread_->Invoke(RTC_FROM_HERE, [&] { @@ -495,7 +483,6 @@ sigslot::signal0<>* AudioRtpSender::GetOnDestroyedSignal() { } void AudioRtpSender::OnChanged() { - RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "AudioRtpSender::OnChanged"); RTC_DCHECK(!stopped_); if (cached_track_enabled_ != track_->enabled()) { @@ -530,12 +517,10 @@ void AudioRtpSender::RemoveTrackFromStats() { } rtc::scoped_refptr AudioRtpSender::GetDtmfSender() const { - RTC_DCHECK_RUN_ON(signaling_thread_); return dtmf_sender_proxy_; } void AudioRtpSender::SetSend() { - RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!stopped_); RTC_DCHECK(can_send_track()); if (!media_channel_) { @@ -566,7 +551,6 @@ void AudioRtpSender::SetSend() { } void AudioRtpSender::ClearSend() { - RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(ssrc_ != 0); RTC_DCHECK(!stopped_); if (!media_channel_) { @@ -600,13 +584,10 @@ VideoRtpSender::~VideoRtpSender() { } void VideoRtpSender::OnChanged() { - RTC_DCHECK_RUN_ON(signaling_thread_); TRACE_EVENT0("webrtc", "VideoRtpSender::OnChanged"); RTC_DCHECK(!stopped_); - - auto content_hint = video_track()->content_hint(); - if (cached_track_content_hint_ != content_hint) { - cached_track_content_hint_ = content_hint; + if (cached_track_content_hint_ != video_track()->content_hint()) { + cached_track_content_hint_ = video_track()->content_hint(); if (can_send_track()) { SetSend(); } @@ -619,13 +600,11 @@ void VideoRtpSender::AttachTrack() { } rtc::scoped_refptr VideoRtpSender::GetDtmfSender() const { - RTC_DCHECK_RUN_ON(signaling_thread_); - RTC_DLOG(LS_ERROR) << "Tried to get DTMF sender from video sender."; + RTC_LOG(LS_ERROR) << "Tried to get DTMF sender from video sender."; return nullptr; } void VideoRtpSender::SetSend() { - RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(!stopped_); RTC_DCHECK(can_send_track()); if (!media_channel_) { @@ -657,7 +636,6 @@ void VideoRtpSender::SetSend() { } void VideoRtpSender::ClearSend() { - RTC_DCHECK_RUN_ON(signaling_thread_); RTC_DCHECK(ssrc_ != 0); RTC_DCHECK(!stopped_); if (!media_channel_) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h index 569a6007d..4bc16c796 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_sender.h @@ -17,7 +17,6 @@ #include #include - #include #include #include @@ -33,16 +32,13 @@ #include "api/rtp_parameters.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" -#include "api/sequence_checker.h" #include "media/base/audio_source.h" #include "media/base/media_channel.h" #include "pc/dtmf_sender.h" #include "pc/stats_collector_interface.h" -#include "rtc_base/checks.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" -#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -108,9 +104,6 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { bool SetTrack(MediaStreamTrackInterface* track) override; rtc::scoped_refptr track() const override { - // This method is currently called from the worker thread by - // RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n. - // RTC_DCHECK_RUN_ON(signaling_thread_); return track_; } @@ -127,17 +120,9 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { // underlying transport (this occurs if the sender isn't seen in a local // description). void SetSsrc(uint32_t ssrc) override; - uint32_t ssrc() const override { - // This method is currently called from the worker thread by - // RTCStatsCollector::PrepareTransceiverStatsInfosAndCallStats_s_w_n. - // RTC_DCHECK_RUN_ON(signaling_thread_); - return ssrc_; - } + uint32_t ssrc() const override { return ssrc_; } - std::vector stream_ids() const override { - RTC_DCHECK_RUN_ON(signaling_thread_); - return stream_ids_; - } + std::vector stream_ids() const override { return stream_ids_; } void set_stream_ids(const std::vector& stream_ids) override { stream_ids_ = stream_ids; } @@ -150,7 +135,6 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { init_parameters_.encodings = init_send_encodings; } std::vector init_send_encodings() const override { - RTC_DCHECK_RUN_ON(signaling_thread_); return init_parameters_.encodings; } @@ -159,7 +143,6 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { dtls_transport_ = dtls_transport; } rtc::scoped_refptr dtls_transport() const override { - RTC_DCHECK_RUN_ON(signaling_thread_); return dtls_transport_; } @@ -185,10 +168,7 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { void SetEncoderToPacketizerFrameTransformer( rtc::scoped_refptr frame_transformer) override; - void SetTransceiverAsStopped() override { - RTC_DCHECK_RUN_ON(signaling_thread_); - is_transceiver_stopped_ = true; - } + void SetTransceiverAsStopped() override { is_transceiver_stopped_ = true; } protected: // If `set_streams_observer` is not null, it is invoked when SetStreams() @@ -215,22 +195,16 @@ class RtpSenderBase : public RtpSenderInternal, public ObserverInterface { virtual void AddTrackToStats() {} virtual void RemoveTrackFromStats() {} - rtc::Thread* const signaling_thread_; - rtc::Thread* const worker_thread_; + rtc::Thread* worker_thread_; uint32_t ssrc_ = 0; - bool stopped_ RTC_GUARDED_BY(signaling_thread_) = false; - bool is_transceiver_stopped_ RTC_GUARDED_BY(signaling_thread_) = false; + bool stopped_ = false; + bool is_transceiver_stopped_ = false; int attachment_id_ = 0; const std::string id_; std::vector stream_ids_; RtpParameters init_parameters_; - // TODO(tommi): `media_channel_` and several other member variables in this - // class (ssrc_, stopped_, etc) are accessed from more than one thread without - // a guard or lock. Internally there are also several Invoke()s that we could - // remove since the upstream code may already be performing several operations - // on the worker thread. cricket::MediaChannel* media_channel_ = nullptr; rtc::scoped_refptr track_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc index 5e3a084d2..a78b9d6be 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.cc @@ -10,7 +10,6 @@ #include "pc/rtp_transceiver.h" -#include #include #include #include @@ -25,7 +24,6 @@ #include "pc/rtp_media_utils.h" #include "pc/session_description.h" #include "rtc_base/checks.h" -#include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" @@ -156,21 +154,16 @@ RtpTransceiver::~RtpTransceiver() { RTC_DCHECK_RUN_ON(thread_); StopInternal(); } - - RTC_CHECK(!channel_) << "Missing call to SetChannel(nullptr)?"; } -void RtpTransceiver::SetChannel( - cricket::ChannelInterface* channel, - std::function transport_lookup) { +void RtpTransceiver::SetChannel(cricket::ChannelInterface* channel) { RTC_DCHECK_RUN_ON(thread_); // Cannot set a non-null channel on a stopped transceiver. - if ((stopped_ && channel) || channel == channel_) { + if (stopped_ && channel) { return; } RTC_DCHECK(channel || channel_); - RTC_DCHECK(!channel || transport_lookup) << "lookup function not supplied"; RTC_LOG_THREAD_BLOCK_COUNT(); @@ -184,8 +177,6 @@ void RtpTransceiver::SetChannel( signaling_thread_safety_ = PendingTaskSafetyFlag::Create(); } - cricket::ChannelInterface* channel_to_delete = nullptr; - // An alternative to this, could be to require SetChannel to be called // on the network thread. The channel object operates for the most part // on the network thread, as part of its initialization being on the network @@ -198,14 +189,11 @@ void RtpTransceiver::SetChannel( channel_manager_->network_thread()->Invoke(RTC_FROM_HERE, [&]() { if (channel_) { channel_->SetFirstPacketReceivedCallback(nullptr); - channel_->SetRtpTransport(nullptr); - channel_to_delete = channel_; } channel_ = channel; if (channel_) { - channel_->SetRtpTransport(transport_lookup(channel_->mid())); channel_->SetFirstPacketReceivedCallback( [thread = thread_, flag = signaling_thread_safety_, this]() mutable { thread->PostTask(ToQueuedTask( @@ -214,34 +202,20 @@ void RtpTransceiver::SetChannel( } }); + for (const auto& sender : senders_) { + sender->internal()->SetMediaChannel(channel_ ? channel_->media_channel() + : nullptr); + } + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); - if (!channel_) { - for (const auto& receiver : receivers_) - receiver->internal()->SetSourceEnded(); - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); // There should not be an invoke. + for (const auto& receiver : receivers_) { + if (!channel_) { + receiver->internal()->Stop(); + } else { + receiver->internal()->SetMediaChannel(channel_->media_channel()); + } } - - if (channel_to_delete || !senders_.empty() || !receivers_.empty()) { - channel_manager_->worker_thread()->Invoke(RTC_FROM_HERE, [&]() { - auto* media_channel = channel_ ? channel_->media_channel() : nullptr; - for (const auto& sender : senders_) { - sender->internal()->SetMediaChannel(media_channel); - } - - for (const auto& receiver : receivers_) { - receiver->internal()->SetMediaChannel(media_channel); - } - - // Destroy the channel, if we had one, now _after_ updating the receivers - // who might have had references to the previous channel. - if (channel_to_delete) { - channel_manager_->DestroyChannel(channel_to_delete); - } - }); - } - - RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(2); } void RtpTransceiver::AddSender( @@ -282,7 +256,6 @@ void RtpTransceiver::AddReceiver( } bool RtpTransceiver::RemoveReceiver(RtpReceiverInterface* receiver) { - RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(!unified_plan_); if (receiver) { RTC_DCHECK_EQ(media_type(), receiver->media_type()); @@ -291,13 +264,8 @@ bool RtpTransceiver::RemoveReceiver(RtpReceiverInterface* receiver) { if (it == receivers_.end()) { return false; } - + // `Stop()` will clear the internally cached pointer to the media channel. (*it)->internal()->Stop(); - channel_manager_->worker_thread()->Invoke(RTC_FROM_HERE, [&]() { - // `Stop()` will clear the receiver's pointer to the media channel. - (*it)->internal()->SetMediaChannel(nullptr); - }); - receivers_.erase(it); return true; } @@ -305,14 +273,14 @@ bool RtpTransceiver::RemoveReceiver(RtpReceiverInterface* receiver) { rtc::scoped_refptr RtpTransceiver::sender_internal() const { RTC_DCHECK(unified_plan_); RTC_CHECK_EQ(1u, senders_.size()); - return rtc::scoped_refptr(senders_[0]->internal()); + return senders_[0]->internal(); } rtc::scoped_refptr RtpTransceiver::receiver_internal() const { RTC_DCHECK(unified_plan_); RTC_CHECK_EQ(1u, receivers_.size()); - return rtc::scoped_refptr(receivers_[0]->internal()); + return receivers_[0]->internal(); } cricket::MediaType RtpTransceiver::media_type() const { @@ -415,22 +383,15 @@ void RtpTransceiver::StopSendingAndReceiving() { // // 3. Stop sending media with sender. // - RTC_DCHECK_RUN_ON(thread_); - // 4. Send an RTCP BYE for each RTP stream that was being sent by sender, as // specified in [RFC3550]. + RTC_DCHECK_RUN_ON(thread_); for (const auto& sender : senders_) sender->internal()->Stop(); - // Signal to receiver sources that we're stopping. + // 5. Stop receiving media with receiver. for (const auto& receiver : receivers_) - receiver->internal()->Stop(); - - channel_manager_->worker_thread()->Invoke(RTC_FROM_HERE, [&]() { - // 5 Stop receiving media with receiver. - for (const auto& receiver : receivers_) - receiver->internal()->SetMediaChannel(nullptr); - }); + receiver->internal()->StopAndEndTrack(); stopping_ = true; direction_ = webrtc::RtpTransceiverDirection::kInactive; diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h index e7e3fb9be..c99532927 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transceiver.h @@ -20,12 +20,9 @@ #include "absl/types/optional.h" #include "api/array_view.h" -#include "api/jsep.h" #include "api/media_types.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" -#include "api/rtp_receiver_interface.h" -#include "api/rtp_sender_interface.h" #include "api/rtp_transceiver_direction.h" #include "api/rtp_transceiver_interface.h" #include "api/scoped_refptr.h" @@ -37,8 +34,6 @@ #include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender.h" #include "pc/rtp_sender_proxy.h" -#include "pc/rtp_transport_internal.h" -#include "pc/session_description.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -105,36 +100,8 @@ class RtpTransceiver final cricket::ChannelInterface* channel() const { return channel_; } // Sets the Voice/VideoChannel. The caller must pass in the correct channel - // implementation based on the type of the transceiver. The call must - // furthermore be made on the signaling thread. - // - // `channel`: The channel instance to be associated with the transceiver. - // When a valid pointer is passed for `channel`, the state of the object - // is expected to be newly constructed and not initalized for network - // activity (see next parameter for more). - // - // NOTE: For all practical purposes, the ownership of the channel - // object should be considered to lie with the transceiver until - // `SetChannel()` is called again with nullptr set as the new channel. - // Moving forward, this parameter will change to either be a - // std::unique_ptr<> or the full construction of the channel object will - // be moved to happen within the context of the transceiver class. - // - // `transport_lookup`: When `channel` points to a valid channel object, this - // callback function will be used to look up the `RtpTransport` object - // to associate with the channel via `BaseChannel::SetRtpTransport`. - // The lookup function will be called on the network thread, synchronously - // during the call to `SetChannel`. This means that the caller of - // `SetChannel()` may provide a callback function that references state - // that exists within the calling scope of SetChannel (e.g. a variable - // on the stack). - // The reason for this design is to limit the number of times we jump - // synchronously to the network thread from the signaling thread. - // The callback allows us to combine the transport lookup with network - // state initialization of the channel object. - void SetChannel(cricket::ChannelInterface* channel, - std::function - transport_lookup); + // implementation based on the type of the transceiver. + void SetChannel(cricket::ChannelInterface* channel); // Adds an RtpSender of the appropriate type to be owned by this transceiver. // Must not be null. diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc index 5dbb76509..e91715496 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.cc @@ -10,7 +10,7 @@ #include "pc/rtp_transmission_manager.h" -#include +#include #include #include "absl/types/optional.h" @@ -23,7 +23,6 @@ #include "rtc_base/checks.h" #include "rtc_base/helpers.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -379,8 +378,7 @@ void RtpTransmissionManager::AddAudioTrack(AudioTrackInterface* track, } // Normal case; we've never seen this track before. - auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), - rtc::scoped_refptr(track), + auto new_sender = CreateSender(cricket::MEDIA_TYPE_AUDIO, track->id(), track, {stream->id()}, {}); new_sender->internal()->SetMediaChannel(voice_media_channel()); GetAudioTransceiver()->internal()->AddSender(new_sender); @@ -426,8 +424,7 @@ void RtpTransmissionManager::AddVideoTrack(VideoTrackInterface* track, } // Normal case; we've never seen this track before. - auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), - rtc::scoped_refptr(track), + auto new_sender = CreateSender(cricket::MEDIA_TYPE_VIDEO, track->id(), track, {stream->id()}, {}); new_sender->internal()->SetMediaChannel(video_media_channel()); GetVideoTransceiver()->internal()->AddSender(new_sender); @@ -460,14 +457,13 @@ void RtpTransmissionManager::CreateAudioReceiver( // TODO(https://crbug.com/webrtc/9480): When we remove remote_streams(), use // the constructor taking stream IDs instead. auto audio_receiver = rtc::make_ref_counted( - worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan(), - voice_media_channel()); + worker_thread(), remote_sender_info.sender_id, streams, IsUnifiedPlan()); + audio_receiver->SetMediaChannel(voice_media_channel()); if (remote_sender_info.sender_id == kDefaultAudioSenderId) { audio_receiver->SetupUnsignaledMediaChannel(); } else { audio_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); } - auto receiver = RtpReceiverProxyWithInternal::Create( signaling_thread(), worker_thread(), std::move(audio_receiver)); GetAudioTransceiver()->internal()->AddReceiver(receiver); @@ -485,13 +481,12 @@ void RtpTransmissionManager::CreateVideoReceiver( // the constructor taking stream IDs instead. auto video_receiver = rtc::make_ref_counted( worker_thread(), remote_sender_info.sender_id, streams); - - video_receiver->SetupMediaChannel( - remote_sender_info.sender_id == kDefaultVideoSenderId - ? absl::nullopt - : absl::optional(remote_sender_info.first_ssrc), - video_media_channel()); - + video_receiver->SetMediaChannel(video_media_channel()); + if (remote_sender_info.sender_id == kDefaultVideoSenderId) { + video_receiver->SetupUnsignaledMediaChannel(); + } else { + video_receiver->SetupMediaChannel(remote_sender_info.first_ssrc); + } auto receiver = RtpReceiverProxyWithInternal::Create( signaling_thread(), worker_thread(), std::move(video_receiver)); GetVideoTransceiver()->internal()->AddReceiver(receiver); @@ -641,7 +636,7 @@ std::vector* RtpTransmissionManager::GetLocalSenderInfos( const RtpSenderInfo* RtpTransmissionManager::FindSenderInfo( const std::vector& infos, const std::string& stream_id, - const std::string& sender_id) const { + const std::string sender_id) const { for (const RtpSenderInfo& sender_info : infos) { if (sender_info.stream_id == stream_id && sender_info.sender_id == sender_id) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h index 3496bfa54..f616d9d0f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transmission_manager.h @@ -29,9 +29,7 @@ #include "media/base/media_channel.h" #include "pc/channel_manager.h" #include "pc/rtp_receiver.h" -#include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender.h" -#include "pc/rtp_sender_proxy.h" #include "pc/rtp_transceiver.h" #include "pc/stats_collector_interface.h" #include "pc/transceiver_list.h" @@ -52,7 +50,7 @@ namespace webrtc { struct RtpSenderInfo { RtpSenderInfo() : first_ssrc(0) {} RtpSenderInfo(const std::string& stream_id, - const std::string& sender_id, + const std::string sender_id, uint32_t ssrc) : stream_id(stream_id), sender_id(sender_id), first_ssrc(ssrc) {} bool operator==(const RtpSenderInfo& other) { @@ -186,7 +184,7 @@ class RtpTransmissionManager : public RtpSenderBase::SetStreamsObserver { cricket::MediaType media_type); const RtpSenderInfo* FindSenderInfo(const std::vector& infos, const std::string& stream_id, - const std::string& sender_id) const; + const std::string sender_id) const; // Return the RtpSender with the given track attached. rtc::scoped_refptr> diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc index 54276650c..347b2440f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.cc @@ -11,13 +11,11 @@ #include "pc/rtp_transport.h" #include - -#include +#include #include #include "absl/strings/string_view.h" #include "api/array_view.h" -#include "api/units/timestamp.h" #include "media/base/rtp_utils.h" #include "modules/rtp_rtcp/source/rtp_packet_received.h" #include "rtc_base/checks.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h index 39d4ad5b5..893d91e73 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/rtp_transport.h @@ -18,7 +18,6 @@ #include "absl/types/optional.h" #include "call/rtp_demuxer.h" -#include "call/video_receive_stream.h" #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" #include "p2p/base/packet_transport_internal.h" #include "pc/rtp_transport_internal.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc index 626d1757b..f01f86ebd 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_data_channel_transport.cc @@ -10,6 +10,9 @@ #include "pc/sctp_data_channel_transport.h" +#include "absl/types/optional.h" +#include "pc/sctp_utils.h" + namespace webrtc { SctpDataChannelTransport::SctpDataChannelTransport( diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h b/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h index 4981db4ed..16b98407b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_transport.h @@ -16,11 +16,9 @@ #include "api/dtls_transport_interface.h" #include "api/scoped_refptr.h" #include "api/sctp_transport_interface.h" -#include "api/sequence_checker.h" #include "media/sctp/sctp_transport_internal.h" #include "p2p/base/dtls_transport_internal.h" #include "pc/dtls_transport.h" -#include "rtc_base/checks.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc b/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc index c60e339b0..f7458405e 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sctp_utils.cc @@ -11,12 +11,12 @@ #include "pc/sctp_utils.h" #include - -#include +#include #include "absl/types/optional.h" #include "api/priority.h" #include "rtc_base/byte_buffer.h" +#include "rtc_base/checks.h" #include "rtc_base/copy_on_write_buffer.h" #include "rtc_base/logging.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc index 2f4a88c68..4332cd6df 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.cc @@ -11,18 +11,15 @@ #include "pc/sdp_offer_answer.h" #include -#include #include #include #include #include -#include #include #include #include "absl/algorithm/container.h" #include "absl/memory/memory.h" -#include "absl/strings/match.h" #include "absl/strings/string_view.h" #include "api/array_view.h" #include "api/crypto/crypto_options.h" @@ -32,6 +29,7 @@ #include "api/rtp_sender_interface.h" #include "api/video/builtin_video_bitrate_allocator_factory.h" #include "media/base/codec.h" +#include "media/base/media_engine.h" #include "media/base/rid_description.h" #include "p2p/base/ice_transport_internal.h" #include "p2p/base/p2p_constants.h" @@ -40,16 +38,15 @@ #include "p2p/base/transport_description.h" #include "p2p/base/transport_description_factory.h" #include "p2p/base/transport_info.h" -#include "pc/channel_interface.h" +#include "pc/data_channel_utils.h" #include "pc/dtls_transport.h" #include "pc/media_stream.h" #include "pc/media_stream_proxy.h" -#include "pc/peer_connection_internal.h" +#include "pc/peer_connection.h" #include "pc/peer_connection_message_handler.h" #include "pc/rtp_media_utils.h" -#include "pc/rtp_receiver_proxy.h" #include "pc/rtp_sender.h" -#include "pc/rtp_sender_proxy.h" +#include "pc/rtp_transport_internal.h" #include "pc/simulcast_description.h" #include "pc/stats_collector.h" #include "pc/usage_pattern.h" @@ -62,6 +59,7 @@ #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/string_encode.h" #include "rtc_base/strings/string_builder.h" +#include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" @@ -139,9 +137,40 @@ void NoteAddIceCandidateResult(int result) { kAddIceCandidateMax); } -void NoteKeyProtocol(KeyExchangeProtocolType protocol_type) { +void NoteKeyProtocolAndMedia(KeyExchangeProtocolType protocol_type, + cricket::MediaType media_type) { + // Array of structs needed to map {KeyExchangeProtocolType, + // cricket::MediaType} to KeyExchangeProtocolMedia without using std::map in + // order to avoid -Wglobal-constructors and -Wexit-time-destructors. + static constexpr struct { + KeyExchangeProtocolType protocol_type; + cricket::MediaType media_type; + KeyExchangeProtocolMedia protocol_media; + } kEnumCounterKeyProtocolMediaMap[] = { + {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_AUDIO, + kEnumCounterKeyProtocolMediaTypeDtlsAudio}, + {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_VIDEO, + kEnumCounterKeyProtocolMediaTypeDtlsVideo}, + {kEnumCounterKeyProtocolDtls, cricket::MEDIA_TYPE_DATA, + kEnumCounterKeyProtocolMediaTypeDtlsData}, + {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_AUDIO, + kEnumCounterKeyProtocolMediaTypeSdesAudio}, + {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_VIDEO, + kEnumCounterKeyProtocolMediaTypeSdesVideo}, + {kEnumCounterKeyProtocolSdes, cricket::MEDIA_TYPE_DATA, + kEnumCounterKeyProtocolMediaTypeSdesData}, + }; + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocol", protocol_type, kEnumCounterKeyProtocolMax); + + for (const auto& i : kEnumCounterKeyProtocolMediaMap) { + if (i.protocol_type == protocol_type && i.media_type == media_type) { + RTC_HISTOGRAM_ENUMERATION("WebRTC.PeerConnection.KeyProtocolByMedia", + i.protocol_media, + kEnumCounterKeyProtocolMediaTypeMax); + } + } } std::map GetBundleGroupsByMid( @@ -196,9 +225,7 @@ std::string GetSetDescriptionErrorMessage(cricket::ContentSource source, const RTCError& error) { rtc::StringBuilder oss; oss << "Failed to set " << (source == cricket::CS_LOCAL ? "local" : "remote") - << " " << SdpTypeToString(type) << " sdp: "; - RTC_DCHECK(!absl::StartsWith(error.message(), oss.str())) << error.message(); - oss << error.message(); + << " " << SdpTypeToString(type) << " sdp: " << error.message(); return oss.Release(); } @@ -338,8 +365,9 @@ RTCError VerifyCrypto(const SessionDescription* desc, continue; } // Note what media is used with each crypto protocol, for all sections. - NoteKeyProtocol(dtls_enabled ? webrtc::kEnumCounterKeyProtocolDtls - : webrtc::kEnumCounterKeyProtocolSdes); + NoteKeyProtocolAndMedia(dtls_enabled ? webrtc::kEnumCounterKeyProtocolDtls + : webrtc::kEnumCounterKeyProtocolSdes, + content_info.media_description()->type()); const std::string& mid = content_info.name; auto it = bundle_groups_by_mid.find(mid); const cricket::ContentGroup* bundle = @@ -719,250 +747,6 @@ bool ContentHasHeaderExtension(const cricket::ContentInfo& content_info, } // namespace -// This class stores state related to a SetRemoteDescription operation, captures -// and reports potential errors that migth occur and makes sure to notify the -// observer of the operation and the operations chain of completion. -class SdpOfferAnswerHandler::RemoteDescriptionOperation { - public: - RemoteDescriptionOperation( - SdpOfferAnswerHandler* handler, - std::unique_ptr desc, - rtc::scoped_refptr observer, - std::function operations_chain_callback) - : handler_(handler), - desc_(std::move(desc)), - observer_(std::move(observer)), - operations_chain_callback_(std::move(operations_chain_callback)), - unified_plan_(handler_->IsUnifiedPlan()) { - if (!desc_) { - type_ = static_cast(-1); - InvalidParam("SessionDescription is NULL."); - } else { - type_ = desc_->GetType(); - } - } - - ~RemoteDescriptionOperation() { - RTC_DCHECK_RUN_ON(handler_->signaling_thread()); - SignalCompletion(); - operations_chain_callback_(); - } - - bool ok() const { return error_.ok(); } - - // Notifies the observer that the operation is complete and releases the - // reference to the observer. - void SignalCompletion() { - if (!observer_) - return; - - if (!error_.ok() && type_ != static_cast(-1)) { - std::string error_message = - GetSetDescriptionErrorMessage(cricket::CS_REMOTE, type_, error_); - RTC_LOG(LS_ERROR) << error_message; - error_.set_message(std::move(error_message)); - } - - observer_->OnSetRemoteDescriptionComplete(error_); - observer_ = nullptr; // Only fire the notification once. - } - - // If a session error has occurred the PeerConnection is in a possibly - // inconsistent state so fail right away. - bool HaveSessionError() { - RTC_DCHECK(ok()); - if (handler_->session_error() != SessionError::kNone) - InternalError(handler_->GetSessionErrorMsg()); - return !ok(); - } - - // Returns true if the operation was a rollback operation. If this function - // returns true, the caller should consider the operation complete. Otherwise - // proceed to the next step. - bool MaybeRollback() { - RTC_DCHECK_RUN_ON(handler_->signaling_thread()); - RTC_DCHECK(ok()); - if (type_ != SdpType::kRollback) { - // Check if we can do an implicit rollback. - if (type_ == SdpType::kOffer && unified_plan_ && - handler_->pc_->configuration()->enable_implicit_rollback && - handler_->signaling_state() == - PeerConnectionInterface::kHaveLocalOffer) { - handler_->Rollback(type_); - } - return false; - } - - if (unified_plan_) { - error_ = handler_->Rollback(type_); - } else if (type_ == SdpType::kRollback) { - Unsupported("Rollback not supported in Plan B"); - } - - return true; - } - - // Report to UMA the format of the received offer or answer. - void ReportOfferAnswerUma() { - RTC_DCHECK(ok()); - if (type_ == SdpType::kOffer || type_ == SdpType::kAnswer) { - handler_->pc_->ReportSdpFormatReceived(*desc_.get()); - handler_->pc_->ReportSdpBundleUsage(*desc_.get()); - } - } - - // Checks if the session description for the operation is valid. If not, the - // function captures error information and returns false. Note that if the - // return value is false, the operation should be considered done. - bool IsDescriptionValid() { - RTC_DCHECK_RUN_ON(handler_->signaling_thread()); - RTC_DCHECK(ok()); - RTC_DCHECK(bundle_groups_by_mid_.empty()) << "Already called?"; - bundle_groups_by_mid_ = GetBundleGroupsByMid(description()); - error_ = handler_->ValidateSessionDescription( - desc_.get(), cricket::CS_REMOTE, bundle_groups_by_mid_); - return ok(); - } - - // Transfers ownership of the session description object over to `handler_`. - bool ReplaceRemoteDescriptionAndCheckEror() { - RTC_DCHECK_RUN_ON(handler_->signaling_thread()); - RTC_DCHECK(ok()); - RTC_DCHECK(desc_); - RTC_DCHECK(!replaced_remote_description_); -#if RTC_DCHECK_IS_ON - const auto* existing_remote_description = handler_->remote_description(); -#endif - - error_ = handler_->ReplaceRemoteDescription(std::move(desc_), type_, - &replaced_remote_description_); - - if (ok()) { -#if RTC_DCHECK_IS_ON - // Sanity check that our `old_remote_description()` method always returns - // the same value as `remote_description()` did before the call to - // ReplaceRemoteDescription. - RTC_DCHECK_EQ(existing_remote_description, old_remote_description()); -#endif - } else { - SetAsSessionError(); - } - - return ok(); - } - - bool UpdateChannels() { - RTC_DCHECK(ok()); - RTC_DCHECK(!desc_) << "ReplaceRemoteDescription hasn't been called"; - - const auto* remote_description = handler_->remote_description(); - - const cricket::SessionDescription* session_desc = - remote_description->description(); - - // Transport and Media channels will be created only when offer is set. - if (unified_plan_) { - error_ = handler_->UpdateTransceiversAndDataChannels( - cricket::CS_REMOTE, *remote_description, - handler_->local_description(), old_remote_description(), - bundle_groups_by_mid_); - } else { - // Media channels will be created only when offer is set. These may use - // new transports just created by PushdownTransportDescription. - if (type_ == SdpType::kOffer) { - // TODO(mallinath) - Handle CreateChannel failure, as new local - // description is applied. Restore back to old description. - error_ = handler_->CreateChannels(*session_desc); - } - // Remove unused channels if MediaContentDescription is rejected. - handler_->RemoveUnusedChannels(session_desc); - } - - return ok(); - } - - bool UpdateSessionState() { - RTC_DCHECK(ok()); - error_ = handler_->UpdateSessionState( - type_, cricket::CS_REMOTE, - handler_->remote_description()->description(), bundle_groups_by_mid_); - if (!ok()) - SetAsSessionError(); - return ok(); - } - - bool UseCandidatesInRemoteDescription() { - RTC_DCHECK(ok()); - if (handler_->local_description() && - !handler_->UseCandidatesInRemoteDescription()) { - InvalidParam(kInvalidCandidates); - } - return ok(); - } - - // Convenience getter for desc_->GetType(). - SdpType type() const { return type_; } - bool unified_plan() const { return unified_plan_; } - cricket::SessionDescription* description() { return desc_->description(); } - - const SessionDescriptionInterface* old_remote_description() const { - RTC_DCHECK(!desc_) << "Called before replacing the remote description"; - if (type_ == SdpType::kAnswer) - return replaced_remote_description_.get(); - return replaced_remote_description_ - ? replaced_remote_description_.get() - : handler_->current_remote_description(); - } - - // Returns a reference to a cached map of bundle groups ordered by mid. - // Note that this will only be valid after a successful call to - // `IsDescriptionValid`. - const std::map& - bundle_groups_by_mid() const { - RTC_DCHECK(ok()); - return bundle_groups_by_mid_; - } - - private: - // Convenience methods for populating the embedded `error_` object. - void Unsupported(std::string message) { - SetError(RTCErrorType::UNSUPPORTED_OPERATION, std::move(message)); - } - - void InvalidParam(std::string message) { - SetError(RTCErrorType::INVALID_PARAMETER, std::move(message)); - } - - void InternalError(std::string message) { - SetError(RTCErrorType::INTERNAL_ERROR, std::move(message)); - } - - void SetError(RTCErrorType type, std::string message) { - RTC_DCHECK(ok()) << "Overwriting an existing error?"; - error_ = RTCError(type, std::move(message)); - } - - // Called when the PeerConnection could be in an inconsistent state and we set - // the session error so that future calls to - // SetLocalDescription/SetRemoteDescription fail. - void SetAsSessionError() { - RTC_DCHECK(!ok()); - handler_->SetSessionError(SessionError::kContent, error_.message()); - } - - SdpOfferAnswerHandler* const handler_; - std::unique_ptr desc_; - // Keeps the replaced session description object alive while the operation - // is taking place since methods that depend on `old_remote_description()` - // for updating the state, need it. - std::unique_ptr replaced_remote_description_; - rtc::scoped_refptr observer_; - std::function operations_chain_callback_; - RTCError error_ = RTCError::OK(); - std::map bundle_groups_by_mid_; - SdpType type_; - const bool unified_plan_; -}; // Used by parameterless SetLocalDescription() to create an offer or answer. // Upon completion of creating the session description, SetLocalDescription() is // invoked with the result. @@ -1162,10 +946,8 @@ class SdpOfferAnswerHandler::LocalIceCredentialsToReplace { std::set> ice_credentials_; }; -SdpOfferAnswerHandler::SdpOfferAnswerHandler(PeerConnectionSdpMethods* pc, - ConnectionContext* context) +SdpOfferAnswerHandler::SdpOfferAnswerHandler(PeerConnection* pc) : pc_(pc), - context_(context), local_streams_(StreamCollection::Create()), remote_streams_(StreamCollection::Create()), operations_chain_(rtc::OperationsChain::Create()), @@ -1184,11 +966,10 @@ SdpOfferAnswerHandler::~SdpOfferAnswerHandler() {} // Static std::unique_ptr SdpOfferAnswerHandler::Create( - PeerConnectionSdpMethods* pc, + PeerConnection* pc, const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies& dependencies, - ConnectionContext* context) { - auto handler = absl::WrapUnique(new SdpOfferAnswerHandler(pc, context)); + PeerConnectionDependencies& dependencies) { + auto handler = absl::WrapUnique(new SdpOfferAnswerHandler(pc)); handler->Initialize(configuration, dependencies); return handler; } @@ -1227,10 +1008,9 @@ void SdpOfferAnswerHandler::Initialize( std::make_unique( signaling_thread(), channel_manager(), this, pc_->session_id(), pc_->dtls_enabled(), std::move(dependencies.cert_generator), - certificate, + certificate, &ssrc_generator_, [this](const rtc::scoped_refptr& certificate) { - RTC_DCHECK_RUN_ON(signaling_thread()); - transport_controller_s()->SetLocalCertificate(certificate); + transport_controller()->SetLocalCertificate(certificate); }); if (pc_->options()->disable_encryption) { @@ -1253,7 +1033,7 @@ void SdpOfferAnswerHandler::Initialize( // ================================================================== // Access to pc_ variables cricket::ChannelManager* SdpOfferAnswerHandler::channel_manager() const { - return context_->channel_manager(); + return pc_->channel_manager(); } TransceiverList* SdpOfferAnswerHandler::transceivers() { if (!pc_->rtp_manager()) { @@ -1267,19 +1047,12 @@ const TransceiverList* SdpOfferAnswerHandler::transceivers() const { } return pc_->rtp_manager()->transceivers(); } -JsepTransportController* SdpOfferAnswerHandler::transport_controller_s() { - return pc_->transport_controller_s(); +JsepTransportController* SdpOfferAnswerHandler::transport_controller() { + return pc_->transport_controller(); } -JsepTransportController* SdpOfferAnswerHandler::transport_controller_n() { - return pc_->transport_controller_n(); -} -const JsepTransportController* SdpOfferAnswerHandler::transport_controller_s() +const JsepTransportController* SdpOfferAnswerHandler::transport_controller() const { - return pc_->transport_controller_s(); -} -const JsepTransportController* SdpOfferAnswerHandler::transport_controller_n() - const { - return pc_->transport_controller_n(); + return pc_->transport_controller(); } DataChannelController* SdpOfferAnswerHandler::data_channel_controller() { return pc_->data_channel_controller(); @@ -1320,11 +1093,7 @@ void SdpOfferAnswerHandler::RestartIce() { } rtc::Thread* SdpOfferAnswerHandler::signaling_thread() const { - return context_->signaling_thread(); -} - -rtc::Thread* SdpOfferAnswerHandler::network_thread() const { - return context_->network_thread(); + return pc_->signaling_thread(); } void SdpOfferAnswerHandler::CreateOffer( @@ -1423,8 +1192,7 @@ void SdpOfferAnswerHandler::SetLocalDescription( RTC_DCHECK_RUN_ON(signaling_thread()); SetLocalDescription( rtc::make_ref_counted( - weak_ptr_factory_.GetWeakPtr(), - rtc::scoped_refptr(observer))); + weak_ptr_factory_.GetWeakPtr(), observer)); } void SdpOfferAnswerHandler::SetLocalDescription( @@ -1490,10 +1258,9 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(desc); - // Invalidate the [legacy] stats cache to make sure that it gets updated next - // time getStats() gets called, as updating the session description affects - // the stats. - pc_->stats()->InvalidateCache(); + // Update stats here so that we have the most recent stats for tracks and + // streams that might be removed by updating the session description. + pc_->stats()->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard); // Take a reference to the old local description since it's used below to // compare against the new local description. When setting the new local @@ -1515,9 +1282,6 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( replaced_local_description = std::move(pending_local_description_); pending_local_description_ = std::move(desc); } - if (!initial_offerer_) { - initial_offerer_.emplace(type == SdpType::kOffer); - } // The session description to apply now must be accessed by // `local_description()`. RTC_DCHECK(local_description()); @@ -1542,12 +1306,10 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( } if (IsUnifiedPlan()) { - error = UpdateTransceiversAndDataChannels( + RTCError error = UpdateTransceiversAndDataChannels( cricket::CS_LOCAL, *local_description(), old_local_description, remote_description(), bundle_groups_by_mid); if (!error.ok()) { - RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type) - << ")"; return error; } std::vector> remove_list; @@ -1563,8 +1325,7 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( // information about DTLS transports. if (transceiver->mid()) { auto dtls_transport = LookupDtlsTransportByMid( - context_->network_thread(), transport_controller_s(), - *transceiver->mid()); + pc_->network_thread(), transport_controller(), *transceiver->mid()); transceiver->sender_internal()->set_transport(dtls_transport); transceiver->receiver_internal()->set_transport(dtls_transport); } @@ -1609,8 +1370,6 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( // description is applied. Restore back to old description. RTCError error = CreateChannels(*local_description()->description()); if (!error.ok()) { - RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type) - << ")"; return error; } } @@ -1622,13 +1381,13 @@ RTCError SdpOfferAnswerHandler::ApplyLocalDescription( local_description()->description(), bundle_groups_by_mid); if (!error.ok()) { - RTC_LOG(LS_ERROR) << error.message() << " (" << SdpTypeToString(type) - << ")"; return error; } - // Now that we have a local description, we can push down remote candidates. - UseCandidatesInRemoteDescription(); + if (remote_description()) { + // Now that we have a local description, we can push down remote candidates. + UseCandidatesInSessionDescription(remote_description()); + } pending_ice_restarts_.clear(); if (session_error() != SessionError::kNone) { @@ -1740,11 +1499,15 @@ void SdpOfferAnswerHandler::SetRemoteDescription( // SetSessionDescriptionObserverAdapter takes care of making sure the // `observer_refptr` is invoked in a posted message. this_weak_ptr->DoSetRemoteDescription( - std::make_unique( - this_weak_ptr.get(), std::move(desc), - rtc::make_ref_counted( - this_weak_ptr, observer_refptr), - std::move(operations_chain_callback))); + std::move(desc), + rtc::make_ref_counted( + this_weak_ptr, observer_refptr)); + // For backwards-compatability reasons, we declare the operation as + // completed here (rather than in a post), so that the operation chain + // is not blocked by this operation when the observer is invoked. This + // allows the observer to trigger subsequent offer/answer operations + // synchronously if the operation chain is now empty. + operations_chain_callback(); }); } @@ -1759,12 +1522,6 @@ void SdpOfferAnswerHandler::SetRemoteDescription( [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), observer, desc = std::move(desc)]( std::function operations_chain_callback) mutable { - if (!observer) { - RTC_DLOG(LS_ERROR) << "SetRemoteDescription - observer is NULL."; - operations_chain_callback(); - return; - } - // Abort early if `this_weak_ptr` is no longer valid. if (!this_weak_ptr) { observer->OnSetRemoteDescriptionComplete(RTCError( @@ -1773,80 +1530,107 @@ void SdpOfferAnswerHandler::SetRemoteDescription( operations_chain_callback(); return; } - - this_weak_ptr->DoSetRemoteDescription( - std::make_unique( - this_weak_ptr.get(), std::move(desc), std::move(observer), - std::move(operations_chain_callback))); + this_weak_ptr->DoSetRemoteDescription(std::move(desc), + std::move(observer)); + // DoSetRemoteDescription() is implemented as a synchronous operation. + // The `observer` will already have been informed that it completed, and + // we can mark this operation as complete without any loose ends. + operations_chain_callback(); }); } -RTCError SdpOfferAnswerHandler::ReplaceRemoteDescription( +RTCError SdpOfferAnswerHandler::ApplyRemoteDescription( std::unique_ptr desc, - SdpType sdp_type, - std::unique_ptr* replaced_description) { - RTC_DCHECK(replaced_description); - if (sdp_type == SdpType::kAnswer) { - *replaced_description = pending_remote_description_ - ? std::move(pending_remote_description_) - : std::move(current_remote_description_); + const std::map& + bundle_groups_by_mid) { + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ApplyRemoteDescription"); + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(desc); + + // Update stats here so that we have the most recent stats for tracks and + // streams that might be removed by updating the session description. + pc_->stats()->UpdateStats(PeerConnectionInterface::kStatsOutputLevelStandard); + + // Take a reference to the old remote description since it's used below to + // compare against the new remote description. When setting the new remote + // description, grab ownership of the replaced session description in case it + // is the same as `old_remote_description`, to keep it alive for the duration + // of the method. + const SessionDescriptionInterface* old_remote_description = + remote_description(); + std::unique_ptr replaced_remote_description; + SdpType type = desc->GetType(); + if (type == SdpType::kAnswer) { + replaced_remote_description = pending_remote_description_ + ? std::move(pending_remote_description_) + : std::move(current_remote_description_); current_remote_description_ = std::move(desc); pending_remote_description_ = nullptr; current_local_description_ = std::move(pending_local_description_); } else { - *replaced_description = std::move(pending_remote_description_); + replaced_remote_description = std::move(pending_remote_description_); pending_remote_description_ = std::move(desc); } - // The session description to apply now must be accessed by // `remote_description()`. - const cricket::SessionDescription* session_desc = - remote_description()->description(); + RTC_DCHECK(remote_description()); // Report statistics about any use of simulcast. ReportSimulcastApiVersion(kSimulcastVersionApplyRemoteDescription, - *session_desc); + *remote_description()->description()); - // NOTE: This will perform an Invoke() to the network thread. - return transport_controller_s()->SetRemoteDescription(sdp_type, session_desc); -} - -void SdpOfferAnswerHandler::ApplyRemoteDescription( - std::unique_ptr operation) { - TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::ApplyRemoteDescription"); - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(operation->description()); - - // Invalidate the [legacy] stats cache to make sure that it gets updated next - // time getStats() gets called, as updating the session description affects - // the stats. - pc_->stats()->InvalidateCache(); - - if (!operation->ReplaceRemoteDescriptionAndCheckEror()) - return; - - if (!operation->UpdateChannels()) - return; + RTCError error = PushdownTransportDescription(cricket::CS_REMOTE, type); + if (!error.ok()) { + return error; + } + // Transport and Media channels will be created only when offer is set. + if (IsUnifiedPlan()) { + RTCError error = UpdateTransceiversAndDataChannels( + cricket::CS_REMOTE, *remote_description(), local_description(), + old_remote_description, bundle_groups_by_mid); + if (!error.ok()) { + return error; + } + } else { + // Media channels will be created only when offer is set. These may use new + // transports just created by PushdownTransportDescription. + if (type == SdpType::kOffer) { + // TODO(mallinath) - Handle CreateChannel failure, as new local + // description is applied. Restore back to old description. + RTCError error = CreateChannels(*remote_description()->description()); + if (!error.ok()) { + return error; + } + } + // Remove unused channels if MediaContentDescription is rejected. + RemoveUnusedChannels(remote_description()->description()); + } // NOTE: Candidates allocation will be initiated only when // SetLocalDescription is called. - if (!operation->UpdateSessionState()) - return; + error = UpdateSessionState(type, cricket::CS_REMOTE, + remote_description()->description(), + bundle_groups_by_mid); + if (!error.ok()) { + return error; + } - if (!operation->UseCandidatesInRemoteDescription()) - return; + if (local_description() && + !UseCandidatesInSessionDescription(remote_description())) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidCandidates); + } - if (operation->old_remote_description()) { + if (old_remote_description) { for (const cricket::ContentInfo& content : - operation->old_remote_description()->description()->contents()) { + old_remote_description->description()->contents()) { // Check if this new SessionDescription contains new ICE ufrag and // password that indicates the remote peer requests an ICE restart. // TODO(deadbeef): When we start storing both the current and pending // remote description, this should reset pending_ice_restarts and compare // against the current description. - if (CheckForRemoteIceRestart(operation->old_remote_description(), - remote_description(), content.name)) { - if (operation->type() == SdpType::kOffer) { + if (CheckForRemoteIceRestart(old_remote_description, remote_description(), + content.name)) { + if (type == SdpType::kOffer) { pending_ice_restarts_.insert(content.name); } } else { @@ -1858,14 +1642,14 @@ void SdpOfferAnswerHandler::ApplyRemoteDescription( // description plus any candidates added since then. We should remove // this once we're sure it won't break anything. WebRtcSessionDescriptionFactory::CopyCandidatesFromSessionDescription( - operation->old_remote_description(), content.name, - mutable_remote_description()); + old_remote_description, content.name, mutable_remote_description()); } } } - if (operation->HaveSessionError()) - return; + if (session_error() != SessionError::kNone) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); + } // Set the the ICE connection state to connecting since the connection may // become writable with peer reflexive candidates before any remote candidate @@ -1877,7 +1661,7 @@ void SdpOfferAnswerHandler::ApplyRemoteDescription( // actually means "gathering candidates", so cannot be be used here. if (remote_description()->GetType() != SdpType::kOffer && remote_description()->number_of_mediasections() > 0u && - pc_->ice_connection_state_internal() == + pc_->ice_connection_state() == PeerConnectionInterface::kIceConnectionNew) { pc_->SetIceConnectionState(PeerConnectionInterface::kIceConnectionChecking); } @@ -1889,10 +1673,122 @@ void SdpOfferAnswerHandler::ApplyRemoteDescription( data_channel_controller()->AllocateSctpSids(role); } - if (operation->unified_plan()) { - ApplyRemoteDescriptionUpdateTransceiverState(operation->type()); + if (IsUnifiedPlan()) { + std::vector> + now_receiving_transceivers; + std::vector> remove_list; + std::vector> added_streams; + std::vector> removed_streams; + for (const auto& transceiver_ext : transceivers()->List()) { + const auto transceiver = transceiver_ext->internal(); + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, remote_description()); + if (!content) { + continue; + } + const MediaContentDescription* media_desc = content->media_description(); + RtpTransceiverDirection local_direction = + RtpTransceiverDirectionReversed(media_desc->direction()); + // Roughly the same as steps 2.2.8.6 of section 4.4.1.6 "Set the + // RTCSessionDescription: Set the associated remote streams given + // transceiver.[[Receiver]], msids, addList, and removeList". + // https://w3c.github.io/webrtc-pc/#set-the-rtcsessiondescription + if (RtpTransceiverDirectionHasRecv(local_direction)) { + std::vector stream_ids; + if (!media_desc->streams().empty()) { + // The remote description has signaled the stream IDs. + stream_ids = media_desc->streams()[0].stream_ids(); + } + transceivers() + ->StableState(transceiver_ext) + ->SetRemoteStreamIdsIfUnset(transceiver->receiver()->stream_ids()); + + RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name + << " (" << GetStreamIdsString(stream_ids) << ")."; + SetAssociatedRemoteStreams(transceiver->receiver_internal(), stream_ids, + &added_streams, &removed_streams); + // From the WebRTC specification, steps 2.2.8.5/6 of section 4.4.1.6 + // "Set the RTCSessionDescription: If direction is sendrecv or recvonly, + // and transceiver's current direction is neither sendrecv nor recvonly, + // process the addition of a remote track for the media description. + if (!transceiver->fired_direction() || + !RtpTransceiverDirectionHasRecv(*transceiver->fired_direction())) { + RTC_LOG(LS_INFO) + << "Processing the addition of a remote track for MID=" + << content->name << "."; + // Since the transceiver is passed to the user in an + // OnTrack event, we must use the proxied transceiver. + now_receiving_transceivers.push_back(transceiver_ext); + } + } + // 2.2.8.1.9: If direction is "sendonly" or "inactive", and transceiver's + // [[FiredDirection]] slot is either "sendrecv" or "recvonly", process the + // removal of a remote track for the media description, given transceiver, + // removeList, and muteTracks. + if (!RtpTransceiverDirectionHasRecv(local_direction) && + (transceiver->fired_direction() && + RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { + ProcessRemovalOfRemoteTrack(transceiver_ext, &remove_list, + &removed_streams); + } + // 2.2.8.1.10: Set transceiver's [[FiredDirection]] slot to direction. + transceiver->set_fired_direction(local_direction); + // 2.2.8.1.11: If description is of type "answer" or "pranswer", then run + // the following steps: + if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { + // 2.2.8.1.11.1: Set transceiver's [[CurrentDirection]] slot to + // direction. + transceiver->set_current_direction(local_direction); + // 2.2.8.1.11.[3-6]: Set the transport internal slots. + if (transceiver->mid()) { + auto dtls_transport = LookupDtlsTransportByMid(pc_->network_thread(), + transport_controller(), + *transceiver->mid()); + transceiver->sender_internal()->set_transport(dtls_transport); + transceiver->receiver_internal()->set_transport(dtls_transport); + } + } + // 2.2.8.1.12: If the media description is rejected, and transceiver is + // not already stopped, stop the RTCRtpTransceiver transceiver. + if (content->rejected && !transceiver->stopped()) { + RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name + << " since the media section was rejected."; + transceiver->StopTransceiverProcedure(); + } + if (!content->rejected && + RtpTransceiverDirectionHasRecv(local_direction)) { + if (!media_desc->streams().empty() && + media_desc->streams()[0].has_ssrcs()) { + uint32_t ssrc = media_desc->streams()[0].first_ssrc(); + transceiver->receiver_internal()->SetupMediaChannel(ssrc); + } else { + transceiver->receiver_internal()->SetupUnsignaledMediaChannel(); + } + } + } + // Once all processing has finished, fire off callbacks. + auto observer = pc_->Observer(); + for (const auto& transceiver : now_receiving_transceivers) { + pc_->stats()->AddTrack(transceiver->receiver()->track()); + observer->OnTrack(transceiver); + observer->OnAddTrack(transceiver->receiver(), + transceiver->receiver()->streams()); + } + for (const auto& stream : added_streams) { + observer->OnAddStream(stream); + } + for (const auto& transceiver : remove_list) { + observer->OnRemoveTrack(transceiver->receiver()); + } + for (const auto& stream : removed_streams) { + observer->OnRemoveStream(stream); + } } + const cricket::ContentInfo* audio_content = + GetFirstAudioContent(remote_description()->description()); + const cricket::ContentInfo* video_content = + GetFirstVideoContent(remote_description()->description()); const cricket::AudioContentDescription* audio_desc = GetFirstAudioContentDescription(remote_description()->description()); const cricket::VideoContentDescription* video_desc = @@ -1906,199 +1802,68 @@ void SdpOfferAnswerHandler::ApplyRemoteDescription( remote_peer_supports_msid_ = true; } - if (!operation->unified_plan()) { - PlanBUpdateSendersAndReceivers( - GetFirstAudioContent(remote_description()->description()), audio_desc, - GetFirstVideoContent(remote_description()->description()), video_desc); - } - - if (operation->type() == SdpType::kAnswer) { - if (local_ice_credentials_to_replace_->SatisfiesIceRestart( - *current_local_description_)) { - local_ice_credentials_to_replace_->ClearIceCredentials(); - } - - RemoveStoppedTransceivers(); - } - - // Consider the operation complete at this point. - operation->SignalCompletion(); - - SetRemoteDescriptionPostProcess(operation->type() == SdpType::kAnswer); -} - -void SdpOfferAnswerHandler::ApplyRemoteDescriptionUpdateTransceiverState( - SdpType sdp_type) { - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(IsUnifiedPlan()); - std::vector> - now_receiving_transceivers; - std::vector> remove_list; - std::vector> added_streams; - std::vector> removed_streams; - for (const auto& transceiver_ext : transceivers()->List()) { - const auto transceiver = transceiver_ext->internal(); - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, remote_description()); - if (!content) { - continue; - } - const MediaContentDescription* media_desc = content->media_description(); - RtpTransceiverDirection local_direction = - RtpTransceiverDirectionReversed(media_desc->direction()); - // Roughly the same as steps 2.2.8.6 of section 4.4.1.6 "Set the - // RTCSessionDescription: Set the associated remote streams given - // transceiver.[[Receiver]], msids, addList, and removeList". - // https://w3c.github.io/webrtc-pc/#set-the-rtcsessiondescription - if (RtpTransceiverDirectionHasRecv(local_direction)) { - std::vector stream_ids; - if (!media_desc->streams().empty()) { - // The remote description has signaled the stream IDs. - stream_ids = media_desc->streams()[0].stream_ids(); - } - transceivers() - ->StableState(transceiver_ext) - ->SetRemoteStreamIdsIfUnset(transceiver->receiver()->stream_ids()); - - RTC_LOG(LS_INFO) << "Processing the MSIDs for MID=" << content->name - << " (" << GetStreamIdsString(stream_ids) << ")."; - SetAssociatedRemoteStreams(transceiver->receiver_internal(), stream_ids, - &added_streams, &removed_streams); - // From the WebRTC specification, steps 2.2.8.5/6 of section 4.4.1.6 - // "Set the RTCSessionDescription: If direction is sendrecv or recvonly, - // and transceiver's current direction is neither sendrecv nor recvonly, - // process the addition of a remote track for the media description. - if (!transceiver->fired_direction() || - !RtpTransceiverDirectionHasRecv(*transceiver->fired_direction())) { - RTC_LOG(LS_INFO) << "Processing the addition of a remote track for MID=" - << content->name << "."; - // Since the transceiver is passed to the user in an - // OnTrack event, we must use the proxied transceiver. - now_receiving_transceivers.push_back(transceiver_ext); - } - } - // 2.2.8.1.9: If direction is "sendonly" or "inactive", and transceiver's - // [[FiredDirection]] slot is either "sendrecv" or "recvonly", process the - // removal of a remote track for the media description, given transceiver, - // removeList, and muteTracks. - if (!RtpTransceiverDirectionHasRecv(local_direction) && - (transceiver->fired_direction() && - RtpTransceiverDirectionHasRecv(*transceiver->fired_direction()))) { - ProcessRemovalOfRemoteTrack(transceiver_ext, &remove_list, - &removed_streams); - } - // 2.2.8.1.10: Set transceiver's [[FiredDirection]] slot to direction. - transceiver->set_fired_direction(local_direction); - // 2.2.8.1.11: If description is of type "answer" or "pranswer", then run - // the following steps: - if (sdp_type == SdpType::kPrAnswer || sdp_type == SdpType::kAnswer) { - // 2.2.8.1.11.1: Set transceiver's [[CurrentDirection]] slot to - // direction. - transceiver->set_current_direction(local_direction); - // 2.2.8.1.11.[3-6]: Set the transport internal slots. - if (transceiver->mid()) { - auto dtls_transport = LookupDtlsTransportByMid( - context_->network_thread(), transport_controller_s(), - *transceiver->mid()); - transceiver->sender_internal()->set_transport(dtls_transport); - transceiver->receiver_internal()->set_transport(dtls_transport); - } - } - // 2.2.8.1.12: If the media description is rejected, and transceiver is - // not already stopped, stop the RTCRtpTransceiver transceiver. - if (content->rejected && !transceiver->stopped()) { - RTC_LOG(LS_INFO) << "Stopping transceiver for MID=" << content->name - << " since the media section was rejected."; - transceiver->StopTransceiverProcedure(); - } - if (!content->rejected && RtpTransceiverDirectionHasRecv(local_direction)) { - if (!media_desc->streams().empty() && - media_desc->streams()[0].has_ssrcs()) { - uint32_t ssrc = media_desc->streams()[0].first_ssrc(); - transceiver->receiver_internal()->SetupMediaChannel(ssrc); - } else { - transceiver->receiver_internal()->SetupUnsignaledMediaChannel(); - } - } - } - // Once all processing has finished, fire off callbacks. - auto observer = pc_->Observer(); - for (const auto& transceiver : now_receiving_transceivers) { - pc_->stats()->AddTrack(transceiver->receiver()->track()); - observer->OnTrack(transceiver); - observer->OnAddTrack(transceiver->receiver(), - transceiver->receiver()->streams()); - } - for (const auto& stream : added_streams) { - observer->OnAddStream(stream); - } - for (const auto& transceiver : remove_list) { - observer->OnRemoveTrack(transceiver->receiver()); - } - for (const auto& stream : removed_streams) { - observer->OnRemoveStream(stream); - } -} - -void SdpOfferAnswerHandler::PlanBUpdateSendersAndReceivers( - const cricket::ContentInfo* audio_content, - const cricket::AudioContentDescription* audio_desc, - const cricket::ContentInfo* video_content, - const cricket::VideoContentDescription* video_desc) { - RTC_DCHECK_RUN_ON(signaling_thread()); - RTC_DCHECK(!IsUnifiedPlan()); - // We wait to signal new streams until we finish processing the description, // since only at that point will new streams have all their tracks. rtc::scoped_refptr new_streams(StreamCollection::Create()); - // TODO(steveanton): When removing RTP senders/receivers in response to a - // rejected media section, there is some cleanup logic that expects the - // voice/ video channel to still be set. But in this method the voice/video - // channel would have been destroyed by the SetRemoteDescription caller - // above so the cleanup that relies on them fails to run. The RemoveSenders - // calls should be moved to right before the DestroyChannel calls to fix - // this. + if (!IsUnifiedPlan()) { + // TODO(steveanton): When removing RTP senders/receivers in response to a + // rejected media section, there is some cleanup logic that expects the + // voice/ video channel to still be set. But in this method the voice/video + // channel would have been destroyed by the SetRemoteDescription caller + // above so the cleanup that relies on them fails to run. The RemoveSenders + // calls should be moved to right before the DestroyChannel calls to fix + // this. - // Find all audio rtp streams and create corresponding remote AudioTracks - // and MediaStreams. - if (audio_content) { - if (audio_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_AUDIO); - } else { - bool default_audio_track_needed = - !remote_peer_supports_msid_ && - RtpTransceiverDirectionHasSend(audio_desc->direction()); - UpdateRemoteSendersList(GetActiveStreams(audio_desc), - default_audio_track_needed, audio_desc->type(), - new_streams); + // Find all audio rtp streams and create corresponding remote AudioTracks + // and MediaStreams. + if (audio_content) { + if (audio_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_AUDIO); + } else { + bool default_audio_track_needed = + !remote_peer_supports_msid_ && + RtpTransceiverDirectionHasSend(audio_desc->direction()); + UpdateRemoteSendersList(GetActiveStreams(audio_desc), + default_audio_track_needed, audio_desc->type(), + new_streams); + } } - } - // Find all video rtp streams and create corresponding remote VideoTracks - // and MediaStreams. - if (video_content) { - if (video_content->rejected) { - RemoveSenders(cricket::MEDIA_TYPE_VIDEO); - } else { - bool default_video_track_needed = - !remote_peer_supports_msid_ && - RtpTransceiverDirectionHasSend(video_desc->direction()); - UpdateRemoteSendersList(GetActiveStreams(video_desc), - default_video_track_needed, video_desc->type(), - new_streams); + // Find all video rtp streams and create corresponding remote VideoTracks + // and MediaStreams. + if (video_content) { + if (video_content->rejected) { + RemoveSenders(cricket::MEDIA_TYPE_VIDEO); + } else { + bool default_video_track_needed = + !remote_peer_supports_msid_ && + RtpTransceiverDirectionHasSend(video_desc->direction()); + UpdateRemoteSendersList(GetActiveStreams(video_desc), + default_video_track_needed, video_desc->type(), + new_streams); + } } + + // Iterate new_streams and notify the observer about new MediaStreams. + auto observer = pc_->Observer(); + for (size_t i = 0; i < new_streams->count(); ++i) { + MediaStreamInterface* new_stream = new_streams->at(i); + pc_->stats()->AddStream(new_stream); + observer->OnAddStream( + rtc::scoped_refptr(new_stream)); + } + + UpdateEndedRemoteMediaStreams(); } - // Iterate new_streams and notify the observer about new MediaStreams. - auto observer = pc_->Observer(); - for (size_t i = 0; i < new_streams->count(); ++i) { - MediaStreamInterface* new_stream = new_streams->at(i); - pc_->stats()->AddStream(new_stream); - observer->OnAddStream(rtc::scoped_refptr(new_stream)); + if (type == SdpType::kAnswer && + local_ice_credentials_to_replace_->SatisfiesIceRestart( + *current_local_description_)) { + local_ice_credentials_to_replace_->ClearIceCredentials(); } - UpdateEndedRemoteMediaStreams(); + return RTCError::OK(); } void SdpOfferAnswerHandler::DoSetLocalDescription( @@ -2179,7 +1944,7 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( // TODO(deadbeef): We already had to hop to the network thread for // MaybeStartGathering... - context_->network_thread()->Invoke( + pc_->network_thread()->Invoke( RTC_FROM_HERE, [this] { port_allocator()->DiscardCandidatePool(); }); // Make UMA notes about what was agreed to. ReportNegotiatedSdpSemantics(*local_description()); @@ -2206,7 +1971,7 @@ void SdpOfferAnswerHandler::DoSetLocalDescription( // MaybeStartGathering needs to be called after informing the observer so that // we don't signal any candidates before signaling that SetLocalDescription // completed. - transport_controller_s()->MaybeStartGathering(); + transport_controller()->MaybeStartGathering(); } void SdpOfferAnswerHandler::DoCreateOffer( @@ -2348,48 +2113,110 @@ void SdpOfferAnswerHandler::DoCreateAnswer( } void SdpOfferAnswerHandler::DoSetRemoteDescription( - std::unique_ptr operation) { + std::unique_ptr desc, + rtc::scoped_refptr observer) { RTC_DCHECK_RUN_ON(signaling_thread()); TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DoSetRemoteDescription"); - if (!operation->ok()) + if (!observer) { + RTC_LOG(LS_ERROR) << "SetRemoteDescription - observer is NULL."; return; + } - if (operation->HaveSessionError()) + if (!desc) { + observer->OnSetRemoteDescriptionComplete(RTCError( + RTCErrorType::INVALID_PARAMETER, "SessionDescription is NULL.")); return; + } - if (operation->MaybeRollback()) + // If a session error has occurred the PeerConnection is in a possibly + // inconsistent state so fail right away. + if (session_error() != SessionError::kNone) { + std::string error_message = GetSessionErrorMsg(); + RTC_LOG(LS_ERROR) << "SetRemoteDescription: " << error_message; + observer->OnSetRemoteDescriptionComplete( + RTCError(RTCErrorType::INTERNAL_ERROR, std::move(error_message))); return; - - operation->ReportOfferAnswerUma(); - - // Handle remote descriptions missing a=mid lines for interop with legacy - // end points. - FillInMissingRemoteMids(operation->description()); - if (!operation->IsDescriptionValid()) + } + if (IsUnifiedPlan()) { + if (pc_->configuration()->enable_implicit_rollback) { + if (desc->GetType() == SdpType::kOffer && + signaling_state() == PeerConnectionInterface::kHaveLocalOffer) { + Rollback(desc->GetType()); + } + } + // Explicit rollback. + if (desc->GetType() == SdpType::kRollback) { + observer->OnSetRemoteDescriptionComplete(Rollback(desc->GetType())); + return; + } + } else if (desc->GetType() == SdpType::kRollback) { + observer->OnSetRemoteDescriptionComplete( + RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Rollback not supported in Plan B")); return; + } + if (desc->GetType() == SdpType::kOffer || + desc->GetType() == SdpType::kAnswer) { + // Report to UMA the format of the received offer or answer. + pc_->ReportSdpFormatReceived(*desc); + pc_->ReportSdpBundleUsage(*desc); + } - ApplyRemoteDescription(std::move(operation)); -} + // Handle remote descriptions missing a=mid lines for interop with legacy end + // points. + FillInMissingRemoteMids(desc->description()); -// Called after a DoSetRemoteDescription operation completes. -void SdpOfferAnswerHandler::SetRemoteDescriptionPostProcess(bool was_answer) { + std::map bundle_groups_by_mid = + GetBundleGroupsByMid(desc->description()); + RTCError error = ValidateSessionDescription(desc.get(), cricket::CS_REMOTE, + bundle_groups_by_mid); + if (!error.ok()) { + std::string error_message = GetSetDescriptionErrorMessage( + cricket::CS_REMOTE, desc->GetType(), error); + RTC_LOG(LS_ERROR) << error_message; + observer->OnSetRemoteDescriptionComplete( + RTCError(error.type(), std::move(error_message))); + return; + } + + // Grab the description type before moving ownership to + // ApplyRemoteDescription, which may destroy it before returning. + const SdpType type = desc->GetType(); + + error = ApplyRemoteDescription(std::move(desc), bundle_groups_by_mid); + // `desc` may be destroyed at this point. + + if (!error.ok()) { + // If ApplyRemoteDescription fails, the PeerConnection could be in an + // inconsistent state, so act conservatively here and set the session error + // so that future calls to SetLocalDescription/SetRemoteDescription fail. + SetSessionError(SessionError::kContent, error.message()); + std::string error_message = + GetSetDescriptionErrorMessage(cricket::CS_REMOTE, type, error); + RTC_LOG(LS_ERROR) << error_message; + observer->OnSetRemoteDescriptionComplete( + RTCError(error.type(), std::move(error_message))); + return; + } RTC_DCHECK(remote_description()); - if (was_answer) { + if (type == SdpType::kAnswer) { + RemoveStoppedTransceivers(); // TODO(deadbeef): We already had to hop to the network thread for // MaybeStartGathering... - context_->network_thread()->Invoke( + pc_->network_thread()->Invoke( RTC_FROM_HERE, [this] { port_allocator()->DiscardCandidatePool(); }); // Make UMA notes about what was agreed to. ReportNegotiatedSdpSemantics(*remote_description()); } + observer->OnSetRemoteDescriptionComplete(RTCError::OK()); pc_->NoteUsageEvent(UsageEvent::SET_REMOTE_DESCRIPTION_SUCCEEDED); // Check if negotiation is needed. We must do this after informing the - // observer that SetRemoteDescription() has completed to ensure negotiation - // is not needed prior to the promise resolving. + // observer that SetRemoteDescription() has completed to ensure negotiation is + // not needed prior to the promise resolving. if (IsUnifiedPlan()) { bool was_negotiation_needed = is_negotiation_needed_; UpdateNegotiationNeeded(); @@ -2411,8 +2238,8 @@ void SdpOfferAnswerHandler::SetAssociatedRemoteStreams( RTC_DCHECK_RUN_ON(signaling_thread()); std::vector> media_streams; for (const std::string& stream_id : stream_ids) { - rtc::scoped_refptr stream( - remote_streams_->find(stream_id)); + rtc::scoped_refptr stream = + remote_streams_->find(stream_id); if (!stream) { stream = MediaStreamProxy::Create(rtc::Thread::Current(), MediaStream::Create(stream_id)); @@ -2434,10 +2261,10 @@ void SdpOfferAnswerHandler::SetAssociatedRemoteStreams( } std::vector> previous_streams = receiver->streams(); - // SetStreams() will add/remove the receiver's track to/from the streams. - // This differs from the spec - the spec uses an "addList" and "removeList" - // to update the stream-track relationships in a later step. We do this - // earlier, changing the order of things, but the end-result is the same. + // SetStreams() will add/remove the receiver's track to/from the streams. This + // differs from the spec - the spec uses an "addList" and "removeList" to + // update the stream-track relationships in a later step. We do this earlier, + // changing the order of things, but the end-result is the same. // TODO(hbos): When we remove remote_streams(), use set_stream_ids() // instead. https://crbug.com/webrtc/9480 receiver->SetStreams(media_streams); @@ -2448,8 +2275,8 @@ bool SdpOfferAnswerHandler::AddIceCandidate( const IceCandidateInterface* ice_candidate) { const AddIceCandidateResult result = AddIceCandidateInternal(ice_candidate); NoteAddIceCandidateResult(result); - // If the return value is kAddIceCandidateFailNotReady, the candidate has - // been added, although not 'ready', but that's a success. + // If the return value is kAddIceCandidateFailNotReady, the candidate has been + // added, although not 'ready', but that's a success. return result == kAddIceCandidateSuccess || result == kAddIceCandidateFailNotReady; } @@ -2505,9 +2332,9 @@ void SdpOfferAnswerHandler::AddIceCandidate( std::function callback) { TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::AddIceCandidate"); RTC_DCHECK_RUN_ON(signaling_thread()); - // Chain this operation. If asynchronous operations are pending on the - // chain, this operation will be queued to be invoked, otherwise the - // contents of the lambda will execute immediately. + // Chain this operation. If asynchronous operations are pending on the chain, + // this operation will be queued to be invoked, otherwise the contents of the + // lambda will execute immediately. operations_chain_->ChainOperation( [this_weak_ptr = weak_ptr_factory_.GetWeakPtr(), candidate = std::move(candidate), callback = std::move(callback)]( @@ -2518,42 +2345,18 @@ void SdpOfferAnswerHandler::AddIceCandidate( : kAddIceCandidateFailClosed; NoteAddIceCandidateResult(result); operations_chain_callback(); - switch (result) { - case AddIceCandidateResult::kAddIceCandidateSuccess: - case AddIceCandidateResult::kAddIceCandidateFailNotReady: - // Success! - callback(RTCError::OK()); - break; - case AddIceCandidateResult::kAddIceCandidateFailClosed: - // Note that the spec says to just abort without resolving the - // promise in this case, but this layer must return an RTCError. - callback(RTCError( - RTCErrorType::INVALID_STATE, - "AddIceCandidate failed because the session was shut down")); - break; - case AddIceCandidateResult::kAddIceCandidateFailNoRemoteDescription: - // Spec: "If remoteDescription is null return a promise rejected - // with a newly created InvalidStateError." - callback(RTCError(RTCErrorType::INVALID_STATE, - "The remote description was null")); - break; - case AddIceCandidateResult::kAddIceCandidateFailNullCandidate: - // TODO(https://crbug.com/935898): Handle end-of-candidates instead - // of treating null candidate as an error. - callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Error processing ICE candidate")); - break; - case AddIceCandidateResult::kAddIceCandidateFailNotValid: - case AddIceCandidateResult::kAddIceCandidateFailInAddition: - case AddIceCandidateResult::kAddIceCandidateFailNotUsable: - // Spec: "If candidate could not be successfully added [...] Reject - // p with a newly created OperationError and abort these steps." - // UNSUPPORTED_OPERATION maps to OperationError. - callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, - "Error processing ICE candidate")); - break; - default: - RTC_DCHECK_NOTREACHED(); + if (result == kAddIceCandidateFailClosed) { + callback(RTCError( + RTCErrorType::INVALID_STATE, + "AddIceCandidate failed because the session was shut down")); + } else if (result != kAddIceCandidateSuccess && + result != kAddIceCandidateFailNotReady) { + // Fail with an error type and message consistent with Chromium. + // TODO(hbos): Fail with error types according to spec. + callback(RTCError(RTCErrorType::UNSUPPORTED_OPERATION, + "Error processing ICE candidate")); + } else { + callback(RTCError::OK()); } }); } @@ -2588,7 +2391,7 @@ bool SdpOfferAnswerHandler::RemoveIceCandidates( } // Remove the candidates from the transport controller. - RTCError error = transport_controller_s()->RemoveRemoteCandidates(candidates); + RTCError error = transport_controller()->RemoveRemoteCandidates(candidates); if (!error.ok()) { RTC_LOG(LS_ERROR) << "RemoveIceCandidates: Error when removing remote candidates: " @@ -2680,8 +2483,8 @@ RTCError SdpOfferAnswerHandler::UpdateSessionState( bundle_groups_by_mid) { RTC_DCHECK_RUN_ON(signaling_thread()); - // If there's already a pending error then no state transition should - // happen. But all call-sites should be verifying this before calling us! + // If there's already a pending error then no state transition should happen. + // But all call-sites should be verifying this before calling us! RTC_DCHECK(session_error() == SessionError::kNone); // If this is answer-ish we're ready to let media flow. @@ -2727,10 +2530,10 @@ bool SdpOfferAnswerHandler::ShouldFireNegotiationNeededEvent( // one obsolete. if (!operations_chain_->IsEmpty()) { // Since we just suppressed an event that would have been fired, if - // negotiation is still needed by the time the chain becomes empty again, - // we must make sure to generate another event if negotiation is needed - // then. This happens when `is_negotiation_needed_` goes from false to - // true, so we set it to false until UpdateNegotiationNeeded() is called. + // negotiation is still needed by the time the chain becomes empty again, we + // must make sure to generate another event if negotiation is needed then. + // This happens when `is_negotiation_needed_` goes from false to true, so we + // set it to false until UpdateNegotiationNeeded() is called. is_negotiation_needed_ = false; update_negotiation_needed_on_empty_chain_ = true; return false; @@ -2774,8 +2577,7 @@ bool SdpOfferAnswerHandler::AddStream(MediaStreamInterface* local_stream) { return false; } - local_streams_->AddStream( - rtc::scoped_refptr(local_stream)); + local_streams_->AddStream(local_stream); auto observer = std::make_unique( local_stream, [this](AudioTrackInterface* audio_track, @@ -2914,7 +2716,7 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { } RTC_DCHECK(transceiver->internal()->mid().has_value()); - transceiver->internal()->SetChannel(nullptr, nullptr); + DestroyTransceiverChannel(transceiver); if (signaling_state() == PeerConnectionInterface::kHaveRemoteOffer && transceiver->receiver()) { @@ -2936,7 +2738,7 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { transceiver->internal()->set_mid(state.mid()); transceiver->internal()->set_mline_index(state.mline_index()); } - RTCError e = transport_controller_s()->RollbackTransports(); + RTCError e = transport_controller()->RollbackTransports(); if (!e.ok()) { return e; } @@ -2956,8 +2758,8 @@ RTCError SdpOfferAnswerHandler::Rollback(SdpType desc_type) { pc_->Observer()->OnRemoveStream(stream); } - // The assumption is that in case of implicit rollback - // UpdateNegotiationNeeded gets called in SetRemoteDescription. + // The assumption is that in case of implicit rollback UpdateNegotiationNeeded + // gets called in SetRemoteDescription. if (desc_type == SdpType::kRollback) { UpdateNegotiationNeeded(); if (is_negotiation_needed_) { @@ -2979,9 +2781,9 @@ void SdpOfferAnswerHandler::OnOperationsChainEmpty() { if (pc_->IsClosed() || !update_negotiation_needed_on_empty_chain_) return; update_negotiation_needed_on_empty_chain_ = false; - // Firing when chain is empty is only supported in Unified Plan to avoid - // Plan B regressions. (In Plan B, onnegotiationneeded is already broken - // anyway, so firing it even more might just be confusing.) + // Firing when chain is empty is only supported in Unified Plan to avoid Plan + // B regressions. (In Plan B, onnegotiationneeded is already broken anyway, so + // firing it even more might just be confusing.) if (IsUnifiedPlan()) { UpdateNegotiationNeeded(); } @@ -3011,8 +2813,7 @@ bool SdpOfferAnswerHandler::NeedsIceRestart( absl::optional SdpOfferAnswerHandler::GetDtlsRole( const std::string& mid) const { - RTC_DCHECK_RUN_ON(signaling_thread()); - return transport_controller_s()->GetDtlsRole(mid); + return transport_controller()->GetDtlsRole(mid); } void SdpOfferAnswerHandler::UpdateNegotiationNeeded() { @@ -3024,8 +2825,8 @@ void SdpOfferAnswerHandler::UpdateNegotiationNeeded() { } // In the spec, a task is queued here to run the following steps - this is - // meant to ensure we do not fire onnegotiationneeded prematurely if - // multiple changes are being made at once. In order to support Chromium's + // meant to ensure we do not fire onnegotiationneeded prematurely if multiple + // changes are being made at once. In order to support Chromium's // implementation where the JavaScript representation of the PeerConnection // lives on a separate thread though, the queuing of a task is instead // performed by the PeerConnectionObserver posting from the signaling thread @@ -3048,8 +2849,8 @@ void SdpOfferAnswerHandler::UpdateNegotiationNeeded() { // "stable", as part of the steps for setting an RTCSessionDescription. // If the result of checking if negotiation is needed is false, clear the - // negotiation-needed flag by setting connection's [[NegotiationNeeded]] - // slot to false, and abort these steps. + // negotiation-needed flag by setting connection's [[NegotiationNeeded]] slot + // to false, and abort these steps. bool is_negotiation_needed = CheckIfNegotiationIsNeeded(); if (!is_negotiation_needed) { is_negotiation_needed_ = false; @@ -3072,16 +2873,16 @@ void SdpOfferAnswerHandler::UpdateNegotiationNeeded() { // If connection's [[NegotiationNeeded]] slot is false, abort these steps. // Fire an event named negotiationneeded at connection. pc_->Observer()->OnRenegotiationNeeded(); - // Fire the spec-compliant version; when ShouldFireNegotiationNeededEvent() - // is used in the task queued by the observer, this event will only fire - // when the chain is empty. + // Fire the spec-compliant version; when ShouldFireNegotiationNeededEvent() is + // used in the task queued by the observer, this event will only fire when the + // chain is empty. GenerateNegotiationNeededEvent(); } bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { RTC_DCHECK_RUN_ON(signaling_thread()); - // 1. If any implementation-specific negotiation is required, as described - // at the start of this section, return true. + // 1. If any implementation-specific negotiation is required, as described at + // the start of this section, return true. // 2. If connection.[[LocalIceCredentialsToReplace]] is not empty, return // true. @@ -3228,20 +3029,22 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( cricket::ContentSource source, const std::map& bundle_groups_by_mid) { - // An assumption is that a check for session error is done at a higher level. - RTC_DCHECK_EQ(SessionError::kNone, session_error()); + if (session_error() != SessionError::kNone) { + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, GetSessionErrorMsg()); + } if (!sdesc || !sdesc->description()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, kInvalidSdp); } SdpType type = sdesc->GetType(); if ((source == cricket::CS_LOCAL && !ExpectSetLocalDescription(type)) || (source == cricket::CS_REMOTE && !ExpectSetRemoteDescription(type))) { - return RTCError(RTCErrorType::INVALID_STATE, - (rtc::StringBuilder("Called in wrong state: ") - << PeerConnectionInterface::AsString(signaling_state())) - .Release()); + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_STATE, + (rtc::StringBuilder("Called in wrong state: ") + << PeerConnectionInterface::AsString(signaling_state())) + .Release()); } RTCError error = ValidateMids(*sdesc->description()); @@ -3262,12 +3065,14 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( // Verify ice-ufrag and ice-pwd. if (!VerifyIceUfragPwdPresent(sdesc->description(), bundle_groups_by_mid)) { - return RTCError(RTCErrorType::INVALID_PARAMETER, kSdpWithoutIceUfragPwd); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kSdpWithoutIceUfragPwd); } if (!pc_->ValidateBundleSettings(sdesc->description(), bundle_groups_by_mid)) { - return RTCError(RTCErrorType::INVALID_PARAMETER, kBundleWithoutRtcpMux); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kBundleWithoutRtcpMux); } // TODO(skvlad): When the local rtcp-mux policy is Require, reject any @@ -3275,25 +3080,25 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( // Verify m-lines in Answer when compared against Offer. if (type == SdpType::kPrAnswer || type == SdpType::kAnswer) { - // With an answer we want to compare the new answer session description - // with the offer's session description from the current negotiation. + // With an answer we want to compare the new answer session description with + // the offer's session description from the current negotiation. const cricket::SessionDescription* offer_desc = (source == cricket::CS_LOCAL) ? remote_description()->description() : local_description()->description(); if (!MediaSectionsHaveSameCount(*offer_desc, *sdesc->description()) || !MediaSectionsInSameOrder(*offer_desc, nullptr, *sdesc->description(), type)) { - return RTCError(RTCErrorType::INVALID_PARAMETER, kMlineMismatchInAnswer); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kMlineMismatchInAnswer); } } else { // The re-offers should respect the order of m= sections in current // description. See RFC3264 Section 8 paragraph 4 for more details. - // With a re-offer, either the current local or current remote - // descriptions could be the most up to date, so we would like to check - // against both of them if they exist. It could be the case that one of - // them has a 0 port for a media section, but the other does not. This is - // important to check against in the case that we are recycling an m= - // section. + // With a re-offer, either the current local or current remote descriptions + // could be the most up to date, so we would like to check against both of + // them if they exist. It could be the case that one of them has a 0 port + // for a media section, but the other does not. This is important to check + // against in the case that we are recycling an m= section. const cricket::SessionDescription* current_desc = nullptr; const cricket::SessionDescription* secondary_current_desc = nullptr; if (local_description()) { @@ -3307,8 +3112,8 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( if (current_desc && !MediaSectionsInSameOrder(*current_desc, secondary_current_desc, *sdesc->description(), type)) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - kMlineMismatchInSubsequentOffer); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + kMlineMismatchInSubsequentOffer); } } @@ -3323,10 +3128,10 @@ RTCError SdpOfferAnswerHandler::ValidateSessionDescription( if ((desc.type() == cricket::MEDIA_TYPE_AUDIO || desc.type() == cricket::MEDIA_TYPE_VIDEO) && desc.streams().size() > 1u) { - return RTCError( - RTCErrorType::INVALID_PARAMETER, - "Media section has more than one track specified with a=ssrc lines " - "which is not supported with Unified Plan."); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Media section has more than one track specified " + "with a=ssrc lines which is not supported with " + "Unified Plan."); } } } @@ -3348,15 +3153,14 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( if (new_session.GetType() == SdpType::kOffer) { // If the BUNDLE policy is max-bundle, then we know for sure that all - // transports will be bundled from the start. Return an error if - // max-bundle is specified but the session description does not have a - // BUNDLE group. + // transports will be bundled from the start. Return an error if max-bundle + // is specified but the session description does not have a BUNDLE group. if (pc_->configuration()->bundle_policy == PeerConnectionInterface::kBundlePolicyMaxBundle && bundle_groups_by_mid.empty()) { - return RTCError( - RTCErrorType::INVALID_PARAMETER, - "max-bundle configured but session description has no BUNDLE group"); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "max-bundle configured but session description " + "has no BUNDLE group"); } } @@ -3414,7 +3218,8 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiversAndDataChannels( } else if (media_type == cricket::MEDIA_TYPE_UNSUPPORTED) { RTC_LOG(LS_INFO) << "Ignoring unsupported media type"; } else { - return RTCError(RTCErrorType::INTERNAL_ERROR, "Unknown section type."); + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Unknown section type."); } } @@ -3459,8 +3264,8 @@ SdpOfferAnswerHandler::AssociateTransceiver( } if (!transceiver) { // This may happen normally when media sections are rejected. - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Transceiver not found based on m-line index"); + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, + "Transceiver not found based on m-line index"); } } else { RTC_DCHECK_EQ(source, cricket::CS_REMOTE); @@ -3520,8 +3325,9 @@ SdpOfferAnswerHandler::AssociateTransceiver( } if (transceiver->media_type() != media_desc->type()) { - return RTCError(RTCErrorType::INVALID_PARAMETER, - "Transceiver type does not match media description type."); + LOG_AND_RETURN_ERROR( + RTCErrorType::INVALID_PARAMETER, + "Transceiver type does not match media description type."); } if (media_desc->HasSimulcast()) { @@ -3568,7 +3374,8 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel( cricket::ChannelInterface* channel = transceiver->internal()->channel(); if (content.rejected) { if (channel) { - transceiver->internal()->SetChannel(nullptr, nullptr); + transceiver->internal()->SetChannel(nullptr); + DestroyChannelInterface(channel); } } else { if (!channel) { @@ -3579,15 +3386,11 @@ RTCError SdpOfferAnswerHandler::UpdateTransceiverChannel( channel = CreateVideoChannel(content.name); } if (!channel) { - return RTCError(RTCErrorType::INTERNAL_ERROR, - "Failed to create channel for mid=" + content.name); + LOG_AND_RETURN_ERROR( + RTCErrorType::INTERNAL_ERROR, + "Failed to create channel for mid=" + content.name); } - // Note: this is a thread hop; the lambda will be executed - // on the network thread. - transceiver->internal()->SetChannel(channel, [&](const std::string& mid) { - RTC_DCHECK_RUN_ON(network_thread()); - return transport_controller_n()->GetRtpTransport(mid); - }); + transceiver->internal()->SetChannel(channel); } } return RTCError::OK(); @@ -3610,8 +3413,8 @@ RTCError SdpOfferAnswerHandler::UpdateDataChannel( if (!data_channel_controller()->data_channel_transport()) { RTC_LOG(LS_INFO) << "Creating data channel, mid=" << content.mid(); if (!CreateDataChannel(content.name)) { - return RTCError(RTCErrorType::INTERNAL_ERROR, - "Failed to create data channel."); + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create data channel."); } } } @@ -3749,7 +3552,7 @@ void SdpOfferAnswerHandler::GetOptionsForOffer( session_options->rtcp_cname = rtcp_cname_; session_options->crypto_options = pc_->GetCryptoOptions(); session_options->pooled_ice_credentials = - context_->network_thread()->Invoke>( + pc_->network_thread()->Invoke>( RTC_FROM_HERE, [this] { return port_allocator()->GetPooledIceCredentials(); }); session_options->offer_extmap_allow_mixed = @@ -4004,7 +3807,7 @@ void SdpOfferAnswerHandler::GetOptionsForAnswer( session_options->rtcp_cname = rtcp_cname_; session_options->crypto_options = pc_->GetCryptoOptions(); session_options->pooled_ice_credentials = - context_->network_thread()->Invoke>( + pc_->network_thread()->Invoke>( RTC_FROM_HERE, [this] { return port_allocator()->GetPooledIceCredentials(); }); } @@ -4354,8 +4157,8 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( const std::string& sender_id = params.id; uint32_t ssrc = params.first_ssrc(); - rtc::scoped_refptr stream( - remote_streams_->find(stream_id)); + rtc::scoped_refptr stream = + remote_streams_->find(stream_id); if (!stream) { // This is a new MediaStream. Create a new remote MediaStream. stream = MediaStreamProxy::Create(rtc::Thread::Current(), @@ -4375,8 +4178,8 @@ void SdpOfferAnswerHandler::UpdateRemoteSendersList( // Add default sender if necessary. if (default_sender_needed) { - rtc::scoped_refptr default_stream( - remote_streams_->find(kDefaultStreamId)); + rtc::scoped_refptr default_stream = + remote_streams_->find(kDefaultStreamId); if (!default_stream) { // Create the new default MediaStream. default_stream = MediaStreamProxy::Create( @@ -4421,14 +4224,12 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( RTC_DCHECK_RUN_ON(signaling_thread()); RTC_DCHECK(sdesc); - // Note: This will perform an Invoke over to the worker thread, which we'll - // also do in a loop below. if (!UpdatePayloadTypeDemuxingState(source, bundle_groups_by_mid)) { // Note that this is never expected to fail, since RtpDemuxer doesn't return // an error when changing payload type demux criteria, which is all this // does. - return RTCError(RTCErrorType::INTERNAL_ERROR, - "Failed to update payload type demuxing state."); + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to update payload type demuxing state."); } // Push down the new SDP media section for each audio/video transceiver. @@ -4463,15 +4264,20 @@ RTCError SdpOfferAnswerHandler::PushdownMediaDescription( // - crbug.com/1157227 // - crbug.com/1187289 for (const auto& entry : channels) { - std::string error; - bool success = - context_->worker_thread()->Invoke(RTC_FROM_HERE, [&]() { - return (source == cricket::CS_LOCAL) - ? entry.first->SetLocalContent(entry.second, type, error) - : entry.first->SetRemoteContent(entry.second, type, error); + RTCError error = + pc_->worker_thread()->Invoke(RTC_FROM_HERE, [&]() { + std::string error; + bool success = + (source == cricket::CS_LOCAL) + ? entry.first->SetLocalContent(entry.second, type, &error) + : entry.first->SetRemoteContent(entry.second, type, &error); + if (!success) { + LOG_AND_RETURN_ERROR(RTCErrorType::INVALID_PARAMETER, error); + } + return RTCError::OK(); }); - if (!success) { - return RTCError(RTCErrorType::INVALID_PARAMETER, error); + if (!error.ok()) { + return error; } } @@ -4511,13 +4317,13 @@ RTCError SdpOfferAnswerHandler::PushdownTransportDescription( if (source == cricket::CS_LOCAL) { const SessionDescriptionInterface* sdesc = local_description(); RTC_DCHECK(sdesc); - return transport_controller_s()->SetLocalDescription(type, - sdesc->description()); + return transport_controller()->SetLocalDescription(type, + sdesc->description()); } else { const SessionDescriptionInterface* sdesc = remote_description(); RTC_DCHECK(sdesc); - return transport_controller_s()->SetRemoteDescription(type, - sdesc->description()); + return transport_controller()->SetRemoteDescription(type, + sdesc->description()); } } @@ -4566,14 +4372,12 @@ void SdpOfferAnswerHandler::RemoveUnusedChannels( // voice channel. const cricket::ContentInfo* video_info = cricket::GetFirstVideoContent(desc); if (!video_info || video_info->rejected) { - rtp_manager()->GetVideoTransceiver()->internal()->SetChannel(nullptr, - nullptr); + DestroyTransceiverChannel(rtp_manager()->GetVideoTransceiver()); } const cricket::ContentInfo* audio_info = cricket::GetFirstAudioContent(desc); if (!audio_info || audio_info->rejected) { - rtp_manager()->GetAudioTransceiver()->internal()->SetChannel(nullptr, - nullptr); + DestroyTransceiverChannel(rtp_manager()->GetAudioTransceiver()); } const cricket::ContentInfo* data_info = cricket::GetFirstDataContent(desc); @@ -4622,8 +4426,7 @@ void SdpOfferAnswerHandler::UpdateEndedRemoteMediaStreams() { for (size_t i = 0; i < remote_streams_->count(); ++i) { MediaStreamInterface* stream = remote_streams_->at(i); if (stream->GetAudioTracks().empty() && stream->GetVideoTracks().empty()) { - streams_to_remove.push_back( - rtc::scoped_refptr(stream)); + streams_to_remove.push_back(stream); } } @@ -4633,9 +4436,9 @@ void SdpOfferAnswerHandler::UpdateEndedRemoteMediaStreams() { } } -bool SdpOfferAnswerHandler::UseCandidatesInRemoteDescription() { +bool SdpOfferAnswerHandler::UseCandidatesInSessionDescription( + const SessionDescriptionInterface* remote_desc) { RTC_DCHECK_RUN_ON(signaling_thread()); - auto* remote_desc = remote_description(); if (!remote_desc) { return true; } @@ -4649,7 +4452,7 @@ bool SdpOfferAnswerHandler::UseCandidatesInRemoteDescription() { if (!ReadyToUseRemoteCandidate(candidate, remote_desc, &valid)) { if (valid) { RTC_LOG(LS_INFO) - << "UseCandidatesInRemoteDescription: Not ready to use " + << "UseCandidatesInSessionDescription: Not ready to use " "candidate."; } continue; @@ -4764,14 +4567,10 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { !rtp_manager()->GetAudioTransceiver()->internal()->channel()) { cricket::VoiceChannel* voice_channel = CreateVoiceChannel(voice->name); if (!voice_channel) { - return RTCError(RTCErrorType::INTERNAL_ERROR, - "Failed to create voice channel."); + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create voice channel."); } - rtp_manager()->GetAudioTransceiver()->internal()->SetChannel( - voice_channel, [&](const std::string& mid) { - RTC_DCHECK_RUN_ON(network_thread()); - return transport_controller_n()->GetRtpTransport(mid); - }); + rtp_manager()->GetAudioTransceiver()->internal()->SetChannel(voice_channel); } const cricket::ContentInfo* video = cricket::GetFirstVideoContent(&desc); @@ -4779,22 +4578,18 @@ RTCError SdpOfferAnswerHandler::CreateChannels(const SessionDescription& desc) { !rtp_manager()->GetVideoTransceiver()->internal()->channel()) { cricket::VideoChannel* video_channel = CreateVideoChannel(video->name); if (!video_channel) { - return RTCError(RTCErrorType::INTERNAL_ERROR, - "Failed to create video channel."); + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create video channel."); } - rtp_manager()->GetVideoTransceiver()->internal()->SetChannel( - video_channel, [&](const std::string& mid) { - RTC_DCHECK_RUN_ON(network_thread()); - return transport_controller_n()->GetRtpTransport(mid); - }); + rtp_manager()->GetVideoTransceiver()->internal()->SetChannel(video_channel); } const cricket::ContentInfo* data = cricket::GetFirstDataContent(&desc); if (data && !data->rejected && !data_channel_controller()->data_channel_transport()) { if (!CreateDataChannel(data->name)) { - return RTCError(RTCErrorType::INTERNAL_ERROR, - "Failed to create data channel."); + LOG_AND_RETURN_ERROR(RTCErrorType::INTERNAL_ERROR, + "Failed to create data channel."); } } @@ -4809,12 +4604,15 @@ cricket::VoiceChannel* SdpOfferAnswerHandler::CreateVoiceChannel( if (!channel_manager()->media_engine()) return nullptr; + RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); + // TODO(bugs.webrtc.org/11992): CreateVoiceChannel internally switches to the // worker thread. We shouldn't be using the `call_ptr_` hack here but simply // be on the worker thread and use `call_` (update upstream code). return channel_manager()->CreateVoiceChannel( - pc_->call_ptr(), pc_->configuration()->media_config, mid, - pc_->SrtpRequired(), pc_->GetCryptoOptions(), audio_options()); + pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, + signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), + &ssrc_generator_, audio_options()); } // TODO(steveanton): Perhaps this should be managed by the RtpTransceiver. @@ -4825,19 +4623,23 @@ cricket::VideoChannel* SdpOfferAnswerHandler::CreateVideoChannel( if (!channel_manager()->media_engine()) return nullptr; + // NOTE: This involves a non-ideal hop (Invoke) over to the network thread. + RtpTransportInternal* rtp_transport = pc_->GetRtpTransport(mid); + // TODO(bugs.webrtc.org/11992): CreateVideoChannel internally switches to the // worker thread. We shouldn't be using the `call_ptr_` hack here but simply // be on the worker thread and use `call_` (update upstream code). return channel_manager()->CreateVideoChannel( - pc_->call_ptr(), pc_->configuration()->media_config, mid, - pc_->SrtpRequired(), pc_->GetCryptoOptions(), video_options(), + pc_->call_ptr(), pc_->configuration()->media_config, rtp_transport, + signaling_thread(), mid, pc_->SrtpRequired(), pc_->GetCryptoOptions(), + &ssrc_generator_, video_options(), video_bitrate_allocator_factory_.get()); } bool SdpOfferAnswerHandler::CreateDataChannel(const std::string& mid) { RTC_DCHECK_RUN_ON(signaling_thread()); - if (!context_->network_thread()->Invoke(RTC_FROM_HERE, [this, &mid] { - RTC_DCHECK_RUN_ON(context_->network_thread()); + if (!pc_->network_thread()->Invoke(RTC_FROM_HERE, [this, &mid] { + RTC_DCHECK_RUN_ON(pc_->network_thread()); return pc_->SetupDataChannelTransport_n(mid); })) { return false; @@ -4851,6 +4653,40 @@ bool SdpOfferAnswerHandler::CreateDataChannel(const std::string& mid) { return true; } +void SdpOfferAnswerHandler::DestroyTransceiverChannel( + rtc::scoped_refptr> + transceiver) { + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DestroyTransceiverChannel"); + RTC_DCHECK(transceiver); + RTC_LOG_THREAD_BLOCK_COUNT(); + + // TODO(tommi): We're currently on the signaling thread. + // There are multiple hops to the worker ahead. + // Consider if we can make the call to SetChannel() on the worker thread + // (and require that to be the context it's always called in) and also + // call DestroyChannelInterface there, since it also needs to hop to the + // worker. + + cricket::ChannelInterface* channel = transceiver->internal()->channel(); + RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(0); + if (channel) { + // TODO(tommi): VideoRtpReceiver::SetMediaChannel blocks and jumps to the + // worker thread. When being set to nullptr, there are additional + // blocking calls to e.g. ClearRecordableEncodedFrameCallback which triggers + // another blocking call or Stop() for video channels. + // The channel object also needs to be de-initialized on the network thread + // so if ownership of the channel object lies with the transceiver, we could + // un-set the channel pointer and uninitialize/destruct the channel object + // at the same time, rather than in separate steps. + transceiver->internal()->SetChannel(nullptr); + // TODO(tommi): All channel objects end up getting deleted on the + // worker thread (ideally should be on the network thread but the + // MediaChannel objects are tied to the worker. Can the teardown be done + // asynchronously across the threads rather than blocking? + DestroyChannelInterface(channel); + } +} + void SdpOfferAnswerHandler::DestroyDataChannelTransport(RTCError error) { RTC_DCHECK_RUN_ON(signaling_thread()); const bool has_sctp = pc_->sctp_mid().has_value(); @@ -4858,8 +4694,8 @@ void SdpOfferAnswerHandler::DestroyDataChannelTransport(RTCError error) { if (has_sctp) data_channel_controller()->OnTransportChannelClosed(error); - context_->network_thread()->Invoke(RTC_FROM_HERE, [this] { - RTC_DCHECK_RUN_ON(context_->network_thread()); + pc_->network_thread()->Invoke(RTC_FROM_HERE, [this] { + RTC_DCHECK_RUN_ON(pc_->network_thread()); pc_->TeardownDataChannelTransport_n(); }); @@ -4867,6 +4703,45 @@ void SdpOfferAnswerHandler::DestroyDataChannelTransport(RTCError error) { pc_->ResetSctpDataMid(); } +void SdpOfferAnswerHandler::DestroyChannelInterface( + cricket::ChannelInterface* channel) { + TRACE_EVENT0("webrtc", "SdpOfferAnswerHandler::DestroyChannelInterface"); + RTC_DCHECK_RUN_ON(signaling_thread()); + RTC_DCHECK(channel_manager()->media_engine()); + RTC_DCHECK(channel); + + // TODO(bugs.webrtc.org/11992): All the below methods should be called on the + // worker thread. (they switch internally anyway). Change + // DestroyChannelInterface to either be called on the worker thread, or do + // this asynchronously on the worker. + RTC_LOG_THREAD_BLOCK_COUNT(); + + switch (channel->media_type()) { + case cricket::MEDIA_TYPE_AUDIO: + channel_manager()->DestroyVoiceChannel( + static_cast(channel)); + break; + case cricket::MEDIA_TYPE_VIDEO: + channel_manager()->DestroyVideoChannel( + static_cast(channel)); + break; + case cricket::MEDIA_TYPE_DATA: + RTC_DCHECK_NOTREACHED() + << "Trying to destroy datachannel through DestroyChannelInterface"; + break; + default: + RTC_DCHECK_NOTREACHED() + << "Unknown media type: " << channel->media_type(); + break; + } + + // TODO(tommi): Figure out why we can get 2 blocking calls when running + // PeerConnectionCryptoTest.CreateAnswerWithDifferentSslRoles. + // and 3 when running + // PeerConnectionCryptoTest.CreateAnswerWithDifferentSslRoles + // RTC_DCHECK_BLOCK_COUNT_NO_MORE_THAN(1); +} + void SdpOfferAnswerHandler::DestroyAllChannels() { RTC_DCHECK_RUN_ON(signaling_thread()); if (!transceivers()) { @@ -4882,12 +4757,12 @@ void SdpOfferAnswerHandler::DestroyAllChannels() { for (const auto& transceiver : list) { if (transceiver->media_type() == cricket::MEDIA_TYPE_VIDEO) { - transceiver->internal()->SetChannel(nullptr, nullptr); + DestroyTransceiverChannel(transceiver); } } for (const auto& transceiver : list) { if (transceiver->media_type() == cricket::MEDIA_TYPE_AUDIO) { - transceiver->internal()->SetChannel(nullptr, nullptr); + DestroyTransceiverChannel(transceiver); } } @@ -5072,6 +4947,29 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( } } + // Gather all updates ahead of time so that all channels can be updated in a + // single Invoke; necessary due to thread guards. + std::vector> + channels_to_update; + for (const auto& transceiver : transceivers()->ListInternal()) { + cricket::ChannelInterface* channel = transceiver->channel(); + const ContentInfo* content = + FindMediaSectionForTransceiver(transceiver, sdesc); + if (!channel || !content) { + continue; + } + RtpTransceiverDirection local_direction = + content->media_description()->direction(); + if (source == cricket::CS_REMOTE) { + local_direction = RtpTransceiverDirectionReversed(local_direction); + } + channels_to_update.emplace_back(local_direction, transceiver->channel()); + } + + if (channels_to_update.empty()) { + return true; + } + // In Unified Plan, payload type demuxing is useful for legacy endpoints that // don't support the MID header extension, but it can also cause incorrrect // forwarding of packets when going from one m= section to multiple m= @@ -5098,71 +4996,44 @@ bool SdpOfferAnswerHandler::UpdatePayloadTypeDemuxingState( bundled_pt_demux_allowed_video = true; } - // Gather all updates ahead of time so that all channels can be updated in a - // single Invoke; necessary due to thread guards. - std::vector> channels_to_update; - for (const auto& transceiver : transceivers()->ListInternal()) { - cricket::ChannelInterface* channel = transceiver->channel(); - const ContentInfo* content = - FindMediaSectionForTransceiver(transceiver, sdesc); - if (!channel || !content) { - continue; - } - - const cricket::MediaType media_type = channel->media_type(); - if (media_type != cricket::MediaType::MEDIA_TYPE_AUDIO && - media_type != cricket::MediaType::MEDIA_TYPE_VIDEO) { - continue; - } - - RtpTransceiverDirection local_direction = - content->media_description()->direction(); - if (source == cricket::CS_REMOTE) { - local_direction = RtpTransceiverDirectionReversed(local_direction); - } - - auto bundle_it = bundle_groups_by_mid.find(channel->mid()); - const cricket::ContentGroup* bundle_group = - bundle_it != bundle_groups_by_mid.end() ? bundle_it->second : nullptr; - bool pt_demux_enabled = RtpTransceiverDirectionHasRecv(local_direction); - if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) { - pt_demux_enabled &= - !bundle_group || - (bundled_pt_demux_allowed_audio && - payload_types_by_bundle[bundle_group].pt_demuxing_possible_audio); - if (pt_demux_enabled) { - pt_demuxing_has_been_used_audio_ = true; - } - } else { - RTC_DCHECK_EQ(media_type, cricket::MediaType::MEDIA_TYPE_VIDEO); - pt_demux_enabled &= - !bundle_group || - (bundled_pt_demux_allowed_video && - payload_types_by_bundle[bundle_group].pt_demuxing_possible_video); - if (pt_demux_enabled) { - pt_demuxing_has_been_used_video_ = true; - } - } - - channels_to_update.emplace_back(pt_demux_enabled, transceiver->channel()); - } - - if (channels_to_update.empty()) { - return true; - } - - // TODO(bugs.webrtc.org/11993): This Invoke() will also invoke on the network - // thread for every demuxer sink that needs to be updated. The demuxer state - // needs to be fully (and only) managed on the network thread and once that's - // the case, there's no need to stop by on the worker. Ideally we could also - // do this without blocking. - return context_->worker_thread()->Invoke( - RTC_FROM_HERE, [&channels_to_update]() { + return pc_->worker_thread()->Invoke( + RTC_FROM_HERE, + [&channels_to_update, &bundle_groups_by_mid, &payload_types_by_bundle, + bundled_pt_demux_allowed_audio, bundled_pt_demux_allowed_video, + pt_demuxing_has_been_used_audio = &pt_demuxing_has_been_used_audio_, + pt_demuxing_has_been_used_video = &pt_demuxing_has_been_used_video_]() { for (const auto& it : channels_to_update) { - if (!it.second->SetPayloadTypeDemuxingEnabled(it.first)) { - // Note that the state has already been irrevocably changed at this - // point. Is it useful to stop the loop? - return false; + RtpTransceiverDirection local_direction = it.first; + cricket::ChannelInterface* channel = it.second; + cricket::MediaType media_type = channel->media_type(); + auto bundle_it = bundle_groups_by_mid.find(channel->content_name()); + const cricket::ContentGroup* bundle_group = + bundle_it != bundle_groups_by_mid.end() ? bundle_it->second + : nullptr; + if (media_type == cricket::MediaType::MEDIA_TYPE_AUDIO) { + bool pt_demux_enabled = + RtpTransceiverDirectionHasRecv(local_direction) && + (!bundle_group || (bundled_pt_demux_allowed_audio && + payload_types_by_bundle[bundle_group] + .pt_demuxing_possible_audio)); + if (pt_demux_enabled) { + *pt_demuxing_has_been_used_audio = true; + } + if (!channel->SetPayloadTypeDemuxingEnabled(pt_demux_enabled)) { + return false; + } + } else if (media_type == cricket::MediaType::MEDIA_TYPE_VIDEO) { + bool pt_demux_enabled = + RtpTransceiverDirectionHasRecv(local_direction) && + (!bundle_group || (bundled_pt_demux_allowed_video && + payload_types_by_bundle[bundle_group] + .pt_demuxing_possible_video)); + if (pt_demux_enabled) { + *pt_demuxing_has_been_used_video = true; + } + if (!channel->SetPayloadTypeDemuxingEnabled(pt_demux_enabled)) { + return false; + } } } return true; diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h index 67ead4724..6c116f660 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_offer_answer.h @@ -47,12 +47,12 @@ #include "pc/channel.h" #include "pc/channel_interface.h" #include "pc/channel_manager.h" -#include "pc/connection_context.h" #include "pc/data_channel_controller.h" #include "pc/ice_server_parsing.h" #include "pc/jsep_transport_controller.h" #include "pc/media_session.h" #include "pc/media_stream_observer.h" +#include "pc/peer_connection_factory.h" #include "pc/peer_connection_internal.h" #include "pc/rtc_stats_collector.h" #include "pc/rtp_receiver.h" @@ -94,10 +94,9 @@ class SdpOfferAnswerHandler : public SdpStateProvider, // Creates an SdpOfferAnswerHandler. Modifies dependencies. static std::unique_ptr Create( - PeerConnectionSdpMethods* pc, + PeerConnection* pc, const PeerConnectionInterface::RTCConfiguration& configuration, - PeerConnectionDependencies& dependencies, - ConnectionContext* context); + PeerConnectionDependencies& dependencies); void ResetSessionDescFactory() { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -182,16 +181,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider, rtc::scoped_refptr local_streams(); rtc::scoped_refptr remote_streams(); - bool initial_offerer() { - RTC_DCHECK_RUN_ON(signaling_thread()); - if (initial_offerer_) { - return *initial_offerer_; - } - return false; - } - private: - class RemoteDescriptionOperation; class ImplicitCreateSessionDescriptionObserver; friend class ImplicitCreateSessionDescriptionObserver; @@ -214,8 +204,7 @@ class SdpOfferAnswerHandler : public SdpStateProvider, class LocalIceCredentialsToReplace; // Only called by the Create() function. - explicit SdpOfferAnswerHandler(PeerConnectionSdpMethods* pc, - ConnectionContext* context); + explicit SdpOfferAnswerHandler(PeerConnection* pc); // Called from the `Create()` function. Can only be called // once. Modifies dependencies. void Initialize( @@ -223,7 +212,6 @@ class SdpOfferAnswerHandler : public SdpStateProvider, PeerConnectionDependencies& dependencies); rtc::Thread* signaling_thread() const; - rtc::Thread* network_thread() const; // Non-const versions of local_description()/remote_description(), for use // internally. SessionDescriptionInterface* mutable_local_description() @@ -243,24 +231,10 @@ class SdpOfferAnswerHandler : public SdpStateProvider, std::unique_ptr desc, const std::map& bundle_groups_by_mid); - void ApplyRemoteDescription( - std::unique_ptr operation); - - RTCError ReplaceRemoteDescription( + RTCError ApplyRemoteDescription( std::unique_ptr desc, - SdpType sdp_type, - std::unique_ptr* replaced_description) - RTC_RUN_ON(signaling_thread()); - - // Part of ApplyRemoteDescription steps specific to Unified Plan. - void ApplyRemoteDescriptionUpdateTransceiverState(SdpType sdp_type); - - // Part of ApplyRemoteDescription steps specific to plan b. - void PlanBUpdateSendersAndReceivers( - const cricket::ContentInfo* audio_content, - const cricket::AudioContentDescription* audio_desc, - const cricket::ContentInfo* video_content, - const cricket::VideoContentDescription* video_desc); + const std::map& + bundle_groups_by_mid); // Implementation of the offer/answer exchange operations. These are chained // onto the `operations_chain_` when the public CreateOffer(), CreateAnswer(), @@ -275,11 +249,8 @@ class SdpOfferAnswerHandler : public SdpStateProvider, std::unique_ptr desc, rtc::scoped_refptr observer); void DoSetRemoteDescription( - std::unique_ptr operation); - - // Called after a DoSetRemoteDescription operation completes. - void SetRemoteDescriptionPostProcess(bool was_answer) - RTC_RUN_ON(signaling_thread()); + std::unique_ptr desc, + rtc::scoped_refptr observer); // Update the state, signaling if necessary. void ChangeSignalingState( @@ -511,11 +482,9 @@ class SdpOfferAnswerHandler : public SdpStateProvider, // exist. void UpdateEndedRemoteMediaStreams(); - // Uses all remote candidates in the currently set remote_description(). - // If no remote description is currently set (nullptr), the return value will - // be true. If `UseCandidate()` fails for any candidate in the remote - // description, the return value will be false. - bool UseCandidatesInRemoteDescription(); + // Uses all remote candidates in `remote_desc` in this session. + bool UseCandidatesInSessionDescription( + const SessionDescriptionInterface* remote_desc); // Uses `candidate` in this session. bool UseCandidate(const IceCandidateInterface* candidate); // Returns true if we are ready to push down the remote candidate. @@ -544,12 +513,22 @@ class SdpOfferAnswerHandler : public SdpStateProvider, cricket::VideoChannel* CreateVideoChannel(const std::string& mid); bool CreateDataChannel(const std::string& mid); + // Destroys and clears the BaseChannel associated with the given transceiver, + // if such channel is set. + void DestroyTransceiverChannel( + rtc::scoped_refptr> + transceiver); + // Destroys the RTP data channel transport and/or the SCTP data channel // transport and clears it. void DestroyDataChannelTransport(RTCError error); + // Destroys the given ChannelInterface. + // The channel cannot be accessed after this method is called. + void DestroyChannelInterface(cricket::ChannelInterface* channel); // Generates MediaDescriptionOptions for the `session_opts` based on existing // local description or remote description. + void GenerateMediaDescriptionOptions( const SessionDescriptionInterface* session_desc, RtpTransceiverDirection audio_direction, @@ -596,20 +575,13 @@ class SdpOfferAnswerHandler : public SdpStateProvider, const cricket::PortAllocator* port_allocator() const; RtpTransmissionManager* rtp_manager(); const RtpTransmissionManager* rtp_manager() const; - JsepTransportController* transport_controller_s() - RTC_RUN_ON(signaling_thread()); - const JsepTransportController* transport_controller_s() const - RTC_RUN_ON(signaling_thread()); - JsepTransportController* transport_controller_n() - RTC_RUN_ON(network_thread()); - const JsepTransportController* transport_controller_n() const - RTC_RUN_ON(network_thread()); + JsepTransportController* transport_controller(); + const JsepTransportController* transport_controller() const; // =================================================================== const cricket::AudioOptions& audio_options() { return audio_options_; } const cricket::VideoOptions& video_options() { return video_options_; } - PeerConnectionSdpMethods* const pc_; - ConnectionContext* const context_; + PeerConnection* const pc_; std::unique_ptr webrtc_session_desc_factory_ RTC_GUARDED_BY(signaling_thread()); @@ -690,6 +662,14 @@ class SdpOfferAnswerHandler : public SdpStateProvider, cricket::AudioOptions audio_options_ RTC_GUARDED_BY(signaling_thread()); cricket::VideoOptions video_options_ RTC_GUARDED_BY(signaling_thread()); + // This object should be used to generate any SSRC that is not explicitly + // specified by the user (or by the remote party). + // The generator is not used directly, instead it is passed on to the + // channel manager and the session description factory. + // TODO(bugs.webrtc.org/12666): This variable is used from both the signaling + // and worker threads. See if we can't restrict usage to a single thread. + rtc::UniqueRandomIdGenerator ssrc_generator_; + // A video bitrate allocator factory. // This can be injected using the PeerConnectionDependencies, // or else the CreateBuiltinVideoBitrateAllocatorFactory() will be called. @@ -698,10 +678,6 @@ class SdpOfferAnswerHandler : public SdpStateProvider, std::unique_ptr video_bitrate_allocator_factory_ RTC_GUARDED_BY(signaling_thread()); - // Whether we are the initial offerer on the association. This - // determines the SSL role. - absl::optional initial_offerer_ RTC_GUARDED_BY(signaling_thread()); - rtc::WeakPtrFactory weak_ptr_factory_ RTC_GUARDED_BY(signaling_thread()); }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc b/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc index 6d405d07a..3f46db5bb 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_serializer.cc @@ -10,9 +10,9 @@ #include "pc/sdp_serializer.h" +#include #include #include -#include #include #include @@ -101,33 +101,33 @@ rtc::StringBuilder& operator<<(rtc::StringBuilder& builder, // sc-id = [sc-id-paused] rid-id // rid-id = 1*(alpha-numeric / "-" / "_") ; see: I-D.ietf-mmusic-rid RTCErrorOr ParseSimulcastLayerList(const std::string& str) { - std::vector tokens = - rtc::split(str, kDelimiterSemicolonChar); + std::vector tokens; + rtc::split(str, kDelimiterSemicolonChar, &tokens); if (tokens.empty()) { return ParseError("Layer list cannot be empty."); } SimulcastLayerList result; - for (const absl::string_view& token : tokens) { + for (const std::string& token : tokens) { if (token.empty()) { return ParseError("Simulcast alternative layer list is empty."); } - std::vector rid_tokens = - rtc::split(token, kDelimiterCommaChar); + std::vector rid_tokens; + rtc::split(token, kDelimiterCommaChar, &rid_tokens); if (rid_tokens.empty()) { return ParseError("Simulcast alternative layer list is malformed."); } std::vector layers; - for (const absl::string_view& rid_token : rid_tokens) { + for (const std::string& rid_token : rid_tokens) { if (rid_token.empty() || rid_token == kSimulcastPausedStream) { return ParseError("Rid must not be empty."); } bool paused = rid_token[0] == kSimulcastPausedStreamChar; - absl::string_view rid = paused ? rid_token.substr(1) : rid_token; + std::string rid = paused ? rid_token.substr(1) : rid_token; layers.push_back(SimulcastLayer(rid, paused)); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc b/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc index ca61f0013..b750b04a4 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/sdp_utils.cc @@ -10,8 +10,8 @@ #include "pc/sdp_utils.h" +#include #include -#include #include "api/jsep_session_description.h" #include "rtc_base/checks.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/session_description.cc b/TMessagesProj/jni/voip/webrtc/pc/session_description.cc index c1feedbf5..7b878cbf7 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/session_description.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/session_description.cc @@ -10,10 +10,11 @@ #include "pc/session_description.h" +#include + #include "absl/algorithm/container.h" #include "absl/memory/memory.h" #include "rtc_base/checks.h" -#include "rtc_base/strings/string_builder.h" namespace cricket { namespace { diff --git a/TMessagesProj/jni/voip/webrtc/pc/session_description.h b/TMessagesProj/jni/voip/webrtc/pc/session_description.h index a68c312f4..ee7a91c84 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/session_description.h +++ b/TMessagesProj/jni/voip/webrtc/pc/session_description.h @@ -15,9 +15,9 @@ #include #include +#include #include #include -#include #include #include diff --git a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc index ec8741567..0ae3e2074 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.cc @@ -14,7 +14,7 @@ namespace cricket { -SimulcastLayer::SimulcastLayer(absl::string_view rid, bool is_paused) +SimulcastLayer::SimulcastLayer(const std::string& rid, bool is_paused) : rid{rid}, is_paused{is_paused} { RTC_DCHECK(!rid.empty()); } diff --git a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h index 7caf164de..f7ae28837 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h +++ b/TMessagesProj/jni/voip/webrtc/pc/simulcast_description.h @@ -16,8 +16,6 @@ #include #include -#include "absl/strings/string_view.h" - namespace cricket { // Describes a Simulcast Layer. @@ -25,7 +23,7 @@ namespace cricket { // See also: https://tools.ietf.org/html/draft-ietf-mmusic-rid-15 for // an explanation about rids. struct SimulcastLayer final { - SimulcastLayer(absl::string_view rid, bool is_paused); + SimulcastLayer(const std::string& rid, bool is_paused); SimulcastLayer(const SimulcastLayer& other) = default; SimulcastLayer& operator=(const SimulcastLayer& other) = default; diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc index 9d7f39a7a..c48dfdb4c 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.cc @@ -11,8 +11,8 @@ #include "pc/srtp_filter.h" #include - -#include +#include +#include #include "absl/strings/match.h" #include "rtc_base/logging.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h index e2848a109..f1e164936 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_filter.h @@ -27,6 +27,7 @@ #include "api/sequence_checker.h" #include "pc/session_description.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_stream_adapter.h" // Forward declaration to avoid pulling in libsrtp headers here diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc index a81f2415a..76ab3a8fe 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.cc @@ -10,18 +10,12 @@ #include "pc/srtp_session.h" -#include - #include -#include #include "absl/base/attributes.h" -#include "absl/base/const_init.h" #include "api/array_view.h" #include "modules/rtp_rtcp/source/rtp_util.h" #include "pc/external_hmac.h" -#include "rtc_base/byte_order.h" -#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/string_encode.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h index d88eaae31..89fab0daf 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h +++ b/TMessagesProj/jni/voip/webrtc/pc/srtp_session.h @@ -11,13 +11,11 @@ #ifndef PC_SRTP_SESSION_H_ #define PC_SRTP_SESSION_H_ -#include -#include - #include #include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" // Forward declaration to avoid pulling in libsrtp headers here @@ -37,9 +35,6 @@ class SrtpSession { SrtpSession(); ~SrtpSession(); - SrtpSession(const SrtpSession&) = delete; - SrtpSession& operator=(const SrtpSession&) = delete; - // Configures the session for sending data using the specified // cipher-suite and key. Receiving must be done by a separate session. bool SetSend(int cs, @@ -146,6 +141,7 @@ class SrtpSession { bool external_auth_enabled_ = false; int decryption_failure_count_ = 0; bool dump_plain_rtp_ = false; + RTC_DISALLOW_COPY_AND_ASSIGN(SrtpSession); }; } // namespace cricket diff --git a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc index 6b1cda3ad..dc172599d 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.cc @@ -13,19 +13,18 @@ #include #include -#include #include -#include +#include #include #include #include -#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/audio_codecs/audio_encoder.h" #include "api/candidate.h" #include "api/data_channel_interface.h" #include "api/media_types.h" +#include "api/rtp_receiver_interface.h" #include "api/rtp_sender_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" @@ -40,8 +39,6 @@ #include "pc/channel_interface.h" #include "pc/data_channel_utils.h" #include "pc/rtp_receiver.h" -#include "pc/rtp_receiver_proxy.h" -#include "pc/rtp_sender_proxy.h" #include "pc/rtp_transceiver.h" #include "pc/transport_stats.h" #include "rtc_base/checks.h" @@ -889,8 +886,8 @@ StatsCollector::SessionStats StatsCollector::ExtractSessionInfo_n( for (auto& transceiver : transceivers) { cricket::ChannelInterface* channel = transceiver->internal()->channel(); if (channel) { - stats.transport_names_by_mid[channel->mid()] = - std::string(channel->transport_name()); + stats.transport_names_by_mid[channel->content_name()] = + channel->transport_name(); } } @@ -1185,7 +1182,7 @@ void StatsCollector::ExtractMediaInfo( } std::unique_ptr gatherer = CreateMediaChannelStatsGatherer(channel->media_channel()); - gatherer->mid = channel->mid(); + gatherer->mid = channel->content_name(); gatherer->transport_name = transport_names_by_mid.at(gatherer->mid); for (const auto& sender : transceiver->internal()->senders()) { @@ -1212,7 +1209,7 @@ void StatsCollector::ExtractMediaInfo( if (!channel) continue; MediaChannelStatsGatherer* gatherer = gatherers[i++].get(); - RTC_DCHECK_EQ(gatherer->mid, channel->mid()); + RTC_DCHECK_EQ(gatherer->mid, channel->content_name()); for (const auto& receiver : transceiver->internal()->receivers()) { gatherer->receiver_track_id_by_ssrc.insert(std::make_pair( @@ -1370,8 +1367,7 @@ void StatsCollector::UpdateTrackReports() { } } -void StatsCollector::InvalidateCache() { - RTC_DCHECK_RUN_ON(pc_->signaling_thread()); +void StatsCollector::ClearUpdateStatsCacheForTest() { cache_timestamp_ms_ = 0; } diff --git a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h index 751a2de09..2fd5d9d8f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h +++ b/TMessagesProj/jni/voip/webrtc/pc/stats_collector.h @@ -21,24 +21,18 @@ #include #include #include -#include #include #include -#include "absl/types/optional.h" #include "api/media_stream_interface.h" #include "api/peer_connection_interface.h" -#include "api/scoped_refptr.h" #include "api/stats_types.h" #include "p2p/base/connection_info.h" #include "p2p/base/port.h" #include "pc/peer_connection_internal.h" -#include "pc/rtp_transceiver.h" #include "pc/stats_collector_interface.h" -#include "pc/transport_stats.h" #include "rtc_base/network_constants.h" #include "rtc_base/ssl_certificate.h" -#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -102,11 +96,10 @@ class StatsCollector : public StatsCollectorInterface { // A track is invalid if there is no report data for it. bool IsValidTrack(const std::string& track_id); - // Reset the internal cache timestamp to force an update of the stats next - // time UpdateStats() is called. This call needs to be made on the signaling - // thread and should be made every time configuration changes that affect - // stats have been made. - void InvalidateCache(); + // Method used by the unittest to force a update of stats since UpdateStats() + // that occur less than kMinGatherStatsPeriod number of ms apart will be + // ignored. + void ClearUpdateStatsCacheForTest(); bool UseStandardBytesStats() const { return use_standard_bytes_stats_; } @@ -199,7 +192,7 @@ class StatsCollector : public StatsCollectorInterface { TrackIdMap track_ids_; // Raw pointer to the peer connection the statistics are gathered from. PeerConnectionInternal* const pc_; - int64_t cache_timestamp_ms_ RTC_GUARDED_BY(pc_->signaling_thread()) = 0; + int64_t cache_timestamp_ms_ = 0; double stats_gathering_started_; const bool use_standard_bytes_stats_; diff --git a/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h b/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h index ed85947bf..9bbf957ef 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h +++ b/TMessagesProj/jni/voip/webrtc/pc/stream_collection.h @@ -12,7 +12,6 @@ #define PC_STREAM_COLLECTION_H_ #include -#include #include #include "api/peer_connection_interface.h" @@ -67,13 +66,13 @@ class StreamCollection : public StreamCollectionInterface { return NULL; } - void AddStream(rtc::scoped_refptr stream) { + void AddStream(MediaStreamInterface* stream) { for (StreamVector::iterator it = media_streams_.begin(); it != media_streams_.end(); ++it) { if ((*it)->id().compare(stream->id()) == 0) return; } - media_streams_.push_back(std::move(stream)); + media_streams_.push_back(stream); } void RemoveStream(MediaStreamInterface* remove_stream) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc b/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc index 12670dda2..e68f2f7a5 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/track_media_info_map.cc @@ -12,7 +12,7 @@ #include #include -#include +#include #include #include "api/media_types.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc index 139c49863..235c9af03 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/transceiver_list.cc @@ -10,8 +10,6 @@ #include "pc/transceiver_list.h" -#include - #include "rtc_base/checks.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/used_ids.h b/TMessagesProj/jni/voip/webrtc/pc/used_ids.h index 1236a786d..e88927aaf 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/used_ids.h +++ b/TMessagesProj/jni/voip/webrtc/pc/used_ids.h @@ -52,7 +52,8 @@ class UsedIds { if (IsIdUsed(original_id)) { new_id = FindUnusedId(); - // Duplicate id found. Reassign from the original id to the new. + RTC_LOG(LS_WARNING) << "Duplicate id found. Reassigning from " + << original_id << " to " << new_id; idstruct->id = new_id; } SetIdUsed(new_id); diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc index 7659d7c2f..8db4d9f02 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.cc @@ -12,16 +12,15 @@ #include -#include #include #include #include "api/video/recordable_encoded_frame.h" +#include "api/video_track_source_proxy_factory.h" #include "pc/video_track.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" -#include "rtc_base/ref_counted_object.h" namespace webrtc { @@ -42,15 +41,20 @@ VideoRtpReceiver::VideoRtpReceiver( track_(VideoTrackProxyWithInternal::Create( rtc::Thread::Current(), worker_thread, - VideoTrack::Create(receiver_id, source_, worker_thread))), + VideoTrack::Create(receiver_id, + CreateVideoTrackSourceProxy(rtc::Thread::Current(), + worker_thread, + source_), + worker_thread))), attachment_id_(GenerateUniqueId()) { RTC_DCHECK(worker_thread_); SetStreams(streams); - RTC_DCHECK_EQ(source_->state(), MediaSourceInterface::kInitializing); + RTC_DCHECK_EQ(source_->state(), MediaSourceInterface::kLive); } VideoRtpReceiver::~VideoRtpReceiver() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + RTC_DCHECK(stopped_); RTC_DCHECK(!media_channel_); } @@ -110,66 +114,87 @@ void VideoRtpReceiver::SetDepacketizerToDecoderFrameTransformer( void VideoRtpReceiver::Stop() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - source_->SetState(MediaSourceInterface::kEnded); + // TODO(deadbeef): Need to do more here to fully stop receiving packets. + + if (!stopped_) { + source_->SetState(MediaSourceInterface::kEnded); + stopped_ = true; + } + + worker_thread_->Invoke(RTC_FROM_HERE, [&] { + RTC_DCHECK_RUN_ON(worker_thread_); + if (media_channel_) { + SetSink(nullptr); + SetMediaChannel_w(nullptr); + } + source_->ClearCallback(); + }); +} + +void VideoRtpReceiver::StopAndEndTrack() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + Stop(); track_->internal()->set_ended(); } -void VideoRtpReceiver::SetSourceEnded() { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - source_->SetState(MediaSourceInterface::kEnded); -} - -// RTC_RUN_ON(&signaling_thread_checker_) void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { - MediaSourceInterface::SourceState state = source_->state(); + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + + // `stopped_` will be `true` on construction. RestartMediaChannel + // can in this case function like "ensure started" and flip `stopped_` + // to false. + // TODO(tommi): Can we restart the media channel without blocking? - worker_thread_->Invoke(RTC_FROM_HERE, [&] { + bool ok = worker_thread_->Invoke(RTC_FROM_HERE, [&, was_stopped = + stopped_] { RTC_DCHECK_RUN_ON(worker_thread_); - RestartMediaChannel_w(std::move(ssrc), state); - }); - source_->SetState(MediaSourceInterface::kLive); -} - -// RTC_RUN_ON(worker_thread_) -void VideoRtpReceiver::RestartMediaChannel_w( - absl::optional ssrc, - MediaSourceInterface::SourceState state) { - if (!media_channel_) { - return; // Can't restart. - } - - const bool encoded_sink_enabled = saved_encoded_sink_enabled_; - - if (state != MediaSourceInterface::kInitializing) { - if (ssrc == ssrc_) - return; - - // Disconnect from a previous ssrc. - SetSink(nullptr); - - if (encoded_sink_enabled) - SetEncodedSinkEnabled(false); - } - - // Set up the new ssrc. - ssrc_ = std::move(ssrc); - SetSink(source_->sink()); - if (encoded_sink_enabled) { - SetEncodedSinkEnabled(true); - } - - if (frame_transformer_ && media_channel_) { - media_channel_->SetDepacketizerToDecoderFrameTransformer( - ssrc_.value_or(0), frame_transformer_); - } - - if (media_channel_ && ssrc_) { - if (frame_decryptor_) { - media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_); + if (!media_channel_) { + // Ignore further negotiations if we've already been stopped and don't + // have an associated media channel. + RTC_DCHECK(was_stopped); + return false; // Can't restart. } - media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); - } + if (!was_stopped && ssrc_ == ssrc) { + // Already running with that ssrc. + return true; + } + + // Disconnect from the previous ssrc. + if (!was_stopped) { + SetSink(nullptr); + } + + bool encoded_sink_enabled = saved_encoded_sink_enabled_; + SetEncodedSinkEnabled(false); + + // Set up the new ssrc. + ssrc_ = std::move(ssrc); + SetSink(source_->sink()); + if (encoded_sink_enabled) { + SetEncodedSinkEnabled(true); + } + + if (frame_transformer_ && media_channel_) { + media_channel_->SetDepacketizerToDecoderFrameTransformer( + ssrc_.value_or(0), frame_transformer_); + } + + if (media_channel_ && ssrc_) { + if (frame_decryptor_) { + media_channel_->SetFrameDecryptor(*ssrc_, frame_decryptor_); + } + + media_channel_->SetBaseMinimumPlayoutDelayMs(*ssrc_, delay_.GetMs()); + } + + return true; + }); + + if (!ok) + return; + + stopped_ = false; } // RTC_RUN_ON(worker_thread_) @@ -259,11 +284,17 @@ void VideoRtpReceiver::SetJitterBufferMinimumDelay( } void VideoRtpReceiver::SetMediaChannel(cricket::MediaChannel* media_channel) { - RTC_DCHECK_RUN_ON(worker_thread_); + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(media_channel == nullptr || media_channel->media_type() == media_type()); - SetMediaChannel_w(media_channel); + if (stopped_ && !media_channel) + return; + + worker_thread_->Invoke(RTC_FROM_HERE, [&] { + RTC_DCHECK_RUN_ON(worker_thread_); + SetMediaChannel_w(media_channel); + }); } // RTC_RUN_ON(worker_thread_) @@ -271,10 +302,6 @@ void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { if (media_channel == media_channel_) return; - if (!media_channel) { - SetSink(nullptr); - } - bool encoded_sink_enabled = saved_encoded_sink_enabled_; if (encoded_sink_enabled && media_channel_) { // Turn off the old sink, if any. @@ -297,9 +324,6 @@ void VideoRtpReceiver::SetMediaChannel_w(cricket::MediaChannel* media_channel) { ssrc_.value_or(0), frame_transformer_); } } - - if (!media_channel) - source_->ClearCallback(); } void VideoRtpReceiver::NotifyFirstPacketReceived() { @@ -317,19 +341,6 @@ std::vector VideoRtpReceiver::GetSources() const { return media_channel_->GetSources(*ssrc_); } -void VideoRtpReceiver::SetupMediaChannel(absl::optional ssrc, - cricket::MediaChannel* media_channel) { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - RTC_DCHECK(media_channel); - MediaSourceInterface::SourceState state = source_->state(); - worker_thread_->Invoke(RTC_FROM_HERE, [&] { - RTC_DCHECK_RUN_ON(worker_thread_); - SetMediaChannel_w(media_channel); - RestartMediaChannel_w(std::move(ssrc), state); - }); - source_->SetState(MediaSourceInterface::kLive); -} - void VideoRtpReceiver::OnGenerateKeyFrame() { RTC_DCHECK_RUN_ON(worker_thread_); if (!media_channel_) { diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h index 4261e417d..b5381860b 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_receiver.h @@ -88,7 +88,7 @@ class VideoRtpReceiver : public RtpReceiverInternal { // RtpReceiverInternal implementation. void Stop() override; - void SetSourceEnded() override; + void StopAndEndTrack() override; void SetupMediaChannel(uint32_t ssrc) override; void SetupUnsignaledMediaChannel() override; uint32_t ssrc() const override; @@ -110,17 +110,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { std::vector GetSources() const override; - // Combines SetMediaChannel, SetupMediaChannel and - // SetupUnsignaledMediaChannel. - void SetupMediaChannel(absl::optional ssrc, - cricket::MediaChannel* media_channel); - private: - void RestartMediaChannel(absl::optional ssrc) - RTC_RUN_ON(&signaling_thread_checker_); - void RestartMediaChannel_w(absl::optional ssrc, - MediaSourceInterface::SourceState state) - RTC_RUN_ON(worker_thread_); + void RestartMediaChannel(absl::optional ssrc); void SetSink(rtc::VideoSinkInterface* sink) RTC_RUN_ON(worker_thread_); void SetMediaChannel_w(cricket::MediaChannel* media_channel) @@ -150,6 +141,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { rtc::Thread* const worker_thread_; const std::string id_; + // See documentation for `stopped_` below for when a valid media channel + // has been assigned and when this pointer will be null. cricket::VideoMediaChannel* media_channel_ RTC_GUARDED_BY(worker_thread_) = nullptr; absl::optional ssrc_ RTC_GUARDED_BY(worker_thread_); @@ -159,6 +152,15 @@ class VideoRtpReceiver : public RtpReceiverInternal { const rtc::scoped_refptr> track_; std::vector> streams_ RTC_GUARDED_BY(&signaling_thread_checker_); + // `stopped` is state that's used on the signaling thread to indicate whether + // a valid `media_channel_` has been assigned and configured. When an instance + // of VideoRtpReceiver is initially created, `stopped_` is true and will + // remain true until either `SetupMediaChannel` or + // `SetupUnsignaledMediaChannel` is called after assigning a media channel. + // After that, `stopped_` will remain false until `Stop()` is called. + // Note, for checking the state of the class on the worker thread, + // check `media_channel_` instead, as that's the main worker thread state. + bool stopped_ RTC_GUARDED_BY(&signaling_thread_checker_) = true; RtpReceiverObserverInterface* observer_ RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h index a9e43f666..23a7cd224 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_rtp_track_source.h @@ -20,6 +20,7 @@ #include "api/video/video_source_interface.h" #include "media/base/video_broadcaster.h" #include "pc/video_track_source.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" @@ -44,9 +45,6 @@ class VideoRtpTrackSource : public VideoTrackSource { explicit VideoRtpTrackSource(Callback* callback); - VideoRtpTrackSource(const VideoRtpTrackSource&) = delete; - VideoRtpTrackSource& operator=(const VideoRtpTrackSource&) = delete; - // Call before the object implementing Callback finishes it's destructor. No // more callbacks will be fired after completion. Must be called on the // worker thread @@ -85,6 +83,8 @@ class VideoRtpTrackSource : public VideoTrackSource { std::vector*> encoded_sinks_ RTC_GUARDED_BY(mu_); Callback* callback_ RTC_GUARDED_BY(worker_sequence_checker_); + + RTC_DISALLOW_COPY_AND_ASSIGN(VideoRtpTrackSource); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track.cc b/TMessagesProj/jni/voip/webrtc/pc/video_track.cc index 744800c9f..d0246faa8 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track.cc @@ -10,6 +10,7 @@ #include "pc/video_track.h" +#include #include #include @@ -21,14 +22,12 @@ namespace webrtc { -VideoTrack::VideoTrack( - const std::string& label, - rtc::scoped_refptr< - VideoTrackSourceProxyWithInternal> source, - rtc::Thread* worker_thread) +VideoTrack::VideoTrack(const std::string& label, + VideoTrackSourceInterface* video_source, + rtc::Thread* worker_thread) : MediaStreamTrack(label), worker_thread_(worker_thread), - video_source_(std::move(source)), + video_source_(video_source), content_hint_(ContentHint::kNone) { RTC_DCHECK_RUN_ON(&signaling_thread_); // Detach the thread checker for VideoSourceBaseGuarded since we'll make calls @@ -54,19 +53,14 @@ void VideoTrack::AddOrUpdateSink(rtc::VideoSinkInterface* sink, RTC_DCHECK_RUN_ON(worker_thread_); VideoSourceBaseGuarded::AddOrUpdateSink(sink, wants); rtc::VideoSinkWants modified_wants = wants; - modified_wants.black_frames = !enabled_w_; - video_source_->internal()->AddOrUpdateSink(sink, modified_wants); + modified_wants.black_frames = !enabled(); + video_source_->AddOrUpdateSink(sink, modified_wants); } void VideoTrack::RemoveSink(rtc::VideoSinkInterface* sink) { RTC_DCHECK_RUN_ON(worker_thread_); VideoSourceBaseGuarded::RemoveSink(sink); - video_source_->internal()->RemoveSink(sink); -} - -void VideoTrack::RequestRefreshFrame() { - RTC_DCHECK_RUN_ON(worker_thread_); - video_source_->internal()->RequestRefreshFrame(); + video_source_->RemoveSink(sink); } VideoTrackSourceInterface* VideoTrack::GetSource() const { @@ -74,17 +68,13 @@ VideoTrackSourceInterface* VideoTrack::GetSource() const { return video_source_.get(); } -VideoTrackSourceInterface* VideoTrack::GetSourceInternal() const { - return video_source_->internal(); -} - VideoTrackInterface::ContentHint VideoTrack::content_hint() const { - RTC_DCHECK_RUN_ON(&signaling_thread_); + RTC_DCHECK_RUN_ON(worker_thread_); return content_hint_; } void VideoTrack::set_content_hint(ContentHint hint) { - RTC_DCHECK_RUN_ON(&signaling_thread_); + RTC_DCHECK_RUN_ON(worker_thread_); if (content_hint_ == hint) return; content_hint_ = hint; @@ -92,29 +82,17 @@ void VideoTrack::set_content_hint(ContentHint hint) { } bool VideoTrack::set_enabled(bool enable) { - RTC_DCHECK_RUN_ON(&signaling_thread_); - - bool ret = MediaStreamTrack::set_enabled(enable); - - worker_thread_->Invoke(RTC_FROM_HERE, [&]() { - RTC_DCHECK_RUN_ON(worker_thread_); - enabled_w_ = enable; - for (auto& sink_pair : sink_pairs()) { - rtc::VideoSinkWants modified_wants = sink_pair.wants; - modified_wants.black_frames = !enable; - video_source_->AddOrUpdateSink(sink_pair.sink, modified_wants); - } - }); - - return ret; + RTC_DCHECK_RUN_ON(worker_thread_); + for (auto& sink_pair : sink_pairs()) { + rtc::VideoSinkWants modified_wants = sink_pair.wants; + modified_wants.black_frames = !enable; + video_source_->AddOrUpdateSink(sink_pair.sink, modified_wants); + } + return MediaStreamTrack::set_enabled(enable); } bool VideoTrack::enabled() const { - if (worker_thread_->IsCurrent()) { - RTC_DCHECK_RUN_ON(worker_thread_); - return enabled_w_; - } - RTC_DCHECK_RUN_ON(&signaling_thread_); + RTC_DCHECK_RUN_ON(worker_thread_); return MediaStreamTrack::enabled(); } @@ -125,22 +103,22 @@ MediaStreamTrackInterface::TrackState VideoTrack::state() const { void VideoTrack::OnChanged() { RTC_DCHECK_RUN_ON(&signaling_thread_); - rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; - MediaSourceInterface::SourceState state = video_source_->state(); - set_state(state == MediaSourceInterface::kEnded ? kEnded : kLive); + worker_thread_->Invoke( + RTC_FROM_HERE, [this, state = video_source_->state()]() { + // TODO(tommi): Calling set_state() this way isn't ideal since we're + // currently blocking the signaling thread and set_state() may + // internally fire notifications via `FireOnChanged()` which may further + // amplify the blocking effect on the signaling thread. + rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; + set_state(state == MediaSourceInterface::kEnded ? kEnded : kLive); + }); } rtc::scoped_refptr VideoTrack::Create( const std::string& id, VideoTrackSourceInterface* source, rtc::Thread* worker_thread) { - rtc::scoped_refptr< - VideoTrackSourceProxyWithInternal> - source_proxy = VideoTrackSourceProxy::Create(rtc::Thread::Current(), - worker_thread, source); - - return rtc::make_ref_counted(id, std::move(source_proxy), - worker_thread); + return rtc::make_ref_counted(id, source, worker_thread); } } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track.h b/TMessagesProj/jni/voip/webrtc/pc/video_track.h index f938b3362..49deaee76 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track.h @@ -13,7 +13,6 @@ #include -#include "absl/types/optional.h" #include "api/media_stream_interface.h" #include "api/media_stream_track.h" #include "api/scoped_refptr.h" @@ -22,17 +21,11 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "media/base/video_source_base.h" -#include "pc/video_track_source_proxy.h" -#include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread.h" #include "rtc_base/thread_annotations.h" namespace webrtc { -// TODO(tommi): Instead of inheriting from `MediaStreamTrack<>`, implement the -// properties directly in this class. `MediaStreamTrack` doesn't guard against -// conflicting access, so we'd need to override those methods anyway in this -// class in order to make sure things are correctly checked. class VideoTrack : public MediaStreamTrack, public rtc::VideoSourceBaseGuarded, public ObserverInterface { @@ -45,7 +38,6 @@ class VideoTrack : public MediaStreamTrack, void AddOrUpdateSink(rtc::VideoSinkInterface* sink, const rtc::VideoSinkWants& wants) override; void RemoveSink(rtc::VideoSinkInterface* sink) override; - void RequestRefreshFrame() override; VideoTrackSourceInterface* GetSource() const override; ContentHint content_hint() const override; @@ -55,15 +47,10 @@ class VideoTrack : public MediaStreamTrack, MediaStreamTrackInterface::TrackState state() const override; std::string kind() const override; - // Direct access to the non-proxied source object for internal implementation. - VideoTrackSourceInterface* GetSourceInternal() const; - protected: - VideoTrack( - const std::string& id, - rtc::scoped_refptr< - VideoTrackSourceProxyWithInternal> source, - rtc::Thread* worker_thread); + VideoTrack(const std::string& id, + VideoTrackSourceInterface* video_source, + rtc::Thread* worker_thread); ~VideoTrack(); private: @@ -72,15 +59,8 @@ class VideoTrack : public MediaStreamTrack, RTC_NO_UNIQUE_ADDRESS webrtc::SequenceChecker signaling_thread_; rtc::Thread* const worker_thread_; - const rtc::scoped_refptr< - VideoTrackSourceProxyWithInternal> - video_source_; - ContentHint content_hint_ RTC_GUARDED_BY(&signaling_thread_); - // Cached `enabled` state for the worker thread. This is kept in sync with - // the state maintained on the signaling thread via set_enabled() but can - // be queried without blocking on the worker thread by callers that don't - // use an api proxy to call the `enabled()` method. - bool enabled_w_ RTC_GUARDED_BY(worker_thread_) = true; + const rtc::scoped_refptr video_source_; + ContentHint content_hint_ RTC_GUARDED_BY(worker_thread_); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track_source.cc b/TMessagesProj/jni/voip/webrtc/pc/video_track_source.cc index 64e99cc06..d15eaaf43 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track_source.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track_source.cc @@ -15,12 +15,11 @@ namespace webrtc { VideoTrackSource::VideoTrackSource(bool remote) - : state_(kInitializing), remote_(remote) { + : state_(kLive), remote_(remote) { worker_thread_checker_.Detach(); } void VideoTrackSource::SetState(SourceState new_state) { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); if (state_ != new_state) { state_ = new_state; FireOnChanged(); diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h b/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h index 723b10d8f..4a29381c4 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track_source.h @@ -20,10 +20,7 @@ #include "api/video/video_sink_interface.h" #include "api/video/video_source_interface.h" #include "media/base/media_channel.h" -#include "rtc_base/checks.h" -#include "rtc_base/system/no_unique_address.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/thread_annotations.h" namespace webrtc { @@ -34,10 +31,7 @@ class RTC_EXPORT VideoTrackSource : public Notifier { explicit VideoTrackSource(bool remote); void SetState(SourceState new_state); - SourceState state() const override { - RTC_DCHECK_RUN_ON(&signaling_thread_checker_); - return state_; - } + SourceState state() const override { return state_; } bool remote() const override { return remote_; } bool is_screencast() const override { return false; } @@ -62,9 +56,8 @@ class RTC_EXPORT VideoTrackSource : public Notifier { virtual rtc::VideoSourceInterface* source() = 0; private: - RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_thread_checker_; - RTC_NO_UNIQUE_ADDRESS SequenceChecker signaling_thread_checker_; - SourceState state_ RTC_GUARDED_BY(&signaling_thread_checker_); + SequenceChecker worker_thread_checker_; + SourceState state_; const bool remote_; }; diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.cc b/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.cc index 26f0ecec9..309c1f20f 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.cc @@ -11,9 +11,7 @@ #include "pc/video_track_source_proxy.h" #include "api/media_stream_interface.h" -#include "api/scoped_refptr.h" #include "api/video_track_source_proxy_factory.h" -#include "rtc_base/thread.h" namespace webrtc { diff --git a/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.h b/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.h index 8500a9876..6e71bb161 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.h +++ b/TMessagesProj/jni/voip/webrtc/pc/video_track_source_proxy.h @@ -11,13 +11,7 @@ #ifndef PC_VIDEO_TRACK_SOURCE_PROXY_H_ #define PC_VIDEO_TRACK_SOURCE_PROXY_H_ -#include "absl/types/optional.h" #include "api/media_stream_interface.h" -#include "api/video/recordable_encoded_frame.h" -#include "api/video/video_frame.h" -#include "api/video/video_sink_interface.h" -#include "api/video/video_source_interface.h" -#include "api/video_track_source_constraints.h" #include "pc/proxy.h" namespace webrtc { @@ -27,7 +21,6 @@ namespace webrtc { // TODO(deadbeef): Move this to .cc file. What threads methods are called on is // an implementation detail. BEGIN_PROXY_MAP(VideoTrackSource) - PROXY_PRIMARY_THREAD_DESTRUCTOR() PROXY_CONSTMETHOD0(SourceState, state) BYPASS_PROXY_CONSTMETHOD0(bool, remote) @@ -39,7 +32,6 @@ PROXY_SECONDARY_METHOD2(void, rtc::VideoSinkInterface*, const rtc::VideoSinkWants&) PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) -PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) PROXY_CONSTMETHOD0(bool, SupportsEncodedOutput) diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc index ea7a148fe..3f06f307a 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_sdp.cc @@ -12,9 +12,9 @@ #include #include +#include #include -#include #include #include #include @@ -31,7 +31,6 @@ #include "api/jsep_session_description.h" #include "api/media_types.h" // for RtpExtension -#include "absl/strings/string_view.h" #include "absl/types/optional.h" #include "api/rtc_error.h" #include "api/rtp_parameters.h" diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc index 82ba84954..ac20308df 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.cc @@ -11,8 +11,8 @@ #include "pc/webrtc_session_description_factory.h" #include - -#include +#include +#include #include #include #include @@ -23,7 +23,6 @@ #include "api/jsep.h" #include "api/jsep_session_description.h" #include "api/rtc_error.h" -#include "api/sequence_checker.h" #include "pc/sdp_state_provider.h" #include "pc/session_description.h" #include "rtc_base/checks.h" @@ -33,7 +32,6 @@ #include "rtc_base/ssl_identity.h" #include "rtc_base/ssl_stream_adapter.h" #include "rtc_base/string_encode.h" -#include "rtc_base/unique_id_generator.h" using cricket::MediaSessionOptions; using rtc::UniqueRandomIdGenerator; @@ -134,10 +132,13 @@ WebRtcSessionDescriptionFactory::WebRtcSessionDescriptionFactory( bool dtls_enabled, std::unique_ptr cert_generator, const rtc::scoped_refptr& certificate, + UniqueRandomIdGenerator* ssrc_generator, std::function&)> on_certificate_ready) : signaling_thread_(signaling_thread), - session_desc_factory_(channel_manager, &transport_desc_factory_), + session_desc_factory_(channel_manager, + &transport_desc_factory_, + ssrc_generator), // RFC 4566 suggested a Network Time Protocol (NTP) format timestamp // as the session id and session version. To simplify, it should be fine // to just use a random number as session id and start version from diff --git a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h index 6a6e8efa5..8e80fb556 100644 --- a/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h +++ b/TMessagesProj/jni/voip/webrtc/pc/webrtc_session_description_factory.h @@ -26,6 +26,7 @@ #include "pc/channel_manager.h" #include "pc/media_session.h" #include "pc/sdp_state_provider.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/message_handler.h" #include "rtc_base/rtc_certificate.h" #include "rtc_base/rtc_certificate_generator.h" @@ -86,15 +87,11 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, bool dtls_enabled, std::unique_ptr cert_generator, const rtc::scoped_refptr& certificate, + rtc::UniqueRandomIdGenerator* ssrc_generator, std::function&)> on_certificate_ready); virtual ~WebRtcSessionDescriptionFactory(); - WebRtcSessionDescriptionFactory(const WebRtcSessionDescriptionFactory&) = - delete; - WebRtcSessionDescriptionFactory& operator=( - const WebRtcSessionDescriptionFactory&) = delete; - static void CopyCandidatesFromSessionDescription( const SessionDescriptionInterface* source_desc, const std::string& content_name, @@ -162,6 +159,8 @@ class WebRtcSessionDescriptionFactory : public rtc::MessageHandler, std::function&)> on_certificate_ready_; + + RTC_DISALLOW_COPY_AND_ASSIGN(WebRtcSessionDescriptionFactory); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h index c1f206013..e5a3c15ca 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_invoker.h @@ -18,6 +18,7 @@ #include "absl/base/attributes.h" #include "api/scoped_refptr.h" #include "rtc_base/async_invoker_inl.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -93,9 +94,6 @@ class DEPRECATED_AsyncInvoker : public MessageHandlerAutoCleanup { DEPRECATED_AsyncInvoker(); ~DEPRECATED_AsyncInvoker() override; - DEPRECATED_AsyncInvoker(const DEPRECATED_AsyncInvoker&) = delete; - DEPRECATED_AsyncInvoker& operator=(const DEPRECATED_AsyncInvoker&) = delete; - // Call `functor` asynchronously on `thread`, with no callback upon // completion. Returns immediately. template @@ -161,6 +159,8 @@ class DEPRECATED_AsyncInvoker : public MessageHandlerAutoCleanup { std::atomic destroying_; friend class AsyncClosure; + + RTC_DISALLOW_COPY_AND_ASSIGN(DEPRECATED_AsyncInvoker); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.h index 2e334ec36..b5fcf8edb 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_packet_socket.h @@ -13,6 +13,7 @@ #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/dscp.h" #include "rtc_base/network/sent_packet.h" #include "rtc_base/socket.h" @@ -68,9 +69,6 @@ class RTC_EXPORT AsyncPacketSocket : public sigslot::has_slots<> { AsyncPacketSocket(); ~AsyncPacketSocket() override; - AsyncPacketSocket(const AsyncPacketSocket&) = delete; - AsyncPacketSocket& operator=(const AsyncPacketSocket&) = delete; - // Returns current local address. Address may be set to null if the // socket is not bound yet (GetState() returns STATE_BINDING). virtual SocketAddress GetLocalAddress() const = 0; @@ -129,6 +127,9 @@ class RTC_EXPORT AsyncPacketSocket : public sigslot::has_slots<> { // Emitted for client TCP sockets when state is changed from // CONNECTED to CLOSED. sigslot::signal2 SignalClose; + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(AsyncPacketSocket); }; // Listen socket, producing an AsyncPacketSocket when a peer connects. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.h b/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.h index 541080fba..ca61b54d7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/async_tcp_socket.h @@ -17,6 +17,7 @@ #include "rtc_base/async_packet_socket.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" @@ -30,9 +31,6 @@ class AsyncTCPSocketBase : public AsyncPacketSocket { AsyncTCPSocketBase(Socket* socket, size_t max_packet_size); ~AsyncTCPSocketBase() override; - AsyncTCPSocketBase(const AsyncTCPSocketBase&) = delete; - AsyncTCPSocketBase& operator=(const AsyncTCPSocketBase&) = delete; - // Pure virtual methods to send and recv data. int Send(const void* pv, size_t cb, @@ -80,6 +78,8 @@ class AsyncTCPSocketBase : public AsyncPacketSocket { Buffer outbuf_; size_t max_insize_; size_t max_outsize_; + + RTC_DISALLOW_COPY_AND_ASSIGN(AsyncTCPSocketBase); }; class AsyncTCPSocket : public AsyncTCPSocketBase { @@ -93,13 +93,13 @@ class AsyncTCPSocket : public AsyncTCPSocketBase { explicit AsyncTCPSocket(Socket* socket); ~AsyncTCPSocket() override {} - AsyncTCPSocket(const AsyncTCPSocket&) = delete; - AsyncTCPSocket& operator=(const AsyncTCPSocket&) = delete; - int Send(const void* pv, size_t cb, const rtc::PacketOptions& options) override; void ProcessInput(char* data, size_t* len) override; + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(AsyncTCPSocket); }; class AsyncTcpListenSocket : public AsyncListenSocket { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h index 21e0e9e00..2cdac88e2 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/bit_buffer.h @@ -14,6 +14,8 @@ #include // For size_t. #include // For integer types. +#include "rtc_base/constructor_magic.h" + namespace rtc { // A BitBuffer API for write operations. Supports symmetric write APIs to the @@ -25,9 +27,6 @@ class BitBufferWriter { // Constructs a bit buffer for the writable buffer of `bytes`. BitBufferWriter(uint8_t* bytes, size_t byte_count); - BitBufferWriter(const BitBufferWriter&) = delete; - BitBufferWriter& operator=(const BitBufferWriter&) = delete; - // Gets the current offset, in bytes/bits, from the start of the buffer. The // bit offset is the offset into the current byte, in the range [0,7]. void GetCurrentOffset(size_t* out_byte_offset, size_t* out_bit_offset); @@ -149,6 +148,8 @@ class BitBufferWriter { size_t byte_offset_; // The current offset, in bits, into the current byte. size_t bit_offset_; + + RTC_DISALLOW_COPY_AND_ASSIGN(BitBufferWriter); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h index 8b4577a17..40a4bd8f3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_certificate.h @@ -19,6 +19,7 @@ #include #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" @@ -42,9 +43,6 @@ class BoringSSLCertificate final : public SSLCertificate { ~BoringSSLCertificate() override; - BoringSSLCertificate(const BoringSSLCertificate&) = delete; - BoringSSLCertificate& operator=(const BoringSSLCertificate&) = delete; - std::unique_ptr Clone() const override; CRYPTO_BUFFER* cert_buffer() const { return cert_buffer_.get(); } @@ -74,6 +72,7 @@ class BoringSSLCertificate final : public SSLCertificate { private: // A handle to the DER encoded certificate data. bssl::UniquePtr cert_buffer_; + RTC_DISALLOW_COPY_AND_ASSIGN(BoringSSLCertificate); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h index e322afaba..71b29b486 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/boringssl_identity.h @@ -18,6 +18,7 @@ #include #include "rtc_base/boringssl_certificate.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/openssl_key_pair.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" @@ -43,9 +44,6 @@ class BoringSSLIdentity final : public SSLIdentity { const std::string& certificate_chain); ~BoringSSLIdentity() override; - BoringSSLIdentity(const BoringSSLIdentity&) = delete; - BoringSSLIdentity& operator=(const BoringSSLIdentity&) = delete; - const BoringSSLCertificate& certificate() const override; const SSLCertChain& cert_chain() const override; @@ -69,6 +67,8 @@ class BoringSSLIdentity final : public SSLIdentity { std::unique_ptr key_pair_; std::unique_ptr cert_chain_; + + RTC_DISALLOW_COPY_AND_ASSIGN(BoringSSLIdentity); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h index b018e160a..ee435f469 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/buffer_queue.h @@ -18,6 +18,7 @@ #include "api/sequence_checker.h" #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/system/no_unique_address.h" #include "rtc_base/thread_annotations.h" @@ -29,9 +30,6 @@ class BufferQueue final { BufferQueue(size_t capacity, size_t default_size); ~BufferQueue(); - BufferQueue(const BufferQueue&) = delete; - BufferQueue& operator=(const BufferQueue&) = delete; - // Return number of queued buffers. size_t size() const; @@ -63,6 +61,8 @@ class BufferQueue final { const size_t default_size_; std::deque queue_ RTC_GUARDED_BY(sequence_checker_); std::vector free_list_ RTC_GUARDED_BY(sequence_checker_); + + RTC_DISALLOW_COPY_AND_ASSIGN(BufferQueue); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/byte_buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/byte_buffer.h index d2dda3c8e..fc383f0a3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/byte_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/byte_buffer.h @@ -18,6 +18,7 @@ #include "rtc_base/buffer.h" #include "rtc_base/byte_order.h" +#include "rtc_base/constructor_magic.h" // Reads/Writes from/to buffer using network byte order (big endian) namespace rtc { @@ -28,9 +29,6 @@ class ByteBufferWriterT { ByteBufferWriterT() { Construct(nullptr, kDefaultCapacity); } ByteBufferWriterT(const char* bytes, size_t len) { Construct(bytes, len); } - ByteBufferWriterT(const ByteBufferWriterT&) = delete; - ByteBufferWriterT& operator=(const ByteBufferWriterT&) = delete; - const char* Data() const { return buffer_.data(); } size_t Length() const { return buffer_.size(); } size_t Capacity() const { return buffer_.capacity(); } @@ -106,6 +104,7 @@ class ByteBufferWriterT { // There are sensible ways to define these, but they aren't needed in our code // base. + RTC_DISALLOW_COPY_AND_ASSIGN(ByteBufferWriterT); }; class ByteBufferWriter : public ByteBufferWriterT> { @@ -113,8 +112,8 @@ class ByteBufferWriter : public ByteBufferWriterT> { ByteBufferWriter(); ByteBufferWriter(const char* bytes, size_t len); - ByteBufferWriter(const ByteBufferWriter&) = delete; - ByteBufferWriter& operator=(const ByteBufferWriter&) = delete; + private: + RTC_DISALLOW_COPY_AND_ASSIGN(ByteBufferWriter); }; // The ByteBufferReader references the passed data, i.e. the pointer must be @@ -130,9 +129,6 @@ class ByteBufferReader { explicit ByteBufferReader(const ByteBufferWriter& buf); - ByteBufferReader(const ByteBufferReader&) = delete; - ByteBufferReader& operator=(const ByteBufferReader&) = delete; - // Returns start of unprocessed data. const char* Data() const { return bytes_ + start_; } // Returns number of unprocessed bytes. @@ -165,6 +161,9 @@ class ByteBufferReader { size_t size_; size_t start_; size_t end_; + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(ByteBufferReader); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/containers/BUILD.gn b/TMessagesProj/jni/voip/webrtc/rtc_base/containers/BUILD.gn deleted file mode 100644 index f303e706e..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/containers/BUILD.gn +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (c) 2021 The WebRTC project authors. All Rights Reserved. -# -# Use of this source code is governed by a BSD-style license -# that can be found in the LICENSE file in the root of the source -# tree. An additional intellectual property rights grant can be found -# in the file PATENTS. All contributing project authors may -# be found in the AUTHORS file in the root of the source tree. - -import("../../webrtc.gni") - -rtc_library("flat_containers_internal") { - sources = [ - "as_const.h", - "flat_tree.cc", - "flat_tree.h", - "identity.h", - "invoke.h", - "move_only_int.h", - "not_fn.h", - "void_t.h", - ] - deps = [ - "..:checks", - "../system:no_unique_address", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] - visibility = [ ":*" ] -} - -rtc_source_set("flat_set") { - sources = [ "flat_set.h" ] - deps = [ ":flat_containers_internal" ] -} - -rtc_source_set("flat_map") { - sources = [ "flat_map.h" ] - deps = [ - ":flat_containers_internal", - "..:checks", - ] -} - -rtc_library("unittests") { - testonly = true - sources = [ - "flat_map_unittest.cc", - "flat_set_unittest.cc", - "flat_tree_unittest.cc", - ] - deps = [ - ":flat_containers_internal", - ":flat_map", - ":flat_set", - "../../test:test_support", - "//testing/gmock:gmock", - "//testing/gtest:gtest", - ] - absl_deps = [ "//third_party/abseil-cpp/absl/algorithm:container" ] -} diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.h b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.h index cc308e45b..15b1f97e9 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/deprecated/recursive_critical_section.h @@ -11,6 +11,7 @@ #ifndef RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_ #define RTC_BASE_DEPRECATED_RECURSIVE_CRITICAL_SECTION_H_ +#include "rtc_base/constructor_magic.h" #include "rtc_base/platform_thread_types.h" #include "rtc_base/thread_annotations.h" @@ -93,11 +94,9 @@ class RTC_SCOPED_LOCKABLE CritScope { RTC_EXCLUSIVE_LOCK_FUNCTION(cs); ~CritScope() RTC_UNLOCK_FUNCTION(); - CritScope(const CritScope&) = delete; - CritScope& operator=(const CritScope&) = delete; - private: const RecursiveCriticalSection* const cs_; + RTC_DISALLOW_COPY_AND_ASSIGN(CritScope); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.h index 68aaf0d8a..4bbda579b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/event_tracer.h @@ -28,8 +28,6 @@ #include -#include "rtc_base/system/rtc_export.h" - namespace webrtc { typedef const unsigned char* (*GetCategoryEnabledPtr)(const char* name); @@ -72,12 +70,12 @@ class EventTracer { namespace rtc { namespace tracing { // Set up internal event tracer. -RTC_EXPORT void SetupInternalTracer(); -RTC_EXPORT bool StartInternalCapture(const char* filename); -RTC_EXPORT void StartInternalCaptureToFile(FILE* file); -RTC_EXPORT void StopInternalCapture(); +void SetupInternalTracer(); +bool StartInternalCapture(const char* filename); +void StartInternalCaptureToFile(FILE* file); +void StopInternalCapture(); // Make sure we run this, this will tear down the internal tracing. -RTC_EXPORT void ShutdownInternalTracer(); +void ShutdownInternalTracer(); } // namespace tracing } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.h index 00425be7b..877e29a69 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_list.h @@ -58,9 +58,7 @@ class FieldTrialList : public FieldTrialListBase { std::vector Get() const { return values_; } operator std::vector() const { return Get(); } - typename std::vector::const_reference operator[](size_t index) const { - return values_[index]; - } + const T& operator[](size_t index) const { return values_[index]; } const std::vector* operator->() const { return &values_; } protected: diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.h index bd11eea20..c67ef542d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/field_trial_parser.h @@ -220,7 +220,7 @@ class FieldTrialFlag : public FieldTrialParameterInterface { explicit FieldTrialFlag(std::string key); FieldTrialFlag(std::string key, bool default_value); bool Get() const; - explicit operator bool() const; + operator bool() const; protected: bool Parse(absl::optional str_value) override; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.h b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.h index f5f834020..523ecfb05 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/experiments/struct_parameters_parser.h @@ -28,7 +28,7 @@ namespace webrtc { namespace struct_parser_impl { struct TypedMemberParser { public: - bool (*parse)(absl::string_view src, void* target); + bool (*parse)(const absl::string_view src, void* target); void (*encode)(const void* src, std::string* target); }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h b/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h index 6aaa0bd78..beb47c83d 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/file_rotating_stream.h @@ -17,6 +17,7 @@ #include #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/system/file_wrapper.h" namespace rtc { @@ -36,9 +37,6 @@ class FileRotatingStream { virtual ~FileRotatingStream(); - FileRotatingStream(const FileRotatingStream&) = delete; - FileRotatingStream& operator=(const FileRotatingStream&) = delete; - bool IsOpen() const; bool Write(const void* data, size_t data_len); @@ -102,6 +100,8 @@ class FileRotatingStream { // buffering the file size read from disk might not be accurate. size_t current_bytes_written_; bool disable_buffering_; + + RTC_DISALLOW_COPY_AND_ASSIGN(FileRotatingStream); }; // CallSessionFileRotatingStream is meant to be used in situations where we will @@ -130,10 +130,6 @@ class CallSessionFileRotatingStream : public FileRotatingStream { size_t max_total_log_size); ~CallSessionFileRotatingStream() override {} - CallSessionFileRotatingStream(const CallSessionFileRotatingStream&) = delete; - CallSessionFileRotatingStream& operator=( - const CallSessionFileRotatingStream&) = delete; - protected: void OnRotation() override; @@ -144,6 +140,8 @@ class CallSessionFileRotatingStream : public FileRotatingStream { const size_t max_total_log_size_; size_t num_rotations_; + + RTC_DISALLOW_COPY_AND_ASSIGN(CallSessionFileRotatingStream); }; // This is a convenience class, to read all files produced by a diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ifaddrs_android.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/ifaddrs_android.cc index 6474fb724..1cc63fe9f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ifaddrs_android.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ifaddrs_android.cc @@ -24,8 +24,6 @@ #include #include -#include "absl/cleanup/cleanup.h" - namespace { struct netlinkrequest { @@ -140,12 +138,10 @@ int populate_ifaddrs(struct ifaddrs* ifaddr, } int getifaddrs(struct ifaddrs** result) { - *result = nullptr; int fd = socket(PF_NETLINK, SOCK_RAW, NETLINK_ROUTE); if (fd < 0) { return -1; } - absl::Cleanup close_file = [fd] { close(fd); }; netlinkrequest ifaddr_request; memset(&ifaddr_request, 0, sizeof(ifaddr_request)); @@ -155,10 +151,10 @@ int getifaddrs(struct ifaddrs** result) { ssize_t count = send(fd, &ifaddr_request, ifaddr_request.header.nlmsg_len, 0); if (static_cast(count) != ifaddr_request.header.nlmsg_len) { + close(fd); return -1; } struct ifaddrs* start = nullptr; - absl::Cleanup cleanup_start = [&start] { freeifaddrs(start); }; struct ifaddrs* current = nullptr; char buf[kMaxReadSize]; ssize_t amount_read = recv(fd, &buf, kMaxReadSize, 0); @@ -169,12 +165,13 @@ int getifaddrs(struct ifaddrs** result) { header = NLMSG_NEXT(header, header_size)) { switch (header->nlmsg_type) { case NLMSG_DONE: - // Success. Return `start`. Cancel `start` cleanup because it - // becomes callers responsibility. - std::move(cleanup_start).Cancel(); + // Success. Return. *result = start; + close(fd); return 0; case NLMSG_ERROR: + close(fd); + freeifaddrs(start); return -1; case RTM_NEWADDR: { ifaddrmsg* address_msg = @@ -195,6 +192,8 @@ int getifaddrs(struct ifaddrs** result) { } if (populate_ifaddrs(newest, address_msg, RTA_DATA(rta), RTA_PAYLOAD(rta)) != 0) { + freeifaddrs(start); + *result = nullptr; return -1; } current = newest; @@ -207,6 +206,8 @@ int getifaddrs(struct ifaddrs** result) { } amount_read = recv(fd, &buf, kMaxReadSize, 0); } + close(fd); + freeifaddrs(start); return -1; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.h b/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.h index d2c286a86..87bec6dba 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/log_sinks.h @@ -16,6 +16,7 @@ #include #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/file_rotating_stream.h" #include "rtc_base/logging.h" @@ -33,9 +34,6 @@ class FileRotatingLogSink : public LogSink { size_t num_log_files); ~FileRotatingLogSink() override; - FileRotatingLogSink(const FileRotatingLogSink&) = delete; - FileRotatingLogSink& operator=(const FileRotatingLogSink&) = delete; - // Writes the message to the current file. It will spill over to the next // file if needed. void OnLogMessage(const std::string& message) override; @@ -54,6 +52,8 @@ class FileRotatingLogSink : public LogSink { private: std::unique_ptr stream_; + + RTC_DISALLOW_COPY_AND_ASSIGN(FileRotatingLogSink); }; // Log sink that uses a CallSessionFileRotatingStream to write to disk. @@ -64,10 +64,8 @@ class CallSessionFileRotatingLogSink : public FileRotatingLogSink { size_t max_total_log_size); ~CallSessionFileRotatingLogSink() override; - CallSessionFileRotatingLogSink(const CallSessionFileRotatingLogSink&) = - delete; - CallSessionFileRotatingLogSink& operator=( - const CallSessionFileRotatingLogSink&) = delete; + private: + RTC_DISALLOW_COPY_AND_ASSIGN(CallSessionFileRotatingLogSink); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc index 2b5c80b33..4ebeebebf 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.cc @@ -55,15 +55,13 @@ namespace rtc { namespace { // By default, release builds don't log, debug builds at info level #if !defined(NDEBUG) -constexpr LoggingSeverity kDefaultLoggingSeverity = LS_INFO; +static LoggingSeverity g_min_sev = LS_INFO; +static LoggingSeverity g_dbg_sev = LS_INFO; #else -constexpr LoggingSeverity kDefaultLoggingSeverity = LS_NONE; +static LoggingSeverity g_min_sev = LS_NONE; +static LoggingSeverity g_dbg_sev = LS_NONE; #endif -// Note: `g_min_sev` and `g_dbg_sev` can be changed while running. -LoggingSeverity g_min_sev = kDefaultLoggingSeverity; -LoggingSeverity g_dbg_sev = kDefaultLoggingSeverity; - // Return the filename portion of the string (that following the last slash). const char* FilenameFromPath(const char* file) { const char* end1 = ::strrchr(file, '/'); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h index eaa84e893..3ac12d7e6 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/logging.h @@ -54,6 +54,7 @@ #include "absl/base/attributes.h" #include "absl/meta/type_traits.h" #include "absl/strings/string_view.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/system/inline.h" @@ -88,6 +89,11 @@ enum LoggingSeverity { LS_WARNING, LS_ERROR, LS_NONE, + // Compatibility aliases, to be deleted. + // TODO(bugs.webrtc.org/13362): Remove usage and delete. + INFO [[deprecated("Use LS_INFO")]] = LS_INFO, + WARNING [[deprecated("Use LS_WARNING")]] = LS_WARNING, + LERROR [[deprecated("Use LS_ERROR")]] = LS_ERROR }; // LogErrorContext assists in interpreting the meaning of an error value. @@ -437,9 +443,6 @@ class LogMessage { const std::string& tag); ~LogMessage(); - LogMessage(const LogMessage&) = delete; - LogMessage& operator=(const LogMessage&) = delete; - void AddTag(const char* tag); rtc::StringBuilder& stream(); // Returns the time at which this function was called for the first time. @@ -599,6 +602,8 @@ class LogMessage { // The stringbuilder that buffers the formatted message before output rtc::StringBuilder print_stream_; + + RTC_DISALLOW_COPY_AND_ASSIGN(LogMessage); }; ////////////////////////////////////////////////////////////////////// diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h b/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h index e3d29f4b6..0bc943ac3 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/memory/fifo_buffer.h @@ -29,10 +29,6 @@ class FifoBuffer final : public StreamInterface { // Creates a FIFO buffer with the specified capacity and owner FifoBuffer(size_t length, Thread* owner); ~FifoBuffer() override; - - FifoBuffer(const FifoBuffer&) = delete; - FifoBuffer& operator=(const FifoBuffer&) = delete; - // Gets the amount of data currently readable from the buffer. bool GetBuffered(size_t* data_len) const; @@ -114,6 +110,7 @@ class FifoBuffer final : public StreamInterface { Thread* const owner_; // object lock mutable webrtc::Mutex mutex_; + RTC_DISALLOW_COPY_AND_ASSIGN(FifoBuffer); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h b/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h index c5e05cad6..62c8344e1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/message_handler.h @@ -14,6 +14,7 @@ #include #include "api/function_view.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/system/rtc_export.h" namespace rtc { @@ -36,12 +37,11 @@ class RTC_EXPORT MessageHandlerAutoCleanup : public MessageHandler { public: ~MessageHandlerAutoCleanup() override; - MessageHandlerAutoCleanup(const MessageHandlerAutoCleanup&) = delete; - MessageHandlerAutoCleanup& operator=(const MessageHandlerAutoCleanup&) = - delete; - protected: MessageHandlerAutoCleanup(); + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(MessageHandlerAutoCleanup); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_server.h index acbd62a09..5078fbb2c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_server.h @@ -15,6 +15,7 @@ #include #include "rtc_base/async_udp_socket.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/nat_types.h" #include "rtc_base/proxy_server.h" #include "rtc_base/socket_address_pair.h" @@ -68,9 +69,6 @@ class NATServer : public sigslot::has_slots<> { const SocketAddress& external_ip); ~NATServer() override; - NATServer(const NATServer&) = delete; - NATServer& operator=(const NATServer&) = delete; - SocketAddress internal_udp_address() const { return udp_server_socket_->GetLocalAddress(); } @@ -124,6 +122,7 @@ class NATServer : public sigslot::has_slots<> { ProxyServer* tcp_proxy_server_; InternalMap* int_map_; ExternalMap* ext_map_; + RTC_DISALLOW_COPY_AND_ASSIGN(NATServer); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h index 70cb303de..9b1d2f09e 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/nat_socket_factory.h @@ -17,6 +17,7 @@ #include #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/nat_server.h" #include "rtc_base/nat_types.h" #include "rtc_base/socket.h" @@ -49,9 +50,6 @@ class NATSocketFactory : public SocketFactory, public NATInternalSocketFactory { const SocketAddress& nat_udp_addr, const SocketAddress& nat_tcp_addr); - NATSocketFactory(const NATSocketFactory&) = delete; - NATSocketFactory& operator=(const NATSocketFactory&) = delete; - // SocketFactory implementation Socket* CreateSocket(int family, int type) override; @@ -65,6 +63,7 @@ class NATSocketFactory : public SocketFactory, public NATInternalSocketFactory { SocketFactory* factory_; SocketAddress nat_udp_addr_; SocketAddress nat_tcp_addr_; + RTC_DISALLOW_COPY_AND_ASSIGN(NATSocketFactory); }; // Creates sockets that will send traffic through a NAT depending on what @@ -136,9 +135,6 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory { explicit NATSocketServer(SocketServer* ss); - NATSocketServer(const NATSocketServer&) = delete; - NATSocketServer& operator=(const NATSocketServer&) = delete; - SocketServer* socketserver() { return server_; } Thread* queue() { return msg_queue_; } @@ -165,6 +161,7 @@ class NATSocketServer : public SocketServer, public NATInternalSocketFactory { SocketServer* server_; Thread* msg_queue_; TranslatorMap nats_; + RTC_DISALLOW_COPY_AND_ASSIGN(NATSocketServer); }; // Free-standing NAT helper functions. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc index 295d39c5d..870f22a3a 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network.cc @@ -34,7 +34,6 @@ #include "rtc_base/string_encode.h" #include "rtc_base/string_utils.h" #include "rtc_base/strings/string_builder.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread.h" #include "system_wrappers/include/field_trial.h" @@ -49,6 +48,9 @@ constexpr uint8_t kVpns[2][6] = { {0x2, 0x50, 0x41, 0x0, 0x0, 0x1}, }; +const uint32_t kUpdateNetworksMessage = 1; +const uint32_t kSignalNetworksMessage = 2; + // Fetch list of networks every two seconds. const int kNetworksUpdateIntervalMs = 2000; @@ -526,11 +528,7 @@ BasicNetworkManager::BasicNetworkManager( bind_using_ifname_( !webrtc::field_trial::IsDisabled("WebRTC-BindUsingInterfaceName")) {} -BasicNetworkManager::~BasicNetworkManager() { - if (task_safety_flag_) { - task_safety_flag_->SetNotAlive(); - } -} +BasicNetworkManager::~BasicNetworkManager() {} void BasicNetworkManager::OnNetworksChanged() { RTC_DCHECK_RUN_ON(thread_); @@ -899,17 +897,9 @@ void BasicNetworkManager::StartUpdating() { // we should trigger network signal immediately for the new clients // to start allocating ports. if (sent_first_update_) - thread_->PostTask(ToQueuedTask(task_safety_flag_, [this] { - RTC_DCHECK_RUN_ON(thread_); - SignalNetworksChanged(); - })); + thread_->Post(RTC_FROM_HERE, this, kSignalNetworksMessage); } else { - RTC_DCHECK(task_safety_flag_ == nullptr); - task_safety_flag_ = webrtc::PendingTaskSafetyFlag::Create(); - thread_->PostTask(ToQueuedTask(task_safety_flag_, [this] { - RTC_DCHECK_RUN_ON(thread_); - UpdateNetworksContinually(); - })); + thread_->Post(RTC_FROM_HERE, this, kUpdateNetworksMessage); StartNetworkMonitor(); } ++start_count_; @@ -922,8 +912,7 @@ void BasicNetworkManager::StopUpdating() { --start_count_; if (!start_count_) { - task_safety_flag_->SetNotAlive(); - task_safety_flag_ = nullptr; + thread_->Clear(this); sent_first_update_ = false; StopNetworkMonitor(); } @@ -967,6 +956,22 @@ void BasicNetworkManager::StopNetworkMonitor() { } } +void BasicNetworkManager::OnMessage(Message* msg) { + RTC_DCHECK_RUN_ON(thread_); + switch (msg->message_id) { + case kUpdateNetworksMessage: { + UpdateNetworksContinually(); + break; + } + case kSignalNetworksMessage: { + SignalNetworksChanged(); + break; + } + default: + RTC_DCHECK_NOTREACHED(); + } +} + IPAddress BasicNetworkManager::QueryDefaultLocalAddress(int family) const { RTC_DCHECK(family == AF_INET || family == AF_INET6); @@ -1021,12 +1026,8 @@ void BasicNetworkManager::UpdateNetworksOnce() { void BasicNetworkManager::UpdateNetworksContinually() { UpdateNetworksOnce(); - thread_->PostDelayedTask(ToQueuedTask(task_safety_flag_, - [this] { - RTC_DCHECK_RUN_ON(thread_); - UpdateNetworksContinually(); - }), - kNetworksUpdateIntervalMs); + thread_->PostDelayed(RTC_FROM_HERE, kNetworksUpdateIntervalMs, this, + kUpdateNetworksMessage); } void BasicNetworkManager::DumpNetworks() { @@ -1167,51 +1168,6 @@ uint16_t Network::GetCost() const { add_network_cost_to_vpn_); } -// This is the inverse of ComputeNetworkCostByType(). -std::pair -Network::GuessAdapterFromNetworkCost(int network_cost) { - switch (network_cost) { - case kNetworkCostMin: - return {rtc::ADAPTER_TYPE_ETHERNET, false}; - case kNetworkCostMin + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_ETHERNET, true}; - case kNetworkCostLow: - return {rtc::ADAPTER_TYPE_WIFI, false}; - case kNetworkCostLow + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_WIFI, true}; - case kNetworkCostCellular: - return {rtc::ADAPTER_TYPE_CELLULAR, false}; - case kNetworkCostCellular + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_CELLULAR, true}; - case kNetworkCostCellular2G: - return {rtc::ADAPTER_TYPE_CELLULAR_2G, false}; - case kNetworkCostCellular2G + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_CELLULAR_2G, true}; - case kNetworkCostCellular3G: - return {rtc::ADAPTER_TYPE_CELLULAR_3G, false}; - case kNetworkCostCellular3G + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_CELLULAR_3G, true}; - case kNetworkCostCellular4G: - return {rtc::ADAPTER_TYPE_CELLULAR_4G, false}; - case kNetworkCostCellular4G + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_CELLULAR_4G, true}; - case kNetworkCostCellular5G: - return {rtc::ADAPTER_TYPE_CELLULAR_5G, false}; - case kNetworkCostCellular5G + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_CELLULAR_5G, true}; - case kNetworkCostUnknown: - return {rtc::ADAPTER_TYPE_UNKNOWN, false}; - case kNetworkCostUnknown + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_UNKNOWN, true}; - case kNetworkCostMax: - return {rtc::ADAPTER_TYPE_ANY, false}; - case kNetworkCostMax + kNetworkCostVpn: - return {rtc::ADAPTER_TYPE_ANY, true}; - } - RTC_LOG(LS_VERBOSE) << "Unknown network cost: " << network_cost; - return {rtc::ADAPTER_TYPE_UNKNOWN, false}; -} - std::string Network::ToString() const { rtc::StringBuilder ss; // Print out the first space-terminated token of the network desc, plus diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network.h index 83a2f7d27..0b462bded 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network.h @@ -23,11 +23,11 @@ #include "api/sequence_checker.h" #include "rtc_base/ip_address.h" #include "rtc_base/mdns_responder_interface.h" +#include "rtc_base/message_handler.h" #include "rtc_base/network_monitor.h" #include "rtc_base/network_monitor_factory.h" #include "rtc_base/socket_factory.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/third_party/sigslot/sigslot.h" #include "rtc_base/thread_annotations.h" @@ -251,6 +251,7 @@ class RTC_EXPORT NetworkManagerBase : public NetworkManager { // Basic implementation of the NetworkManager interface that gets list // of networks using OS APIs. class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, + public MessageHandlerAutoCleanup, public NetworkBinderInterface, public sigslot::has_slots<> { public: @@ -270,6 +271,8 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, void DumpNetworks() override; + // MessageHandler interface. + void OnMessage(Message* msg) override; bool started() { return start_count_ > 0; } // Sets the network ignore list, which is empty by default. Any network on the @@ -344,7 +347,6 @@ class RTC_EXPORT BasicNetworkManager : public NetworkManagerBase, bool bind_using_ifname_ RTC_GUARDED_BY(thread_) = false; std::vector vpn_; - rtc::scoped_refptr task_safety_flag_; }; // Represents a Unix-type network interface, with a name and single address. @@ -524,9 +526,6 @@ class RTC_EXPORT Network { SignalNetworkPreferenceChanged(this); } - static std::pair - GuessAdapterFromNetworkCost(int network_cost); - // Debugging description of this network std::string ToString() const; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/network_constants.h b/TMessagesProj/jni/voip/webrtc/rtc_base/network_constants.h index 578b9710d..0495afdcc 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/network_constants.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/network_constants.h @@ -57,16 +57,6 @@ enum AdapterType { std::string AdapterTypeToString(AdapterType type); -// Useful for testing! -constexpr AdapterType kAllAdapterTypes[] = { - ADAPTER_TYPE_UNKNOWN, ADAPTER_TYPE_ETHERNET, - ADAPTER_TYPE_WIFI, ADAPTER_TYPE_CELLULAR, - ADAPTER_TYPE_VPN, ADAPTER_TYPE_LOOPBACK, - ADAPTER_TYPE_ANY, ADAPTER_TYPE_CELLULAR_2G, - ADAPTER_TYPE_CELLULAR_3G, ADAPTER_TYPE_CELLULAR_4G, - ADAPTER_TYPE_CELLULAR_5G, -}; - } // namespace rtc #endif // RTC_BASE_NETWORK_CONSTANTS_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_max_counter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_max_counter.h index 5eb45d392..26dd506d6 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_max_counter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_max_counter.h @@ -19,6 +19,7 @@ #include "absl/types/optional.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" namespace rtc { @@ -33,10 +34,6 @@ template class MovingMaxCounter { public: explicit MovingMaxCounter(int64_t window_length_ms); - - MovingMaxCounter(const MovingMaxCounter&) = delete; - MovingMaxCounter& operator=(const MovingMaxCounter&) = delete; - // Advances the current time, and adds a new sample. The new current time must // be at least as large as the old current time. void Add(const T& sample, int64_t current_time_ms); @@ -60,6 +57,7 @@ class MovingMaxCounter { #if RTC_DCHECK_IS_ON int64_t last_call_time_ms_ = std::numeric_limits::min(); #endif + RTC_DISALLOW_COPY_AND_ASSIGN(MovingMaxCounter); }; template diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_median_filter.h b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_median_filter.h index 2a8ea7d62..157eb152c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_median_filter.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/numerics/moving_median_filter.h @@ -17,6 +17,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/numerics/percentile_filter.h" namespace webrtc { @@ -29,9 +30,6 @@ class MovingMedianFilter { // used to take median. `window_size` must be positive. explicit MovingMedianFilter(size_t window_size); - MovingMedianFilter(const MovingMedianFilter&) = delete; - MovingMedianFilter& operator=(const MovingMedianFilter&) = delete; - // Insert a new sample. void Insert(const T& value); @@ -49,6 +47,8 @@ class MovingMedianFilter { std::list samples_; size_t samples_stored_; const size_t window_size_; + + RTC_DISALLOW_COPY_AND_ASSIGN(MovingMedianFilter); }; template diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.h index b2debbee8..c317a7211 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_certificate.h @@ -18,6 +18,7 @@ #include #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" @@ -41,9 +42,6 @@ class OpenSSLCertificate final : public SSLCertificate { ~OpenSSLCertificate() override; - OpenSSLCertificate(const OpenSSLCertificate&) = delete; - OpenSSLCertificate& operator=(const OpenSSLCertificate&) = delete; - std::unique_ptr Clone() const override; X509* x509() const { return x509_; } @@ -72,6 +70,7 @@ class OpenSSLCertificate final : public SSLCertificate { private: X509* x509_; // NOT OWNED + RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLCertificate); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h index 63f46b374..00d6c7492 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_identity.h @@ -17,6 +17,7 @@ #include #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/openssl_certificate.h" #include "rtc_base/openssl_key_pair.h" #include "rtc_base/ssl_certificate.h" @@ -42,9 +43,6 @@ class OpenSSLIdentity final : public SSLIdentity { const std::string& certificate_chain); ~OpenSSLIdentity() override; - OpenSSLIdentity(const OpenSSLIdentity&) = delete; - OpenSSLIdentity& operator=(const OpenSSLIdentity&) = delete; - const OpenSSLCertificate& certificate() const override; const SSLCertChain& cert_chain() const override; @@ -68,6 +66,8 @@ class OpenSSLIdentity final : public SSLIdentity { std::unique_ptr key_pair_; std::unique_ptr cert_chain_; + + RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLIdentity); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h index d9a4939a7..a84c43b6b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_key_pair.h @@ -17,6 +17,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_identity.h" namespace rtc { @@ -38,9 +39,6 @@ class OpenSSLKeyPair final { ~OpenSSLKeyPair(); - OpenSSLKeyPair(const OpenSSLKeyPair&) = delete; - OpenSSLKeyPair& operator=(const OpenSSLKeyPair&) = delete; - std::unique_ptr Clone(); EVP_PKEY* pkey() const { return pkey_; } @@ -53,6 +51,8 @@ class OpenSSLKeyPair final { void AddReference(); EVP_PKEY* pkey_; + + RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLKeyPair); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.h b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.h index b801ec7b0..b049e64dd 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/openssl_session_cache.h @@ -16,6 +16,7 @@ #include #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/ssl_stream_adapter.h" #ifndef OPENSSL_IS_BORINGSSL @@ -35,10 +36,6 @@ class OpenSSLSessionCache final { OpenSSLSessionCache(SSLMode ssl_mode, SSL_CTX* ssl_ctx); // Frees the cached SSL_SESSIONS and then frees the SSL_CTX. ~OpenSSLSessionCache(); - - OpenSSLSessionCache(const OpenSSLSessionCache&) = delete; - OpenSSLSessionCache& operator=(const OpenSSLSessionCache&) = delete; - // Looks up a session by hostname. The returned SSL_SESSION is not up_refed. SSL_SESSION* LookupSession(const std::string& hostname) const; // Adds a session to the cache, and up_refs it. Any existing session with the @@ -63,6 +60,7 @@ class OpenSSLSessionCache final { // TODO(juberti): Add LRU eviction to keep the cache from growing forever. std::map sessions_; // The cache should never be copied or assigned directly. + RTC_DISALLOW_COPY_AND_ASSIGN(OpenSSLSessionCache); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc index f42482b3f..59d30d350 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.cc @@ -37,11 +37,10 @@ void OperationsChain::CallbackHandle::OnOperationComplete() { // static scoped_refptr OperationsChain::Create() { - // Explicit new, to access private constructor. - return rtc::scoped_refptr(new OperationsChain()); + return new OperationsChain(); } -OperationsChain::OperationsChain() { +OperationsChain::OperationsChain() : RefCountedObject() { RTC_DCHECK_RUN_ON(&sequence_checker_); } @@ -64,10 +63,8 @@ bool OperationsChain::IsEmpty() const { } std::function OperationsChain::CreateOperationsChainCallback() { - return [handle = rtc::make_ref_counted( - rtc::scoped_refptr(this))]() { - handle->OnOperationComplete(); - }; + return [handle = rtc::scoped_refptr( + new CallbackHandle(this))]() { handle->OnOperationComplete(); }; } void OperationsChain::OnOperationComplete() { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h index 0e8c0681b..7823f6e23 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/operations_chain.h @@ -19,10 +19,10 @@ #include #include "absl/types/optional.h" -#include "api/ref_counted_base.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/ref_counted_object.h" #include "rtc_base/system/no_unique_address.h" @@ -113,14 +113,11 @@ class OperationWithFunctor final : public Operation { // The OperationsChain is kept-alive through reference counting if there are // operations pending. This, together with the contract, guarantees that all // operations that are chained get executed. -class OperationsChain final : public RefCountedNonVirtual { +class OperationsChain final : public RefCountedObject { public: static scoped_refptr Create(); ~OperationsChain(); - OperationsChain(const OperationsChain&) = delete; - OperationsChain& operator=(const OperationsChain&) = delete; - void SetOnChainEmptyCallback(std::function on_chain_empty_callback); bool IsEmpty() const; @@ -166,14 +163,11 @@ class OperationsChain final : public RefCountedNonVirtual { // std::function, which is a copyable type. To allow the callback to // be copyable, it is backed up by this reference counted handle. See // CreateOperationsChainCallback(). - class CallbackHandle final : public RefCountedNonVirtual { + class CallbackHandle final : public RefCountedObject { public: explicit CallbackHandle(scoped_refptr operations_chain); ~CallbackHandle(); - CallbackHandle(const CallbackHandle&) = delete; - CallbackHandle& operator=(const CallbackHandle&) = delete; - void OnOperationComplete(); private: @@ -181,6 +175,8 @@ class OperationsChain final : public RefCountedNonVirtual { #if RTC_DCHECK_IS_ON bool has_run_ = false; #endif // RTC_DCHECK_IS_ON + + RTC_DISALLOW_COPY_AND_ASSIGN(CallbackHandle); }; OperationsChain(); @@ -196,6 +192,8 @@ class OperationsChain final : public RefCountedNonVirtual { chained_operations_ RTC_GUARDED_BY(sequence_checker_); absl::optional> on_chain_empty_callback_ RTC_GUARDED_BY(sequence_checker_); + + RTC_DISALLOW_COPY_AND_ASSIGN(OperationsChain); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/proxy_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/proxy_server.h index 0b9b655a5..6db0e1289 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/proxy_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/proxy_server.h @@ -15,6 +15,7 @@ #include #include "absl/memory/memory.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/memory/fifo_buffer.h" #include "rtc_base/server_socket_adapters.h" #include "rtc_base/socket.h" @@ -35,10 +36,6 @@ class ProxyBinding : public sigslot::has_slots<> { public: ProxyBinding(AsyncProxyServerSocket* in_socket, Socket* out_socket); ~ProxyBinding() override; - - ProxyBinding(const ProxyBinding&) = delete; - ProxyBinding& operator=(const ProxyBinding&) = delete; - sigslot::signal1 SignalDestroyed; private: @@ -62,6 +59,7 @@ class ProxyBinding : public sigslot::has_slots<> { bool connected_; FifoBuffer out_buffer_; FifoBuffer in_buffer_; + RTC_DISALLOW_COPY_AND_ASSIGN(ProxyBinding); }; class ProxyServer : public sigslot::has_slots<> { @@ -72,9 +70,6 @@ class ProxyServer : public sigslot::has_slots<> { const SocketAddress& ext_ip); ~ProxyServer() override; - ProxyServer(const ProxyServer&) = delete; - ProxyServer& operator=(const ProxyServer&) = delete; - // Returns the address to which the proxy server is bound SocketAddress GetServerAddress(); @@ -87,6 +82,7 @@ class ProxyServer : public sigslot::has_slots<> { SocketAddress ext_ip_; std::unique_ptr server_socket_; std::vector> bindings_; + RTC_DISALLOW_COPY_AND_ASSIGN(ProxyServer); }; // SocksProxyServer is a simple extension of ProxyServer to implement SOCKS. @@ -98,11 +94,9 @@ class SocksProxyServer : public ProxyServer { const SocketAddress& ext_ip) : ProxyServer(int_factory, int_addr, ext_factory, ext_ip) {} - SocksProxyServer(const SocksProxyServer&) = delete; - SocksProxyServer& operator=(const SocksProxyServer&) = delete; - protected: AsyncProxyServerSocket* WrapSocket(Socket* socket) override; + RTC_DISALLOW_COPY_AND_ASSIGN(SocksProxyServer); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h index 54aac952c..2a55d863c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ref_counted_object.h @@ -14,36 +14,17 @@ #include #include "api/scoped_refptr.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/ref_count.h" #include "rtc_base/ref_counter.h" namespace rtc { -namespace webrtc_make_ref_counted_internal { -// Determines if the given class has AddRef and Release methods. -template -class HasAddRefAndRelease { - private: - template ().AddRef())* = nullptr, - decltype(std::declval().Release())* = nullptr> - static int Test(int); - template - static char Test(...); - - public: - static constexpr bool value = std::is_same_v(0)), int>; -}; -} // namespace webrtc_make_ref_counted_internal - template class RefCountedObject : public T { public: RefCountedObject() {} - RefCountedObject(const RefCountedObject&) = delete; - RefCountedObject& operator=(const RefCountedObject&) = delete; - template explicit RefCountedObject(P0&& p0) : T(std::forward(p0)) {} @@ -75,15 +56,18 @@ class RefCountedObject : public T { ~RefCountedObject() override {} mutable webrtc::webrtc_impl::RefCounter ref_count_{0}; + + RTC_DISALLOW_COPY_AND_ASSIGN(RefCountedObject); }; template class FinalRefCountedObject final : public T { public: using T::T; - // Above using declaration propagates a default move constructor - // FinalRefCountedObject(FinalRefCountedObject&& other), but we also need - // move construction from T. + // Until c++17 compilers are allowed not to inherit the default constructors. + // Thus the default constructors are forwarded explicitly. + FinalRefCountedObject() = default; + explicit FinalRefCountedObject(const T& other) : T(other) {} explicit FinalRefCountedObject(T&& other) : T(std::move(other)) {} FinalRefCountedObject(const FinalRefCountedObject&) = delete; FinalRefCountedObject& operator=(const FinalRefCountedObject&) = delete; @@ -122,13 +106,8 @@ class FinalRefCountedObject final : public T { // // auto p = scoped_refptr(new RefCountedObject("bar", 123)); // -// If the class does not inherit from RefCountInterface, but does have -// AddRef/Release methods (so a T* is convertible to rtc::scoped_refptr), this -// is equivalent to just -// -// auto p = scoped_refptr(new Foo("bar", 123)); -// -// Otherwise, the example is equivalent to: +// If the class does not inherit from RefCountInterface, the example is +// equivalent to: // // auto p = scoped_refptr>( // new FinalRefCountedObject("bar", 123)); @@ -143,40 +122,24 @@ class FinalRefCountedObject final : public T { // needed. // `make_ref_counted` for classes that are convertible to RefCountInterface. -template , - T>::type* = nullptr> -scoped_refptr make_ref_counted(Args&&... args) { - return scoped_refptr(new RefCountedObject(std::forward(args)...)); -} - -// `make_ref_counted` for complete classes that are not convertible to -// RefCountInterface and already carry a ref count. template < typename T, typename... Args, - typename std::enable_if< - !std::is_convertible_v && - webrtc_make_ref_counted_internal::HasAddRefAndRelease::value, - T>::type* = nullptr> + typename std::enable_if::value, + T>::type* = nullptr> scoped_refptr make_ref_counted(Args&&... args) { - return scoped_refptr(new T(std::forward(args)...)); + return new RefCountedObject(std::forward(args)...); } // `make_ref_counted` for complete classes that are not convertible to -// RefCountInterface and have no ref count of their own. +// RefCountInterface. template < typename T, typename... Args, - typename std::enable_if< - !std::is_convertible_v && - !webrtc_make_ref_counted_internal::HasAddRefAndRelease::value, - - T>::type* = nullptr> + typename std::enable_if::value, + T>::type* = nullptr> scoped_refptr> make_ref_counted(Args&&... args) { - return scoped_refptr>( - new FinalRefCountedObject(std::forward(args)...)); + return new FinalRefCountedObject(std::forward(args)...); } // `Ref<>`, `Ref<>::Type` and `Ref<>::Ptr`: @@ -225,7 +188,7 @@ scoped_refptr> make_ref_counted(Args&&... args) { template struct Ref { typedef typename std::conditional< - webrtc_make_ref_counted_internal::HasAddRefAndRelease::value, + std::is_convertible::value, T, FinalRefCountedObject>::type Type; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h b/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h index 84d791edd..241bd72a1 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rolling_accumulator.h @@ -17,6 +17,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/numerics/running_statistics.h" namespace rtc { @@ -34,9 +35,6 @@ class RollingAccumulator { } ~RollingAccumulator() {} - RollingAccumulator(const RollingAccumulator&) = delete; - RollingAccumulator& operator=(const RollingAccumulator&) = delete; - size_t max_count() const { return samples_.size(); } size_t count() const { return static_cast(stats_.Size()); } @@ -138,6 +136,8 @@ class RollingAccumulator { mutable T min_; mutable bool min_stale_; std::vector samples_; + + RTC_DISALLOW_COPY_AND_ASSIGN(RollingAccumulator); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc index e9137f4ab..496b4ac4b 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate.cc @@ -13,7 +13,6 @@ #include #include "rtc_base/checks.h" -#include "rtc_base/ref_counted_object.h" #include "rtc_base/ssl_certificate.h" #include "rtc_base/ssl_identity.h" #include "rtc_base/time_utils.h" @@ -22,9 +21,7 @@ namespace rtc { scoped_refptr RTCCertificate::Create( std::unique_ptr identity) { - // Explicit new to access proteced constructor. - return rtc::scoped_refptr( - new RTCCertificate(identity.release())); + return new RTCCertificate(identity.release()); } RTCCertificate::RTCCertificate(SSLIdentity* identity) : identity_(identity) { @@ -64,7 +61,7 @@ scoped_refptr RTCCertificate::FromPEM( SSLIdentity::CreateFromPEMStrings(pem.private_key(), pem.certificate())); if (!identity) return nullptr; - return RTCCertificate::Create(std::move(identity)); + return new RTCCertificate(identity.release()); } bool RTCCertificate::operator==(const RTCCertificate& certificate) const { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc index 09cd279aa..16ff23c74 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/rtc_certificate_generator.cc @@ -79,13 +79,13 @@ void RTCCertificateGenerator::GenerateCertificateAsync( // Create a new `RTCCertificateGenerationTask` for this generation request. It // is reference counted and referenced by the message data, ensuring it lives // until the task has completed (independent of `RTCCertificateGenerator`). - worker_thread_->PostTask([key_params, expires_ms, - signaling_thread = signaling_thread_, - cb = callback]() { + worker_thread_->PostTask(RTC_FROM_HERE, [key_params, expires_ms, + signaling_thread = signaling_thread_, + cb = callback]() { scoped_refptr certificate = RTCCertificateGenerator::GenerateCertificate(key_params, expires_ms); signaling_thread->PostTask( - [cert = std::move(certificate), cb = std::move(cb)]() { + RTC_FROM_HERE, [cert = std::move(certificate), cb = std::move(cb)]() { cert ? cb->OnSuccess(cert) : cb->OnFailure(); }); }); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/server_socket_adapters.h b/TMessagesProj/jni/voip/webrtc/rtc_base/server_socket_adapters.h index b18c7a6a6..07e963675 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/server_socket_adapters.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/server_socket_adapters.h @@ -31,11 +31,9 @@ class AsyncSSLServerSocket : public BufferedReadAdapter { public: explicit AsyncSSLServerSocket(Socket* socket); - AsyncSSLServerSocket(const AsyncSSLServerSocket&) = delete; - AsyncSSLServerSocket& operator=(const AsyncSSLServerSocket&) = delete; - protected: void ProcessInput(char* data, size_t* len) override; + RTC_DISALLOW_COPY_AND_ASSIGN(AsyncSSLServerSocket); }; // Implements a proxy server socket for the SOCKS protocol. @@ -43,10 +41,6 @@ class AsyncSocksProxyServerSocket : public AsyncProxyServerSocket { public: explicit AsyncSocksProxyServerSocket(Socket* socket); - AsyncSocksProxyServerSocket(const AsyncSocksProxyServerSocket&) = delete; - AsyncSocksProxyServerSocket& operator=(const AsyncSocksProxyServerSocket&) = - delete; - private: void ProcessInput(char* data, size_t* len) override; void DirectSend(const ByteBufferWriter& buf); @@ -70,6 +64,7 @@ class AsyncSocksProxyServerSocket : public AsyncProxyServerSocket { SS_ERROR }; State state_; + RTC_DISALLOW_COPY_AND_ASSIGN(AsyncSocksProxyServerSocket); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_tester.h b/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_tester.h index 92483c0b8..58be511ef 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_tester.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/sigslot_tester.h @@ -38,6 +38,7 @@ // EXPECT_EQ("hello", capture); // /* See unit-tests for more examples */ +#include "rtc_base/constructor_magic.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace rtc { @@ -49,14 +50,13 @@ class SigslotTester0 : public sigslot::has_slots<> { signal->connect(this, &SigslotTester0::OnSignalCallback); } - SigslotTester0(const SigslotTester0&) = delete; - SigslotTester0& operator=(const SigslotTester0&) = delete; - int callback_count() const { return callback_count_; } private: void OnSignalCallback() { callback_count_++; } int callback_count_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester0); }; // Versions below are for testing signals that pass arguments. For all the @@ -74,9 +74,6 @@ class SigslotTester1 : public sigslot::has_slots<> { signal->connect(this, &SigslotTester1::OnSignalCallback); } - SigslotTester1(const SigslotTester1&) = delete; - SigslotTester1& operator=(const SigslotTester1&) = delete; - int callback_count() const { return callback_count_; } private: @@ -87,6 +84,8 @@ class SigslotTester1 : public sigslot::has_slots<> { int callback_count_; C1* capture1_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester1); }; template @@ -97,9 +96,6 @@ class SigslotTester2 : public sigslot::has_slots<> { signal->connect(this, &SigslotTester2::OnSignalCallback); } - SigslotTester2(const SigslotTester2&) = delete; - SigslotTester2& operator=(const SigslotTester2&) = delete; - int callback_count() const { return callback_count_; } private: @@ -112,6 +108,8 @@ class SigslotTester2 : public sigslot::has_slots<> { int callback_count_; C1* capture1_; C2* capture2_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester2); }; template @@ -128,9 +126,6 @@ class SigslotTester3 : public sigslot::has_slots<> { signal->connect(this, &SigslotTester3::OnSignalCallback); } - SigslotTester3(const SigslotTester3&) = delete; - SigslotTester3& operator=(const SigslotTester3&) = delete; - int callback_count() const { return callback_count_; } private: @@ -145,6 +140,8 @@ class SigslotTester3 : public sigslot::has_slots<> { C1* capture1_; C2* capture2_; C3* capture3_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester3); }; template { signal->connect(this, &SigslotTester4::OnSignalCallback); } - SigslotTester4(const SigslotTester4&) = delete; - SigslotTester4& operator=(const SigslotTester4&) = delete; - int callback_count() const { return callback_count_; } private: @@ -189,6 +183,8 @@ class SigslotTester4 : public sigslot::has_slots<> { C2* capture2_; C3* capture3_; C4* capture4_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester4); }; template { signal->connect(this, &SigslotTester5::OnSignalCallback); } - SigslotTester5(const SigslotTester5&) = delete; - SigslotTester5& operator=(const SigslotTester5&) = delete; - int callback_count() const { return callback_count_; } private: @@ -239,6 +232,8 @@ class SigslotTester5 : public sigslot::has_slots<> { C3* capture3_; C4* capture4_; C5* capture5_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester5); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/sigslottester.h.pump b/TMessagesProj/jni/voip/webrtc/rtc_base/sigslottester.h.pump index c3d2d6e99..0a1f41128 100755 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/sigslottester.h.pump +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/sigslottester.h.pump @@ -35,6 +35,7 @@ // EXPECT_EQ("hello", capture); // /* See unit-tests for more examples */ +#include "rtc_base/constructor_magic.h" #include "rtc_base/third_party/sigslot/sigslot.h" namespace rtc { @@ -46,14 +47,13 @@ class SigslotTester0 : public sigslot::has_slots<> { signal->connect(this, &SigslotTester0::OnSignalCallback); } - SigslotTester0(const SigslotTester0&) = delete; - SigslotTester0& operator=(const SigslotTester0&) = delete; - int callback_count() const { return callback_count_; } private: void OnSignalCallback() { callback_count_++; } int callback_count_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester0); }; // Versions below are for testing signals that pass arguments. For all the @@ -78,9 +78,6 @@ class SigslotTester$i : public sigslot::has_slots<> { signal->connect(this, &SigslotTester$i::OnSignalCallback); } - SigslotTester$i(const SigslotTester$i&) = delete; - SigslotTester$i& operator=(const SigslotTester$i&) = delete; - int callback_count() const { return callback_count_; } private: @@ -94,6 +91,9 @@ class SigslotTester$i : public sigslot::has_slots<> { int callback_count_;$for j [[ C$j* capture$j[[]]_;]] + + + RTC_DISALLOW_COPY_AND_ASSIGN(SigslotTester$i); }; ]] diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h index 0ed3a7fa6..648211763 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket.h @@ -25,6 +25,7 @@ #include "rtc_base/win32.h" #endif +#include "rtc_base/constructor_magic.h" #include "rtc_base/socket_address.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -82,9 +83,6 @@ class Socket { public: virtual ~Socket() {} - Socket(const Socket&) = delete; - Socket& operator=(const Socket&) = delete; - // Returns the address to which the socket is bound. If the socket is not // bound, then the any-address is returned. virtual SocketAddress GetLocalAddress() const = 0; @@ -140,6 +138,9 @@ class Socket { protected: Socket() {} + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(Socket); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.h index 55f62115d..67d3bbff7 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_adapters.h @@ -15,6 +15,7 @@ #include "api/array_view.h" #include "rtc_base/async_socket.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/crypt_string.h" namespace rtc { @@ -33,9 +34,6 @@ class BufferedReadAdapter : public AsyncSocketAdapter { BufferedReadAdapter(Socket* socket, size_t buffer_size); ~BufferedReadAdapter() override; - BufferedReadAdapter(const BufferedReadAdapter&) = delete; - BufferedReadAdapter& operator=(const BufferedReadAdapter&) = delete; - int Send(const void* pv, size_t cb) override; int Recv(void* pv, size_t cb, int64_t* timestamp) override; @@ -53,6 +51,7 @@ class BufferedReadAdapter : public AsyncSocketAdapter { char* buffer_; size_t buffer_size_, data_len_; bool buffering_; + RTC_DISALLOW_COPY_AND_ASSIGN(BufferedReadAdapter); }; /////////////////////////////////////////////////////////////////////////////// @@ -66,14 +65,12 @@ class AsyncSSLSocket : public BufferedReadAdapter { explicit AsyncSSLSocket(Socket* socket); - AsyncSSLSocket(const AsyncSSLSocket&) = delete; - AsyncSSLSocket& operator=(const AsyncSSLSocket&) = delete; - int Connect(const SocketAddress& addr) override; protected: void OnConnectEvent(Socket* socket) override; void ProcessInput(char* data, size_t* len) override; + RTC_DISALLOW_COPY_AND_ASSIGN(AsyncSSLSocket); }; /////////////////////////////////////////////////////////////////////////////// @@ -88,9 +85,6 @@ class AsyncHttpsProxySocket : public BufferedReadAdapter { const CryptString& password); ~AsyncHttpsProxySocket() override; - AsyncHttpsProxySocket(const AsyncHttpsProxySocket&) = delete; - AsyncHttpsProxySocket& operator=(const AsyncHttpsProxySocket&) = delete; - // If connect is forced, the adapter will always issue an HTTP CONNECT to the // target address. Otherwise, it will connect only if the destination port // is not port 80. @@ -134,6 +128,7 @@ class AsyncHttpsProxySocket : public BufferedReadAdapter { } state_; HttpAuthContext* context_; std::string unknown_mechanisms_; + RTC_DISALLOW_COPY_AND_ASSIGN(AsyncHttpsProxySocket); }; /////////////////////////////////////////////////////////////////////////////// @@ -147,9 +142,6 @@ class AsyncSocksProxySocket : public BufferedReadAdapter { const CryptString& password); ~AsyncSocksProxySocket() override; - AsyncSocksProxySocket(const AsyncSocksProxySocket&) = delete; - AsyncSocksProxySocket& operator=(const AsyncSocksProxySocket&) = delete; - int Connect(const SocketAddress& addr) override; SocketAddress GetRemoteAddress() const override; int Close() override; @@ -170,6 +162,7 @@ class AsyncSocksProxySocket : public BufferedReadAdapter { SocketAddress proxy_, dest_; std::string user_; CryptString pass_; + RTC_DISALLOW_COPY_AND_ASSIGN(AsyncSocksProxySocket); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_stream.h b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_stream.h index f678f805d..266a6e6fe 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/socket_stream.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/socket_stream.h @@ -13,6 +13,7 @@ #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/socket.h" #include "rtc_base/stream.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -26,9 +27,6 @@ class SocketStream : public StreamInterface, public sigslot::has_slots<> { explicit SocketStream(Socket* socket); ~SocketStream() override; - SocketStream(const SocketStream&) = delete; - SocketStream& operator=(const SocketStream&) = delete; - void Attach(Socket* socket); Socket* Detach(); @@ -55,6 +53,8 @@ class SocketStream : public StreamInterface, public sigslot::has_slots<> { void OnCloseEvent(Socket* socket, int err); Socket* socket_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SocketStream); }; /////////////////////////////////////////////////////////////////////////////// diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.h b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.h index d0e60ee9c..3b3f24fb9 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/ssl_certificate.h @@ -17,12 +17,12 @@ #include #include - #include #include #include #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/system/rtc_export.h" namespace rtc { @@ -101,9 +101,6 @@ class RTC_EXPORT SSLCertChain final { ~SSLCertChain(); - SSLCertChain(const SSLCertChain&) = delete; - SSLCertChain& operator=(const SSLCertChain&) = delete; - // Vector access methods. size_t GetSize() const { return certs_.size(); } @@ -121,6 +118,8 @@ class RTC_EXPORT SSLCertChain final { private: std::vector> certs_; + + RTC_DISALLOW_COPY_AND_ASSIGN(SSLCertChain); }; // SSLCertificateVerifier provides a simple interface to allow third parties to diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h index c0ceb4e36..70de65a75 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/stream.h @@ -14,6 +14,7 @@ #include #include "rtc_base/buffer.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/message_handler.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -51,9 +52,6 @@ class RTC_EXPORT StreamInterface { public: virtual ~StreamInterface() {} - StreamInterface(const StreamInterface&) = delete; - StreamInterface& operator=(const StreamInterface&) = delete; - virtual StreamState GetState() const = 0; // Read attempts to fill buffer of size buffer_len. Write attempts to send @@ -112,6 +110,9 @@ class RTC_EXPORT StreamInterface { protected: StreamInterface(); + + private: + RTC_DISALLOW_COPY_AND_ASSIGN(StreamInterface); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.cc index 85fb99250..364eaa0f0 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.cc @@ -214,27 +214,19 @@ std::string join(const std::vector& source, char delimiter) { return joined_string; } -std::vector split(absl::string_view source, char delimiter) { - std::vector fields; - size_t last = 0; - for (size_t i = 0; i < source.length(); ++i) { - if (source[i] == delimiter) { - fields.push_back(source.substr(last, i - last)); - last = i + 1; - } - } - fields.push_back(source.substr(last)); - return fields; -} - size_t split(absl::string_view source, char delimiter, std::vector* fields) { RTC_DCHECK(fields); fields->clear(); - for (const absl::string_view field_view : split(source, delimiter)) { - fields->emplace_back(field_view); + size_t last = 0; + for (size_t i = 0; i < source.length(); ++i) { + if (source[i] == delimiter) { + fields->emplace_back(source.substr(last, i - last)); + last = i + 1; + } } + fields->emplace_back(source.substr(last)); return fields->size(); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.h b/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.h index c63d5271f..356fdfaaf 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_encode.h @@ -62,10 +62,7 @@ size_t hex_decode_with_delimiter(char* buffer, std::string join(const std::vector& source, char delimiter); // Splits the source string into multiple fields separated by delimiter, -// with duplicates of delimiter creating empty fields. Empty input produces a -// single, empty, field. -std::vector split(absl::string_view source, char delimiter); - +// with duplicates of delimiter creating empty fields. size_t split(absl::string_view source, char delimiter, std::vector* fields); @@ -80,7 +77,7 @@ size_t tokenize(absl::string_view source, // duplicates of delimiter ignored. Return false if the delimiter could not be // found, otherwise return true. bool tokenize_first(absl::string_view source, - char delimiter, + const char delimiter, std::string* token, std::string* rest); diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h b/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h index 6e8b0b599..d844e5e12 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/string_utils.h @@ -86,7 +86,7 @@ inline std::string ToUtf8(const std::wstring& wstr) { std::string string_trim(const std::string& s); // TODO(jonasolsson): replace with absl::Hex when that becomes available. -std::string ToHex(int i); +std::string ToHex(const int i); // CompileTimeString comprises of a string-like object which can be used as a // regular const char* in compile time and supports concatenation. Useful for diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.cc index f0dbdb3ae..965a4d8c6 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.cc @@ -38,10 +38,4 @@ void TaskQueue::PostDelayedTask(std::unique_ptr task, return impl_->PostDelayedTask(std::move(task), milliseconds); } -void TaskQueue::PostDelayedHighPrecisionTask( - std::unique_ptr task, - uint32_t milliseconds) { - return impl_->PostDelayedHighPrecisionTask(std::move(task), milliseconds); -} - } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.h index 4ad3fedd9..86d35976c 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue.h @@ -20,6 +20,7 @@ #include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" #include "api/task_queue/task_queue_factory.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/system/rtc_export.h" #include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread_annotations.h" @@ -82,9 +83,6 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueue { webrtc::TaskQueueDeleter> task_queue); ~TaskQueue(); - TaskQueue(const TaskQueue&) = delete; - TaskQueue& operator=(const TaskQueue&) = delete; - // Used for DCHECKing the current queue. bool IsCurrent() const; @@ -95,11 +93,14 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueue { // Ownership of the task is passed to PostTask. void PostTask(std::unique_ptr task); - // See webrtc::TaskQueueBase for precision expectations. + + // Schedules a task to execute a specified number of milliseconds from when + // the call is made. The precision should be considered as "best effort" + // and in some cases, such as on Windows when all high precision timers have + // been used up, can be off by as much as 15 millseconds (although 8 would be + // more likely). This can be mitigated by limiting the use of delayed tasks. void PostDelayedTask(std::unique_ptr task, uint32_t milliseconds); - void PostDelayedHighPrecisionTask(std::unique_ptr task, - uint32_t milliseconds); // std::enable_if is used here to make sure that calls to PostTask() with // std::unique_ptr would not end up being @@ -111,6 +112,8 @@ class RTC_LOCKABLE RTC_EXPORT TaskQueue { void PostTask(Closure&& closure) { PostTask(webrtc::ToQueuedTask(std::forward(closure))); } + + // See documentation above for performance expectations. template (closure)), milliseconds); } - template >::value>::type* = nullptr> - void PostDelayedHighPrecisionTask(Closure&& closure, uint32_t milliseconds) { - PostDelayedHighPrecisionTask( - webrtc::ToQueuedTask(std::forward(closure)), milliseconds); - } private: webrtc::TaskQueueBase* const impl_; + + RTC_DISALLOW_COPY_AND_ASSIGN(TaskQueue); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.cc deleted file mode 100644 index cb6b23cea..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.cc +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "rtc_base/task_queue_for_test.h" - -#include "api/task_queue/default_task_queue_factory.h" - -namespace webrtc { - -TaskQueueForTest::TaskQueueForTest(absl::string_view name, Priority priority) - : TaskQueue( - CreateDefaultTaskQueueFactory()->CreateTaskQueue(name, priority)) {} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.h deleted file mode 100644 index 97ca254d6..000000000 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_for_test.h +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright 2018 The WebRTC Project Authors. All rights reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef RTC_BASE_TASK_QUEUE_FOR_TEST_H_ -#define RTC_BASE_TASK_QUEUE_FOR_TEST_H_ - -#include - -#include "absl/strings/string_view.h" -#include "api/task_queue/task_queue_base.h" -#include "rtc_base/checks.h" -#include "rtc_base/event.h" -#include "rtc_base/location.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "rtc_base/thread_annotations.h" - -namespace webrtc { - -template -void SendTask(rtc::Location loc, TaskQueueBase* task_queue, Closure&& task) { - RTC_CHECK(!task_queue->IsCurrent()) - << "Called SendTask to a queue from the same queue at " << loc.ToString(); - rtc::Event event; - task_queue->PostTask( - ToQueuedTask(std::forward(task), [&event] { event.Set(); })); - RTC_CHECK(event.Wait(/*give_up_after_ms=*/rtc::Event::kForever, - /*warn_after_ms=*/10'000)) - << "Waited too long at " << loc.ToString(); -} - -class RTC_LOCKABLE TaskQueueForTest : public rtc::TaskQueue { - public: - using rtc::TaskQueue::TaskQueue; - explicit TaskQueueForTest(absl::string_view name = "TestQueue", - Priority priority = Priority::NORMAL); - TaskQueueForTest(const TaskQueueForTest&) = delete; - TaskQueueForTest& operator=(const TaskQueueForTest&) = delete; - ~TaskQueueForTest() = default; - - // A convenience, test-only method that blocks the current thread while - // a task executes on the task queue. - // This variant is specifically for posting custom QueuedTask derived - // implementations that tests do not want to pass ownership of over to the - // task queue (i.e. the Run() method always returns `false`.). - template - void SendTask(Closure* task) { - RTC_CHECK(!IsCurrent()); - rtc::Event event; - PostTask(ToQueuedTask( - [&task] { RTC_CHECK_EQ(false, static_cast(task)->Run()); }, - [&event] { event.Set(); })); - event.Wait(rtc::Event::kForever); - } - - // A convenience, test-only method that blocks the current thread while - // a task executes on the task queue. - template - void SendTask(Closure&& task, rtc::Location loc) { - ::webrtc::SendTask(loc, Get(), std::forward(task)); - } - - // Wait for the completion of all tasks posted prior to the - // WaitForPreviouslyPostedTasks() call. - void WaitForPreviouslyPostedTasks() { - // Post an empty task on the queue and wait for it to finish, to ensure - // that all already posted tasks on the queue get executed. - SendTask([]() {}, RTC_FROM_HERE); - } -}; - -} // namespace webrtc - -#endif // RTC_BASE_TASK_QUEUE_FOR_TEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc index dd14a7d8e..6382d6b15 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_queue_win.cc @@ -34,6 +34,7 @@ #include "api/task_queue/task_queue_base.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" @@ -121,9 +122,6 @@ class MultimediaTimer { ::CloseHandle(event_); } - MultimediaTimer(const MultimediaTimer&) = delete; - MultimediaTimer& operator=(const MultimediaTimer&) = delete; - bool StartOneShotTimer(UINT delay_ms) { RTC_DCHECK_EQ(0, timer_id_); RTC_DCHECK(event_ != nullptr); @@ -150,6 +148,8 @@ class MultimediaTimer { private: HANDLE event_ = nullptr; MMRESULT timer_id_ = 0; + + RTC_DISALLOW_COPY_AND_ASSIGN(MultimediaTimer); }; class TaskQueueWin : public TaskQueueBase { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc index 8bff213a0..57b3f6ce8 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.cc @@ -12,29 +12,23 @@ namespace webrtc { -// static -rtc::scoped_refptr PendingTaskSafetyFlag::CreateInternal( - bool alive) { - // Explicit new, to access private constructor. - return rtc::scoped_refptr( - new PendingTaskSafetyFlag(alive)); -} - // static rtc::scoped_refptr PendingTaskSafetyFlag::Create() { - return CreateInternal(true); + return new PendingTaskSafetyFlag(true); } rtc::scoped_refptr PendingTaskSafetyFlag::CreateDetached() { - rtc::scoped_refptr safety_flag = CreateInternal(true); + rtc::scoped_refptr safety_flag( + new PendingTaskSafetyFlag(true)); safety_flag->main_sequence_.Detach(); return safety_flag; } rtc::scoped_refptr PendingTaskSafetyFlag::CreateDetachedInactive() { - rtc::scoped_refptr safety_flag = CreateInternal(false); + rtc::scoped_refptr safety_flag( + new PendingTaskSafetyFlag(false)); safety_flag->main_sequence_.Detach(); return safety_flag; } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h index 58772bcbb..6446bfe55 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/pending_task_safety_flag.h @@ -93,8 +93,6 @@ class PendingTaskSafetyFlag final explicit PendingTaskSafetyFlag(bool alive) : alive_(alive) {} private: - static rtc::scoped_refptr CreateInternal(bool alive); - bool alive_ = true; RTC_NO_UNIQUE_ADDRESS SequenceChecker main_sequence_; }; diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.cc index c07df4567..1f3eb1d06 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.cc @@ -21,12 +21,10 @@ namespace webrtc_repeating_task_impl { RepeatingTaskBase::RepeatingTaskBase( TaskQueueBase* task_queue, - TaskQueueBase::DelayPrecision precision, TimeDelta first_delay, Clock* clock, rtc::scoped_refptr alive_flag) : task_queue_(task_queue), - precision_(precision), clock_(clock), next_run_time_(clock_->CurrentTime() + first_delay), alive_flag_(std::move(alive_flag)) {} @@ -40,21 +38,19 @@ bool RepeatingTaskBase::Run() { return true; TimeDelta delay = RunClosure(); - RTC_DCHECK_GE(delay, TimeDelta::Zero()); - // A delay of +infinity means that the task should not be run again. - // Alternatively, the closure might have stopped this task. In either which - // case we return true to destruct this object. - if (delay.IsPlusInfinity() || !alive_flag_->alive()) + // The closure might have stopped this task, in which case we return true to + // destruct this object. + if (!alive_flag_->alive()) return true; + RTC_DCHECK(delay.IsFinite()); TimeDelta lost_time = clock_->CurrentTime() - next_run_time_; next_run_time_ += delay; delay -= lost_time; delay = std::max(delay, TimeDelta::Zero()); - task_queue_->PostDelayedTaskWithPrecision(precision_, absl::WrapUnique(this), - delay.ms()); + task_queue_->PostDelayedTask(absl::WrapUnique(this), delay.ms()); // Return false to tell the TaskQueue to not destruct this object since we // have taken ownership with absl::WrapUnique. diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.h b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.h index 20f28d54f..91a40e071 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/task_utils/repeating_task.h @@ -34,7 +34,6 @@ void RepeatingTaskImplDTraceProbeRun(); class RepeatingTaskBase : public QueuedTask { public: RepeatingTaskBase(TaskQueueBase* task_queue, - TaskQueueBase::DelayPrecision precision, TimeDelta first_delay, Clock* clock, rtc::scoped_refptr alive_flag); @@ -46,7 +45,6 @@ class RepeatingTaskBase : public QueuedTask { bool Run() final; TaskQueueBase* const task_queue_; - const TaskQueueBase::DelayPrecision precision_; Clock* const clock_; // This is always finite. Timestamp next_run_time_ RTC_GUARDED_BY(task_queue_); @@ -54,21 +52,16 @@ class RepeatingTaskBase : public QueuedTask { RTC_GUARDED_BY(task_queue_); }; -// The template closure pattern is based on rtc::ClosureTask. The provided -// closure should have a TimeDelta return value, specifing the desired -// non-negative interval to next repetition, or TimeDelta::PlusInfinity to -// indicate that the task should be deleted and not called again. +// The template closure pattern is based on rtc::ClosureTask. template class RepeatingTaskImpl final : public RepeatingTaskBase { public: RepeatingTaskImpl(TaskQueueBase* task_queue, - TaskQueueBase::DelayPrecision precision, TimeDelta first_delay, Closure&& closure, Clock* clock, rtc::scoped_refptr alive_flag) : RepeatingTaskBase(task_queue, - precision, first_delay, clock, std::move(alive_flag)), @@ -110,20 +103,17 @@ class RepeatingTaskHandle { // owned by the TaskQueue and will live until it has been stopped or the // TaskQueue deletes it. It's perfectly fine to destroy the handle while the // task is running, since the repeated task is owned by the TaskQueue. - // The tasks are scheduled onto the task queue using the specified precision. template static RepeatingTaskHandle Start(TaskQueueBase* task_queue, Closure&& closure, - TaskQueueBase::DelayPrecision precision = - TaskQueueBase::DelayPrecision::kLow, Clock* clock = Clock::GetRealTimeClock()) { auto alive_flag = PendingTaskSafetyFlag::CreateDetached(); webrtc_repeating_task_impl::RepeatingTaskHandleDTraceProbeStart(); task_queue->PostTask( std::make_unique< webrtc_repeating_task_impl::RepeatingTaskImpl>( - task_queue, precision, TimeDelta::Zero(), - std::forward(closure), clock, alive_flag)); + task_queue, TimeDelta::Zero(), std::forward(closure), + clock, alive_flag)); return RepeatingTaskHandle(std::move(alive_flag)); } @@ -134,17 +124,14 @@ class RepeatingTaskHandle { TaskQueueBase* task_queue, TimeDelta first_delay, Closure&& closure, - TaskQueueBase::DelayPrecision precision = - TaskQueueBase::DelayPrecision::kLow, Clock* clock = Clock::GetRealTimeClock()) { auto alive_flag = PendingTaskSafetyFlag::CreateDetached(); webrtc_repeating_task_impl::RepeatingTaskHandleDTraceProbeDelayedStart(); - task_queue->PostDelayedTaskWithPrecision( - precision, + task_queue->PostDelayedTask( std::make_unique< webrtc_repeating_task_impl::RepeatingTaskImpl>( - task_queue, precision, first_delay, std::forward(closure), - clock, alive_flag), + task_queue, first_delay, std::forward(closure), clock, + alive_flag), first_delay.ms()); return RepeatingTaskHandle(std::move(alive_flag)); } diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/test_client.h b/TMessagesProj/jni/voip/webrtc/rtc_base/test_client.h index dd91d37ab..6989fe1d5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/test_client.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/test_client.h @@ -15,6 +15,7 @@ #include #include "rtc_base/async_udp_socket.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/fake_clock.h" #include "rtc_base/synchronization/mutex.h" @@ -52,9 +53,6 @@ class TestClient : public sigslot::has_slots<> { ThreadProcessingFakeClock* fake_clock); ~TestClient() override; - TestClient(const TestClient&) = delete; - TestClient& operator=(const TestClient&) = delete; - SocketAddress address() const { return socket_->GetLocalAddress(); } SocketAddress remote_address() const { return socket_->GetRemoteAddress(); } @@ -112,6 +110,7 @@ class TestClient : public sigslot::has_slots<> { std::vector> packets_; int ready_to_send_count_ = 0; int64_t prev_packet_timestamp_; + RTC_DISALLOW_COPY_AND_ASSIGN(TestClient); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/test_echo_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/test_echo_server.h index ba5f99728..a061ed0ce 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/test_echo_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/test_echo_server.h @@ -20,6 +20,7 @@ #include "absl/algorithm/container.h" #include "rtc_base/async_packet_socket.h" #include "rtc_base/async_tcp_socket.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/socket.h" #include "rtc_base/socket_address.h" #include "rtc_base/third_party/sigslot/sigslot.h" @@ -34,9 +35,6 @@ class TestEchoServer : public sigslot::has_slots<> { TestEchoServer(Thread* thread, const SocketAddress& addr); ~TestEchoServer() override; - TestEchoServer(const TestEchoServer&) = delete; - TestEchoServer& operator=(const TestEchoServer&) = delete; - SocketAddress address() const { return server_socket_->GetLocalAddress(); } private: @@ -66,6 +64,7 @@ class TestEchoServer : public sigslot::has_slots<> { typedef std::list ClientList; std::unique_ptr server_socket_; ClientList client_sockets_; + RTC_DISALLOW_COPY_AND_ASSIGN(TestEchoServer); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc index 307d49925..46e082f8f 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.cc @@ -75,9 +75,6 @@ class MessageHandlerWithTask final : public MessageHandler { public: MessageHandlerWithTask() {} - MessageHandlerWithTask(const MessageHandlerWithTask&) = delete; - MessageHandlerWithTask& operator=(const MessageHandlerWithTask&) = delete; - void OnMessage(Message* msg) override { static_cast(msg->pdata)->Run(); delete msg->pdata; @@ -85,6 +82,8 @@ class MessageHandlerWithTask final : public MessageHandler { private: ~MessageHandlerWithTask() override {} + + RTC_DISALLOW_COPY_AND_ASSIGN(MessageHandlerWithTask); }; class RTC_SCOPED_LOCKABLE MarkProcessingCritScope { @@ -101,12 +100,11 @@ class RTC_SCOPED_LOCKABLE MarkProcessingCritScope { cs_->Leave(); } - MarkProcessingCritScope(const MarkProcessingCritScope&) = delete; - MarkProcessingCritScope& operator=(const MarkProcessingCritScope&) = delete; - private: const RecursiveCriticalSection* const cs_; size_t* processing_; + + RTC_DISALLOW_COPY_AND_ASSIGN(MarkProcessingCritScope); }; } // namespace @@ -255,11 +253,19 @@ Thread* Thread::Current() { ThreadManager* manager = ThreadManager::Instance(); Thread* thread = manager->CurrentThread(); +#ifndef NO_MAIN_THREAD_WRAPPING + // Only autowrap the thread which instantiated the ThreadManager. + if (!thread && manager->IsMainThread()) { + thread = new Thread(CreateDefaultSocketServer()); + thread->WrapCurrentWithThreadManager(manager, true); + } +#endif + return thread; } #if defined(WEBRTC_POSIX) -ThreadManager::ThreadManager() { +ThreadManager::ThreadManager() : main_thread_ref_(CurrentThreadRef()) { #if defined(WEBRTC_MAC) InitCocoaMultiThreading(); #endif @@ -276,7 +282,8 @@ void ThreadManager::SetCurrentThreadInternal(Thread* thread) { #endif #if defined(WEBRTC_WIN) -ThreadManager::ThreadManager() : key_(TlsAlloc()) {} +ThreadManager::ThreadManager() + : key_(TlsAlloc()), main_thread_ref_(CurrentThreadRef()) {} Thread* ThreadManager::CurrentThread() { return static_cast(TlsGetValue(key_)); @@ -332,6 +339,10 @@ void ThreadManager::UnwrapCurrentThread() { } } +bool ThreadManager::IsMainThread() { + return IsThreadRefEqual(CurrentThreadRef(), main_thread_ref_); +} + Thread::ScopedDisallowBlockingCalls::ScopedDisallowBlockingCalls() : thread_(Thread::Current()), previous_state_(thread_->SetAllowBlockingCalls(false)) {} @@ -1096,16 +1107,10 @@ void Thread::PostTask(std::unique_ptr task) { void Thread::PostDelayedTask(std::unique_ptr task, uint32_t milliseconds) { - // This implementation does not support low precision yet. - PostDelayedHighPrecisionTask(std::move(task), milliseconds); -} - -void Thread::PostDelayedHighPrecisionTask( - std::unique_ptr task, - uint32_t milliseconds) { // Though PostDelayed takes MessageData by raw pointer (last parameter), // it still takes it with ownership. - PostDelayed(RTC_FROM_HERE, milliseconds, &queued_task_handler_, /*id=*/0, + PostDelayed(RTC_FROM_HERE, milliseconds, &queued_task_handler_, + /*id=*/0, new ScopedMessageData(std::move(task))); } @@ -1223,6 +1228,11 @@ AutoSocketServerThread::AutoSocketServerThread(SocketServer* ss) AutoSocketServerThread::~AutoSocketServerThread() { RTC_DCHECK(ThreadManager::Instance()->CurrentThread() == this); + // Some tests post destroy messages to this thread. To avoid memory + // leaks, we have to process those messages. In particular + // P2PTransportChannelPingTest, relying on the message posted in + // cricket::Connection::Destroy. + ProcessMessages(0); // Stop and destroy the thread before clearing it as the current thread. // Sometimes there are messages left in the Thread that will be // destroyed by DoDestroy, and sometimes the destructors of the message and/or diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h index 3c4ed558c..38e9732fb 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/thread.h @@ -25,18 +25,17 @@ #if defined(WEBRTC_POSIX) #include #endif -#include "absl/base/attributes.h" #include "api/function_view.h" #include "api/task_queue/queued_task.h" #include "api/task_queue/task_queue_base.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/deprecated/recursive_critical_section.h" #include "rtc_base/location.h" #include "rtc_base/message_handler.h" #include "rtc_base/platform_thread_types.h" #include "rtc_base/socket_server.h" #include "rtc_base/system/rtc_export.h" -#include "rtc_base/task_utils/to_queued_task.h" #include "rtc_base/thread_annotations.h" #include "rtc_base/thread_message.h" @@ -90,15 +89,14 @@ class MessageWithFunctor final : public MessageLikeTask { explicit MessageWithFunctor(FunctorT&& functor) : functor_(std::forward(functor)) {} - MessageWithFunctor(const MessageWithFunctor&) = delete; - MessageWithFunctor& operator=(const MessageWithFunctor&) = delete; - void Run() override { functor_(); } private: ~MessageWithFunctor() override {} typename std::remove_reference::type functor_; + + RTC_DISALLOW_COPY_AND_ASSIGN(MessageWithFunctor); }; } // namespace rtc_thread_internal @@ -141,6 +139,8 @@ class RTC_EXPORT ThreadManager { Thread* WrapCurrentThread(); void UnwrapCurrentThread(); + bool IsMainThread(); + #if RTC_DCHECK_IS_ON // Registers that a Send operation is to be performed between `source` and // `target`, while checking that this does not cause a send cycle that could @@ -152,9 +152,6 @@ class RTC_EXPORT ThreadManager { ThreadManager(); ~ThreadManager(); - ThreadManager(const ThreadManager&) = delete; - ThreadManager& operator=(const ThreadManager&) = delete; - void SetCurrentThreadInternal(Thread* thread); void AddInternal(Thread* message_queue); void RemoveInternal(Thread* message_queue); @@ -186,6 +183,11 @@ class RTC_EXPORT ThreadManager { #if defined(WEBRTC_WIN) const DWORD key_; #endif + + // The thread to potentially autowrap. + const PlatformThreadRef main_thread_ref_; + + RTC_DISALLOW_COPY_AND_ASSIGN(ThreadManager); }; // WARNING! SUBCLASSES MUST CALL Stop() IN THEIR DESTRUCTORS! See ~Thread(). @@ -219,9 +221,6 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // calling Clear on the object from a different thread. ~Thread() override; - Thread(const Thread&) = delete; - Thread& operator=(const Thread&) = delete; - static std::unique_ptr CreateWithSocketServer(); static std::unique_ptr Create(); static Thread* Current(); @@ -415,39 +414,62 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { // true. bool IsInvokeToThreadAllowed(rtc::Thread* target); + // Posts a task to invoke the functor on `this` thread asynchronously, i.e. + // without blocking the thread that invoked PostTask(). Ownership of `functor` + // is passed and (usually, see below) destroyed on `this` thread after it is + // invoked. + // Requirements of FunctorT: + // - FunctorT is movable. + // - FunctorT implements "T operator()()" or "T operator()() const" for some T + // (if T is not void, the return value is discarded on `this` thread). + // - FunctorT has a public destructor that can be invoked from `this` thread + // after operation() has been invoked. + // - The functor must not cause the thread to quit before PostTask() is done. + // + // Destruction of the functor/task mimics what TaskQueue::PostTask does: If + // the task is run, it will be destroyed on `this` thread. However, if there + // are pending tasks by the time the Thread is destroyed, or a task is posted + // to a thread that is quitting, the task is destroyed immediately, on the + // calling thread. Destroying the Thread only blocks for any currently running + // task to complete. Note that TQ abstraction is even vaguer on how + // destruction happens in these cases, allowing destruction to happen + // asynchronously at a later time and on some arbitrary thread. So to ease + // migration, don't depend on Thread::PostTask destroying un-run tasks + // immediately. + // + // Example - Calling a class method: + // class Foo { + // public: + // void DoTheThing(); + // }; + // Foo foo; + // thread->PostTask(RTC_FROM_HERE, Bind(&Foo::DoTheThing, &foo)); + // + // Example - Calling a lambda function: + // thread->PostTask(RTC_FROM_HERE, + // [&x, &y] { x.TrackComputations(y.Compute()); }); + template + void PostTask(const Location& posted_from, FunctorT&& functor) { + Post(posted_from, GetPostTaskMessageHandler(), /*id=*/0, + new rtc_thread_internal::MessageWithFunctor( + std::forward(functor))); + } + template + void PostDelayedTask(const Location& posted_from, + FunctorT&& functor, + uint32_t milliseconds) { + PostDelayed(posted_from, milliseconds, GetPostTaskMessageHandler(), + /*id=*/0, + new rtc_thread_internal::MessageWithFunctor( + std::forward(functor))); + } + // From TaskQueueBase void PostTask(std::unique_ptr task) override; void PostDelayedTask(std::unique_ptr task, uint32_t milliseconds) override; - void PostDelayedHighPrecisionTask(std::unique_ptr task, - uint32_t milliseconds) override; void Delete() override; - // Helper methods to avoid having to do ToQueuedTask() at the calling places. - template >::value>::type* = nullptr> - void PostTask(Closure&& closure) { - PostTask(webrtc::ToQueuedTask(std::forward(closure))); - } - template >::value>::type* = nullptr> - void PostDelayedTask(Closure&& closure, uint32_t milliseconds) { - PostDelayedTask(webrtc::ToQueuedTask(std::forward(closure)), - milliseconds); - } - template >::value>::type* = nullptr> - void PostDelayedHighPrecisionTask(Closure&& closure, uint32_t milliseconds) { - PostDelayedHighPrecisionTask( - webrtc::ToQueuedTask(std::forward(closure)), milliseconds); - } - // ProcessMessages will process I/O and dispatch messages until: // 1) cms milliseconds have elapsed (returns true) // 2) Stop() is called (returns false) @@ -663,6 +685,8 @@ class RTC_LOCKABLE RTC_EXPORT Thread : public webrtc::TaskQueueBase { friend class ThreadManager; int dispatch_warning_ms_ RTC_GUARDED_BY(this) = kSlowDispatchLoggingThreshold; + + RTC_DISALLOW_COPY_AND_ASSIGN(Thread); }; // AutoThread automatically installs itself at construction @@ -676,8 +700,8 @@ class AutoThread : public Thread { AutoThread(); ~AutoThread() override; - AutoThread(const AutoThread&) = delete; - AutoThread& operator=(const AutoThread&) = delete; + private: + RTC_DISALLOW_COPY_AND_ASSIGN(AutoThread); }; // AutoSocketServerThread automatically installs itself at @@ -690,11 +714,10 @@ class AutoSocketServerThread : public Thread { explicit AutoSocketServerThread(SocketServer* ss); ~AutoSocketServerThread() override; - AutoSocketServerThread(const AutoSocketServerThread&) = delete; - AutoSocketServerThread& operator=(const AutoSocketServerThread&) = delete; - private: rtc::Thread* old_thread_; + + RTC_DISALLOW_COPY_AND_ASSIGN(AutoSocketServerThread); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h b/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h index 6a3cfda3d..de3c58c81 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/time_utils.h @@ -31,12 +31,6 @@ static const int64_t kNumNanosecsPerMillisec = static const int64_t kNumNanosecsPerMicrosec = kNumNanosecsPerSec / kNumMicrosecsPerSec; -// Elapsed milliseconds between NTP base, 1900 January 1 00:00 GMT -// (see https://tools.ietf.org/html/rfc868), and January 1 00:00 GMT 1970 -// epoch. This is useful when converting between the NTP time base and the -// time base used in RTCP reports. -constexpr int64_t kNtpJan1970Millisecs = 2'208'988'800 * kNumMillisecsPerSec; - // TODO(honghaiz): Define a type for the time value specifically. class ClockInterface { diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/timestamp_aligner.h b/TMessagesProj/jni/voip/webrtc/rtc_base/timestamp_aligner.h index 138e936af..73af9debf 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/timestamp_aligner.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/timestamp_aligner.h @@ -13,6 +13,7 @@ #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/system/rtc_export.h" namespace rtc { @@ -34,9 +35,6 @@ class RTC_EXPORT TimestampAligner { TimestampAligner(); ~TimestampAligner(); - TimestampAligner(const TimestampAligner&) = delete; - TimestampAligner& operator=(const TimestampAligner&) = delete; - public: // Translates timestamps of a capture system to the same timescale as is used // by rtc::TimeMicros(). `capturer_time_us` is assumed to be accurate, but @@ -79,6 +77,7 @@ class RTC_EXPORT TimestampAligner { // Offset between `prev_translated_time_us_` and the corresponding capturer // time. int64_t prev_time_offset_us_; + RTC_DISALLOW_COPY_AND_ASSIGN(TimestampAligner); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/units/unit_base.h b/TMessagesProj/jni/voip/webrtc/rtc_base/units/unit_base.h index 4ccd9b750..7196bae34 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/units/unit_base.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/units/unit_base.h @@ -298,14 +298,6 @@ template inline constexpr Unit_T operator*(int32_t scalar, RelativeUnit other) { return other * scalar; } -template -inline constexpr Unit_T operator-(RelativeUnit other) { - if (other.IsPlusInfinity()) - return UnitBase::MinusInfinity(); - if (other.IsMinusInfinity()) - return UnitBase::PlusInfinity(); - return -1 * other; -} } // namespace rtc_units_impl diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h index b17256793..8873f18dc 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/virtual_socket_server.h @@ -16,6 +16,7 @@ #include #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/fake_clock.h" #include "rtc_base/message_handler.h" @@ -162,9 +163,6 @@ class VirtualSocketServer : public SocketServer { explicit VirtualSocketServer(ThreadProcessingFakeClock* fake_clock); ~VirtualSocketServer() override; - VirtualSocketServer(const VirtualSocketServer&) = delete; - VirtualSocketServer& operator=(const VirtualSocketServer&) = delete; - // The default source address specifies which local address to use when a // socket is bound to the 'any' address, e.g. 0.0.0.0. (If not set, the 'any' // address is used as the source address on outgoing virtual packets, exposed @@ -421,6 +419,7 @@ class VirtualSocketServer : public SocketServer { size_t max_udp_payload_ RTC_GUARDED_BY(mutex_) = 65507; bool sending_blocked_ RTC_GUARDED_BY(mutex_) = false; + RTC_DISALLOW_COPY_AND_ASSIGN(VirtualSocketServer); }; } // namespace rtc diff --git a/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h b/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h index 3636eabf1..dbb0d8eb5 100644 --- a/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h +++ b/TMessagesProj/jni/voip/webrtc/rtc_base/win/windows_version.h @@ -15,6 +15,8 @@ #include +#include "rtc_base/constructor_magic.h" + typedef void* HANDLE; namespace rtc { @@ -103,9 +105,6 @@ class OSInfo { WOW64_UNKNOWN, }; - OSInfo(const OSInfo&) = delete; - OSInfo& operator=(const OSInfo&) = delete; - static OSInfo* GetInstance(); Version version() const { return version_; } @@ -141,6 +140,8 @@ class OSInfo { size_t allocation_granularity_; WOW64Status wow64_status_; std::string processor_model_name_; + + RTC_DISALLOW_COPY_AND_ASSIGN(OSInfo); }; // Because this is by far the most commonly-requested value from the above diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioRecord_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioRecord_jni.h index a22b23591..c69234d04 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioRecord_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/generated_java_audio_device_module_native_jni/WebRtcAudioRecord_jni.h @@ -53,12 +53,10 @@ JNI_GENERATOR_EXPORT void Java_org_webrtc_audio_WebRtcAudioRecord_nativeDataIsRe JNIEnv* env, jobject jcaller, jlong nativeAudioRecordJni, - jint bytes, - jlong captureTimestampNs) { + jint bytes) { AudioRecordJni* native = reinterpret_cast(nativeAudioRecordJni); CHECK_NATIVE_PTR(env, jcaller, native, "DataIsRecorded"); - return native->DataIsRecorded(env, base::android::JavaParamRef(env, jcaller), bytes, - captureTimestampNs); + return native->DataIsRecorded(env, base::android::JavaParamRef(env, jcaller), bytes); } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc index 4f3152dc6..ae9d651d6 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/android_video_track_source.cc @@ -67,7 +67,7 @@ void AndroidVideoTrackSource::SetState(JNIEnv* env, } else { // TODO(sakal): Is this even necessary, does FireOnChanged have to be // called from signaling thread? - signaling_thread_->PostTask([this] { FireOnChanged(); }); + signaling_thread_->PostTask(RTC_FROM_HERE, [this] { FireOnChanged(); }); } } } diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/DEPS b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/DEPS deleted file mode 100644 index 9a3adee68..000000000 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/DEPS +++ /dev/null @@ -1,4 +0,0 @@ -include_rules = [ - "+base/android/jni_android.h", - "+modules/audio_device", -] diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc index f5f10895e..170c81af4 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.cc @@ -245,15 +245,14 @@ void AudioRecordJni::CacheDirectBufferAddress( // the thread is 'AudioRecordThread'. void AudioRecordJni::DataIsRecorded(JNIEnv* env, const JavaParamRef& j_caller, - int length, - int64_t capture_timestamp_ns) { + int length) { RTC_DCHECK(thread_checker_java_.IsCurrent()); if (!audio_device_buffer_) { RTC_LOG(LS_ERROR) << "AttachAudioBuffer has not been called"; return; } - audio_device_buffer_->SetRecordedBuffer( - direct_buffer_address_, frames_per_buffer_, capture_timestamp_ns); + audio_device_buffer_->SetRecordedBuffer(direct_buffer_address_, + frames_per_buffer_); // We provide one (combined) fixed delay estimate for the APM and use the // `playDelayMs` parameter only. Components like the AEC only sees the sum // of `playDelayMs` and `recDelayMs`, hence the distributions does not matter. diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.h index 49c905daa..1ff62f8dc 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/audio_device/audio_record_jni.h @@ -90,8 +90,7 @@ class AudioRecordJni : public AudioInput { // the thread is 'AudioRecordThread'. void DataIsRecorded(JNIEnv* env, const JavaParamRef& j_caller, - int length, - int64_t capture_timestamp_ns); + int length); private: // Stores thread ID in constructor. diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/libaom_av1_codec.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/av1_codec.cc similarity index 87% rename from TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/libaom_av1_codec.cc rename to TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/av1_codec.cc index e4332a651..02070f790 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/libaom_av1_codec.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/av1_codec.cc @@ -11,7 +11,7 @@ #include #include "modules/video_coding/codecs/av1/libaom_av1_decoder.h" -#include "modules/video_coding/codecs/av1/libaom_av1_encoder_supported.h" +#include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" #include "sdk/android/generated_libaom_av1_jni/LibaomAv1Decoder_jni.h" #include "sdk/android/generated_libaom_av1_jni/LibaomAv1Encoder_jni.h" #include "sdk/android/src/jni/jni_helpers.h" @@ -20,8 +20,7 @@ namespace webrtc { namespace jni { static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* jni) { - return jlongFromPointer( - webrtc::CreateLibaomAv1EncoderIfSupported().release()); + return jlongFromPointer(webrtc::CreateLibaomAv1Encoder().release()); } static jboolean JNI_LibaomAv1Encoder_IsSupported(JNIEnv* jni) { diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/dav1d_codec.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/dav1d_codec.cc deleted file mode 100644 index 1246d88c0..000000000 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/dav1d_codec.cc +++ /dev/null @@ -1,25 +0,0 @@ -/* - * Copyright 2021 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include - -#include "modules/video_coding/codecs/av1/dav1d_decoder.h" -#include "sdk/android/generated_dav1d_jni/Dav1dDecoder_jni.h" -#include "sdk/android/src/jni/jni_helpers.h" - -namespace webrtc { -namespace jni { - -static jlong JNI_Dav1dDecoder_CreateDecoder(JNIEnv* jni) { - return jlongFromPointer(webrtc::CreateDav1dDecoder().release()); -} - -} // namespace jni -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc index 2aa1c9e70..9c73b9400 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.cc @@ -41,7 +41,6 @@ #include "rtc_base/logging.h" #include "rtc_base/numerics/safe_conversions.h" #include "sdk/android/generated_peerconnection_jni/CandidatePairChangeEvent_jni.h" -#include "sdk/android/generated_peerconnection_jni/IceCandidateErrorEvent_jni.h" #include "sdk/android/generated_peerconnection_jni/PeerConnection_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" @@ -119,7 +118,7 @@ SdpSemantics JavaToNativeSdpSemantics(JNIEnv* jni, return SdpSemantics::kUnifiedPlan; RTC_DCHECK_NOTREACHED(); - return SdpSemantics::kUnifiedPlan; + return SdpSemantics::kPlanB_DEPRECATED; } ScopedJavaLocalRef NativeToJavaCandidatePairChange( @@ -307,19 +306,6 @@ void PeerConnectionObserverJni::OnIceCandidate( NativeToJavaIceCandidate(env, *candidate)); } -void PeerConnectionObserverJni::OnIceCandidateError( - const std::string& address, - int port, - const std::string& url, - int error_code, - const std::string& error_text) { - JNIEnv* env = AttachCurrentThreadIfNeeded(); - ScopedJavaLocalRef event = Java_IceCandidateErrorEvent_Constructor( - env, NativeToJavaString(env, address), port, NativeToJavaString(env, url), - error_code, NativeToJavaString(env, error_text)); - Java_Observer_onIceCandidateError(env, j_observer_global_, event); -} - void PeerConnectionObserverJni::OnIceCandidatesRemoved( const std::vector& candidates) { JNIEnv* env = AttachCurrentThreadIfNeeded(); @@ -761,8 +747,7 @@ static ScopedJavaLocalRef JNI_PeerConnection_AddTrack( const JavaParamRef& j_stream_labels) { RTCErrorOr> result = ExtractNativePC(jni, j_pc)->AddTrack( - rtc::scoped_refptr( - reinterpret_cast(native_track)), + reinterpret_cast(native_track), JavaListToNativeVector(jni, j_stream_labels, &JavaToNativeString)); if (!result.ok()) { @@ -777,10 +762,8 @@ static jboolean JNI_PeerConnection_RemoveTrack( JNIEnv* jni, const JavaParamRef& j_pc, jlong native_sender) { - return ExtractNativePC(jni, j_pc) - ->RemoveTrackOrError(rtc::scoped_refptr( - reinterpret_cast(native_sender))) - .ok(); + return ExtractNativePC(jni, j_pc)->RemoveTrack( + reinterpret_cast(native_sender)); } static ScopedJavaLocalRef JNI_PeerConnection_AddTransceiverWithTrack( @@ -790,8 +773,7 @@ static ScopedJavaLocalRef JNI_PeerConnection_AddTransceiverWithTrack( const JavaParamRef& j_init) { RTCErrorOr> result = ExtractNativePC(jni, j_pc)->AddTransceiver( - rtc::scoped_refptr( - reinterpret_cast(native_track)), + reinterpret_cast(native_track), JavaToNativeRtpTransceiverInit(jni, j_init)); if (!result.ok()) { RTC_LOG(LS_ERROR) << "Failed to add transceiver: " diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.h b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.h index 9976e8e4f..86d99f31c 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection.h @@ -48,12 +48,6 @@ class PeerConnectionObserverJni : public PeerConnectionObserver { // Implementation of PeerConnectionObserver interface, which propagates // the callbacks to the Java observer. void OnIceCandidate(const IceCandidateInterface* candidate) override; - void OnIceCandidateError(const std::string& address, - int port, - const std::string& url, - int error_code, - const std::string& error_text) override; - void OnIceCandidatesRemoved( const std::vector& candidates) override; void OnSignalingChange( diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc index 08af07a5e..5330cbd63 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/peer_connection_factory.cc @@ -351,12 +351,11 @@ JNI_PeerConnectionFactory_CreatePeerConnectionFactory( jlong native_network_controller_factory, jlong native_network_state_predictor_factory, jlong native_neteq_factory) { - rtc::scoped_refptr audio_processor( - reinterpret_cast(native_audio_processor)); + rtc::scoped_refptr audio_processor = + reinterpret_cast(native_audio_processor); return CreatePeerConnectionFactoryForJava( jni, jcontext, joptions, - rtc::scoped_refptr( - reinterpret_cast(native_audio_device_module)), + reinterpret_cast(native_audio_device_module), TakeOwnershipOfRefPtr(native_audio_encoder_factory), TakeOwnershipOfRefPtr(native_audio_decoder_factory), jencoder_factory, jdecoder_factory, diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_receiver.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_receiver.cc index 7a3600b42..4d7e95487 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_receiver.cc @@ -118,9 +118,8 @@ static void JNI_RtpReceiver_SetFrameDecryptor(JNIEnv* jni, jlong j_rtp_sender_pointer, jlong j_frame_decryptor_pointer) { reinterpret_cast(j_rtp_sender_pointer) - ->SetFrameDecryptor(rtc::scoped_refptr( - reinterpret_cast( - j_frame_decryptor_pointer))); + ->SetFrameDecryptor(reinterpret_cast( + j_frame_decryptor_pointer)); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_sender.cc b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_sender.cc index 233a35365..411e5dc8c 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_sender.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/android/src/jni/pc/rtp_sender.cc @@ -105,9 +105,8 @@ static void JNI_RtpSender_SetFrameEncryptor(JNIEnv* jni, jlong j_rtp_sender_pointer, jlong j_frame_encryptor_pointer) { reinterpret_cast(j_rtp_sender_pointer) - ->SetFrameEncryptor(rtc::scoped_refptr( - reinterpret_cast( - j_frame_encryptor_pointer))); + ->SetFrameEncryptor(reinterpret_cast( + j_frame_encryptor_pointer)); } } // namespace jni diff --git a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc index c77bf8892..08bd9dc66 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc +++ b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.cc @@ -95,13 +95,17 @@ const char MediaConstraints::kValueFalse[] = "false"; // Audio constraints. const char MediaConstraints::kGoogEchoCancellation[] = "googEchoCancellation"; const char MediaConstraints::kAutoGainControl[] = "googAutoGainControl"; +const char MediaConstraints::kExperimentalAutoGainControl[] = + "googAutoGainControl2"; const char MediaConstraints::kNoiseSuppression[] = "googNoiseSuppression"; +const char MediaConstraints::kExperimentalNoiseSuppression[] = + "googNoiseSuppression2"; const char MediaConstraints::kHighpassFilter[] = "googHighpassFilter"; +const char MediaConstraints::kTypingNoiseDetection[] = + "googTypingNoiseDetection"; const char MediaConstraints::kAudioMirroring[] = "googAudioMirroring"; const char MediaConstraints::kAudioNetworkAdaptorConfig[] = "googAudioNetworkAdaptorConfig"; -const char MediaConstraints::kInitAudioRecordingOnSend[] = - "InitAudioRecordingOnSend"; // Constraint keys for CreateOffer / CreateAnswer defined in W3C specification. const char MediaConstraints::kOfferToReceiveAudio[] = "OfferToReceiveAudio"; @@ -184,10 +188,19 @@ void CopyConstraintsIntoAudioOptions(const MediaConstraints* constraints, &options->echo_cancellation); ConstraintToOptional(constraints, MediaConstraints::kAutoGainControl, &options->auto_gain_control); + ConstraintToOptional(constraints, + MediaConstraints::kExperimentalAutoGainControl, + &options->experimental_agc); ConstraintToOptional(constraints, MediaConstraints::kNoiseSuppression, &options->noise_suppression); + ConstraintToOptional(constraints, + MediaConstraints::kExperimentalNoiseSuppression, + &options->experimental_ns); ConstraintToOptional(constraints, MediaConstraints::kHighpassFilter, &options->highpass_filter); + ConstraintToOptional(constraints, + MediaConstraints::kTypingNoiseDetection, + &options->typing_detection); ConstraintToOptional(constraints, MediaConstraints::kAudioMirroring, &options->stereo_swapping); ConstraintToOptional( @@ -198,9 +211,6 @@ void CopyConstraintsIntoAudioOptions(const MediaConstraints* constraints, if (options->audio_network_adaptor_config) { options->audio_network_adaptor = true; } - ConstraintToOptional(constraints, - MediaConstraints::kInitAudioRecordingOnSend, - &options->init_recording_on_send); } bool CopyConstraintsIntoOfferAnswerOptions( diff --git a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h index c946e4fab..fd95a6023 100644 --- a/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h +++ b/TMessagesProj/jni/voip/webrtc/sdk/media_constraints.h @@ -59,13 +59,15 @@ class MediaConstraints { // These keys are google specific. static const char kGoogEchoCancellation[]; // googEchoCancellation - static const char kAutoGainControl[]; // googAutoGainControl - static const char kNoiseSuppression[]; // googNoiseSuppression - static const char kHighpassFilter[]; // googHighpassFilter + static const char kAutoGainControl[]; // googAutoGainControl + static const char kExperimentalAutoGainControl[]; // googAutoGainControl2 + static const char kNoiseSuppression[]; // googNoiseSuppression + static const char kExperimentalNoiseSuppression[]; // googNoiseSuppression2 + static const char kHighpassFilter[]; // googHighpassFilter + static const char kTypingNoiseDetection[]; // googTypingNoiseDetection static const char kAudioMirroring[]; // googAudioMirroring static const char - kAudioNetworkAdaptorConfig[]; // googAudioNetworkAdaptorConfig - static const char kInitAudioRecordingOnSend[]; // InitAudioRecordingOnSend; + kAudioNetworkAdaptorConfig[]; // goodAudioNetworkAdaptorConfig // Constraint keys for CreateOffer / CreateAnswer // Specified by the W3C PeerConnection spec diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_info.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_info.cc index eff720371..7288c67ef 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_info.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/cpu_info.cc @@ -32,7 +32,7 @@ static int DetectNumberOfCores() { number_of_cores = static_cast(si.dwNumberOfProcessors); #elif defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) number_of_cores = static_cast(sysconf(_SC_NPROCESSORS_ONLN)); - if (number_of_cores <= 0) { + if (number_of_cores < 0) { RTC_LOG(LS_ERROR) << "Failed to get number of cores"; number_of_cores = 1; } diff --git a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/metrics.cc b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/metrics.cc index 8c9cf0c57..b14eef461 100644 --- a/TMessagesProj/jni/voip/webrtc/system_wrappers/source/metrics.cc +++ b/TMessagesProj/jni/voip/webrtc/system_wrappers/source/metrics.cc @@ -11,6 +11,7 @@ #include +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/thread_annotations.h" @@ -34,9 +35,6 @@ class RtcHistogram { RTC_DCHECK_GT(bucket_count, 0); } - RtcHistogram(const RtcHistogram&) = delete; - RtcHistogram& operator=(const RtcHistogram&) = delete; - void Add(int sample) { sample = std::min(sample, max_); sample = std::max(sample, min_ - 1); // Underflow bucket. @@ -101,6 +99,8 @@ class RtcHistogram { const int min_; const int max_; SampleInfo info_ RTC_GUARDED_BY(mutex_); + + RTC_DISALLOW_COPY_AND_ASSIGN(RtcHistogram); }; class RtcHistogramMap { @@ -108,9 +108,6 @@ class RtcHistogramMap { RtcHistogramMap() {} ~RtcHistogramMap() {} - RtcHistogramMap(const RtcHistogramMap&) = delete; - RtcHistogramMap& operator=(const RtcHistogramMap&) = delete; - Histogram* GetCountsHistogram(const std::string& name, int min, int max, @@ -181,6 +178,8 @@ class RtcHistogramMap { mutable Mutex mutex_; std::map> map_ RTC_GUARDED_BY(mutex_); + + RTC_DISALLOW_COPY_AND_ASSIGN(RtcHistogramMap); }; // RtcHistogramMap is allocated upon call to Enable(). diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h index caefda186..2b4dd61d2 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/overuse_frame_detector.h @@ -18,6 +18,7 @@ #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" #include "api/video/video_stream_encoder_observer.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/exp_filter.h" #include "rtc_base/system/no_unique_address.h" @@ -67,9 +68,6 @@ class OveruseFrameDetector { explicit OveruseFrameDetector(CpuOveruseMetricsObserver* metrics_observer); virtual ~OveruseFrameDetector(); - OveruseFrameDetector(const OveruseFrameDetector&) = delete; - OveruseFrameDetector& operator=(const OveruseFrameDetector&) = delete; - // Start to periodically check for overuse. void StartCheckForOveruse( TaskQueueBase* task_queue_base, @@ -163,6 +161,8 @@ class OveruseFrameDetector { // If set by field trial, overrides CpuOveruseOptions::filter_time_ms. FieldTrialOptional filter_time_constant_{"tau"}; + + RTC_DISALLOW_COPY_AND_ASSIGN(OveruseFrameDetector); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc index 70820e51a..36c93cd4f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/pixel_limit_resource.cc @@ -84,11 +84,11 @@ void PixelLimitResource::SetResourceListener(ResourceListener* listener) { int target_pixels_lower_bounds = GetLowerResolutionThan(target_pixel_upper_bounds); if (current_pixels > target_pixel_upper_bounds) { - listener_->OnResourceUsageStateMeasured( - rtc::scoped_refptr(this), ResourceUsageState::kOveruse); + listener_->OnResourceUsageStateMeasured(this, + ResourceUsageState::kOveruse); } else if (current_pixels < target_pixels_lower_bounds) { - listener_->OnResourceUsageStateMeasured( - rtc::scoped_refptr(this), ResourceUsageState::kUnderuse); + listener_->OnResourceUsageStateMeasured(this, + ResourceUsageState::kUnderuse); } return kResourceUsageCheckIntervalMs; }); diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.cc index ad89aef52..d26da708b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource.cc @@ -51,8 +51,7 @@ void VideoStreamEncoderResource::OnResourceUsageStateMeasured( ResourceUsageState usage_state) { MutexLock crit(&lock_); if (listener_) { - listener_->OnResourceUsageStateMeasured(rtc::scoped_refptr(this), - usage_state); + listener_->OnResourceUsageStateMeasured(this, usage_state); } } diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc index 2847cdbb8..6a1e9215a 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.cc @@ -288,8 +288,6 @@ VideoStreamEncoderResourceManager::VideoStreamEncoderResourceManager( initial_frame_dropper_( std::make_unique(quality_scaler_resource_)), quality_scaling_experiment_enabled_(QualityScalingExperiment::Enabled()), - pixel_limit_resource_experiment_enabled_( - field_trial::IsEnabled(kPixelLimitResourceFieldTrialName)), encoder_target_bitrate_bps_(absl::nullopt), quality_rampup_experiment_( QualityRampUpExperimentHelper::CreateIfEnabled(this, clock_)), @@ -352,7 +350,7 @@ void VideoStreamEncoderResourceManager::MaybeInitializePixelLimitResource() { RTC_DCHECK_RUN_ON(encoder_queue_); RTC_DCHECK(adaptation_processor_); RTC_DCHECK(!pixel_limit_resource_); - if (!pixel_limit_resource_experiment_enabled_) { + if (!field_trial::IsEnabled(kPixelLimitResourceFieldTrialName)) { // The field trial is not running. return; } diff --git a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h index 26b8db3ff..f1bc8854b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h +++ b/TMessagesProj/jni/voip/webrtc/video/adaptation/video_stream_encoder_resource_manager.h @@ -213,8 +213,6 @@ class VideoStreamEncoderResourceManager const std::unique_ptr initial_frame_dropper_ RTC_GUARDED_BY(encoder_queue_); const bool quality_scaling_experiment_enabled_ RTC_GUARDED_BY(encoder_queue_); - const bool pixel_limit_resource_experiment_enabled_ - RTC_GUARDED_BY(encoder_queue_); absl::optional encoder_target_bitrate_bps_ RTC_GUARDED_BY(encoder_queue_); absl::optional encoder_rates_ diff --git a/TMessagesProj/jni/voip/webrtc/video/call_stats.h b/TMessagesProj/jni/voip/webrtc/video/call_stats.h index d198223a9..0c8e26741 100644 --- a/TMessagesProj/jni/voip/webrtc/video/call_stats.h +++ b/TMessagesProj/jni/voip/webrtc/video/call_stats.h @@ -18,6 +18,7 @@ #include "modules/include/module.h" #include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/synchronization/mutex.h" #include "system_wrappers/include/clock.h" @@ -34,9 +35,6 @@ class CallStats : public Module, public RtcpRttStats { CallStats(Clock* clock, ProcessThread* process_thread); ~CallStats() override; - CallStats(const CallStats&) = delete; - CallStats& operator=(const CallStats&) = delete; - // Registers/deregisters a new observer to receive statistics updates. // Must be called from the construction thread. void RegisterStatsObserver(CallStatsObserver* observer); @@ -116,6 +114,8 @@ class CallStats : public Module, public RtcpRttStats { SequenceChecker process_thread_checker_; ProcessThread* const process_thread_; bool process_thread_running_ RTC_GUARDED_BY(construction_thread_checker_); + + RTC_DISALLOW_COPY_AND_ASSIGN(CallStats); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc b/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc index e2d7b3e76..2b7c61e0f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/call_stats2.cc @@ -15,6 +15,7 @@ #include #include "absl/algorithm/container.h" +#include "modules/utility/include/process_thread.h" #include "rtc_base/checks.h" #include "rtc_base/location.h" #include "rtc_base/task_utils/to_queued_task.h" diff --git a/TMessagesProj/jni/voip/webrtc/video/call_stats2.h b/TMessagesProj/jni/voip/webrtc/video/call_stats2.h index b626e4884..74bd3482d 100644 --- a/TMessagesProj/jni/voip/webrtc/video/call_stats2.h +++ b/TMessagesProj/jni/voip/webrtc/video/call_stats2.h @@ -17,6 +17,7 @@ #include "api/units/timestamp.h" #include "modules/include/module_common_types.h" #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/task_queue.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/repeating_task.h" @@ -34,9 +35,6 @@ class CallStats { CallStats(Clock* clock, TaskQueueBase* task_queue); ~CallStats(); - CallStats(const CallStats&) = delete; - CallStats& operator=(const CallStats&) = delete; - // Ensure that necessary repeating tasks are started. void EnsureStarted(); @@ -127,6 +125,8 @@ class CallStats { // Used to signal destruction to potentially pending tasks. ScopedTaskSafety task_safety_; + + RTC_DISALLOW_COPY_AND_ASSIGN(CallStats); }; } // namespace internal diff --git a/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.cc b/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.cc deleted file mode 100644 index 9f22c4958..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.cc +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/decode_synchronizer.h" - -#include -#include -#include -#include - -#include "api/sequence_checker.h" -#include "api/units/time_delta.h" -#include "api/units/timestamp.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "video/frame_decode_scheduler.h" -#include "video/frame_decode_timing.h" - -namespace webrtc { - -DecodeSynchronizer::ScheduledFrame::ScheduledFrame( - uint32_t rtp_timestamp, - FrameDecodeTiming::FrameSchedule schedule, - FrameDecodeScheduler::FrameReleaseCallback callback) - : rtp_timestamp_(rtp_timestamp), - schedule_(std::move(schedule)), - callback_(std::move(callback)) {} - -void DecodeSynchronizer::ScheduledFrame::RunFrameReleaseCallback() && { - // Inspiration from Chromium base::OnceCallback. Move `*this` to a local - // before execution to ensure internal state is cleared after callback - // execution. - auto sf = std::move(*this); - sf.callback_(sf.rtp_timestamp_, sf.schedule_.render_time); -} - -Timestamp DecodeSynchronizer::ScheduledFrame::LatestDecodeTime() const { - return schedule_.latest_decode_time; -} - -DecodeSynchronizer::SynchronizedFrameDecodeScheduler:: - SynchronizedFrameDecodeScheduler(DecodeSynchronizer* sync) - : sync_(sync) { - RTC_DCHECK(sync_); -} - -DecodeSynchronizer::SynchronizedFrameDecodeScheduler:: - ~SynchronizedFrameDecodeScheduler() { - RTC_DCHECK(!next_frame_); - RTC_DCHECK(stopped_); -} - -absl::optional -DecodeSynchronizer::SynchronizedFrameDecodeScheduler::ScheduledRtpTimestamp() { - return next_frame_.has_value() - ? absl::make_optional(next_frame_->rtp_timestamp()) - : absl::nullopt; -} - -DecodeSynchronizer::ScheduledFrame -DecodeSynchronizer::SynchronizedFrameDecodeScheduler::ReleaseNextFrame() { - RTC_DCHECK(next_frame_); - auto res = std::move(*next_frame_); - next_frame_.reset(); - return res; -} - -Timestamp -DecodeSynchronizer::SynchronizedFrameDecodeScheduler::LatestDecodeTime() { - RTC_DCHECK(next_frame_); - return next_frame_->LatestDecodeTime(); -} - -void DecodeSynchronizer::SynchronizedFrameDecodeScheduler::ScheduleFrame( - uint32_t rtp, - FrameDecodeTiming::FrameSchedule schedule, - FrameReleaseCallback cb) { - RTC_DCHECK(!next_frame_) << "Can not schedule two frames at once."; - next_frame_ = ScheduledFrame(rtp, std::move(schedule), std::move(cb)); - sync_->OnFrameScheduled(this); -} - -void DecodeSynchronizer::SynchronizedFrameDecodeScheduler::CancelOutstanding() { - next_frame_.reset(); -} - -void DecodeSynchronizer::SynchronizedFrameDecodeScheduler::Stop() { - CancelOutstanding(); - stopped_ = true; - sync_->RemoveFrameScheduler(this); -} - -DecodeSynchronizer::DecodeSynchronizer(Clock* clock, - Metronome* metronome, - TaskQueueBase* worker_queue) - : clock_(clock), worker_queue_(worker_queue), metronome_(metronome) { - RTC_DCHECK(metronome_); - RTC_DCHECK(worker_queue_); -} - -DecodeSynchronizer::~DecodeSynchronizer() { - RTC_DCHECK(schedulers_.empty()); -} - -std::unique_ptr -DecodeSynchronizer::CreateSynchronizedFrameScheduler() { - RTC_DCHECK_RUN_ON(worker_queue_); - auto scheduler = std::make_unique(this); - auto [it, inserted] = schedulers_.emplace(scheduler.get()); - // If this is the first `scheduler` added, start listening to the metronome. - if (inserted && schedulers_.size() == 1) { - RTC_DLOG(LS_VERBOSE) << "Listening to metronome"; - metronome_->AddListener(this); - } - - return std::move(scheduler); -} - -void DecodeSynchronizer::OnFrameScheduled( - SynchronizedFrameDecodeScheduler* scheduler) { - RTC_DCHECK_RUN_ON(worker_queue_); - RTC_DCHECK(scheduler->ScheduledRtpTimestamp()); - - Timestamp now = clock_->CurrentTime(); - Timestamp next_tick = expected_next_tick_; - // If no tick has registered yet assume it will occur in the tick period. - if (next_tick.IsInfinite()) { - next_tick = now + metronome_->TickPeriod(); - } - - // Release the frame right away if the decode time is too soon. Otherwise - // the stream may fall behind too much. - bool decode_before_next_tick = - scheduler->LatestDecodeTime() < - (next_tick - FrameDecodeTiming::kMaxAllowedFrameDelay); - // Decode immediately if the decode time is in the past. - bool decode_time_in_past = scheduler->LatestDecodeTime() < now; - - if (decode_before_next_tick || decode_time_in_past) { - ScheduledFrame scheduled_frame = scheduler->ReleaseNextFrame(); - std::move(scheduled_frame).RunFrameReleaseCallback(); - } -} - -void DecodeSynchronizer::RemoveFrameScheduler( - SynchronizedFrameDecodeScheduler* scheduler) { - RTC_DCHECK_RUN_ON(worker_queue_); - RTC_DCHECK(scheduler); - auto it = schedulers_.find(scheduler); - if (it == schedulers_.end()) { - return; - } - schedulers_.erase(it); - // If there are no more schedulers active, stop listening for metronome ticks. - if (schedulers_.empty()) { - RTC_DLOG(LS_VERBOSE) << "Not listening to metronome"; - metronome_->RemoveListener(this); - expected_next_tick_ = Timestamp::PlusInfinity(); - } -} - -void DecodeSynchronizer::OnTick() { - RTC_DCHECK_RUN_ON(worker_queue_); - expected_next_tick_ = clock_->CurrentTime() + metronome_->TickPeriod(); - - for (auto* scheduler : schedulers_) { - if (scheduler->ScheduledRtpTimestamp() && - scheduler->LatestDecodeTime() < expected_next_tick_) { - auto scheduled_frame = scheduler->ReleaseNextFrame(); - std::move(scheduled_frame).RunFrameReleaseCallback(); - } - } -} - -TaskQueueBase* DecodeSynchronizer::OnTickTaskQueue() { - return worker_queue_; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.h b/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.h deleted file mode 100644 index bcbde4f41..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/decode_synchronizer.h +++ /dev/null @@ -1,137 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_DECODE_SYNCHRONIZER_H_ -#define VIDEO_DECODE_SYNCHRONIZER_H_ - -#include - -#include -#include -#include -#include - -#include "absl/types/optional.h" -#include "api/metronome/metronome.h" -#include "api/sequence_checker.h" -#include "api/task_queue/task_queue_base.h" -#include "api/units/timestamp.h" -#include "rtc_base/checks.h" -#include "rtc_base/thread_annotations.h" -#include "video/frame_decode_scheduler.h" -#include "video/frame_decode_timing.h" - -namespace webrtc { - -// DecodeSynchronizer synchronizes the frame scheduling by coalescing decoding -// on the metronome. -// -// A video receive stream can use the DecodeSynchronizer by receiving a -// FrameDecodeScheduler instance with `CreateSynchronizedFrameScheduler()`. -// This instance implements FrameDecodeScheduler and can be used as a normal -// scheduler. This instance is owned by the receive stream, and is borrowed by -// the DecodeSynchronizer. The DecodeSynchronizer will stop borrowing the -// instance when `FrameDecodeScheduler::Stop()` is called, after which the -// scheduler may be destroyed by the receive stream. -// -// When a frame is scheduled for decode by a receive stream using the -// DecodeSynchronizer, it will instead be executed on the metronome during the -// tick interval where `max_decode_time` occurs. For example, if a frame is -// scheduled for decode in 50ms and the tick interval is 20ms, then the frame -// will be released for decoding in 2 ticks. See below for illustation, -// -// In the case where the decode time is in the past, or must occur before the -// next metronome tick then the frame will be released right away, allowing a -// delayed stream to catch up quickly. -// -// DecodeSynchronizer is single threaded - all method calls must run on the -// `worker_queue_`. -class DecodeSynchronizer : private Metronome::TickListener { - public: - DecodeSynchronizer(Clock* clock, - Metronome* metronome, - TaskQueueBase* worker_queue); - ~DecodeSynchronizer() override; - DecodeSynchronizer(const DecodeSynchronizer&) = delete; - DecodeSynchronizer& operator=(const DecodeSynchronizer&) = delete; - - std::unique_ptr CreateSynchronizedFrameScheduler(); - - private: - class ScheduledFrame { - public: - ScheduledFrame(uint32_t rtp_timestamp, - FrameDecodeTiming::FrameSchedule schedule, - FrameDecodeScheduler::FrameReleaseCallback callback); - - // Disallow copy since `callback` should only be moved. - ScheduledFrame(const ScheduledFrame&) = delete; - ScheduledFrame& operator=(const ScheduledFrame&) = delete; - ScheduledFrame(ScheduledFrame&&) = default; - ScheduledFrame& operator=(ScheduledFrame&&) = default; - - // Executes `callback_`. - void RunFrameReleaseCallback() &&; - - uint32_t rtp_timestamp() const { return rtp_timestamp_; } - Timestamp LatestDecodeTime() const; - - private: - uint32_t rtp_timestamp_; - FrameDecodeTiming::FrameSchedule schedule_; - FrameDecodeScheduler::FrameReleaseCallback callback_; - }; - - class SynchronizedFrameDecodeScheduler : public FrameDecodeScheduler { - public: - explicit SynchronizedFrameDecodeScheduler(DecodeSynchronizer* sync); - ~SynchronizedFrameDecodeScheduler() override; - - // Releases the outstanding frame for decoding. This invalidates - // `next_frame_`. There must be a frame scheduled. - ScheduledFrame ReleaseNextFrame(); - - // Returns `next_frame_.schedule.max_decode_time`. There must be a frame - // scheduled when this is called. - Timestamp LatestDecodeTime(); - - // FrameDecodeScheduler implementation. - absl::optional ScheduledRtpTimestamp() override; - void ScheduleFrame(uint32_t rtp, - FrameDecodeTiming::FrameSchedule schedule, - FrameReleaseCallback cb) override; - void CancelOutstanding() override; - void Stop() override; - - private: - DecodeSynchronizer* sync_; - absl::optional next_frame_; - bool stopped_ = false; - }; - - void OnFrameScheduled(SynchronizedFrameDecodeScheduler* scheduler); - void RemoveFrameScheduler(SynchronizedFrameDecodeScheduler* scheduler); - - // Metronome::TickListener implementation. - void OnTick() override; - TaskQueueBase* OnTickTaskQueue() override; - - Clock* const clock_; - TaskQueueBase* const worker_queue_; - Metronome* const metronome_; - - Timestamp expected_next_tick_ = Timestamp::PlusInfinity(); - std::set schedulers_ - RTC_GUARDED_BY(worker_queue_); -}; - -} // namespace webrtc - -#endif // VIDEO_DECODE_SYNCHRONIZER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_buffer_proxy.cc b/TMessagesProj/jni/voip/webrtc/video/frame_buffer_proxy.cc deleted file mode 100644 index a2e5406ec..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/frame_buffer_proxy.cc +++ /dev/null @@ -1,574 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/frame_buffer_proxy.h" - -#include -#include -#include - -#include "absl/base/attributes.h" -#include "absl/functional/bind_front.h" -#include "api/sequence_checker.h" -#include "modules/video_coding/frame_buffer2.h" -#include "modules/video_coding/frame_buffer3.h" -#include "modules/video_coding/frame_helpers.h" -#include "rtc_base/checks.h" -#include "rtc_base/logging.h" -#include "rtc_base/thread_annotations.h" -#include "system_wrappers/include/field_trial.h" -#include "video/frame_decode_timing.h" -#include "video/task_queue_frame_decode_scheduler.h" -#include "video/video_receive_stream_timeout_tracker.h" - -namespace webrtc { - -namespace { - -class FrameBuffer2Proxy : public FrameBufferProxy { - public: - FrameBuffer2Proxy(Clock* clock, - VCMTiming* timing, - VCMReceiveStatisticsCallback* stats_proxy, - rtc::TaskQueue* decode_queue, - FrameSchedulingReceiver* receiver, - TimeDelta max_wait_for_keyframe, - TimeDelta max_wait_for_frame) - : max_wait_for_keyframe_(max_wait_for_keyframe), - max_wait_for_frame_(max_wait_for_frame), - frame_buffer_(clock, timing, stats_proxy), - decode_queue_(decode_queue), - stats_proxy_(stats_proxy), - receiver_(receiver) { - RTC_DCHECK(decode_queue_); - RTC_DCHECK(stats_proxy_); - RTC_DCHECK(receiver_); - } - - void StopOnWorker() override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - decode_queue_->PostTask([this] { - frame_buffer_.Stop(); - decode_safety_->SetNotAlive(); - }); - } - - void SetProtectionMode(VCMVideoProtection protection_mode) override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - frame_buffer_.SetProtectionMode(kProtectionNackFEC); - } - - void Clear() override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - frame_buffer_.Clear(); - } - - absl::optional InsertFrame( - std::unique_ptr frame) override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - int64_t last_continuous_pid = frame_buffer_.InsertFrame(std::move(frame)); - if (last_continuous_pid != -1) - return last_continuous_pid; - return absl::nullopt; - } - - void UpdateRtt(int64_t max_rtt_ms) override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - frame_buffer_.UpdateRtt(max_rtt_ms); - } - - void StartNextDecode(bool keyframe_required) override { - if (!decode_queue_->IsCurrent()) { - decode_queue_->PostTask(ToQueuedTask( - decode_safety_, - [this, keyframe_required] { StartNextDecode(keyframe_required); })); - return; - } - RTC_DCHECK_RUN_ON(decode_queue_); - - frame_buffer_.NextFrame( - MaxWait(keyframe_required).ms(), keyframe_required, decode_queue_, - /* encoded frame handler */ - [this, keyframe_required](std::unique_ptr frame) { - RTC_DCHECK_RUN_ON(decode_queue_); - if (!decode_safety_->alive()) - return; - if (frame) { - receiver_->OnEncodedFrame(std::move(frame)); - } else { - receiver_->OnDecodableFrameTimeout(MaxWait(keyframe_required)); - } - }); - } - - int Size() override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - return frame_buffer_.Size(); - } - - private: - TimeDelta MaxWait(bool keyframe_required) const { - return keyframe_required ? max_wait_for_keyframe_ : max_wait_for_frame_; - } - - RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; - const TimeDelta max_wait_for_keyframe_; - const TimeDelta max_wait_for_frame_; - video_coding::FrameBuffer frame_buffer_; - rtc::TaskQueue* const decode_queue_; - VCMReceiveStatisticsCallback* const stats_proxy_; - FrameSchedulingReceiver* const receiver_; - rtc::scoped_refptr decode_safety_ = - PendingTaskSafetyFlag::CreateDetached(); -}; - -// Max number of frames the buffer will hold. -static constexpr size_t kMaxFramesBuffered = 800; -// Max number of decoded frame info that will be saved. -static constexpr int kMaxFramesHistory = 1 << 13; - -// Default value for the maximum decode queue size that is used when the -// low-latency renderer is used. -static constexpr size_t kZeroPlayoutDelayDefaultMaxDecodeQueueSize = 8; - -// Encapsulates use of the new frame buffer for use in VideoReceiveStream. This -// behaves the same as the FrameBuffer2Proxy but uses frame_buffer3 instead. -// Responsibilities from frame_buffer2, like stats, jitter and frame timing -// accounting are moved into this pro -class FrameBuffer3Proxy : public FrameBufferProxy { - public: - FrameBuffer3Proxy( - Clock* clock, - TaskQueueBase* worker_queue, - VCMTiming* timing, - VCMReceiveStatisticsCallback* stats_proxy, - rtc::TaskQueue* decode_queue, - FrameSchedulingReceiver* receiver, - TimeDelta max_wait_for_keyframe, - TimeDelta max_wait_for_frame, - std::unique_ptr frame_decode_scheduler) - : max_wait_for_keyframe_(max_wait_for_keyframe), - max_wait_for_frame_(max_wait_for_frame), - clock_(clock), - worker_queue_(worker_queue), - decode_queue_(decode_queue), - stats_proxy_(stats_proxy), - receiver_(receiver), - timing_(timing), - frame_decode_scheduler_(std::move(frame_decode_scheduler)), - jitter_estimator_(clock_), - inter_frame_delay_(clock_->TimeInMilliseconds()), - buffer_(std::make_unique(kMaxFramesBuffered, - kMaxFramesHistory)), - decode_timing_(clock_, timing_), - timeout_tracker_(clock_, - worker_queue_, - VideoReceiveStreamTimeoutTracker::Timeouts{ - .max_wait_for_keyframe = max_wait_for_keyframe, - .max_wait_for_frame = max_wait_for_frame}, - absl::bind_front(&FrameBuffer3Proxy::OnTimeout, this)), - zero_playout_delay_max_decode_queue_size_( - "max_decode_queue_size", - kZeroPlayoutDelayDefaultMaxDecodeQueueSize) { - RTC_DCHECK(decode_queue_); - RTC_DCHECK(stats_proxy_); - RTC_DCHECK(receiver_); - RTC_DCHECK(timing_); - RTC_DCHECK(worker_queue_); - RTC_DCHECK(clock_); - RTC_DCHECK(frame_decode_scheduler_); - RTC_LOG(LS_WARNING) << "Using FrameBuffer3"; - - ParseFieldTrial({&zero_playout_delay_max_decode_queue_size_}, - field_trial::FindFullName("WebRTC-ZeroPlayoutDelay")); - } - - // FrameBufferProxy implementation. - void StopOnWorker() override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - frame_decode_scheduler_->Stop(); - timeout_tracker_.Stop(); - decoder_ready_for_new_frame_ = false; - decode_queue_->PostTask([this] { - RTC_DCHECK_RUN_ON(decode_queue_); - decode_safety_->SetNotAlive(); - }); - } - - void SetProtectionMode(VCMVideoProtection protection_mode) override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - protection_mode_ = kProtectionNackFEC; - } - - void Clear() override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - stats_proxy_->OnDroppedFrames(buffer_->CurrentSize()); - buffer_ = - std::make_unique(kMaxFramesBuffered, kMaxFramesHistory); - frame_decode_scheduler_->CancelOutstanding(); - } - - absl::optional InsertFrame( - std::unique_ptr frame) override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - if (frame->is_last_spatial_layer) - stats_proxy_->OnCompleteFrame(frame->is_keyframe(), frame->size(), - frame->contentType()); - if (!frame->delayed_by_retransmission()) - timing_->IncomingTimestamp(frame->Timestamp(), frame->ReceivedTime()); - - buffer_->InsertFrame(std::move(frame)); - MaybeScheduleFrameForRelease(); - - return buffer_->LastContinuousFrameId(); - } - - void UpdateRtt(int64_t max_rtt_ms) override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - jitter_estimator_.UpdateRtt(max_rtt_ms); - } - - void StartNextDecode(bool keyframe_required) override { - if (!worker_queue_->IsCurrent()) { - worker_queue_->PostTask(ToQueuedTask( - worker_safety_, - [this, keyframe_required] { StartNextDecode(keyframe_required); })); - return; - } - - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - if (!timeout_tracker_.Running()) - timeout_tracker_.Start(keyframe_required); - keyframe_required_ = keyframe_required; - if (keyframe_required_) { - timeout_tracker_.SetWaitingForKeyframe(); - } - decoder_ready_for_new_frame_ = true; - MaybeScheduleFrameForRelease(); - } - - int Size() override { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - return buffer_->CurrentSize(); - } - - void OnFrameReady( - absl::InlinedVector, 4> frames, - Timestamp render_time) { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - RTC_DCHECK(!frames.empty()); - - timeout_tracker_.OnEncodedFrameReleased(); - - int64_t now_ms = clock_->TimeInMilliseconds(); - bool superframe_delayed_by_retransmission = false; - size_t superframe_size = 0; - const EncodedFrame& first_frame = *frames.front(); - int64_t receive_time_ms = first_frame.ReceivedTime(); - - if (first_frame.is_keyframe()) - keyframe_required_ = false; - - // Gracefully handle bad RTP timestamps and render time issues. - if (FrameHasBadRenderTiming(render_time.ms(), now_ms, - timing_->TargetVideoDelay())) { - jitter_estimator_.Reset(); - timing_->Reset(); - render_time = Timestamp::Millis( - timing_->RenderTimeMs(first_frame.Timestamp(), now_ms)); - } - - for (std::unique_ptr& frame : frames) { - frame->SetRenderTime(render_time.ms()); - - superframe_delayed_by_retransmission |= - frame->delayed_by_retransmission(); - receive_time_ms = std::max(receive_time_ms, frame->ReceivedTime()); - superframe_size += frame->size(); - } - - if (!superframe_delayed_by_retransmission) { - int64_t frame_delay; - - if (inter_frame_delay_.CalculateDelay(first_frame.Timestamp(), - &frame_delay, receive_time_ms)) { - jitter_estimator_.UpdateEstimate(frame_delay, superframe_size); - } - - float rtt_mult = protection_mode_ == kProtectionNackFEC ? 0.0 : 1.0; - absl::optional rtt_mult_add_cap_ms = absl::nullopt; - if (rtt_mult_settings_.has_value()) { - rtt_mult = rtt_mult_settings_->rtt_mult_setting; - rtt_mult_add_cap_ms = rtt_mult_settings_->rtt_mult_add_cap_ms; - } - timing_->SetJitterDelay( - jitter_estimator_.GetJitterEstimate(rtt_mult, rtt_mult_add_cap_ms)); - timing_->UpdateCurrentDelay(render_time.ms(), now_ms); - } else if (RttMultExperiment::RttMultEnabled()) { - jitter_estimator_.FrameNacked(); - } - - // Update stats. - UpdateDroppedFrames(); - UpdateJitterDelay(); - UpdateTimingFrameInfo(); - - std::unique_ptr frame = - CombineAndDeleteFrames(std::move(frames)); - - timing_->SetLastDecodeScheduledTimestamp(now_ms); - - decoder_ready_for_new_frame_ = false; - // VideoReceiveStream2 wants frames on the decoder thread. - decode_queue_->PostTask(ToQueuedTask( - decode_safety_, [this, frame = std::move(frame)]() mutable { - receiver_->OnEncodedFrame(std::move(frame)); - })); - } - - void OnTimeout() { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - // If the stream is paused then ignore the timeout. - if (!decoder_ready_for_new_frame_) { - timeout_tracker_.Stop(); - return; - } - receiver_->OnDecodableFrameTimeout(MaxWait()); - // Stop sending timeouts until receive starts waiting for a new frame. - timeout_tracker_.Stop(); - decoder_ready_for_new_frame_ = false; - } - - private: - void FrameReadyForDecode(uint32_t rtp_timestamp, Timestamp render_time) { - RTC_DCHECK_RUN_ON(&worker_sequence_checker_); - RTC_DCHECK(buffer_->NextDecodableTemporalUnitRtpTimestamp() == - rtp_timestamp) - << "Frame buffer's next decodable frame was not the one sent for " - "extraction rtp=" - << rtp_timestamp << " next=" - << buffer_->NextDecodableTemporalUnitRtpTimestamp().value_or(-1); - auto frames = buffer_->ExtractNextDecodableTemporalUnit(); - OnFrameReady(std::move(frames), render_time); - } - - TimeDelta MaxWait() const RTC_RUN_ON(&worker_sequence_checker_) { - return keyframe_required_ ? max_wait_for_keyframe_ : max_wait_for_frame_; - } - - void UpdateDroppedFrames() RTC_RUN_ON(&worker_sequence_checker_) { - const int dropped_frames = buffer_->GetTotalNumberOfDroppedFrames() - - frames_dropped_before_last_new_frame_; - if (dropped_frames > 0) - stats_proxy_->OnDroppedFrames(dropped_frames); - frames_dropped_before_last_new_frame_ = - buffer_->GetTotalNumberOfDroppedFrames(); - } - - void UpdateJitterDelay() { - int max_decode_ms; - int current_delay_ms; - int target_delay_ms; - int jitter_buffer_ms; - int min_playout_delay_ms; - int render_delay_ms; - if (timing_->GetTimings(&max_decode_ms, ¤t_delay_ms, &target_delay_ms, - &jitter_buffer_ms, &min_playout_delay_ms, - &render_delay_ms)) { - stats_proxy_->OnFrameBufferTimingsUpdated( - max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms, - min_playout_delay_ms, render_delay_ms); - } - } - - void UpdateTimingFrameInfo() { - absl::optional info = timing_->GetTimingFrameInfo(); - if (info) - stats_proxy_->OnTimingFrameInfoUpdated(*info); - } - - bool IsTooManyFramesQueued() const RTC_RUN_ON(&worker_sequence_checker_) { - return buffer_->CurrentSize() > zero_playout_delay_max_decode_queue_size_; - } - - void ForceKeyFrameReleaseImmediately() RTC_RUN_ON(&worker_sequence_checker_) { - RTC_DCHECK(keyframe_required_); - // Iterate through the frame buffer until there is a complete keyframe and - // release this right away. - while (buffer_->NextDecodableTemporalUnitRtpTimestamp()) { - auto next_frame = buffer_->ExtractNextDecodableTemporalUnit(); - if (next_frame.empty()) { - RTC_DCHECK_NOTREACHED() - << "Frame buffer should always return at least 1 frame."; - continue; - } - // Found keyframe - decode right away. - if (next_frame.front()->is_keyframe()) { - auto render_time = Timestamp::Millis(timing_->RenderTimeMs( - next_frame.front()->Timestamp(), clock_->TimeInMilliseconds())); - OnFrameReady(std::move(next_frame), render_time); - return; - } - } - } - - void MaybeScheduleFrameForRelease() RTC_RUN_ON(&worker_sequence_checker_) { - if (!decoder_ready_for_new_frame_ || - !buffer_->NextDecodableTemporalUnitRtpTimestamp()) - return; - - if (keyframe_required_) { - return ForceKeyFrameReleaseImmediately(); - } - - // TODO(https://bugs.webrtc.org/13343): Make [next,last] decodable returned - // as an optional pair and remove this check. - RTC_CHECK(buffer_->LastDecodableTemporalUnitRtpTimestamp()); - auto last_rtp = *buffer_->LastDecodableTemporalUnitRtpTimestamp(); - - // If already scheduled then abort. - if (frame_decode_scheduler_->ScheduledRtpTimestamp() == - buffer_->NextDecodableTemporalUnitRtpTimestamp()) - return; - - absl::optional schedule; - while (buffer_->NextDecodableTemporalUnitRtpTimestamp()) { - auto next_rtp = *buffer_->NextDecodableTemporalUnitRtpTimestamp(); - schedule = decode_timing_.OnFrameBufferUpdated(next_rtp, last_rtp, - IsTooManyFramesQueued()); - if (schedule) { - // Don't schedule if already waiting for the same frame. - if (frame_decode_scheduler_->ScheduledRtpTimestamp() != next_rtp) { - frame_decode_scheduler_->CancelOutstanding(); - frame_decode_scheduler_->ScheduleFrame( - next_rtp, *schedule, - absl::bind_front(&FrameBuffer3Proxy::FrameReadyForDecode, this)); - } - return; - } - // If no schedule for current rtp, drop and try again. - buffer_->DropNextDecodableTemporalUnit(); - } - } - - RTC_NO_UNIQUE_ADDRESS SequenceChecker worker_sequence_checker_; - const TimeDelta max_wait_for_keyframe_; - const TimeDelta max_wait_for_frame_; - const absl::optional rtt_mult_settings_ = - RttMultExperiment::GetRttMultValue(); - Clock* const clock_; - TaskQueueBase* const worker_queue_; - rtc::TaskQueue* const decode_queue_; - VCMReceiveStatisticsCallback* const stats_proxy_; - FrameSchedulingReceiver* const receiver_; - VCMTiming* const timing_; - const std::unique_ptr frame_decode_scheduler_ - RTC_GUARDED_BY(&worker_sequence_checker_); - - VCMJitterEstimator jitter_estimator_ - RTC_GUARDED_BY(&worker_sequence_checker_); - VCMInterFrameDelay inter_frame_delay_ - RTC_GUARDED_BY(&worker_sequence_checker_); - bool keyframe_required_ RTC_GUARDED_BY(&worker_sequence_checker_) = false; - std::unique_ptr buffer_ - RTC_GUARDED_BY(&worker_sequence_checker_); - FrameDecodeTiming decode_timing_ RTC_GUARDED_BY(&worker_sequence_checker_); - VideoReceiveStreamTimeoutTracker timeout_tracker_ - RTC_GUARDED_BY(&worker_sequence_checker_); - int frames_dropped_before_last_new_frame_ - RTC_GUARDED_BY(&worker_sequence_checker_) = 0; - VCMVideoProtection protection_mode_ - RTC_GUARDED_BY(&worker_sequence_checker_) = kProtectionNack; - - // This flag guards frames from queuing in front of the decoder. Without this - // guard, encoded frames will not wait for the decoder to finish decoding a - // frame and just queue up, meaning frames will not be dropped or - // fast-forwarded when the decoder is slow or hangs. - bool decoder_ready_for_new_frame_ RTC_GUARDED_BY(&worker_sequence_checker_) = - false; - - // Maximum number of frames in the decode queue to allow pacing. If the - // queue grows beyond the max limit, pacing will be disabled and frames will - // be pushed to the decoder as soon as possible. This only has an effect - // when the low-latency rendering path is active, which is indicated by - // the frame's render time == 0. - FieldTrialParameter zero_playout_delay_max_decode_queue_size_; - - rtc::scoped_refptr decode_safety_ = - PendingTaskSafetyFlag::CreateDetached(); - ScopedTaskSafety worker_safety_; -}; - -enum class FrameBufferArm { - kFrameBuffer2, - kFrameBuffer3, - kSyncDecode, -}; - -constexpr const char* kFrameBufferFieldTrial = "WebRTC-FrameBuffer3"; - -FrameBufferArm ParseFrameBufferFieldTrial() { - webrtc::FieldTrialEnum arm( - "arm", FrameBufferArm::kFrameBuffer2, - { - {"FrameBuffer2", FrameBufferArm::kFrameBuffer2}, - {"FrameBuffer3", FrameBufferArm::kFrameBuffer3}, - {"SyncDecoding", FrameBufferArm::kSyncDecode}, - }); - ParseFieldTrial({&arm}, field_trial::FindFullName(kFrameBufferFieldTrial)); - return arm.Get(); -} - -} // namespace - -std::unique_ptr FrameBufferProxy::CreateFromFieldTrial( - Clock* clock, - TaskQueueBase* worker_queue, - VCMTiming* timing, - VCMReceiveStatisticsCallback* stats_proxy, - rtc::TaskQueue* decode_queue, - FrameSchedulingReceiver* receiver, - TimeDelta max_wait_for_keyframe, - TimeDelta max_wait_for_frame, - DecodeSynchronizer* decode_sync) { - switch (ParseFrameBufferFieldTrial()) { - case FrameBufferArm::kFrameBuffer3: { - auto scheduler = - std::make_unique(clock, worker_queue); - return std::make_unique( - clock, worker_queue, timing, stats_proxy, decode_queue, receiver, - max_wait_for_keyframe, max_wait_for_frame, std::move(scheduler)); - } - case FrameBufferArm::kSyncDecode: { - std::unique_ptr scheduler; - if (decode_sync) { - scheduler = decode_sync->CreateSynchronizedFrameScheduler(); - } else { - RTC_LOG(LS_ERROR) << "In FrameBuffer with sync decode trial, but " - "no DecodeSynchronizer was present!"; - // Crash in debug, but in production use the task queue scheduler. - RTC_DCHECK_NOTREACHED(); - scheduler = std::make_unique( - clock, worker_queue); - } - return std::make_unique( - clock, worker_queue, timing, stats_proxy, decode_queue, receiver, - max_wait_for_keyframe, max_wait_for_frame, std::move(scheduler)); - } - case FrameBufferArm::kFrameBuffer2: - ABSL_FALLTHROUGH_INTENDED; - default: - return std::make_unique( - clock, timing, stats_proxy, decode_queue, receiver, - max_wait_for_keyframe, max_wait_for_frame); - } -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_buffer_proxy.h b/TMessagesProj/jni/voip/webrtc/video/frame_buffer_proxy.h deleted file mode 100644 index b419aedb7..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/frame_buffer_proxy.h +++ /dev/null @@ -1,68 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_FRAME_BUFFER_PROXY_H_ -#define VIDEO_FRAME_BUFFER_PROXY_H_ - -#include - -#include "api/metronome/metronome.h" -#include "api/task_queue/task_queue_base.h" -#include "api/video/encoded_frame.h" -#include "modules/video_coding/include/video_coding_defines.h" -#include "modules/video_coding/timing.h" -#include "rtc_base/task_queue.h" -#include "system_wrappers/include/clock.h" -#include "video/decode_synchronizer.h" - -namespace webrtc { - -class FrameSchedulingReceiver { - public: - virtual ~FrameSchedulingReceiver() = default; - - virtual void OnEncodedFrame(std::unique_ptr frame) = 0; - virtual void OnDecodableFrameTimeout(TimeDelta wait_time) = 0; -}; - -// Temporary class to enable replacement of frame_buffer2 with frame_buffer3. -// Once frame_buffer3 has shown to work with a field trial, frame_buffer2 will -// be removed and this class should be directly integrated into -// video_receive_stream2. bugs.webrtc.org/13343 tracks this integration. -class FrameBufferProxy { - public: - static std::unique_ptr CreateFromFieldTrial( - Clock* clock, - TaskQueueBase* worker_queue, - VCMTiming* timing, - VCMReceiveStatisticsCallback* stats_proxy, - rtc::TaskQueue* decode_queue, - FrameSchedulingReceiver* receiver, - TimeDelta max_wait_for_keyframe, - TimeDelta max_wait_for_frame, - DecodeSynchronizer* decode_sync); - virtual ~FrameBufferProxy() = default; - - // Run on the worker thread. - virtual void StopOnWorker() = 0; - virtual void SetProtectionMode(VCMVideoProtection protection_mode) = 0; - virtual void Clear() = 0; - virtual absl::optional InsertFrame( - std::unique_ptr frame) = 0; - virtual void UpdateRtt(int64_t max_rtt_ms) = 0; - virtual int Size() = 0; - - // Run on either the worker thread or the decode thread. - virtual void StartNextDecode(bool keyframe_required) = 0; -}; - -} // namespace webrtc - -#endif // VIDEO_FRAME_BUFFER_PROXY_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.cc b/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.cc index 30c16b1db..c46790992 100644 --- a/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.cc +++ b/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.cc @@ -11,18 +11,11 @@ #include "video/frame_cadence_adapter.h" #include -#include #include #include -#include -#include "absl/algorithm/container.h" -#include "absl/base/attributes.h" #include "api/sequence_checker.h" #include "api/task_queue/task_queue_base.h" -#include "api/units/time_delta.h" -#include "api/video/video_frame.h" -#include "rtc_base/checks.h" #include "rtc_base/logging.h" #include "rtc_base/race_checker.h" #include "rtc_base/rate_statistics.h" @@ -30,12 +23,9 @@ #include "rtc_base/system/no_unique_address.h" #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/task_utils/to_queued_task.h" -#include "rtc_base/thread_annotations.h" -#include "rtc_base/time_utils.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/field_trial.h" #include "system_wrappers/include/metrics.h" -#include "system_wrappers/include/ntp_time.h" namespace webrtc { namespace { @@ -96,22 +86,9 @@ class PassthroughAdapterMode : public AdapterMode { // Implements a frame cadence adapter supporting zero-hertz input. class ZeroHertzAdapterMode : public AdapterMode { public: - ZeroHertzAdapterMode(TaskQueueBase* queue, - Clock* clock, - FrameCadenceAdapterInterface::Callback* callback, + ZeroHertzAdapterMode(FrameCadenceAdapterInterface::Callback* callback, double max_fps); - // Reconfigures according to parameters. - // All spatial layer trackers are initialized as unconverged by this method. - void ReconfigureParameters( - const FrameCadenceAdapterInterface::ZeroHertzModeParams& params); - - // Updates spatial layer quality convergence status. - void UpdateLayerQualityConvergence(int spatial_index, bool quality_converged); - - // Updates spatial layer enabled status. - void UpdateLayerStatus(int spatial_index, bool enabled); - // Adapter overrides. void OnFrame(Timestamp post_time, int frames_scheduled_for_processing, @@ -119,109 +96,23 @@ class ZeroHertzAdapterMode : public AdapterMode { absl::optional GetInputFrameRateFps() override; void UpdateFrameRate() override {} - // Conditionally requests a refresh frame via - // Callback::RequestRefreshFrame. - void ProcessKeyFrameRequest(); - private: - // The tracking state of each spatial layer. Used for determining when to - // stop repeating frames. - struct SpatialLayerTracker { - // If unset, the layer is disabled. Otherwise carries the quality - // convergence status of the layer. - absl::optional quality_converged; - }; - // The state of a scheduled repeat. - struct ScheduledRepeat { - ScheduledRepeat(Timestamp origin, - int64_t origin_timestamp_us, - int64_t origin_ntp_time_ms) - : scheduled(origin), - idle(false), - origin(origin), - origin_timestamp_us(origin_timestamp_us), - origin_ntp_time_ms(origin_ntp_time_ms) {} - // The instant when the repeat was scheduled. - Timestamp scheduled; - // True if the repeat was scheduled as an idle repeat (long), false - // otherwise. - bool idle; - // The moment we decided to start repeating. - Timestamp origin; - // The timestamp_us of the frame when we started repeating. - int64_t origin_timestamp_us; - // The ntp_times_ms of the frame when we started repeating. - int64_t origin_ntp_time_ms; - }; - - // Returns true if all spatial layers can be considered to be converged in - // terms of quality. - // Convergence means QP has dropped to a low-enough level to warrant ceasing - // to send identical frames at high frequency. - bool HasQualityConverged() const RTC_RUN_ON(sequence_checker_); - // Resets quality convergence information. HasQualityConverged() returns false - // after this call. - void ResetQualityConvergenceInfo() RTC_RUN_ON(sequence_checker_); - // Processes incoming frames on a delayed cadence. - void ProcessOnDelayedCadence() RTC_RUN_ON(sequence_checker_); - // Schedules a later repeat with delay depending on state of layer trackers. - // If true is passed in `idle_repeat`, the repeat is going to be - // kZeroHertzIdleRepeatRatePeriod. Otherwise it'll be the value of - // `frame_delay`. - void ScheduleRepeat(int frame_id, bool idle_repeat) - RTC_RUN_ON(sequence_checker_); - // Repeats a frame in the abscence of incoming frames. Slows down when quality - // convergence is attained, and stops the cadence terminally when new frames - // have arrived. - void ProcessRepeatedFrameOnDelayedCadence(int frame_id) - RTC_RUN_ON(sequence_checker_); - // Sends a frame, updating the timestamp to the current time. - void SendFrameNow(const VideoFrame& frame) const - RTC_RUN_ON(sequence_checker_); - // Returns the repeat duration depending on if it's an idle repeat or not. - TimeDelta RepeatDuration(bool idle_repeat) const - RTC_RUN_ON(sequence_checker_); - - TaskQueueBase* const queue_; - Clock* const clock_; FrameCadenceAdapterInterface::Callback* const callback_; // The configured max_fps. // TODO(crbug.com/1255737): support max_fps updates. const double max_fps_; - // How much the incoming frame sequence is delayed by. - const TimeDelta frame_delay_ = TimeDelta::Seconds(1) / max_fps_; - RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_; - // A queue of incoming frames and repeated frames. - std::deque queued_frames_ RTC_GUARDED_BY(sequence_checker_); - // The current frame ID to use when starting to repeat frames. This is used - // for cancelling deferred repeated frame processing happening. - int current_frame_id_ RTC_GUARDED_BY(sequence_checker_) = 0; - // Has content when we are repeating frames. - absl::optional scheduled_repeat_ - RTC_GUARDED_BY(sequence_checker_); - // Convergent state of each of the configured simulcast layers. - std::vector layer_trackers_ - RTC_GUARDED_BY(sequence_checker_); - - ScopedTaskSafety safety_; }; class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { public: FrameCadenceAdapterImpl(Clock* clock, TaskQueueBase* queue); - ~FrameCadenceAdapterImpl(); // FrameCadenceAdapterInterface overrides. void Initialize(Callback* callback) override; - void SetZeroHertzModeEnabled( - absl::optional params) override; + void SetZeroHertzModeEnabled(bool enabled) override; absl::optional GetInputFrameRateFps() override; void UpdateFrameRate() override; - void UpdateLayerQualityConvergence(int spatial_index, - bool quality_converged) override; - void UpdateLayerStatus(int spatial_index, bool enabled) override; - void ProcessKeyFrameRequest() override; // VideoFrameSink overrides. void OnFrame(const VideoFrame& frame) override; @@ -258,8 +149,6 @@ class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { // The two possible modes we're under. absl::optional passthrough_adapter_; absl::optional zero_hertz_adapter_; - // If set, zero-hertz mode has been enabled. - absl::optional zero_hertz_params_; // Cache for the current adapter mode. AdapterMode* current_adapter_mode_ = nullptr; @@ -270,102 +159,34 @@ class FrameCadenceAdapterImpl : public FrameCadenceAdapterInterface { absl::optional source_constraints_ RTC_GUARDED_BY(queue_); + // Whether zero-hertz and UMA reporting is enabled. + bool zero_hertz_and_uma_reporting_enabled_ RTC_GUARDED_BY(queue_) = false; + // Race checker for incoming frames. This is the network thread in chromium, // but may vary from test contexts. rtc::RaceChecker incoming_frame_race_checker_; bool has_reported_screenshare_frame_rate_umas_ RTC_GUARDED_BY(queue_) = false; // Number of frames that are currently scheduled for processing on the - // `queue_`. + // |queue_|. std::atomic frames_scheduled_for_processing_{0}; - // Whether to ask for a refresh frame on activation of zero-hertz mode. - bool should_request_refresh_frame_ RTC_GUARDED_BY(queue_) = false; - ScopedTaskSafetyDetached safety_; }; ZeroHertzAdapterMode::ZeroHertzAdapterMode( - TaskQueueBase* queue, - Clock* clock, FrameCadenceAdapterInterface::Callback* callback, double max_fps) - : queue_(queue), clock_(clock), callback_(callback), max_fps_(max_fps) { + : callback_(callback), max_fps_(max_fps) { sequence_checker_.Detach(); } -void ZeroHertzAdapterMode::ReconfigureParameters( - const FrameCadenceAdapterInterface::ZeroHertzModeParams& params) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_LOG(LS_INFO) << __func__ << " this " << this << " num_simulcast_layers " - << params.num_simulcast_layers; - - // Start as unconverged. - layer_trackers_.clear(); - layer_trackers_.resize(params.num_simulcast_layers, - SpatialLayerTracker{false}); -} - -void ZeroHertzAdapterMode::UpdateLayerQualityConvergence( - int spatial_index, - bool quality_converged) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK_LT(spatial_index, layer_trackers_.size()); - RTC_LOG(LS_INFO) << __func__ << " this " << this << " layer " << spatial_index - << " quality has converged: " << quality_converged; - if (layer_trackers_[spatial_index].quality_converged.has_value()) - layer_trackers_[spatial_index].quality_converged = quality_converged; -} - -void ZeroHertzAdapterMode::UpdateLayerStatus(int spatial_index, bool enabled) { - RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DCHECK_LT(spatial_index, layer_trackers_.size()); - if (enabled) { - if (!layer_trackers_[spatial_index].quality_converged.has_value()) { - // Assume quality has not converged until hearing otherwise. - layer_trackers_[spatial_index].quality_converged = false; - } - } else { - layer_trackers_[spatial_index].quality_converged = absl::nullopt; - } - RTC_LOG(LS_INFO) - << __func__ << " this " << this << " layer " << spatial_index - << (enabled - ? (layer_trackers_[spatial_index].quality_converged.has_value() - ? " enabled." - : " enabled and it's assumed quality has not converged.") - : " disabled."); -} - void ZeroHertzAdapterMode::OnFrame(Timestamp post_time, int frames_scheduled_for_processing, const VideoFrame& frame) { RTC_DCHECK_RUN_ON(&sequence_checker_); - RTC_DLOG(LS_VERBOSE) << "ZeroHertzAdapterMode::" << __func__ << " this " - << this; - - // Assume all enabled layers are unconverged after frame entry. - ResetQualityConvergenceInfo(); - - // Remove stored repeating frame if needed. - if (scheduled_repeat_.has_value()) { - RTC_DCHECK(queued_frames_.size() == 1); - RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this - << " cancel repeat and restart with original"; - queued_frames_.pop_front(); - } - - // Store the frame in the queue and schedule deferred processing. - queued_frames_.push_back(frame); - current_frame_id_++; - scheduled_repeat_ = absl::nullopt; - queue_->PostDelayedHighPrecisionTask( - ToQueuedTask(safety_, - [this] { - RTC_DCHECK_RUN_ON(&sequence_checker_); - ProcessOnDelayedCadence(); - }), - frame_delay_.ms()); + // TODO(crbug.com/1255737): fill with meaningful implementation. + callback_->OnFrame(post_time, frames_scheduled_for_processing, frame); } absl::optional ZeroHertzAdapterMode::GetInputFrameRateFps() { @@ -373,185 +194,12 @@ absl::optional ZeroHertzAdapterMode::GetInputFrameRateFps() { return max_fps_; } -void ZeroHertzAdapterMode::ProcessKeyFrameRequest() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - - // If no frame was ever passed to us, request a refresh frame from the source. - if (current_frame_id_ == 0) { - RTC_LOG(LS_INFO) - << __func__ << " this " << this - << " requesting refresh frame due to no frames received yet."; - callback_->RequestRefreshFrame(); - return; - } - - // The next frame encoded will be a key frame. Reset quality convergence so we - // don't get idle repeats shortly after, because key frames need a lot of - // refinement frames. - ResetQualityConvergenceInfo(); - - // If we're not repeating, or we're repeating with short duration, we will - // very soon send out a frame and don't need a refresh frame. - if (!scheduled_repeat_.has_value() || !scheduled_repeat_->idle) { - RTC_LOG(LS_INFO) << __func__ << " this " << this - << " not requesting refresh frame because of recently " - "incoming frame or short repeating."; - return; - } - - // If the repeat is scheduled within a short (i.e. frame_delay_) interval, we - // will very soon send out a frame and don't need a refresh frame. - Timestamp now = clock_->CurrentTime(); - if (scheduled_repeat_->scheduled + RepeatDuration(/*idle_repeat=*/true) - - now <= - frame_delay_) { - RTC_LOG(LS_INFO) << __func__ << " this " << this - << " not requesting refresh frame because of soon " - "happening idle repeat"; - return; - } - - // Cancel the current repeat and reschedule a short repeat now. No need for a - // new refresh frame. - RTC_LOG(LS_INFO) << __func__ << " this " << this - << " not requesting refresh frame and scheduling a short " - "repeat due to key frame request"; - ScheduleRepeat(++current_frame_id_, /*idle_repeat=*/false); - return; -} - -// RTC_RUN_ON(&sequence_checker_) -bool ZeroHertzAdapterMode::HasQualityConverged() const { - // 1. Define ourselves as unconverged with no spatial layers configured. This - // is to keep short repeating until the layer configuration comes. - // 2. Unset layers implicitly imply that they're converged to support - // disabling layers when they're not needed. - const bool quality_converged = - !layer_trackers_.empty() && - absl::c_all_of(layer_trackers_, [](const SpatialLayerTracker& tracker) { - return tracker.quality_converged.value_or(true); - }); - return quality_converged; -} - -// RTC_RUN_ON(&sequence_checker_) -void ZeroHertzAdapterMode::ResetQualityConvergenceInfo() { - RTC_DLOG(LS_INFO) << __func__ << " this " << this; - for (auto& layer_tracker : layer_trackers_) { - if (layer_tracker.quality_converged.has_value()) - layer_tracker.quality_converged = false; - } -} - -// RTC_RUN_ON(&sequence_checker_) -void ZeroHertzAdapterMode::ProcessOnDelayedCadence() { - RTC_DCHECK(!queued_frames_.empty()); - RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this; - - SendFrameNow(queued_frames_.front()); - - // If there were two or more frames stored, we do not have to schedule repeats - // of the front frame. - if (queued_frames_.size() > 1) { - queued_frames_.pop_front(); - return; - } - - // There's only one frame to send. Schedule a repeat sequence, which is - // cancelled by `current_frame_id_` getting incremented should new frames - // arrive. - ScheduleRepeat(current_frame_id_, HasQualityConverged()); -} - -// RTC_RUN_ON(&sequence_checker_) -void ZeroHertzAdapterMode::ScheduleRepeat(int frame_id, bool idle_repeat) { - RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this << " frame_id " - << frame_id; - Timestamp now = clock_->CurrentTime(); - if (!scheduled_repeat_.has_value()) { - scheduled_repeat_.emplace(now, queued_frames_.front().timestamp_us(), - queued_frames_.front().ntp_time_ms()); - } - scheduled_repeat_->scheduled = now; - scheduled_repeat_->idle = idle_repeat; - - TimeDelta repeat_delay = RepeatDuration(idle_repeat); - queue_->PostDelayedHighPrecisionTask( - ToQueuedTask(safety_, - [this, frame_id] { - RTC_DCHECK_RUN_ON(&sequence_checker_); - ProcessRepeatedFrameOnDelayedCadence(frame_id); - }), - repeat_delay.ms()); -} - -// RTC_RUN_ON(&sequence_checker_) -void ZeroHertzAdapterMode::ProcessRepeatedFrameOnDelayedCadence(int frame_id) { - RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this << " frame_id " - << frame_id; - RTC_DCHECK(!queued_frames_.empty()); - - // Cancel this invocation if new frames turned up. - if (frame_id != current_frame_id_) - return; - RTC_DCHECK(scheduled_repeat_.has_value()); - - VideoFrame& frame = queued_frames_.front(); - - // Since this is a repeated frame, nothing changed compared to before. - VideoFrame::UpdateRect empty_update_rect; - empty_update_rect.MakeEmptyUpdate(); - frame.set_update_rect(empty_update_rect); - - // Adjust timestamps of the frame of the repeat, accounting for the actual - // delay since we started repeating. - // - // NOTE: No need to update the RTP timestamp as the VideoStreamEncoder - // overwrites it based on its chosen NTP timestamp source. - TimeDelta total_delay = clock_->CurrentTime() - scheduled_repeat_->origin; - if (frame.timestamp_us() > 0) { - frame.set_timestamp_us(scheduled_repeat_->origin_timestamp_us + - total_delay.us()); - } - if (frame.ntp_time_ms()) { - frame.set_ntp_time_ms(scheduled_repeat_->origin_ntp_time_ms + - total_delay.ms()); - } - SendFrameNow(frame); - - // Schedule another repeat. - ScheduleRepeat(frame_id, HasQualityConverged()); -} - -// RTC_RUN_ON(&sequence_checker_) -void ZeroHertzAdapterMode::SendFrameNow(const VideoFrame& frame) const { - RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this << " timestamp " - << frame.timestamp() << " timestamp_us " - << frame.timestamp_us() << " ntp_time_ms " - << frame.ntp_time_ms(); - // TODO(crbug.com/1255737): figure out if frames_scheduled_for_processing - // makes sense to compute in this implementation. - callback_->OnFrame(/*post_time=*/clock_->CurrentTime(), - /*frames_scheduled_for_processing=*/1, frame); -} - -// RTC_RUN_ON(&sequence_checker_) -TimeDelta ZeroHertzAdapterMode::RepeatDuration(bool idle_repeat) const { - return idle_repeat - ? FrameCadenceAdapterInterface::kZeroHertzIdleRepeatRatePeriod - : frame_delay_; -} - FrameCadenceAdapterImpl::FrameCadenceAdapterImpl(Clock* clock, TaskQueueBase* queue) : clock_(clock), queue_(queue), zero_hertz_screenshare_enabled_( - !field_trial::IsDisabled("WebRTC-ZeroHertzScreenshare")) {} - -FrameCadenceAdapterImpl::~FrameCadenceAdapterImpl() { - RTC_DLOG(LS_VERBOSE) << __func__ << " this " << this; -} + field_trial::IsEnabled("WebRTC-ZeroHertzScreenshare")) {} void FrameCadenceAdapterImpl::Initialize(Callback* callback) { callback_ = callback; @@ -559,13 +207,12 @@ void FrameCadenceAdapterImpl::Initialize(Callback* callback) { current_adapter_mode_ = &passthrough_adapter_.value(); } -void FrameCadenceAdapterImpl::SetZeroHertzModeEnabled( - absl::optional params) { +void FrameCadenceAdapterImpl::SetZeroHertzModeEnabled(bool enabled) { RTC_DCHECK_RUN_ON(queue_); - bool was_zero_hertz_enabled = zero_hertz_params_.has_value(); - if (params.has_value() && !was_zero_hertz_enabled) + bool was_zero_hertz_enabled = zero_hertz_and_uma_reporting_enabled_; + if (enabled && !zero_hertz_and_uma_reporting_enabled_) has_reported_screenshare_frame_rate_umas_ = false; - zero_hertz_params_ = params; + zero_hertz_and_uma_reporting_enabled_ = enabled; MaybeReconfigureAdapters(was_zero_hertz_enabled); } @@ -582,34 +229,10 @@ void FrameCadenceAdapterImpl::UpdateFrameRate() { passthrough_adapter_->UpdateFrameRate(); } -void FrameCadenceAdapterImpl::UpdateLayerQualityConvergence( - int spatial_index, - bool quality_converged) { - if (zero_hertz_adapter_.has_value()) - zero_hertz_adapter_->UpdateLayerQualityConvergence(spatial_index, - quality_converged); -} - -void FrameCadenceAdapterImpl::UpdateLayerStatus(int spatial_index, - bool enabled) { - if (zero_hertz_adapter_.has_value()) - zero_hertz_adapter_->UpdateLayerStatus(spatial_index, enabled); -} - -void FrameCadenceAdapterImpl::ProcessKeyFrameRequest() { - RTC_DCHECK_RUN_ON(queue_); - if (zero_hertz_adapter_) - zero_hertz_adapter_->ProcessKeyFrameRequest(); - else - should_request_refresh_frame_ = true; -} - void FrameCadenceAdapterImpl::OnFrame(const VideoFrame& frame) { // This method is called on the network thread under Chromium, or other // various contexts in test. RTC_DCHECK_RUNS_SERIALIZED(&incoming_frame_race_checker_); - RTC_DLOG(LS_VERBOSE) << "FrameCadenceAdapterImpl::" << __func__ << " this " - << this; // Local time in webrtc time base. Timestamp post_time = clock_->CurrentTime(); @@ -627,7 +250,7 @@ void FrameCadenceAdapterImpl::OnFrame(const VideoFrame& frame) { void FrameCadenceAdapterImpl::OnConstraintsChanged( const VideoTrackSourceConstraints& constraints) { - RTC_LOG(LS_INFO) << __func__ << " this " << this << " min_fps " + RTC_LOG(LS_INFO) << __func__ << " min_fps " << constraints.min_fps.value_or(-1) << " max_fps " << constraints.max_fps.value_or(-1); queue_->PostTask(ToQueuedTask(safety_.flag(), [this, constraints] { @@ -652,7 +275,7 @@ bool FrameCadenceAdapterImpl::IsZeroHertzScreenshareEnabled() const { return zero_hertz_screenshare_enabled_ && source_constraints_.has_value() && source_constraints_->max_fps.value_or(-1) > 0 && source_constraints_->min_fps.value_or(-1) == 0 && - zero_hertz_params_.has_value(); + zero_hertz_and_uma_reporting_enabled_; } // RTC_RUN_ON(queue_) @@ -661,17 +284,9 @@ void FrameCadenceAdapterImpl::MaybeReconfigureAdapters( bool is_zero_hertz_enabled = IsZeroHertzScreenshareEnabled(); if (is_zero_hertz_enabled) { if (!was_zero_hertz_enabled) { - zero_hertz_adapter_.emplace(queue_, clock_, callback_, + zero_hertz_adapter_.emplace(callback_, source_constraints_->max_fps.value()); - RTC_LOG(LS_INFO) << "Zero hertz mode activated."; - - if (should_request_refresh_frame_) { - // Ensure we get a first frame to work with. - should_request_refresh_frame_ = false; - callback_->RequestRefreshFrame(); - } } - zero_hertz_adapter_->ReconfigureParameters(zero_hertz_params_.value()); current_adapter_mode_ = &zero_hertz_adapter_.value(); } else { if (was_zero_hertz_enabled) @@ -685,7 +300,7 @@ void FrameCadenceAdapterImpl::MaybeReportFrameRateConstraintUmas() { if (has_reported_screenshare_frame_rate_umas_) return; has_reported_screenshare_frame_rate_umas_ = true; - if (!zero_hertz_params_.has_value()) + if (!zero_hertz_and_uma_reporting_enabled_) return; RTC_HISTOGRAM_BOOLEAN("WebRTC.Screenshare.FrameRateConstraints.Exists", source_constraints_.has_value()); diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.h b/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.h index a881c6153..8685f37f9 100644 --- a/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.h +++ b/TMessagesProj/jni/voip/webrtc/video/frame_cadence_adapter.h @@ -13,9 +13,7 @@ #include -#include "absl/base/attributes.h" #include "api/task_queue/task_queue_base.h" -#include "api/units/time_delta.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" #include "rtc_base/synchronization/mutex.h" @@ -33,18 +31,7 @@ class FrameCadenceAdapterInterface public: // Averaging window spanning 90 frames at default 30fps, matching old media // optimization module defaults. - // TODO(crbug.com/1255737): Use TimeDelta. static constexpr int64_t kFrameRateAveragingWindowSizeMs = (1000 / 30) * 90; - // In zero-hertz mode, the idle repeat rate is a compromise between - // RTP receiver keyframe-requesting timeout (3s), other backend limitations - // and some worst case RTT. - static constexpr TimeDelta kZeroHertzIdleRepeatRatePeriod = - TimeDelta::Millis(1000); - - struct ZeroHertzModeParams { - // The number of simulcast layers used in this configuration. - int num_simulcast_layers = 0; - }; // Callback interface used to inform instance owners. class Callback { @@ -68,9 +55,6 @@ class FrameCadenceAdapterInterface // Called when the source has discarded a frame. virtual void OnDiscardedFrame() = 0; - - // Called when the adapter needs the source to send a refresh frame. - virtual void RequestRefreshFrame() = 0; }; // Factory function creating a production instance. Deletion of the returned @@ -84,11 +68,8 @@ class FrameCadenceAdapterInterface // Call before using the rest of the API. virtual void Initialize(Callback* callback) = 0; - // Pass zero hertz parameters in |params| as a prerequisite to enable - // zero-hertz operation. If absl:::nullopt is passed, the cadence adapter will - // switch to passthrough mode. - virtual void SetZeroHertzModeEnabled( - absl::optional params) = 0; + // Pass true in |enabled| as a prerequisite to enable zero-hertz operation. + virtual void SetZeroHertzModeEnabled(bool enabled) = 0; // Returns the input framerate. This is measured by RateStatistics when // zero-hertz mode is off, and returns the max framerate in zero-hertz mode. @@ -97,19 +78,6 @@ class FrameCadenceAdapterInterface // Updates frame rate. This is done unconditionally irrespective of adapter // mode. virtual void UpdateFrameRate() = 0; - - // Updates quality convergence status for an enabled spatial layer. - // Convergence means QP has dropped to a low-enough level to warrant ceasing - // to send identical frames at high frequency. - virtual void UpdateLayerQualityConvergence(int spatial_index, - bool converged) = 0; - - // Updates spatial layer enabled status. - virtual void UpdateLayerStatus(int spatial_index, bool enabled) = 0; - - // Conditionally requests a refresh frame via - // Callback::RequestRefreshFrame. - virtual void ProcessKeyFrameRequest() = 0; }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_decode_scheduler.h b/TMessagesProj/jni/voip/webrtc/video/frame_decode_scheduler.h deleted file mode 100644 index 5387e5429..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/frame_decode_scheduler.h +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_FRAME_DECODE_SCHEDULER_H_ -#define VIDEO_FRAME_DECODE_SCHEDULER_H_ - -#include - -#include - -#include "absl/types/optional.h" -#include "api/units/timestamp.h" -#include "video/frame_decode_timing.h" - -namespace webrtc { - -class FrameDecodeScheduler { - public: - // Invoked when a frame with `rtp_timestamp` is ready for decoding. - using FrameReleaseCallback = - std::function; - - virtual ~FrameDecodeScheduler() = default; - - // Returns the rtp timestamp of the next frame scheduled for release, or - // `nullopt` if no frame is currently scheduled. - virtual absl::optional ScheduledRtpTimestamp() = 0; - - // Shedules a frame for release based on `schedule`. When released, `callback` - // will be invoked with the `rtp` timestamp of the frame and the `render_time` - virtual void ScheduleFrame(uint32_t rtp, - FrameDecodeTiming::FrameSchedule schedule, - FrameReleaseCallback callback) = 0; - - // Cancels all scheduled frames. - virtual void CancelOutstanding() = 0; - - // Stop() Must be called before destruction. - virtual void Stop() = 0; -}; - -} // namespace webrtc - -#endif // VIDEO_FRAME_DECODE_SCHEDULER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.cc b/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.cc deleted file mode 100644 index ddc60302e..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.cc +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/frame_decode_timing.h" - -#include - -#include "absl/types/optional.h" -#include "api/units/time_delta.h" -#include "rtc_base/logging.h" - -namespace webrtc { - -FrameDecodeTiming::FrameDecodeTiming(Clock* clock, - webrtc::VCMTiming const* timing) - : clock_(clock), timing_(timing) { - RTC_DCHECK(clock_); - RTC_DCHECK(timing_); -} - -absl::optional -FrameDecodeTiming::OnFrameBufferUpdated(uint32_t next_temporal_unit_rtp, - uint32_t last_temporal_unit_rtp, - bool too_many_frames_queued) { - const Timestamp now = clock_->CurrentTime(); - Timestamp render_time = Timestamp::Millis( - timing_->RenderTimeMs(next_temporal_unit_rtp, now.ms())); - TimeDelta max_wait = TimeDelta::Millis(timing_->MaxWaitingTime( - render_time.ms(), now.ms(), too_many_frames_queued)); - - // If the delay is not too far in the past, or this is the last decodable - // frame then it is the best frame to be decoded. Otherwise, fast-forward - // to the next frame in the buffer. - if (max_wait <= -kMaxAllowedFrameDelay && - next_temporal_unit_rtp != last_temporal_unit_rtp) { - RTC_DLOG(LS_VERBOSE) << "Fast-forwarded frame " << next_temporal_unit_rtp - << " render time " << render_time.ms() - << " with delay " << max_wait.ms() << "ms"; - return absl::nullopt; - } - RTC_DLOG(LS_VERBOSE) << "Selected frame with rtp " << next_temporal_unit_rtp - << " render time " << render_time.ms() - << " with a max wait of " << max_wait.ms() << "ms"; - - Timestamp latest_decode_time = now + std::max(max_wait, TimeDelta::Zero()); - return FrameSchedule{.latest_decode_time = latest_decode_time, - .render_time = render_time}; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.h b/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.h deleted file mode 100644 index ff67ace3b..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/frame_decode_timing.h +++ /dev/null @@ -1,53 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_FRAME_DECODE_TIMING_H_ -#define VIDEO_FRAME_DECODE_TIMING_H_ - -#include - -#include - -#include "api/task_queue/task_queue_base.h" -#include "modules/video_coding/timing.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "system_wrappers/include/clock.h" - -namespace webrtc { - -class FrameDecodeTiming { - public: - FrameDecodeTiming(Clock* clock, webrtc::VCMTiming const* timing); - ~FrameDecodeTiming() = default; - FrameDecodeTiming(const FrameDecodeTiming&) = delete; - FrameDecodeTiming& operator=(const FrameDecodeTiming&) = delete; - - // Any frame that has decode delay more than this in the past can be - // fast-forwarded. - static constexpr TimeDelta kMaxAllowedFrameDelay = TimeDelta::Millis(5); - - struct FrameSchedule { - Timestamp latest_decode_time; - Timestamp render_time; - }; - - absl::optional OnFrameBufferUpdated( - uint32_t next_temporal_unit_rtp, - uint32_t last_temporal_unit_rtp, - bool too_many_frames_queued); - - private: - Clock* const clock_; - webrtc::VCMTiming const* const timing_; -}; - -} // namespace webrtc - -#endif // VIDEO_FRAME_DECODE_TIMING_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc b/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc index 930577983..51f09b09c 100644 --- a/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc +++ b/TMessagesProj/jni/voip/webrtc/video/frame_encode_metadata_writer.cc @@ -70,13 +70,8 @@ void FrameEncodeMetadataWriter::OnEncoderInit(const VideoCodec& codec) { codec_settings_.ScalabilityMode() != "") { std::unique_ptr structure = CreateScalabilityStructure(codec_settings_.ScalabilityMode()); - if (structure) { - num_spatial_layers = structure->StreamConfig().num_spatial_layers; - } else { - // |structure| maybe nullptr if the scalability mode is invalid. - RTC_LOG(LS_WARNING) << "Cannot create ScalabilityStructure, since the " - "scalability mode is invalid"; - } + RTC_DCHECK(structure); + num_spatial_layers = structure->StreamConfig().num_spatial_layers; } num_spatial_layers_ = std::max(num_spatial_layers, size_t{1}); } diff --git a/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc b/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc index 1f5667456..6728e4bd1 100644 --- a/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/pc_full_stack_tests.cc @@ -132,7 +132,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_VP9) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCGenericDescriptorTest, @@ -163,7 +166,10 @@ TEST(PCGenericDescriptorTest, {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } // VP9 2nd profile isn't supported on android arm and arm 64. @@ -201,7 +207,10 @@ TEST(PCFullStackTest, MAYBE_Pc_Generator_Net_Delay_0_0_Plr_0_VP9Profile2) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile2)}})}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } /* @@ -251,7 +260,10 @@ TEST(PCFullStackTest, Pc_Net_Delay_0_0_Plr_0) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCGenericDescriptorTest, @@ -271,7 +283,10 @@ TEST(PCGenericDescriptorTest, alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCGenericDescriptorTest, @@ -298,6 +313,8 @@ TEST(PCGenericDescriptorTest, }, [](PeerConfigurer* bob) {}); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } @@ -319,7 +336,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_150kbps_Net_Delay_0_0_Plr_0) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Foreman_Cif_Link_130kbps_Delay100ms_Loss1_Ulpfec) { @@ -339,10 +359,12 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_130kbps_Delay100ms_Loss1_Ulpfec) { auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetUseUlpFEC(true); }, - [](PeerConfigurer* bob) { bob->SetUseUlpFEC(true); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + [](PeerConfigurer* bob) {}); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = true; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Foreman_Cif_Link_50kbps_Delay100ms_Loss1_Ulpfec) { @@ -362,10 +384,12 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_50kbps_Delay100ms_Loss1_Ulpfec) { auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetUseUlpFEC(true); }, - [](PeerConfigurer* bob) { bob->SetUseUlpFEC(true); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + [](PeerConfigurer* bob) {}); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = true; + fixture->Run(std::move(run_params)); } // Restricted network and encoder overproducing by 30%. @@ -387,10 +411,13 @@ TEST(PCFullStackTest, auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetVideoEncoderBitrateMultiplier(1.30); }, - [](PeerConfigurer* bob) { bob->SetVideoEncoderBitrateMultiplier(1.30); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + [](PeerConfigurer* bob) {}); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + run_params.video_encoder_bitrate_multiplier = 1.30; + fixture->Run(std::move(run_params)); } // Weak 3G-style link: 250kbps, 1% loss, 100ms delay, 15 packets queue. @@ -415,10 +442,13 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Link_250kbps_Delay100ms_10pkts_Loss1) { auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetVideoEncoderBitrateMultiplier(1.30); }, - [](PeerConfigurer* bob) { bob->SetVideoEncoderBitrateMultiplier(1.30); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + [](PeerConfigurer* bob) {}); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + run_params.video_encoder_bitrate_multiplier = 1.30; + fixture->Run(std::move(run_params)); } TEST(PCGenericDescriptorTest, @@ -440,7 +470,10 @@ TEST(PCGenericDescriptorTest, alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCGenericDescriptorTest, @@ -460,10 +493,12 @@ TEST(PCGenericDescriptorTest, auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetUseUlpFEC(true); }, - [](PeerConfigurer* bob) { bob->SetUseUlpFEC(true); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + [](PeerConfigurer* bob) {}); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = true; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_Flexfec) { @@ -482,11 +517,11 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_Flexfec) { auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetUseFlexFEC(true); }, - [](PeerConfigurer* bob) { bob->SetUseFlexFEC(true); }); + [](PeerConfigurer* bob) {}); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.enable_flex_fec_support = true; + run_params.use_flex_fec = true; + run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } @@ -507,11 +542,11 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_Delay_50_0_Plr_3_Flexfec) { auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetUseFlexFEC(true); }, - [](PeerConfigurer* bob) { bob->SetUseFlexFEC(true); }); + [](PeerConfigurer* bob) {}); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.enable_flex_fec_support = true; + run_params.use_flex_fec = true; + run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } @@ -532,10 +567,12 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_Delay_50_0_Plr_3_Ulpfec) { auto frame_generator = CreateFromYuvFileFrameGenerator( video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); - alice->SetUseUlpFEC(true); }, - [](PeerConfigurer* bob) { bob->SetUseUlpFEC(true); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + [](PeerConfigurer* bob) {}); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = true; + fixture->Run(std::move(run_params)); } #if defined(WEBRTC_USE_H264) @@ -558,7 +595,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Net_Delay_0_0_Plr_0_H264) { [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_H264) { @@ -586,7 +626,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_30kbps_Net_Delay_0_0_Plr_0_H264) { [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCGenericDescriptorTest, @@ -611,7 +654,10 @@ TEST(PCGenericDescriptorTest, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Sps_Pps_Idr) { @@ -638,7 +684,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Sps_Pps_Idr) { [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Flexfec) { @@ -658,14 +707,13 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Flexfec) { video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); - alice->SetUseFlexFEC(true); }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); - bob->SetUseFlexFEC(true); }); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); - run_params.enable_flex_fec_support = true; + run_params.use_flex_fec = true; + run_params.use_ulp_fec = false; fixture->Run(std::move(run_params)); } @@ -688,13 +736,14 @@ TEST(PCFullStackTest, DISABLED_Pc_Foreman_Cif_Delay_50_0_Plr_5_H264_Ulpfec) { video, ClipNameToClipPath("foreman_cif")); alice->AddVideoConfig(std::move(video), std::move(frame_generator)); alice->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); - alice->SetUseUlpFEC(true); }, [](PeerConfigurer* bob) { bob->SetVideoCodecs({VideoCodecConfig(cricket::kH264CodecName)}); - bob->SetUseUlpFEC(true); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = true; + fixture->Run(std::move(run_params)); } #endif // defined(WEBRTC_USE_H264) @@ -716,7 +765,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_32pkts_Queue) { @@ -738,7 +790,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_32pkts_Queue) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_100ms) { @@ -760,7 +815,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_500kbps_100ms) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCGenericDescriptorTest, @@ -783,7 +841,10 @@ TEST(PCGenericDescriptorTest, alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } /* @@ -826,7 +887,10 @@ TEST(PCFullStackTest, Pc_Foreman_Cif_1000kbps_100ms_32pkts_Queue) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } // TODO(sprang): Remove this if we have the similar ModerateLimits below? @@ -849,7 +913,10 @@ TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } /* @@ -1007,7 +1074,10 @@ TEST(PCFullStackTest, Pc_Conference_Motion_Hd_2000kbps_100ms_32pkts_Queue_Vp9) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } #endif @@ -1028,7 +1098,10 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_No_Conference_Mode) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Screenshare_Slides) { @@ -1048,6 +1121,8 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides) { }, [](PeerConfigurer* bob) {}); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; run_params.use_conference_mode = true; fixture->Run(std::move(run_params)); } @@ -1073,7 +1148,10 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_Simulcast_No_Conference_Mode) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Screenshare_Slides_Simulcast) { @@ -1096,6 +1174,8 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_Simulcast) { }, [](PeerConfigurer* bob) {}); RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; run_params.use_conference_mode = true; fixture->Run(std::move(run_params)); } @@ -1308,7 +1388,10 @@ TEST(PCFullStackTest, Pc_Screenshare_Slides_Vp9_3sl_High_Fps) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Vp9svc_3sl_High) { @@ -1340,7 +1423,10 @@ TEST(PCFullStackTest, Pc_Vp9svc_3sl_High) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Vp9svc_3sl_Low) { @@ -1372,7 +1458,10 @@ TEST(PCFullStackTest, Pc_Vp9svc_3sl_Low) { {kVP9FmtpProfileId, VP9ProfileToString(VP9Profile::kProfile0)}})}); }); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } #endif // defined(RTC_ENABLE_VP9) @@ -1493,7 +1582,10 @@ TEST(PCFullStackTest, MAYBE_Pc_Simulcast_HD_High) { alice->AddVideoConfig(std::move(video)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_High) { @@ -1515,7 +1607,10 @@ TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_High) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_Low) { @@ -1536,7 +1631,10 @@ TEST(PCFullStackTest, Pc_Simulcast_Vp8_3sl_Low) { alice->AddVideoConfig(std::move(video), std::move(frame_generator)); }, [](PeerConfigurer* bob) {}); - fixture->Run(RunParams(TimeDelta::Seconds(kTestDurationSec))); + RunParams run_params(TimeDelta::Seconds(kTestDurationSec)); + run_params.use_flex_fec = false; + run_params.use_ulp_fec = false; + fixture->Run(std::move(run_params)); } /* diff --git a/TMessagesProj/jni/voip/webrtc/video/quality_limitation_reason_tracker.h b/TMessagesProj/jni/voip/webrtc/video/quality_limitation_reason_tracker.h index 15bc90773..22816a827 100644 --- a/TMessagesProj/jni/voip/webrtc/video/quality_limitation_reason_tracker.h +++ b/TMessagesProj/jni/voip/webrtc/video/quality_limitation_reason_tracker.h @@ -24,8 +24,6 @@ namespace webrtc { // duration of time spent in each reason. See qualityLimitationReason[1], // qualityLimitationDurations[2], and qualityLimitationResolutionChanges[3] in // the webrtc-stats spec. -// Note that the specification defines the durations in seconds while the -// internal data structures defines it in milliseconds. // [1] // https://w3c.github.io/webrtc-stats/#dom-rtcoutboundrtpstreamstats-qualitylimitationreason // [2] diff --git a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.cc b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.cc index aabfa5290..22da793cd 100644 --- a/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/receive_statistics_proxy2.cc @@ -725,33 +725,25 @@ void ReceiveStatisticsProxy::OnFrameBufferTimingsUpdated( int jitter_buffer_ms, int min_playout_delay_ms, int render_delay_ms) { - // Only called on main_thread_ with FrameBuffer3 - if (!worker_thread_->IsCurrent()) { - RTC_DCHECK_RUN_ON(&decode_queue_); - worker_thread_->PostTask(ToQueuedTask( - task_safety_, - [max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms, - min_playout_delay_ms, render_delay_ms, this]() { - OnFrameBufferTimingsUpdated(max_decode_ms, current_delay_ms, - target_delay_ms, jitter_buffer_ms, - min_playout_delay_ms, render_delay_ms); - })); - return; - } - - RTC_DCHECK_RUN_ON(&main_thread_); - stats_.max_decode_ms = max_decode_ms; - stats_.current_delay_ms = current_delay_ms; - stats_.target_delay_ms = target_delay_ms; - stats_.jitter_buffer_ms = jitter_buffer_ms; - stats_.min_playout_delay_ms = min_playout_delay_ms; - stats_.render_delay_ms = render_delay_ms; - jitter_buffer_delay_counter_.Add(jitter_buffer_ms); - target_delay_counter_.Add(target_delay_ms); - current_delay_counter_.Add(current_delay_ms); - // Network delay (rtt/2) + target_delay_ms (jitter delay + decode time + - // render delay). - delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2); + RTC_DCHECK_RUN_ON(&decode_queue_); + worker_thread_->PostTask(ToQueuedTask( + task_safety_, + [max_decode_ms, current_delay_ms, target_delay_ms, jitter_buffer_ms, + min_playout_delay_ms, render_delay_ms, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); + stats_.max_decode_ms = max_decode_ms; + stats_.current_delay_ms = current_delay_ms; + stats_.target_delay_ms = target_delay_ms; + stats_.jitter_buffer_ms = jitter_buffer_ms; + stats_.min_playout_delay_ms = min_playout_delay_ms; + stats_.render_delay_ms = render_delay_ms; + jitter_buffer_delay_counter_.Add(jitter_buffer_ms); + target_delay_counter_.Add(target_delay_ms); + current_delay_counter_.Add(current_delay_ms); + // Network delay (rtt/2) + target_delay_ms (jitter delay + decode time + + // render delay). + delay_counter_.Add(target_delay_ms + avg_rtt_ms_ / 2); + })); } void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) { @@ -761,29 +753,25 @@ void ReceiveStatisticsProxy::OnUniqueFramesCounted(int num_unique_frames) { void ReceiveStatisticsProxy::OnTimingFrameInfoUpdated( const TimingFrameInfo& info) { - // Only called on main_thread_ with FrameBuffer3 - if (!worker_thread_->IsCurrent()) { - RTC_DCHECK_RUN_ON(&decode_queue_); - worker_thread_->PostTask(ToQueuedTask( - task_safety_, [info, this]() { OnTimingFrameInfoUpdated(info); })); - return; - } - RTC_DCHECK_RUN_ON(&main_thread_); - if (info.flags != VideoSendTiming::kInvalid) { - int64_t now_ms = clock_->TimeInMilliseconds(); - timing_frame_info_counter_.Add(info, now_ms); - } + RTC_DCHECK_RUN_ON(&decode_queue_); + worker_thread_->PostTask(ToQueuedTask(task_safety_, [info, this]() { + RTC_DCHECK_RUN_ON(&main_thread_); + if (info.flags != VideoSendTiming::kInvalid) { + int64_t now_ms = clock_->TimeInMilliseconds(); + timing_frame_info_counter_.Add(info, now_ms); + } - // Measure initial decoding latency between the first frame arriving and - // the first frame being decoded. - if (!first_frame_received_time_ms_.has_value()) { - first_frame_received_time_ms_ = info.receive_finish_ms; - } - if (stats_.first_frame_received_to_decoded_ms == -1 && - first_decoded_frame_time_ms_) { - stats_.first_frame_received_to_decoded_ms = - *first_decoded_frame_time_ms_ - *first_frame_received_time_ms_; - } + // Measure initial decoding latency between the first frame arriving and + // the first frame being decoded. + if (!first_frame_received_time_ms_.has_value()) { + first_frame_received_time_ms_ = info.receive_finish_ms; + } + if (stats_.first_frame_received_to_decoded_ms == -1 && + first_decoded_frame_time_ms_) { + stats_.first_frame_received_to_decoded_ms = + *first_decoded_frame_time_ms_ - *first_frame_received_time_ms_; + } + })); } void ReceiveStatisticsProxy::RtcpPacketTypesCounterUpdated( diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc index 5fa12a037..32a438de1 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.cc @@ -17,6 +17,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/base/macros.h" #include "absl/memory/memory.h" #include "absl/types/optional.h" #include "media/base/media_constants.h" @@ -619,7 +620,7 @@ void RtpVideoStreamReceiver::OnReceivedPayloadData( case video_coding::H264SpsPpsTracker::kRequestKeyframe: rtcp_feedback_buffer_.RequestKeyFrame(); rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); - [[fallthrough]]; + ABSL_FALLTHROUGH_INTENDED; case video_coding::H264SpsPpsTracker::kDrop: return; case video_coding::H264SpsPpsTracker::kInsert: diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h index 0f754992d..d78b76a85 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver.h @@ -49,6 +49,7 @@ #include "modules/video_coding/packet_buffer.h" #include "modules/video_coding/rtp_frame_reference_finder.h" #include "modules/video_coding/unique_timestamp_counter.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/synchronization/mutex.h" diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc index d8c2f0c57..ce0b7a14f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.cc @@ -17,6 +17,7 @@ #include #include "absl/algorithm/container.h" +#include "absl/base/macros.h" #include "absl/memory/memory.h" #include "absl/types/optional.h" #include "api/video/video_codec_type.h" @@ -242,7 +243,6 @@ RtpVideoStreamReceiver2::RtpVideoStreamReceiver2( config_.rtp.local_ssrc)), complete_frame_callback_(complete_frame_callback), keyframe_request_sender_(keyframe_request_sender), - keyframe_request_method_(config_.rtp.keyframe_method), // TODO(bugs.webrtc.org/10336): Let `rtcp_feedback_buffer_` communicate // directly with `rtp_rtcp_`. rtcp_feedback_buffer_(this, nack_sender, this), @@ -603,7 +603,7 @@ void RtpVideoStreamReceiver2::OnReceivedPayloadData( case video_coding::H264SpsPpsTracker::kRequestKeyframe: rtcp_feedback_buffer_.RequestKeyFrame(); rtcp_feedback_buffer_.SendBufferedRtcpFeedback(); - [[fallthrough]]; + ABSL_FALLTHROUGH_INTENDED; case video_coding::H264SpsPpsTracker::kDrop: return; case video_coding::H264SpsPpsTracker::kInsert: @@ -701,10 +701,8 @@ void RtpVideoStreamReceiver2::RequestKeyFrame() { // sender) is relying on LNTF alone. if (keyframe_request_sender_) { keyframe_request_sender_->RequestKeyFrame(); - } else if (keyframe_request_method_ == KeyFrameReqMethod::kPliRtcp) { + } else { rtp_rtcp_->SendPictureLossIndication(); - } else if (keyframe_request_method_ == KeyFrameReqMethod::kFirRtcp) { - rtp_rtcp_->SendFullIntraRequest(); } } diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h index 76bbece66..54eb7502a 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver2.h @@ -46,6 +46,7 @@ #include "modules/video_coding/packet_buffer.h" #include "modules/video_coding/rtp_frame_reference_finder.h" #include "modules/video_coding/unique_timestamp_counter.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/numerics/sequence_number_util.h" #include "rtc_base/system/no_unique_address.h" @@ -321,7 +322,6 @@ class RtpVideoStreamReceiver2 : public LossNotificationSender, OnCompleteFrameCallback* complete_frame_callback_; KeyFrameRequestSender* const keyframe_request_sender_; - const KeyFrameReqMethod keyframe_request_method_; RtcpFeedbackBuffer rtcp_feedback_buffer_; const std::unique_ptr nack_module_; diff --git a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc index 90f9ad94f..c54939fe5 100644 --- a/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/TMessagesProj/jni/voip/webrtc/video/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -102,8 +102,8 @@ void RtpVideoStreamReceiverFrameTransformerDelegate::TransformFrame( void RtpVideoStreamReceiverFrameTransformerDelegate::OnTransformedFrame( std::unique_ptr frame) { - rtc::scoped_refptr delegate( - this); + rtc::scoped_refptr delegate = + this; network_thread_->PostTask(ToQueuedTask( [delegate = std::move(delegate), frame = std::move(frame)]() mutable { delegate->ManageFrame(std::move(frame)); diff --git a/TMessagesProj/jni/voip/webrtc/video/stats_counter.h b/TMessagesProj/jni/voip/webrtc/video/stats_counter.h index 9e2b8702d..9c3f6f815 100644 --- a/TMessagesProj/jni/voip/webrtc/video/stats_counter.h +++ b/TMessagesProj/jni/voip/webrtc/video/stats_counter.h @@ -14,6 +14,8 @@ #include #include +#include "rtc_base/constructor_magic.h" + namespace webrtc { class AggregatedCounter; @@ -154,9 +156,6 @@ class AvgCounter : public StatsCounter { bool include_empty_intervals); ~AvgCounter() override {} - AvgCounter(const AvgCounter&) = delete; - AvgCounter& operator=(const AvgCounter&) = delete; - void Add(int sample); private: @@ -164,6 +163,8 @@ class AvgCounter : public StatsCounter { // Returns the last computed metric (i.e. from GetMetric). int GetValueForEmptyInterval() const override; + + RTC_DISALLOW_COPY_AND_ASSIGN(AvgCounter); }; // MaxCounter: maximum of samples @@ -179,14 +180,13 @@ class MaxCounter : public StatsCounter { int64_t process_intervals_ms); ~MaxCounter() override {} - MaxCounter(const MaxCounter&) = delete; - MaxCounter& operator=(const MaxCounter&) = delete; - void Add(int sample); private: bool GetMetric(int* metric) const override; int GetValueForEmptyInterval() const override; + + RTC_DISALLOW_COPY_AND_ASSIGN(MaxCounter); }; // PercentCounter: percentage of samples @@ -200,14 +200,13 @@ class PercentCounter : public StatsCounter { PercentCounter(Clock* clock, StatsCounterObserver* observer); ~PercentCounter() override {} - PercentCounter(const PercentCounter&) = delete; - PercentCounter& operator=(const PercentCounter&) = delete; - void Add(bool sample); private: bool GetMetric(int* metric) const override; int GetValueForEmptyInterval() const override; + + RTC_DISALLOW_COPY_AND_ASSIGN(PercentCounter); }; // PermilleCounter: permille of samples @@ -221,14 +220,13 @@ class PermilleCounter : public StatsCounter { PermilleCounter(Clock* clock, StatsCounterObserver* observer); ~PermilleCounter() override {} - PermilleCounter(const PermilleCounter&) = delete; - PermilleCounter& operator=(const PermilleCounter&) = delete; - void Add(bool sample); private: bool GetMetric(int* metric) const override; int GetValueForEmptyInterval() const override; + + RTC_DISALLOW_COPY_AND_ASSIGN(PermilleCounter); }; // RateCounter: units per second @@ -249,14 +247,13 @@ class RateCounter : public StatsCounter { bool include_empty_intervals); ~RateCounter() override {} - RateCounter(const RateCounter&) = delete; - RateCounter& operator=(const RateCounter&) = delete; - void Add(int sample); private: bool GetMetric(int* metric) const override; int GetValueForEmptyInterval() const override; // Returns zero. + + RTC_DISALLOW_COPY_AND_ASSIGN(RateCounter); }; // RateAccCounter: units per second (used for counters) @@ -277,9 +274,6 @@ class RateAccCounter : public StatsCounter { bool include_empty_intervals); ~RateAccCounter() override {} - RateAccCounter(const RateAccCounter&) = delete; - RateAccCounter& operator=(const RateAccCounter&) = delete; - void Set(int64_t sample, uint32_t stream_id); // Sets the value for previous interval. @@ -289,6 +283,8 @@ class RateAccCounter : public StatsCounter { private: bool GetMetric(int* metric) const override; int GetValueForEmptyInterval() const override; // Returns zero. + + RTC_DISALLOW_COPY_AND_ASSIGN(RateAccCounter); }; } // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.cc b/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.cc deleted file mode 100644 index 72de3c3ec..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.cc +++ /dev/null @@ -1,76 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/task_queue_frame_decode_scheduler.h" - -#include -#include - -#include "api/sequence_checker.h" -#include "rtc_base/checks.h" -#include "rtc_base/task_utils/to_queued_task.h" - -namespace webrtc { - -TaskQueueFrameDecodeScheduler::TaskQueueFrameDecodeScheduler( - Clock* clock, - TaskQueueBase* const bookkeeping_queue) - : clock_(clock), bookkeeping_queue_(bookkeeping_queue) { - RTC_DCHECK(clock_); - RTC_DCHECK(bookkeeping_queue_); -} - -TaskQueueFrameDecodeScheduler::~TaskQueueFrameDecodeScheduler() { - RTC_DCHECK(stopped_); - RTC_DCHECK(!scheduled_rtp_) << "Outstanding scheduled rtp=" << *scheduled_rtp_ - << ". Call CancelOutstanding before destruction."; -} - -void TaskQueueFrameDecodeScheduler::ScheduleFrame( - uint32_t rtp, - FrameDecodeTiming::FrameSchedule schedule, - FrameReleaseCallback cb) { - RTC_DCHECK(!stopped_) << "Can not schedule frames after stopped."; - RTC_DCHECK(!scheduled_rtp_.has_value()) - << "Can not schedule two frames for release at the same time."; - RTC_DCHECK(cb); - scheduled_rtp_ = rtp; - - TimeDelta wait = std::max( - TimeDelta::Zero(), schedule.latest_decode_time - clock_->CurrentTime()); - bookkeeping_queue_->PostDelayedTask( - ToQueuedTask(task_safety_.flag(), - [this, rtp, schedule, cb = std::move(cb)] { - RTC_DCHECK_RUN_ON(bookkeeping_queue_); - // If the next frame rtp has changed since this task was - // this scheduled release should be skipped. - if (scheduled_rtp_ != rtp) - return; - scheduled_rtp_ = absl::nullopt; - cb(rtp, schedule.render_time); - }), - wait.ms()); -} - -void TaskQueueFrameDecodeScheduler::CancelOutstanding() { - scheduled_rtp_ = absl::nullopt; -} - -absl::optional -TaskQueueFrameDecodeScheduler::ScheduledRtpTimestamp() { - return scheduled_rtp_; -} - -void TaskQueueFrameDecodeScheduler::Stop() { - CancelOutstanding(); - stopped_ = true; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.h b/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.h deleted file mode 100644 index 69c6dae63..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/task_queue_frame_decode_scheduler.h +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_TASK_QUEUE_FRAME_DECODE_SCHEDULER_H_ -#define VIDEO_TASK_QUEUE_FRAME_DECODE_SCHEDULER_H_ - -#include "video/frame_decode_scheduler.h" - -namespace webrtc { - -// An implementation of FrameDecodeScheduler that is based on TaskQueues. This -// is the default implementation for general use. -class TaskQueueFrameDecodeScheduler : public FrameDecodeScheduler { - public: - TaskQueueFrameDecodeScheduler(Clock* clock, - TaskQueueBase* const bookkeeping_queue); - ~TaskQueueFrameDecodeScheduler() override; - TaskQueueFrameDecodeScheduler(const TaskQueueFrameDecodeScheduler&) = delete; - TaskQueueFrameDecodeScheduler& operator=( - const TaskQueueFrameDecodeScheduler&) = delete; - - // FrameDecodeScheduler implementation. - absl::optional ScheduledRtpTimestamp() override; - void ScheduleFrame(uint32_t rtp, - FrameDecodeTiming::FrameSchedule schedule, - FrameReleaseCallback cb) override; - void CancelOutstanding() override; - void Stop() override; - - private: - Clock* const clock_; - TaskQueueBase* const bookkeeping_queue_; - - absl::optional scheduled_rtp_; - ScopedTaskSafetyDetached task_safety_; - bool stopped_ = false; -}; - -} // namespace webrtc - -#endif // VIDEO_TASK_QUEUE_FRAME_DECODE_SCHEDULER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc deleted file mode 100644 index 25d50b076..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.cc +++ /dev/null @@ -1,1568 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#include "video/video_quality_test.h" - -#include - -#if defined(WEBRTC_WIN) -#include -#endif - -#include -#include -#include -#include -#include -#include - -#include "api/fec_controller_override.h" -#include "api/rtc_event_log_output_file.h" -#include "api/task_queue/default_task_queue_factory.h" -#include "api/task_queue/task_queue_base.h" -#include "api/test/create_frame_generator.h" -#include "api/video/builtin_video_bitrate_allocator_factory.h" -#include "api/video_codecs/video_encoder.h" -#include "call/fake_network_pipe.h" -#include "call/simulated_network.h" -#include "media/base/media_constants.h" -#include "media/engine/adm_helpers.h" -#include "media/engine/encoder_simulcast_proxy.h" -#include "media/engine/fake_video_codec_factory.h" -#include "media/engine/internal_encoder_factory.h" -#include "media/engine/webrtc_video_engine.h" -#include "modules/audio_device/include/audio_device.h" -#include "modules/audio_mixer/audio_mixer_impl.h" -#include "modules/video_coding/codecs/h264/include/h264.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h" -#include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h" -#include "modules/video_coding/codecs/vp8/include/vp8.h" -#include "modules/video_coding/codecs/vp9/include/vp9.h" -#include "modules/video_coding/utility/ivf_file_writer.h" -#include "rtc_base/strings/string_builder.h" -#include "rtc_base/task_queue_for_test.h" -#include "test/platform_video_capturer.h" -#include "test/testsupport/file_utils.h" -#include "test/video_renderer.h" -#include "video/frame_dumping_decoder.h" -#ifdef WEBRTC_WIN -#include "modules/audio_device/include/audio_device_factory.h" -#endif - -namespace webrtc { - -namespace { -enum : int { // The first valid value is 1. - kAbsSendTimeExtensionId = 1, - kGenericFrameDescriptorExtensionId00, - kGenericFrameDescriptorExtensionId01, - kTransportSequenceNumberExtensionId, - kVideoContentTypeExtensionId, - kVideoTimingExtensionId, -}; - -constexpr char kSyncGroup[] = "av_sync"; -constexpr int kOpusMinBitrateBps = 6000; -constexpr int kOpusBitrateFbBps = 32000; -constexpr int kFramesSentInQuickTest = 1; -constexpr uint32_t kThumbnailSendSsrcStart = 0xE0000; -constexpr uint32_t kThumbnailRtxSsrcStart = 0xF0000; - -constexpr int kDefaultMaxQp = cricket::WebRtcVideoChannel::kDefaultQpMax; - -const VideoEncoder::Capabilities kCapabilities(false); - -std::pair GetMinMaxBitratesBps(const VideoCodec& codec, - size_t spatial_idx) { - uint32_t min_bitrate = codec.minBitrate; - uint32_t max_bitrate = codec.maxBitrate; - if (spatial_idx < codec.numberOfSimulcastStreams) { - min_bitrate = - std::max(min_bitrate, codec.simulcastStream[spatial_idx].minBitrate); - max_bitrate = - std::min(max_bitrate, codec.simulcastStream[spatial_idx].maxBitrate); - } - if (codec.codecType == VideoCodecType::kVideoCodecVP9 && - spatial_idx < codec.VP9().numberOfSpatialLayers) { - min_bitrate = - std::max(min_bitrate, codec.spatialLayers[spatial_idx].minBitrate); - max_bitrate = - std::min(max_bitrate, codec.spatialLayers[spatial_idx].maxBitrate); - } - max_bitrate = std::max(max_bitrate, min_bitrate); - return {min_bitrate * 1000, max_bitrate * 1000}; -} - -class VideoStreamFactory - : public VideoEncoderConfig::VideoStreamFactoryInterface { - public: - explicit VideoStreamFactory(const std::vector& streams) - : streams_(streams) {} - - private: - std::vector CreateEncoderStreams( - int width, - int height, - const VideoEncoderConfig& encoder_config) override { - // The highest layer must match the incoming resolution. - std::vector streams = streams_; - streams[streams_.size() - 1].height = height; - streams[streams_.size() - 1].width = width; - - streams[0].bitrate_priority = encoder_config.bitrate_priority; - return streams; - } - - std::vector streams_; -}; - -// This wrapper provides two features needed by the video quality tests: -// 1. Invoke VideoAnalyzer callbacks before and after encoding each frame. -// 2. Write the encoded frames to file, one file per simulcast layer. -class QualityTestVideoEncoder : public VideoEncoder, - private EncodedImageCallback { - public: - QualityTestVideoEncoder(std::unique_ptr encoder, - VideoAnalyzer* analyzer, - std::vector files, - double overshoot_factor) - : encoder_(std::move(encoder)), - overshoot_factor_(overshoot_factor), - analyzer_(analyzer) { - for (FileWrapper& file : files) { - writers_.push_back( - IvfFileWriter::Wrap(std::move(file), /* byte_limit= */ 100000000)); - } - } - - // Implement VideoEncoder - void SetFecControllerOverride( - FecControllerOverride* fec_controller_override) { - // Ignored. - } - - int32_t InitEncode(const VideoCodec* codec_settings, - const Settings& settings) override { - codec_settings_ = *codec_settings; - return encoder_->InitEncode(codec_settings, settings); - } - - int32_t RegisterEncodeCompleteCallback( - EncodedImageCallback* callback) override { - callback_ = callback; - return encoder_->RegisterEncodeCompleteCallback(this); - } - - int32_t Release() override { return encoder_->Release(); } - - int32_t Encode(const VideoFrame& frame, - const std::vector* frame_types) { - if (analyzer_) { - analyzer_->PreEncodeOnFrame(frame); - } - return encoder_->Encode(frame, frame_types); - } - - void SetRates(const RateControlParameters& parameters) override { - RTC_DCHECK_GT(overshoot_factor_, 0.0); - if (overshoot_factor_ == 1.0) { - encoder_->SetRates(parameters); - return; - } - - // Simulating encoder overshooting target bitrate, by configuring actual - // encoder too high. Take care not to adjust past limits of config, - // otherwise encoders may crash on DCHECK. - VideoBitrateAllocation overshot_allocation; - for (size_t si = 0; si < kMaxSpatialLayers; ++si) { - const uint32_t spatial_layer_bitrate_bps = - parameters.bitrate.GetSpatialLayerSum(si); - if (spatial_layer_bitrate_bps == 0) { - continue; - } - - uint32_t min_bitrate_bps; - uint32_t max_bitrate_bps; - std::tie(min_bitrate_bps, max_bitrate_bps) = - GetMinMaxBitratesBps(codec_settings_, si); - double overshoot_factor = overshoot_factor_; - const uint32_t corrected_bitrate = rtc::checked_cast( - overshoot_factor * spatial_layer_bitrate_bps); - if (corrected_bitrate < min_bitrate_bps) { - overshoot_factor = min_bitrate_bps / spatial_layer_bitrate_bps; - } else if (corrected_bitrate > max_bitrate_bps) { - overshoot_factor = max_bitrate_bps / spatial_layer_bitrate_bps; - } - - for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) { - if (parameters.bitrate.HasBitrate(si, ti)) { - overshot_allocation.SetBitrate( - si, ti, - rtc::checked_cast( - overshoot_factor * parameters.bitrate.GetBitrate(si, ti))); - } - } - } - - return encoder_->SetRates( - RateControlParameters(overshot_allocation, parameters.framerate_fps, - parameters.bandwidth_allocation)); - } - - void OnPacketLossRateUpdate(float packet_loss_rate) override { - encoder_->OnPacketLossRateUpdate(packet_loss_rate); - } - - void OnRttUpdate(int64_t rtt_ms) override { encoder_->OnRttUpdate(rtt_ms); } - - void OnLossNotification(const LossNotification& loss_notification) override { - encoder_->OnLossNotification(loss_notification); - } - - EncoderInfo GetEncoderInfo() const override { - EncoderInfo info = encoder_->GetEncoderInfo(); - if (overshoot_factor_ != 1.0) { - // We're simulating bad encoder, don't forward trusted setting - // from eg libvpx. - info.has_trusted_rate_controller = false; - } - return info; - } - - private: - // Implement EncodedImageCallback - Result OnEncodedImage(const EncodedImage& encoded_image, - const CodecSpecificInfo* codec_specific_info) override { - if (codec_specific_info) { - int simulcast_index; - if (codec_specific_info->codecType == kVideoCodecVP9) { - simulcast_index = 0; - } else { - simulcast_index = encoded_image.SpatialIndex().value_or(0); - } - RTC_DCHECK_GE(simulcast_index, 0); - if (analyzer_) { - analyzer_->PostEncodeOnFrame(simulcast_index, - encoded_image.Timestamp()); - } - if (static_cast(simulcast_index) < writers_.size()) { - writers_[simulcast_index]->WriteFrame(encoded_image, - codec_specific_info->codecType); - } - } - - return callback_->OnEncodedImage(encoded_image, codec_specific_info); - } - - void OnDroppedFrame(DropReason reason) override { - callback_->OnDroppedFrame(reason); - } - - const std::unique_ptr encoder_; - const double overshoot_factor_; - VideoAnalyzer* const analyzer_; - std::vector> writers_; - EncodedImageCallback* callback_ = nullptr; - VideoCodec codec_settings_; -}; - -#if defined(WEBRTC_WIN) && !defined(WINUWP) -void PressEnterToContinue(TaskQueueBase* task_queue) { - puts(">> Press ENTER to continue..."); - - while (!_kbhit() || _getch() != '\r') { - // Drive the message loop for the thread running the task_queue - SendTask(RTC_FROM_HERE, task_queue, [&]() { - MSG msg; - if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) { - TranslateMessage(&msg); - DispatchMessage(&msg); - } - }); - } -} -#else -void PressEnterToContinue(TaskQueueBase* /*task_queue*/) { - puts(">> Press ENTER to continue..."); - while (getc(stdin) != '\n' && !feof(stdin)) - ; // NOLINT -} -#endif - -} // namespace - -std::unique_ptr VideoQualityTest::CreateVideoDecoder( - const SdpVideoFormat& format) { - std::unique_ptr decoder; - if (format.name == "multiplex") { - decoder = std::make_unique( - decoder_factory_.get(), SdpVideoFormat(cricket::kVp9CodecName)); - } else if (format.name == "FakeCodec") { - decoder = webrtc::FakeVideoDecoderFactory::CreateVideoDecoder(); - } else { - decoder = decoder_factory_->CreateVideoDecoder(format); - } - if (!params_.logging.encoded_frame_base_path.empty()) { - rtc::StringBuilder str; - str << receive_logs_++; - std::string path = - params_.logging.encoded_frame_base_path + "." + str.str() + ".recv.ivf"; - decoder = CreateFrameDumpingDecoderWrapper( - std::move(decoder), FileWrapper::OpenWriteOnly(path)); - } - return decoder; -} - -std::unique_ptr VideoQualityTest::CreateVideoEncoder( - const SdpVideoFormat& format, - VideoAnalyzer* analyzer) { - std::unique_ptr encoder; - if (format.name == "VP8") { - encoder = - std::make_unique(encoder_factory_.get(), format); - } else if (format.name == "multiplex") { - encoder = std::make_unique( - encoder_factory_.get(), SdpVideoFormat(cricket::kVp9CodecName)); - } else if (format.name == "FakeCodec") { - encoder = webrtc::FakeVideoEncoderFactory::CreateVideoEncoder(); - } else { - encoder = encoder_factory_->CreateVideoEncoder(format); - } - - std::vector encoded_frame_dump_files; - if (!params_.logging.encoded_frame_base_path.empty()) { - char ss_buf[100]; - rtc::SimpleStringBuilder sb(ss_buf); - sb << send_logs_++; - std::string prefix = - params_.logging.encoded_frame_base_path + "." + sb.str() + ".send."; - encoded_frame_dump_files.push_back( - FileWrapper::OpenWriteOnly(prefix + "1.ivf")); - encoded_frame_dump_files.push_back( - FileWrapper::OpenWriteOnly(prefix + "2.ivf")); - encoded_frame_dump_files.push_back( - FileWrapper::OpenWriteOnly(prefix + "3.ivf")); - } - - double overshoot_factor = 1.0; - // Match format to either of the streams in dual-stream mode in order to get - // the overshoot factor. This is not very robust but we can't know for sure - // which stream this encoder is meant for, from within the factory. - if (format == - SdpVideoFormat(params_.video[0].codec, params_.video[0].sdp_params)) { - overshoot_factor = params_.video[0].encoder_overshoot_factor; - } else if (format == SdpVideoFormat(params_.video[1].codec, - params_.video[1].sdp_params)) { - overshoot_factor = params_.video[1].encoder_overshoot_factor; - } - if (overshoot_factor == 0.0) { - // If params were zero-initialized, set to 1.0 instead. - overshoot_factor = 1.0; - } - - if (analyzer || !encoded_frame_dump_files.empty() || overshoot_factor > 1.0) { - encoder = std::make_unique( - std::move(encoder), analyzer, std::move(encoded_frame_dump_files), - overshoot_factor); - } - - return encoder; -} - -VideoQualityTest::VideoQualityTest( - std::unique_ptr injection_components) - : clock_(Clock::GetRealTimeClock()), - task_queue_factory_(CreateDefaultTaskQueueFactory()), - rtc_event_log_factory_(task_queue_factory_.get()), - video_decoder_factory_([this](const SdpVideoFormat& format) { - return this->CreateVideoDecoder(format); - }), - video_encoder_factory_([this](const SdpVideoFormat& format) { - return this->CreateVideoEncoder(format, nullptr); - }), - video_encoder_factory_with_analyzer_( - [this](const SdpVideoFormat& format) { - return this->CreateVideoEncoder(format, analyzer_.get()); - }), - video_bitrate_allocator_factory_( - CreateBuiltinVideoBitrateAllocatorFactory()), - receive_logs_(0), - send_logs_(0), - injection_components_(std::move(injection_components)), - num_video_streams_(0) { - if (injection_components_ == nullptr) { - injection_components_ = std::make_unique(); - } - if (injection_components_->video_decoder_factory != nullptr) { - decoder_factory_ = std::move(injection_components_->video_decoder_factory); - } else { - decoder_factory_ = std::make_unique(); - } - if (injection_components_->video_encoder_factory != nullptr) { - encoder_factory_ = std::move(injection_components_->video_encoder_factory); - } else { - encoder_factory_ = std::make_unique(); - } - - payload_type_map_ = test::CallTest::payload_type_map_; - RTC_DCHECK(payload_type_map_.find(kPayloadTypeH264) == - payload_type_map_.end()); - RTC_DCHECK(payload_type_map_.find(kPayloadTypeVP8) == - payload_type_map_.end()); - RTC_DCHECK(payload_type_map_.find(kPayloadTypeVP9) == - payload_type_map_.end()); - RTC_DCHECK(payload_type_map_.find(kPayloadTypeGeneric) == - payload_type_map_.end()); - payload_type_map_[kPayloadTypeH264] = webrtc::MediaType::VIDEO; - payload_type_map_[kPayloadTypeVP8] = webrtc::MediaType::VIDEO; - payload_type_map_[kPayloadTypeVP9] = webrtc::MediaType::VIDEO; - payload_type_map_[kPayloadTypeGeneric] = webrtc::MediaType::VIDEO; - - fec_controller_factory_ = - std::move(injection_components_->fec_controller_factory); - network_state_predictor_factory_ = - std::move(injection_components_->network_state_predictor_factory); - network_controller_factory_ = - std::move(injection_components_->network_controller_factory); -} - -VideoQualityTest::InjectionComponents::InjectionComponents() = default; - -VideoQualityTest::InjectionComponents::~InjectionComponents() = default; - -void VideoQualityTest::TestBody() {} - -std::string VideoQualityTest::GenerateGraphTitle() const { - rtc::StringBuilder ss; - ss << params_.video[0].codec; - ss << " (" << params_.video[0].target_bitrate_bps / 1000 << "kbps"; - ss << ", " << params_.video[0].fps << " FPS"; - if (params_.screenshare[0].scroll_duration) - ss << ", " << params_.screenshare[0].scroll_duration << "s scroll"; - if (params_.ss[0].streams.size() > 1) - ss << ", Stream #" << params_.ss[0].selected_stream; - if (params_.ss[0].num_spatial_layers > 1) - ss << ", Layer #" << params_.ss[0].selected_sl; - ss << ")"; - return ss.Release(); -} - -void VideoQualityTest::CheckParamsAndInjectionComponents() { - if (injection_components_ == nullptr) { - injection_components_ = std::make_unique(); - } - if (!params_.config && injection_components_->sender_network == nullptr && - injection_components_->receiver_network == nullptr) { - params_.config = BuiltInNetworkBehaviorConfig(); - } - RTC_CHECK( - (params_.config && injection_components_->sender_network == nullptr && - injection_components_->receiver_network == nullptr) || - (!params_.config && injection_components_->sender_network != nullptr && - injection_components_->receiver_network != nullptr)); - for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) { - // Iterate over primary and secondary video streams. - if (!params_.video[video_idx].enabled) - return; - // Add a default stream in none specified. - if (params_.ss[video_idx].streams.empty()) - params_.ss[video_idx].streams.push_back( - VideoQualityTest::DefaultVideoStream(params_, video_idx)); - if (params_.ss[video_idx].num_spatial_layers == 0) - params_.ss[video_idx].num_spatial_layers = 1; - - if (params_.config) { - if (params_.config->loss_percent != 0 || - params_.config->queue_length_packets != 0) { - // Since LayerFilteringTransport changes the sequence numbers, we can't - // use that feature with pack loss, since the NACK request would end up - // retransmitting the wrong packets. - RTC_CHECK(params_.ss[video_idx].selected_sl == -1 || - params_.ss[video_idx].selected_sl == - params_.ss[video_idx].num_spatial_layers - 1); - RTC_CHECK(params_.video[video_idx].selected_tl == -1 || - params_.video[video_idx].selected_tl == - params_.video[video_idx].num_temporal_layers - 1); - } - } - - // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as - // it does in some parts of the code? - RTC_CHECK_GE(params_.video[video_idx].max_bitrate_bps, - params_.video[video_idx].target_bitrate_bps); - RTC_CHECK_GE(params_.video[video_idx].target_bitrate_bps, - params_.video[video_idx].min_bitrate_bps); - int selected_stream = params_.ss[video_idx].selected_stream; - if (params_.video[video_idx].selected_tl > -1) { - RTC_CHECK_LT(selected_stream, params_.ss[video_idx].streams.size()) - << "Can not use --selected_tl when --selected_stream is all streams"; - int stream_tl = params_.ss[video_idx] - .streams[selected_stream] - .num_temporal_layers.value_or(1); - RTC_CHECK_LT(params_.video[video_idx].selected_tl, stream_tl); - } - RTC_CHECK_LE(params_.ss[video_idx].selected_stream, - params_.ss[video_idx].streams.size()); - for (const VideoStream& stream : params_.ss[video_idx].streams) { - RTC_CHECK_GE(stream.min_bitrate_bps, 0); - RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps); - RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps); - } - // TODO(ivica): Should we check if the sum of all streams/layers is equal to - // the total bitrate? We anyway have to update them in the case bitrate - // estimator changes the total bitrates. - RTC_CHECK_GE(params_.ss[video_idx].num_spatial_layers, 1); - RTC_CHECK_LE(params_.ss[video_idx].selected_sl, - params_.ss[video_idx].num_spatial_layers); - RTC_CHECK( - params_.ss[video_idx].spatial_layers.empty() || - params_.ss[video_idx].spatial_layers.size() == - static_cast(params_.ss[video_idx].num_spatial_layers)); - if (params_.video[video_idx].codec == "VP8") { - RTC_CHECK_EQ(params_.ss[video_idx].num_spatial_layers, 1); - } else if (params_.video[video_idx].codec == "VP9") { - RTC_CHECK_EQ(params_.ss[video_idx].streams.size(), 1); - } - RTC_CHECK_GE(params_.call.num_thumbnails, 0); - if (params_.call.num_thumbnails > 0) { - RTC_CHECK_EQ(params_.ss[video_idx].num_spatial_layers, 1); - RTC_CHECK_EQ(params_.ss[video_idx].streams.size(), 3); - RTC_CHECK_EQ(params_.video[video_idx].num_temporal_layers, 3); - RTC_CHECK_EQ(params_.video[video_idx].codec, "VP8"); - } - // Dual streams with FEC not supported in tests yet. - RTC_CHECK(!params_.video[video_idx].flexfec || num_video_streams_ == 1); - RTC_CHECK(!params_.video[video_idx].ulpfec || num_video_streams_ == 1); - } -} - -// Static. -std::vector VideoQualityTest::ParseCSV(const std::string& str) { - // Parse comma separated nonnegative integers, where some elements may be - // empty. The empty values are replaced with -1. - // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40} - // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1} - std::vector result; - if (str.empty()) - return result; - - const char* p = str.c_str(); - int value = -1; - int pos; - while (*p) { - if (*p == ',') { - result.push_back(value); - value = -1; - ++p; - continue; - } - RTC_CHECK_EQ(sscanf(p, "%d%n", &value, &pos), 1) - << "Unexpected non-number value."; - p += pos; - } - result.push_back(value); - return result; -} - -// Static. -VideoStream VideoQualityTest::DefaultVideoStream(const Params& params, - size_t video_idx) { - VideoStream stream; - stream.width = params.video[video_idx].width; - stream.height = params.video[video_idx].height; - stream.max_framerate = params.video[video_idx].fps; - stream.min_bitrate_bps = params.video[video_idx].min_bitrate_bps; - stream.target_bitrate_bps = params.video[video_idx].target_bitrate_bps; - stream.max_bitrate_bps = params.video[video_idx].max_bitrate_bps; - stream.max_qp = kDefaultMaxQp; - stream.num_temporal_layers = params.video[video_idx].num_temporal_layers; - stream.active = true; - return stream; -} - -// Static. -VideoStream VideoQualityTest::DefaultThumbnailStream() { - VideoStream stream; - stream.width = 320; - stream.height = 180; - stream.max_framerate = 7; - stream.min_bitrate_bps = 7500; - stream.target_bitrate_bps = 37500; - stream.max_bitrate_bps = 50000; - stream.max_qp = kDefaultMaxQp; - return stream; -} - -// Static. -void VideoQualityTest::FillScalabilitySettings( - Params* params, - size_t video_idx, - const std::vector& stream_descriptors, - int num_streams, - size_t selected_stream, - int num_spatial_layers, - int selected_sl, - InterLayerPredMode inter_layer_pred, - const std::vector& sl_descriptors) { - if (params->ss[video_idx].streams.empty() && - params->ss[video_idx].infer_streams) { - webrtc::VideoEncoderConfig encoder_config; - encoder_config.codec_type = - PayloadStringToCodecType(params->video[video_idx].codec); - encoder_config.content_type = - params->screenshare[video_idx].enabled - ? webrtc::VideoEncoderConfig::ContentType::kScreen - : webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo; - encoder_config.max_bitrate_bps = params->video[video_idx].max_bitrate_bps; - encoder_config.min_transmit_bitrate_bps = - params->video[video_idx].min_transmit_bps; - encoder_config.number_of_streams = num_streams; - encoder_config.spatial_layers = params->ss[video_idx].spatial_layers; - encoder_config.simulcast_layers = std::vector(num_streams); - encoder_config.video_stream_factory = - rtc::make_ref_counted( - params->video[video_idx].codec, kDefaultMaxQp, - params->screenshare[video_idx].enabled, true); - params->ss[video_idx].streams = - encoder_config.video_stream_factory->CreateEncoderStreams( - static_cast(params->video[video_idx].width), - static_cast(params->video[video_idx].height), encoder_config); - } else { - // Read VideoStream and SpatialLayer elements from a list of comma separated - // lists. To use a default value for an element, use -1 or leave empty. - // Validity checks performed in CheckParamsAndInjectionComponents. - RTC_CHECK(params->ss[video_idx].streams.empty()); - for (const auto& descriptor : stream_descriptors) { - if (descriptor.empty()) - continue; - VideoStream stream = - VideoQualityTest::DefaultVideoStream(*params, video_idx); - std::vector v = VideoQualityTest::ParseCSV(descriptor); - if (v[0] != -1) - stream.width = static_cast(v[0]); - if (v[1] != -1) - stream.height = static_cast(v[1]); - if (v[2] != -1) - stream.max_framerate = v[2]; - if (v[3] != -1) - stream.min_bitrate_bps = v[3]; - if (v[4] != -1) - stream.target_bitrate_bps = v[4]; - if (v[5] != -1) - stream.max_bitrate_bps = v[5]; - if (v.size() > 6 && v[6] != -1) - stream.max_qp = v[6]; - if (v.size() > 7 && v[7] != -1) { - stream.num_temporal_layers = v[7]; - } else { - // Automatic TL thresholds for more than two layers not supported. - RTC_CHECK_LE(params->video[video_idx].num_temporal_layers, 2); - } - params->ss[video_idx].streams.push_back(stream); - } - } - - params->ss[video_idx].num_spatial_layers = std::max(1, num_spatial_layers); - params->ss[video_idx].selected_stream = selected_stream; - - params->ss[video_idx].selected_sl = selected_sl; - params->ss[video_idx].inter_layer_pred = inter_layer_pred; - RTC_CHECK(params->ss[video_idx].spatial_layers.empty()); - for (const auto& descriptor : sl_descriptors) { - if (descriptor.empty()) - continue; - std::vector v = VideoQualityTest::ParseCSV(descriptor); - RTC_CHECK_EQ(v.size(), 8); - - SpatialLayer layer = {0}; - layer.width = v[0]; - layer.height = v[1]; - layer.maxFramerate = v[2]; - layer.numberOfTemporalLayers = v[3]; - layer.maxBitrate = v[4]; - layer.minBitrate = v[5]; - layer.targetBitrate = v[6]; - layer.qpMax = v[7]; - layer.active = true; - - params->ss[video_idx].spatial_layers.push_back(layer); - } -} - -void VideoQualityTest::SetupVideo(Transport* send_transport, - Transport* recv_transport) { - size_t total_streams_used = 0; - video_receive_configs_.clear(); - video_send_configs_.clear(); - video_encoder_configs_.clear(); - bool decode_all_receive_streams = true; - size_t num_video_substreams = params_.ss[0].streams.size(); - RTC_CHECK(num_video_streams_ > 0); - video_encoder_configs_.resize(num_video_streams_); - std::string generic_codec_name; - for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) { - video_send_configs_.push_back(VideoSendStream::Config(send_transport)); - video_encoder_configs_.push_back(VideoEncoderConfig()); - num_video_substreams = params_.ss[video_idx].streams.size(); - RTC_CHECK_GT(num_video_substreams, 0); - for (size_t i = 0; i < num_video_substreams; ++i) - video_send_configs_[video_idx].rtp.ssrcs.push_back( - kVideoSendSsrcs[total_streams_used + i]); - - int payload_type; - if (params_.video[video_idx].codec == "H264") { - payload_type = kPayloadTypeH264; - } else if (params_.video[video_idx].codec == "VP8") { - payload_type = kPayloadTypeVP8; - } else if (params_.video[video_idx].codec == "VP9") { - payload_type = kPayloadTypeVP9; - } else if (params_.video[video_idx].codec == "multiplex") { - payload_type = kPayloadTypeVP9; - } else if (params_.video[video_idx].codec == "FakeCodec") { - payload_type = kFakeVideoSendPayloadType; - } else { - RTC_CHECK(generic_codec_name.empty() || - generic_codec_name == params_.video[video_idx].codec) - << "Supplying multiple generic codecs is unsupported."; - RTC_LOG(LS_INFO) << "Treating codec " << params_.video[video_idx].codec - << " as generic."; - payload_type = kPayloadTypeGeneric; - generic_codec_name = params_.video[video_idx].codec; - } - video_send_configs_[video_idx].encoder_settings.encoder_factory = - (video_idx == 0) ? &video_encoder_factory_with_analyzer_ - : &video_encoder_factory_; - video_send_configs_[video_idx].encoder_settings.bitrate_allocator_factory = - video_bitrate_allocator_factory_.get(); - - video_send_configs_[video_idx].rtp.payload_name = - params_.video[video_idx].codec; - video_send_configs_[video_idx].rtp.payload_type = payload_type; - video_send_configs_[video_idx].rtp.nack.rtp_history_ms = kNackRtpHistoryMs; - video_send_configs_[video_idx].rtp.rtx.payload_type = kSendRtxPayloadType; - for (size_t i = 0; i < num_video_substreams; ++i) { - video_send_configs_[video_idx].rtp.rtx.ssrcs.push_back( - kSendRtxSsrcs[i + total_streams_used]); - } - video_send_configs_[video_idx].rtp.extensions.clear(); - if (params_.call.send_side_bwe) { - video_send_configs_[video_idx].rtp.extensions.emplace_back( - RtpExtension::kTransportSequenceNumberUri, - kTransportSequenceNumberExtensionId); - } else { - video_send_configs_[video_idx].rtp.extensions.emplace_back( - RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId); - } - - if (params_.call.generic_descriptor) { - video_send_configs_[video_idx].rtp.extensions.emplace_back( - RtpExtension::kGenericFrameDescriptorUri00, - kGenericFrameDescriptorExtensionId00); - } - - video_send_configs_[video_idx].rtp.extensions.emplace_back( - RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId); - video_send_configs_[video_idx].rtp.extensions.emplace_back( - RtpExtension::kVideoTimingUri, kVideoTimingExtensionId); - - video_encoder_configs_[video_idx].video_format.name = - params_.video[video_idx].codec; - - video_encoder_configs_[video_idx].video_format.parameters = - params_.video[video_idx].sdp_params; - - video_encoder_configs_[video_idx].codec_type = - PayloadStringToCodecType(params_.video[video_idx].codec); - - video_encoder_configs_[video_idx].min_transmit_bitrate_bps = - params_.video[video_idx].min_transmit_bps; - - video_send_configs_[video_idx].suspend_below_min_bitrate = - params_.video[video_idx].suspend_below_min_bitrate; - - video_encoder_configs_[video_idx].number_of_streams = - params_.ss[video_idx].streams.size(); - video_encoder_configs_[video_idx].max_bitrate_bps = 0; - for (size_t i = 0; i < params_.ss[video_idx].streams.size(); ++i) { - video_encoder_configs_[video_idx].max_bitrate_bps += - params_.ss[video_idx].streams[i].max_bitrate_bps; - } - video_encoder_configs_[video_idx].simulcast_layers = - std::vector(params_.ss[video_idx].streams.size()); - if (!params_.ss[video_idx].infer_streams) { - video_encoder_configs_[video_idx].simulcast_layers = - params_.ss[video_idx].streams; - } - video_encoder_configs_[video_idx].video_stream_factory = - rtc::make_ref_counted( - params_.video[video_idx].codec, - params_.ss[video_idx].streams[0].max_qp, - params_.screenshare[video_idx].enabled, true); - - video_encoder_configs_[video_idx].spatial_layers = - params_.ss[video_idx].spatial_layers; - decode_all_receive_streams = params_.ss[video_idx].selected_stream == - params_.ss[video_idx].streams.size(); - absl::optional decode_sub_stream; - if (!decode_all_receive_streams) - decode_sub_stream = params_.ss[video_idx].selected_stream; - CreateMatchingVideoReceiveConfigs( - video_send_configs_[video_idx], recv_transport, - params_.call.send_side_bwe, &video_decoder_factory_, decode_sub_stream, - true, kNackRtpHistoryMs); - - if (params_.screenshare[video_idx].enabled) { - // Fill out codec settings. - video_encoder_configs_[video_idx].content_type = - VideoEncoderConfig::ContentType::kScreen; - degradation_preference_ = DegradationPreference::MAINTAIN_RESOLUTION; - if (params_.video[video_idx].codec == "VP8") { - VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); - vp8_settings.denoisingOn = false; - vp8_settings.numberOfTemporalLayers = static_cast( - params_.video[video_idx].num_temporal_layers); - video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); - } else if (params_.video[video_idx].codec == "VP9") { - VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); - vp9_settings.denoisingOn = false; - vp9_settings.automaticResizeOn = false; - vp9_settings.numberOfTemporalLayers = static_cast( - params_.video[video_idx].num_temporal_layers); - vp9_settings.numberOfSpatialLayers = static_cast( - params_.ss[video_idx].num_spatial_layers); - vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred; - // High FPS vp9 screenshare requires flexible mode. - if (params_.ss[video_idx].num_spatial_layers > 1) { - vp9_settings.flexibleMode = true; - } - video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); - } - } else if (params_.ss[video_idx].num_spatial_layers > 1) { - // If SVC mode without screenshare, still need to set codec specifics. - RTC_CHECK(params_.video[video_idx].codec == "VP9"); - VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); - vp9_settings.numberOfTemporalLayers = static_cast( - params_.video[video_idx].num_temporal_layers); - vp9_settings.numberOfSpatialLayers = - static_cast(params_.ss[video_idx].num_spatial_layers); - vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred; - vp9_settings.automaticResizeOn = false; - video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted( - vp9_settings); - RTC_DCHECK_EQ(video_encoder_configs_[video_idx].simulcast_layers.size(), - 1); - // Min bitrate will be enforced by spatial layer config instead. - video_encoder_configs_[video_idx].simulcast_layers[0].min_bitrate_bps = 0; - } else if (params_.video[video_idx].automatic_scaling) { - if (params_.video[video_idx].codec == "VP8") { - VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); - vp8_settings.automaticResizeOn = true; - video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); - } else if (params_.video[video_idx].codec == "VP9") { - VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); - // Only enable quality scaler for single spatial layer. - vp9_settings.automaticResizeOn = - params_.ss[video_idx].num_spatial_layers == 1; - video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); - } else if (params_.video[video_idx].codec == "H264") { - // Quality scaling is always on for H.264. - } else if (params_.video[video_idx].codec == cricket::kAv1CodecName) { - // TODO(bugs.webrtc.org/11404): Propagate the flag to - // aom_codec_enc_cfg_t::rc_resize_mode in Av1 encoder wrapper. - // Until then do nothing, specially do not crash. - } else { - RTC_DCHECK_NOTREACHED() - << "Automatic scaling not supported for codec " - << params_.video[video_idx].codec << ", stream " << video_idx; - } - } else { - // Default mode. Single SL, no automatic_scaling, - if (params_.video[video_idx].codec == "VP8") { - VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings(); - vp8_settings.automaticResizeOn = false; - video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings); - } else if (params_.video[video_idx].codec == "VP9") { - VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings(); - vp9_settings.automaticResizeOn = false; - video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings); - } else if (params_.video[video_idx].codec == "H264") { - VideoCodecH264 h264_settings = VideoEncoder::GetDefaultH264Settings(); - video_encoder_configs_[video_idx].encoder_specific_settings = - rtc::make_ref_counted< - VideoEncoderConfig::H264EncoderSpecificSettings>(h264_settings); - } - } - total_streams_used += num_video_substreams; - } - - // FEC supported only for single video stream mode yet. - if (params_.video[0].flexfec) { - if (decode_all_receive_streams) { - SetSendFecConfig(GetVideoSendConfig()->rtp.ssrcs); - } else { - SetSendFecConfig({kVideoSendSsrcs[params_.ss[0].selected_stream]}); - } - - CreateMatchingFecConfig(recv_transport, *GetVideoSendConfig()); - GetFlexFecConfig()->rtp.transport_cc = params_.call.send_side_bwe; - if (params_.call.send_side_bwe) { - GetFlexFecConfig()->rtp.extensions.push_back( - RtpExtension(RtpExtension::kTransportSequenceNumberUri, - kTransportSequenceNumberExtensionId)); - } else { - GetFlexFecConfig()->rtp.extensions.push_back( - RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId)); - } - } - - if (params_.video[0].ulpfec) { - SetSendUlpFecConfig(GetVideoSendConfig()); - if (decode_all_receive_streams) { - for (auto& receive_config : video_receive_configs_) { - SetReceiveUlpFecConfig(&receive_config); - } - } else { - SetReceiveUlpFecConfig( - &video_receive_configs_[params_.ss[0].selected_stream]); - } - } -} - -void VideoQualityTest::SetupThumbnails(Transport* send_transport, - Transport* recv_transport) { - for (int i = 0; i < params_.call.num_thumbnails; ++i) { - // Thumbnails will be send in the other way: from receiver_call to - // sender_call. - VideoSendStream::Config thumbnail_send_config(recv_transport); - thumbnail_send_config.rtp.ssrcs.push_back(kThumbnailSendSsrcStart + i); - // TODO(nisse): Could use a simpler VP8-only encoder factory. - thumbnail_send_config.encoder_settings.encoder_factory = - &video_encoder_factory_; - thumbnail_send_config.encoder_settings.bitrate_allocator_factory = - video_bitrate_allocator_factory_.get(); - thumbnail_send_config.rtp.payload_name = params_.video[0].codec; - thumbnail_send_config.rtp.payload_type = kPayloadTypeVP8; - thumbnail_send_config.rtp.nack.rtp_history_ms = kNackRtpHistoryMs; - thumbnail_send_config.rtp.rtx.payload_type = kSendRtxPayloadType; - thumbnail_send_config.rtp.rtx.ssrcs.push_back(kThumbnailRtxSsrcStart + i); - thumbnail_send_config.rtp.extensions.clear(); - if (params_.call.send_side_bwe) { - thumbnail_send_config.rtp.extensions.push_back( - RtpExtension(RtpExtension::kTransportSequenceNumberUri, - kTransportSequenceNumberExtensionId)); - } else { - thumbnail_send_config.rtp.extensions.push_back( - RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId)); - } - - VideoEncoderConfig thumbnail_encoder_config; - thumbnail_encoder_config.codec_type = kVideoCodecVP8; - thumbnail_encoder_config.video_format.name = "VP8"; - thumbnail_encoder_config.min_transmit_bitrate_bps = 7500; - thumbnail_send_config.suspend_below_min_bitrate = - params_.video[0].suspend_below_min_bitrate; - thumbnail_encoder_config.number_of_streams = 1; - thumbnail_encoder_config.max_bitrate_bps = 50000; - std::vector streams{params_.ss[0].streams[0]}; - thumbnail_encoder_config.video_stream_factory = - rtc::make_ref_counted(streams); - thumbnail_encoder_config.spatial_layers = params_.ss[0].spatial_layers; - - thumbnail_encoder_configs_.push_back(thumbnail_encoder_config.Copy()); - thumbnail_send_configs_.push_back(thumbnail_send_config.Copy()); - - AddMatchingVideoReceiveConfigs( - &thumbnail_receive_configs_, thumbnail_send_config, send_transport, - params_.call.send_side_bwe, &video_decoder_factory_, absl::nullopt, - false, kNackRtpHistoryMs); - } - for (size_t i = 0; i < thumbnail_send_configs_.size(); ++i) { - thumbnail_send_streams_.push_back(receiver_call_->CreateVideoSendStream( - thumbnail_send_configs_[i].Copy(), - thumbnail_encoder_configs_[i].Copy())); - } - for (size_t i = 0; i < thumbnail_receive_configs_.size(); ++i) { - thumbnail_receive_streams_.push_back(sender_call_->CreateVideoReceiveStream( - thumbnail_receive_configs_[i].Copy())); - } -} - -void VideoQualityTest::DestroyThumbnailStreams() { - for (VideoSendStream* thumbnail_send_stream : thumbnail_send_streams_) { - receiver_call_->DestroyVideoSendStream(thumbnail_send_stream); - } - thumbnail_send_streams_.clear(); - for (VideoReceiveStream* thumbnail_receive_stream : - thumbnail_receive_streams_) { - sender_call_->DestroyVideoReceiveStream(thumbnail_receive_stream); - } - thumbnail_send_streams_.clear(); - thumbnail_receive_streams_.clear(); - for (std::unique_ptr>& video_capturer : - thumbnail_capturers_) { - video_capturer.reset(); - } -} - -void VideoQualityTest::SetupThumbnailCapturers(size_t num_thumbnail_streams) { - VideoStream thumbnail = DefaultThumbnailStream(); - for (size_t i = 0; i < num_thumbnail_streams; ++i) { - auto frame_generator_capturer = - std::make_unique( - clock_, - test::CreateSquareFrameGenerator(static_cast(thumbnail.width), - static_cast(thumbnail.height), - absl::nullopt, absl::nullopt), - thumbnail.max_framerate, *task_queue_factory_); - EXPECT_TRUE(frame_generator_capturer->Init()); - thumbnail_capturers_.push_back(std::move(frame_generator_capturer)); - } -} - -std::unique_ptr -VideoQualityTest::CreateFrameGenerator(size_t video_idx) { - // Setup frame generator. - const size_t kWidth = 1850; - const size_t kHeight = 1110; - std::unique_ptr frame_generator; - if (params_.screenshare[video_idx].generate_slides) { - frame_generator = test::CreateSlideFrameGenerator( - kWidth, kHeight, - params_.screenshare[video_idx].slide_change_interval * - params_.video[video_idx].fps); - } else { - std::vector slides = params_.screenshare[video_idx].slides; - if (slides.empty()) { - slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv")); - slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv")); - slides.push_back(test::ResourcePath("photo_1850_1110", "yuv")); - slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv")); - } - if (params_.screenshare[video_idx].scroll_duration == 0) { - // Cycle image every slide_change_interval seconds. - frame_generator = test::CreateFromYuvFileFrameGenerator( - slides, kWidth, kHeight, - params_.screenshare[video_idx].slide_change_interval * - params_.video[video_idx].fps); - } else { - RTC_CHECK_LE(params_.video[video_idx].width, kWidth); - RTC_CHECK_LE(params_.video[video_idx].height, kHeight); - RTC_CHECK_GT(params_.screenshare[video_idx].slide_change_interval, 0); - const int kPauseDurationMs = - (params_.screenshare[video_idx].slide_change_interval - - params_.screenshare[video_idx].scroll_duration) * - 1000; - RTC_CHECK_LE(params_.screenshare[video_idx].scroll_duration, - params_.screenshare[video_idx].slide_change_interval); - - frame_generator = test::CreateScrollingInputFromYuvFilesFrameGenerator( - clock_, slides, kWidth, kHeight, params_.video[video_idx].width, - params_.video[video_idx].height, - params_.screenshare[video_idx].scroll_duration * 1000, - kPauseDurationMs); - } - } - return frame_generator; -} - -void VideoQualityTest::CreateCapturers() { - RTC_DCHECK(video_sources_.empty()); - video_sources_.resize(num_video_streams_); - for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) { - std::unique_ptr frame_generator; - if (params_.screenshare[video_idx].enabled) { - frame_generator = CreateFrameGenerator(video_idx); - } else if (params_.video[video_idx].clip_path == "Generator") { - frame_generator = test::CreateSquareFrameGenerator( - static_cast(params_.video[video_idx].width), - static_cast(params_.video[video_idx].height), absl::nullopt, - absl::nullopt); - } else if (params_.video[video_idx].clip_path == "GeneratorI420A") { - frame_generator = test::CreateSquareFrameGenerator( - static_cast(params_.video[video_idx].width), - static_cast(params_.video[video_idx].height), - test::FrameGeneratorInterface::OutputType::kI420A, absl::nullopt); - } else if (params_.video[video_idx].clip_path == "GeneratorI010") { - frame_generator = test::CreateSquareFrameGenerator( - static_cast(params_.video[video_idx].width), - static_cast(params_.video[video_idx].height), - test::FrameGeneratorInterface::OutputType::kI010, absl::nullopt); - } else if (params_.video[video_idx].clip_path == "GeneratorNV12") { - frame_generator = test::CreateSquareFrameGenerator( - static_cast(params_.video[video_idx].width), - static_cast(params_.video[video_idx].height), - test::FrameGeneratorInterface::OutputType::kNV12, absl::nullopt); - } else if (params_.video[video_idx].clip_path.empty()) { - video_sources_[video_idx] = test::CreateVideoCapturer( - params_.video[video_idx].width, params_.video[video_idx].height, - params_.video[video_idx].fps, - params_.video[video_idx].capture_device_index); - if (video_sources_[video_idx]) { - continue; - } else { - // Failed to get actual camera, use chroma generator as backup. - frame_generator = test::CreateSquareFrameGenerator( - static_cast(params_.video[video_idx].width), - static_cast(params_.video[video_idx].height), absl::nullopt, - absl::nullopt); - } - } else { - frame_generator = test::CreateFromYuvFileFrameGenerator( - {params_.video[video_idx].clip_path}, params_.video[video_idx].width, - params_.video[video_idx].height, 1); - ASSERT_TRUE(frame_generator) << "Could not create capturer for " - << params_.video[video_idx].clip_path - << ".yuv. Is this file present?"; - } - ASSERT_TRUE(frame_generator); - auto frame_generator_capturer = - std::make_unique( - clock_, std::move(frame_generator), params_.video[video_idx].fps, - *task_queue_factory_); - EXPECT_TRUE(frame_generator_capturer->Init()); - video_sources_[video_idx] = std::move(frame_generator_capturer); - } -} - -void VideoQualityTest::StartAudioStreams() { - audio_send_stream_->Start(); - for (AudioReceiveStream* audio_recv_stream : audio_receive_streams_) - audio_recv_stream->Start(); -} - -void VideoQualityTest::StartThumbnails() { - for (VideoSendStream* send_stream : thumbnail_send_streams_) - send_stream->Start(); - for (VideoReceiveStream* receive_stream : thumbnail_receive_streams_) - receive_stream->Start(); -} - -void VideoQualityTest::StopThumbnails() { - for (VideoReceiveStream* receive_stream : thumbnail_receive_streams_) - receive_stream->Stop(); - for (VideoSendStream* send_stream : thumbnail_send_streams_) - send_stream->Stop(); -} - -std::unique_ptr -VideoQualityTest::CreateSendTransport() { - std::unique_ptr network_behavior = nullptr; - if (injection_components_->sender_network == nullptr) { - network_behavior = std::make_unique(*params_.config); - } else { - network_behavior = std::move(injection_components_->sender_network); - } - return std::make_unique( - task_queue(), - std::make_unique(clock_, std::move(network_behavior)), - sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9, - params_.video[0].selected_tl, params_.ss[0].selected_sl, - payload_type_map_, kVideoSendSsrcs[0], - static_cast(kVideoSendSsrcs[0] + params_.ss[0].streams.size() - - 1)); -} - -std::unique_ptr -VideoQualityTest::CreateReceiveTransport() { - std::unique_ptr network_behavior = nullptr; - if (injection_components_->receiver_network == nullptr) { - network_behavior = std::make_unique(*params_.config); - } else { - network_behavior = std::move(injection_components_->receiver_network); - } - return std::make_unique( - task_queue(), - std::make_unique(clock_, std::move(network_behavior)), - receiver_call_.get(), payload_type_map_); -} - -void VideoQualityTest::RunWithAnalyzer(const Params& params) { - num_video_streams_ = params.call.dual_video ? 2 : 1; - std::unique_ptr send_transport; - std::unique_ptr recv_transport; - FILE* graph_data_output_file = nullptr; - - params_ = params; - // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to - // differentiate between the analyzer and the renderer case. - CheckParamsAndInjectionComponents(); - - if (!params_.analyzer.graph_data_output_filename.empty()) { - graph_data_output_file = - fopen(params_.analyzer.graph_data_output_filename.c_str(), "w"); - RTC_CHECK(graph_data_output_file) - << "Can't open the file " << params_.analyzer.graph_data_output_filename - << "!"; - } - - if (!params.logging.rtc_event_log_name.empty()) { - send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( - RtcEventLog::EncodingType::Legacy); - recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( - RtcEventLog::EncodingType::Legacy); - std::unique_ptr send_output( - std::make_unique( - params.logging.rtc_event_log_name + "_send", - RtcEventLog::kUnlimitedOutput)); - std::unique_ptr recv_output( - std::make_unique( - params.logging.rtc_event_log_name + "_recv", - RtcEventLog::kUnlimitedOutput)); - bool event_log_started = - send_event_log_->StartLogging(std::move(send_output), - RtcEventLog::kImmediateOutput) && - recv_event_log_->StartLogging(std::move(recv_output), - RtcEventLog::kImmediateOutput); - RTC_DCHECK(event_log_started); - } else { - send_event_log_ = std::make_unique(); - recv_event_log_ = std::make_unique(); - } - - SendTask(RTC_FROM_HERE, task_queue(), - [this, ¶ms, &send_transport, &recv_transport]() { - Call::Config send_call_config(send_event_log_.get()); - Call::Config recv_call_config(recv_event_log_.get()); - send_call_config.bitrate_config = params.call.call_bitrate_config; - recv_call_config.bitrate_config = params.call.call_bitrate_config; - if (params_.audio.enabled) - InitializeAudioDevice(&send_call_config, &recv_call_config, - params_.audio.use_real_adm); - - CreateCalls(send_call_config, recv_call_config); - send_transport = CreateSendTransport(); - recv_transport = CreateReceiveTransport(); - }); - - std::string graph_title = params_.analyzer.graph_title; - if (graph_title.empty()) - graph_title = VideoQualityTest::GenerateGraphTitle(); - bool is_quick_test_enabled = field_trial::IsEnabled("WebRTC-QuickPerfTest"); - analyzer_ = std::make_unique( - send_transport.get(), params_.analyzer.test_label, - params_.analyzer.avg_psnr_threshold, params_.analyzer.avg_ssim_threshold, - is_quick_test_enabled - ? kFramesSentInQuickTest - : params_.analyzer.test_durations_secs * params_.video[0].fps, - is_quick_test_enabled - ? TimeDelta::Millis(1) - : TimeDelta::Seconds(params_.analyzer.test_durations_secs), - graph_data_output_file, graph_title, - kVideoSendSsrcs[params_.ss[0].selected_stream], - kSendRtxSsrcs[params_.ss[0].selected_stream], - static_cast(params_.ss[0].selected_stream), - params.ss[0].selected_sl, params_.video[0].selected_tl, - is_quick_test_enabled, clock_, params_.logging.rtp_dump_name, - task_queue()); - - SendTask(RTC_FROM_HERE, task_queue(), [&]() { - analyzer_->SetCall(sender_call_.get()); - analyzer_->SetReceiver(receiver_call_->Receiver()); - send_transport->SetReceiver(analyzer_.get()); - recv_transport->SetReceiver(sender_call_->Receiver()); - - SetupVideo(analyzer_.get(), recv_transport.get()); - SetupThumbnails(analyzer_.get(), recv_transport.get()); - video_receive_configs_[params_.ss[0].selected_stream].renderer = - analyzer_.get(); - - CreateFlexfecStreams(); - CreateVideoStreams(); - analyzer_->SetSendStream(video_send_streams_[0]); - analyzer_->SetReceiveStream( - video_receive_streams_[params_.ss[0].selected_stream]); - - GetVideoSendStream()->SetSource(analyzer_->OutputInterface(), - degradation_preference_); - SetupThumbnailCapturers(params_.call.num_thumbnails); - for (size_t i = 0; i < thumbnail_send_streams_.size(); ++i) { - thumbnail_send_streams_[i]->SetSource(thumbnail_capturers_[i].get(), - degradation_preference_); - } - - CreateCapturers(); - - analyzer_->SetSource(video_sources_[0].get(), true); - - for (size_t video_idx = 1; video_idx < num_video_streams_; ++video_idx) { - video_send_streams_[video_idx]->SetSource(video_sources_[video_idx].get(), - degradation_preference_); - } - - if (params_.audio.enabled) { - SetupAudio(send_transport.get()); - StartAudioStreams(); - analyzer_->SetAudioReceiveStream(audio_receive_streams_[0]); - } - StartVideoStreams(); - StartThumbnails(); - analyzer_->StartMeasuringCpuProcessTime(); - }); - - analyzer_->Wait(); - - SendTask(RTC_FROM_HERE, task_queue(), [&]() { - StopThumbnails(); - Stop(); - - DestroyStreams(); - DestroyThumbnailStreams(); - - if (graph_data_output_file) - fclose(graph_data_output_file); - - send_transport.reset(); - recv_transport.reset(); - - DestroyCalls(); - }); - analyzer_ = nullptr; -} - -rtc::scoped_refptr VideoQualityTest::CreateAudioDevice() { -#ifdef WEBRTC_WIN - RTC_LOG(LS_INFO) << "Using latest version of ADM on Windows"; - // We must initialize the COM library on a thread before we calling any of - // the library functions. All COM functions in the ADM will return - // CO_E_NOTINITIALIZED otherwise. The legacy ADM for Windows used internal - // COM initialization but the new ADM requires COM to be initialized - // externally. - com_initializer_ = - std::make_unique(ScopedCOMInitializer::kMTA); - RTC_CHECK(com_initializer_->Succeeded()); - RTC_CHECK(webrtc_win::core_audio_utility::IsSupported()); - RTC_CHECK(webrtc_win::core_audio_utility::IsMMCSSSupported()); - return CreateWindowsCoreAudioAudioDeviceModule(task_queue_factory_.get()); -#else - // Use legacy factory method on all platforms except Windows. - return AudioDeviceModule::Create(AudioDeviceModule::kPlatformDefaultAudio, - task_queue_factory_.get()); -#endif -} - -void VideoQualityTest::InitializeAudioDevice(Call::Config* send_call_config, - Call::Config* recv_call_config, - bool use_real_adm) { - rtc::scoped_refptr audio_device; - if (use_real_adm) { - // Run test with real ADM (using default audio devices) if user has - // explicitly set the --audio and --use_real_adm command-line flags. - audio_device = CreateAudioDevice(); - } else { - // By default, create a test ADM which fakes audio. - audio_device = TestAudioDeviceModule::Create( - task_queue_factory_.get(), - TestAudioDeviceModule::CreatePulsedNoiseCapturer(32000, 48000), - TestAudioDeviceModule::CreateDiscardRenderer(48000), 1.f); - } - RTC_CHECK(audio_device); - - AudioState::Config audio_state_config; - audio_state_config.audio_mixer = AudioMixerImpl::Create(); - audio_state_config.audio_processing = AudioProcessingBuilder().Create(); - audio_state_config.audio_device_module = audio_device; - send_call_config->audio_state = AudioState::Create(audio_state_config); - recv_call_config->audio_state = AudioState::Create(audio_state_config); - if (use_real_adm) { - // The real ADM requires extra initialization: setting default devices, - // setting up number of channels etc. Helper class also calls - // AudioDeviceModule::Init(). - webrtc::adm_helpers::Init(audio_device.get()); - } else { - audio_device->Init(); - } - // Always initialize the ADM before injecting a valid audio transport. - RTC_CHECK(audio_device->RegisterAudioCallback( - send_call_config->audio_state->audio_transport()) == 0); -} - -void VideoQualityTest::SetupAudio(Transport* transport) { - AudioSendStream::Config audio_send_config(transport); - audio_send_config.rtp.ssrc = kAudioSendSsrc; - - // Add extension to enable audio send side BWE, and allow audio bit rate - // adaptation. - audio_send_config.rtp.extensions.clear(); - audio_send_config.send_codec_spec = AudioSendStream::Config::SendCodecSpec( - kAudioSendPayloadType, - {"OPUS", - 48000, - 2, - {{"usedtx", (params_.audio.dtx ? "1" : "0")}, {"stereo", "1"}}}); - - if (params_.call.send_side_bwe) { - audio_send_config.rtp.extensions.push_back( - webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri, - kTransportSequenceNumberExtensionId)); - audio_send_config.min_bitrate_bps = kOpusMinBitrateBps; - audio_send_config.max_bitrate_bps = kOpusBitrateFbBps; - audio_send_config.send_codec_spec->transport_cc_enabled = true; - // Only allow ANA when send-side BWE is enabled. - audio_send_config.audio_network_adaptor_config = params_.audio.ana_config; - } - audio_send_config.encoder_factory = audio_encoder_factory_; - SetAudioConfig(audio_send_config); - - std::string sync_group; - if (params_.video[0].enabled && params_.audio.sync_video) - sync_group = kSyncGroup; - - CreateMatchingAudioConfigs(transport, sync_group); - CreateAudioStreams(); -} - -void VideoQualityTest::RunWithRenderers(const Params& params) { - RTC_LOG(LS_INFO) << __FUNCTION__; - num_video_streams_ = params.call.dual_video ? 2 : 1; - std::unique_ptr send_transport; - std::unique_ptr recv_transport; - std::unique_ptr local_preview; - std::vector> loopback_renderers; - - if (!params.logging.rtc_event_log_name.empty()) { - send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( - RtcEventLog::EncodingType::Legacy); - recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog( - RtcEventLog::EncodingType::Legacy); - std::unique_ptr send_output( - std::make_unique( - params.logging.rtc_event_log_name + "_send", - RtcEventLog::kUnlimitedOutput)); - std::unique_ptr recv_output( - std::make_unique( - params.logging.rtc_event_log_name + "_recv", - RtcEventLog::kUnlimitedOutput)); - bool event_log_started = - send_event_log_->StartLogging(std::move(send_output), - /*output_period_ms=*/5000) && - recv_event_log_->StartLogging(std::move(recv_output), - /*output_period_ms=*/5000); - RTC_DCHECK(event_log_started); - } else { - send_event_log_ = std::make_unique(); - recv_event_log_ = std::make_unique(); - } - - SendTask(RTC_FROM_HERE, task_queue(), [&]() { - params_ = params; - CheckParamsAndInjectionComponents(); - - // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to - // match the full stack tests. - Call::Config send_call_config(send_event_log_.get()); - send_call_config.bitrate_config = params_.call.call_bitrate_config; - Call::Config recv_call_config(recv_event_log_.get()); - - if (params_.audio.enabled) - InitializeAudioDevice(&send_call_config, &recv_call_config, - params_.audio.use_real_adm); - - CreateCalls(send_call_config, recv_call_config); - - // TODO(minyue): consider if this is a good transport even for audio only - // calls. - send_transport = CreateSendTransport(); - - recv_transport = CreateReceiveTransport(); - - // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at - // least share as much code as possible. That way this test would also match - // the full stack tests better. - send_transport->SetReceiver(receiver_call_->Receiver()); - recv_transport->SetReceiver(sender_call_->Receiver()); - - if (params_.video[0].enabled) { - // Create video renderers. - SetupVideo(send_transport.get(), recv_transport.get()); - size_t num_streams_processed = 0; - for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) { - const size_t selected_stream_id = params_.ss[video_idx].selected_stream; - const size_t num_streams = params_.ss[video_idx].streams.size(); - if (selected_stream_id == num_streams) { - for (size_t stream_id = 0; stream_id < num_streams; ++stream_id) { - rtc::StringBuilder oss; - oss << "Loopback Video #" << video_idx << " - Stream #" - << static_cast(stream_id); - loopback_renderers.emplace_back(test::VideoRenderer::Create( - oss.str().c_str(), - params_.ss[video_idx].streams[stream_id].width, - params_.ss[video_idx].streams[stream_id].height)); - video_receive_configs_[stream_id + num_streams_processed].renderer = - loopback_renderers.back().get(); - if (params_.audio.enabled && params_.audio.sync_video) - video_receive_configs_[stream_id + num_streams_processed] - .sync_group = kSyncGroup; - } - } else { - rtc::StringBuilder oss; - oss << "Loopback Video #" << video_idx; - loopback_renderers.emplace_back(test::VideoRenderer::Create( - oss.str().c_str(), - params_.ss[video_idx].streams[selected_stream_id].width, - params_.ss[video_idx].streams[selected_stream_id].height)); - video_receive_configs_[selected_stream_id + num_streams_processed] - .renderer = loopback_renderers.back().get(); - if (params_.audio.enabled && params_.audio.sync_video) - video_receive_configs_[num_streams_processed + selected_stream_id] - .sync_group = kSyncGroup; - } - num_streams_processed += num_streams; - } - CreateFlexfecStreams(); - CreateVideoStreams(); - - CreateCapturers(); - if (params_.video[0].enabled) { - // Create local preview - local_preview.reset(test::VideoRenderer::Create( - "Local Preview", params_.video[0].width, params_.video[0].height)); - - video_sources_[0]->AddOrUpdateSink(local_preview.get(), - rtc::VideoSinkWants()); - } - ConnectVideoSourcesToStreams(); - } - - if (params_.audio.enabled) { - SetupAudio(send_transport.get()); - } - - Start(); - }); - - PressEnterToContinue(task_queue()); - - SendTask(RTC_FROM_HERE, task_queue(), [&]() { - Stop(); - DestroyStreams(); - - send_transport.reset(); - recv_transport.reset(); - - local_preview.reset(); - loopback_renderers.clear(); - - DestroyCalls(); - }); -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.h b/TMessagesProj/jni/voip/webrtc/video/video_quality_test.h deleted file mode 100644 index f49ce385b..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/video_quality_test.h +++ /dev/null @@ -1,147 +0,0 @@ -/* - * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ -#ifndef VIDEO_VIDEO_QUALITY_TEST_H_ -#define VIDEO_VIDEO_QUALITY_TEST_H_ - -#include -#include -#include -#include - -#include "api/fec_controller.h" -#include "api/rtc_event_log/rtc_event_log_factory.h" -#include "api/task_queue/task_queue_base.h" -#include "api/task_queue/task_queue_factory.h" -#include "api/test/frame_generator_interface.h" -#include "api/test/video_quality_test_fixture.h" -#include "api/video/video_bitrate_allocator_factory.h" -#include "call/fake_network_pipe.h" -#include "media/engine/internal_decoder_factory.h" -#include "media/engine/internal_encoder_factory.h" -#include "test/call_test.h" -#include "test/layer_filtering_transport.h" -#include "video/video_analyzer.h" -#ifdef WEBRTC_WIN -#include "modules/audio_device/win/core_audio_utility_win.h" -#include "rtc_base/win/scoped_com_initializer.h" -#endif - -namespace webrtc { - -class VideoQualityTest : public test::CallTest, - public VideoQualityTestFixtureInterface { - public: - explicit VideoQualityTest( - std::unique_ptr injection_components); - - void RunWithAnalyzer(const Params& params) override; - void RunWithRenderers(const Params& params) override; - - const std::map& payload_type_map() override { - return payload_type_map_; - } - - static void FillScalabilitySettings( - Params* params, - size_t video_idx, - const std::vector& stream_descriptors, - int num_streams, - size_t selected_stream, - int num_spatial_layers, - int selected_sl, - InterLayerPredMode inter_layer_pred, - const std::vector& sl_descriptors); - - // Helper static methods. - static VideoStream DefaultVideoStream(const Params& params, size_t video_idx); - static VideoStream DefaultThumbnailStream(); - static std::vector ParseCSV(const std::string& str); - - protected: - std::map payload_type_map_; - - // No-op implementation to be able to instantiate this class from non-TEST_F - // locations. - void TestBody() override; - - // Helper methods accessing only params_. - std::string GenerateGraphTitle() const; - void CheckParamsAndInjectionComponents(); - - // Helper methods for setting up the call. - void CreateCapturers(); - std::unique_ptr CreateFrameGenerator( - size_t video_idx); - void SetupThumbnailCapturers(size_t num_thumbnail_streams); - std::unique_ptr CreateVideoDecoder( - const SdpVideoFormat& format); - std::unique_ptr CreateVideoEncoder(const SdpVideoFormat& format, - VideoAnalyzer* analyzer); - void SetupVideo(Transport* send_transport, Transport* recv_transport); - void SetupThumbnails(Transport* send_transport, Transport* recv_transport); - void StartAudioStreams(); - void StartThumbnails(); - void StopThumbnails(); - void DestroyThumbnailStreams(); - // Helper method for creating a real ADM (using hardware) for all platforms. - rtc::scoped_refptr CreateAudioDevice(); - void InitializeAudioDevice(Call::Config* send_call_config, - Call::Config* recv_call_config, - bool use_real_adm); - void SetupAudio(Transport* transport); - - void StartEncodedFrameLogs(VideoReceiveStream* stream); - - virtual std::unique_ptr CreateSendTransport(); - virtual std::unique_ptr CreateReceiveTransport(); - - std::vector>> - thumbnail_capturers_; - Clock* const clock_; - const std::unique_ptr task_queue_factory_; - RtcEventLogFactory rtc_event_log_factory_; - - test::FunctionVideoDecoderFactory video_decoder_factory_; - std::unique_ptr decoder_factory_; - test::FunctionVideoEncoderFactory video_encoder_factory_; - test::FunctionVideoEncoderFactory video_encoder_factory_with_analyzer_; - std::unique_ptr - video_bitrate_allocator_factory_; - std::unique_ptr encoder_factory_; - std::vector thumbnail_send_configs_; - std::vector thumbnail_encoder_configs_; - std::vector thumbnail_send_streams_; - std::vector thumbnail_receive_configs_; - std::vector thumbnail_receive_streams_; - - int receive_logs_; - int send_logs_; - - Params params_; - std::unique_ptr injection_components_; - - // Set non-null when running with analyzer. - std::unique_ptr analyzer_; - - // Note: not same as similarly named member in CallTest. This is the number of - // separate send streams, the one in CallTest is the number of substreams for - // a single send stream. - size_t num_video_streams_; - -#ifdef WEBRTC_WIN - // Windows Core Audio based ADM needs to run on a COM initialized thread. - // Only referenced in combination with --audio --use_real_adm flags. - std::unique_ptr com_initializer_; -#endif -}; - -} // namespace webrtc - -#endif // VIDEO_VIDEO_QUALITY_TEST_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc new file mode 100644 index 000000000..27f86cfaa --- /dev/null +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream.cc @@ -0,0 +1,766 @@ +/* + * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "video/video_receive_stream.h" + +#include +#include + +#include +#include +#include +#include +#include + +#include "absl/algorithm/container.h" +#include "absl/types/optional.h" +#include "api/array_view.h" +#include "api/crypto/frame_decryptor_interface.h" +#include "api/video/encoded_image.h" +#include "api/video_codecs/h264_profile_level_id.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/video_decoder_factory.h" +#include "api/video_codecs/video_encoder.h" +#include "call/rtp_stream_receiver_controller_interface.h" +#include "call/rtx_receive_stream.h" +#include "common_video/include/incoming_video_stream.h" +#include "modules/utility/include/process_thread.h" +#include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_coding_defines.h" +#include "modules/video_coding/include/video_error_codes.h" +#include "modules/video_coding/timing.h" +#include "modules/video_coding/utility/vp8_header_parser.h" +#include "rtc_base/checks.h" +#include "rtc_base/location.h" +#include "rtc_base/logging.h" +#include "rtc_base/strings/string_builder.h" +#include "rtc_base/system/thread_registry.h" +#include "rtc_base/time_utils.h" +#include "rtc_base/trace_event.h" +#include "system_wrappers/include/clock.h" +#include "system_wrappers/include/field_trial.h" +#include "video/call_stats.h" +#include "video/frame_dumping_decoder.h" +#include "video/receive_statistics_proxy.h" + +namespace webrtc { + +namespace internal { +constexpr int VideoReceiveStream::kMaxWaitForKeyFrameMs; +} // namespace internal + +namespace { + +constexpr int kMinBaseMinimumDelayMs = 0; +constexpr int kMaxBaseMinimumDelayMs = 10000; + +constexpr int kMaxWaitForFrameMs = 3000; + +// Concrete instance of RecordableEncodedFrame wrapping needed content +// from EncodedFrame. +class WebRtcRecordableEncodedFrame : public RecordableEncodedFrame { + public: + explicit WebRtcRecordableEncodedFrame(const EncodedFrame& frame) + : buffer_(frame.GetEncodedData()), + render_time_ms_(frame.RenderTime()), + codec_(frame.CodecSpecific()->codecType), + is_key_frame_(frame.FrameType() == VideoFrameType::kVideoFrameKey), + resolution_{frame.EncodedImage()._encodedWidth, + frame.EncodedImage()._encodedHeight} { + if (frame.ColorSpace()) { + color_space_ = *frame.ColorSpace(); + } + } + + // VideoEncodedSinkInterface::FrameBuffer + rtc::scoped_refptr encoded_buffer() + const override { + return buffer_; + } + + absl::optional color_space() const override { + return color_space_; + } + + VideoCodecType codec() const override { return codec_; } + + bool is_key_frame() const override { return is_key_frame_; } + + EncodedResolution resolution() const override { return resolution_; } + + Timestamp render_time() const override { + return Timestamp::Millis(render_time_ms_); + } + + private: + rtc::scoped_refptr buffer_; + int64_t render_time_ms_; + VideoCodecType codec_; + bool is_key_frame_; + EncodedResolution resolution_; + absl::optional color_space_; +}; + +// Video decoder class to be used for unknown codecs. Doesn't support decoding +// but logs messages to LS_ERROR. +class NullVideoDecoder : public webrtc::VideoDecoder { + public: + bool Configure(const Settings& settings) override { + RTC_LOG(LS_ERROR) << "Can't initialize NullVideoDecoder."; + return true; + } + + int32_t Decode(const webrtc::EncodedImage& input_image, + bool missing_frames, + int64_t render_time_ms) override { + RTC_LOG(LS_ERROR) << "The NullVideoDecoder doesn't support decoding."; + return WEBRTC_VIDEO_CODEC_OK; + } + + int32_t RegisterDecodeCompleteCallback( + webrtc::DecodedImageCallback* callback) override { + RTC_LOG(LS_ERROR) + << "Can't register decode complete callback on NullVideoDecoder."; + return WEBRTC_VIDEO_CODEC_OK; + } + + int32_t Release() override { return WEBRTC_VIDEO_CODEC_OK; } + + DecoderInfo GetDecoderInfo() const override { + DecoderInfo info; + info.implementation_name = "NullVideoDecoder"; + return info; + } + const char* ImplementationName() const override { return "NullVideoDecoder"; } +}; + +// TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. +// Maximum time between frames before resetting the FrameBuffer to avoid RTP +// timestamps wraparound to affect FrameBuffer. +constexpr int kInactiveStreamThresholdMs = 600000; // 10 minutes. + +} // namespace + +namespace internal { + +VideoReceiveStream::VideoReceiveStream( + TaskQueueFactory* task_queue_factory, + RtpStreamReceiverControllerInterface* receiver_controller, + int num_cpu_cores, + PacketRouter* packet_router, + VideoReceiveStream::Config config, + ProcessThread* process_thread, + CallStats* call_stats, + Clock* clock, + VCMTiming* timing) + : task_queue_factory_(task_queue_factory), + transport_adapter_(config.rtcp_send_transport), + config_(std::move(config)), + num_cpu_cores_(num_cpu_cores), + process_thread_(process_thread), + clock_(clock), + call_stats_(call_stats), + source_tracker_(clock_), + stats_proxy_(config_.rtp.remote_ssrc, clock_), + rtp_receive_statistics_(ReceiveStatistics::Create(clock_)), + timing_(timing), + video_receiver_(clock_, timing_.get()), + rtp_video_stream_receiver_(clock_, + &transport_adapter_, + call_stats, + packet_router, + &config_, + rtp_receive_statistics_.get(), + &stats_proxy_, + &stats_proxy_, + process_thread_, + this, // NackSender + nullptr, // Use default KeyFrameRequestSender + this, // OnCompleteFrameCallback + config_.frame_decryptor, + config_.frame_transformer), + rtp_stream_sync_(this), + max_wait_for_keyframe_ms_(kMaxWaitForKeyFrameMs), + max_wait_for_frame_ms_(kMaxWaitForFrameMs), + decode_queue_(task_queue_factory_->CreateTaskQueue( + "DecodingQueue", + TaskQueueFactory::Priority::HIGH)) { + RTC_LOG(LS_INFO) << "VideoReceiveStream: " << config_.ToString(); + + RTC_DCHECK(config_.renderer); + RTC_DCHECK(process_thread_); + RTC_DCHECK(call_stats_); + + module_process_sequence_checker_.Detach(); + network_sequence_checker_.Detach(); + + RTC_DCHECK(!config_.decoders.empty()); + RTC_CHECK(config_.decoder_factory); + std::set decoder_payload_types; + for (const Decoder& decoder : config_.decoders) { + RTC_CHECK(decoder_payload_types.find(decoder.payload_type) == + decoder_payload_types.end()) + << "Duplicate payload type (" << decoder.payload_type + << ") for different decoders."; + decoder_payload_types.insert(decoder.payload_type); + } + + timing_->set_render_delay(config_.render_delay_ms); + + frame_buffer_.reset( + new video_coding::FrameBuffer(clock_, timing_.get(), &stats_proxy_)); + + process_thread_->RegisterModule(&rtp_stream_sync_, RTC_FROM_HERE); + // Register with RtpStreamReceiverController. + media_receiver_ = receiver_controller->CreateReceiver( + config_.rtp.remote_ssrc, &rtp_video_stream_receiver_); + if (config_.rtp.rtx_ssrc) { + rtx_receive_stream_ = std::make_unique( + &rtp_video_stream_receiver_, config.rtp.rtx_associated_payload_types, + config_.rtp.remote_ssrc, rtp_receive_statistics_.get()); + rtx_receiver_ = receiver_controller->CreateReceiver( + config_.rtp.rtx_ssrc, rtx_receive_stream_.get()); + } else { + rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc, + true); + } +} + +VideoReceiveStream::VideoReceiveStream( + TaskQueueFactory* task_queue_factory, + RtpStreamReceiverControllerInterface* receiver_controller, + int num_cpu_cores, + PacketRouter* packet_router, + VideoReceiveStream::Config config, + ProcessThread* process_thread, + CallStats* call_stats, + Clock* clock) + : VideoReceiveStream(task_queue_factory, + receiver_controller, + num_cpu_cores, + packet_router, + std::move(config), + process_thread, + call_stats, + clock, + new VCMTiming(clock)) {} + +VideoReceiveStream::~VideoReceiveStream() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + RTC_LOG(LS_INFO) << "~VideoReceiveStream: " << config_.ToString(); + Stop(); + process_thread_->DeRegisterModule(&rtp_stream_sync_); +} + +void VideoReceiveStream::SignalNetworkState(NetworkState state) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtp_video_stream_receiver_.SignalNetworkState(state); +} + +bool VideoReceiveStream::DeliverRtcp(const uint8_t* packet, size_t length) { + return rtp_video_stream_receiver_.DeliverRtcp(packet, length); +} + +void VideoReceiveStream::SetSync(Syncable* audio_syncable) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtp_stream_sync_.ConfigureSync(audio_syncable); +} + +void VideoReceiveStream::Start() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + + if (decoder_running_) { + return; + } + + const bool protected_by_fec = config_.rtp.protected_by_flexfec || + rtp_video_stream_receiver_.IsUlpfecEnabled(); + + if (rtp_video_stream_receiver_.IsRetransmissionsEnabled() && + protected_by_fec) { + frame_buffer_->SetProtectionMode(kProtectionNackFEC); + } + + transport_adapter_.Enable(); + rtc::VideoSinkInterface* renderer = nullptr; + if (config_.enable_prerenderer_smoothing) { + incoming_video_stream_.reset(new IncomingVideoStream( + task_queue_factory_, config_.render_delay_ms, this)); + renderer = incoming_video_stream_.get(); + } else { + renderer = this; + } + + for (const Decoder& decoder : config_.decoders) { + std::unique_ptr video_decoder = + config_.decoder_factory->CreateVideoDecoder(decoder.video_format); + // If we still have no valid decoder, we have to create a "Null" decoder + // that ignores all calls. The reason we can get into this state is that the + // old decoder factory interface doesn't have a way to query supported + // codecs. + if (!video_decoder) { + video_decoder = std::make_unique(); + } + + std::string decoded_output_file = + field_trial::FindFullName("WebRTC-DecoderDataDumpDirectory"); + // Because '/' can't be used inside a field trial parameter, we use ';' + // instead. + // This is only relevant to WebRTC-DecoderDataDumpDirectory + // field trial. ';' is chosen arbitrary. Even though it's a legal character + // in some file systems, we can sacrifice ability to use it in the path to + // dumped video, since it's developers-only feature for debugging. + absl::c_replace(decoded_output_file, ';', '/'); + if (!decoded_output_file.empty()) { + char filename_buffer[256]; + rtc::SimpleStringBuilder ssb(filename_buffer); + ssb << decoded_output_file << "/webrtc_receive_stream_" + << this->config_.rtp.remote_ssrc << "-" << rtc::TimeMicros() + << ".ivf"; + video_decoder = CreateFrameDumpingDecoderWrapper( + std::move(video_decoder), FileWrapper::OpenWriteOnly(ssb.str())); + } + + video_decoders_.push_back(std::move(video_decoder)); + + video_receiver_.RegisterExternalDecoder(video_decoders_.back().get(), + decoder.payload_type); + VideoDecoder::Settings settings; + settings.set_codec_type( + PayloadStringToCodecType(decoder.video_format.name)); + settings.set_max_render_resolution({320, 180}); + settings.set_number_of_cores(num_cpu_cores_); + + const bool raw_payload = + config_.rtp.raw_payload_types.count(decoder.payload_type) > 0; + rtp_video_stream_receiver_.AddReceiveCodec( + decoder.payload_type, settings.codec_type(), + decoder.video_format.parameters, raw_payload); + video_receiver_.RegisterReceiveCodec(decoder.payload_type, settings); + } + + RTC_DCHECK(renderer != nullptr); + video_stream_decoder_.reset( + new VideoStreamDecoder(&video_receiver_, &stats_proxy_, renderer)); + + // Make sure we register as a stats observer *after* we've prepared the + // `video_stream_decoder_`. + call_stats_->RegisterStatsObserver(this); + + // Start decoding on task queue. + video_receiver_.DecoderThreadStarting(); + stats_proxy_.DecoderThreadStarting(); + decode_queue_.PostTask([this] { + RTC_DCHECK_RUN_ON(&decode_queue_); + decoder_stopped_ = false; + StartNextDecode(); + }); + decoder_running_ = true; + rtp_video_stream_receiver_.StartReceive(); +} + +void VideoReceiveStream::Stop() { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtp_video_stream_receiver_.StopReceive(); + + stats_proxy_.OnUniqueFramesCounted( + rtp_video_stream_receiver_.GetUniqueFramesSeen()); + + decode_queue_.PostTask([this] { frame_buffer_->Stop(); }); + + call_stats_->DeregisterStatsObserver(this); + + if (decoder_running_) { + rtc::Event done; + decode_queue_.PostTask([this, &done] { + RTC_DCHECK_RUN_ON(&decode_queue_); + decoder_stopped_ = true; + done.Set(); + }); + done.Wait(rtc::Event::kForever); + + decoder_running_ = false; + video_receiver_.DecoderThreadStopped(); + stats_proxy_.DecoderThreadStopped(); + // Deregister external decoders so they are no longer running during + // destruction. This effectively stops the VCM since the decoder thread is + // stopped, the VCM is deregistered and no asynchronous decoder threads are + // running. + for (const Decoder& decoder : config_.decoders) + video_receiver_.RegisterExternalDecoder(nullptr, decoder.payload_type); + + UpdateHistograms(); + } + + video_stream_decoder_.reset(); + incoming_video_stream_.reset(); + transport_adapter_.Disable(); +} + +VideoReceiveStream::Stats VideoReceiveStream::GetStats() const { + VideoReceiveStream::Stats stats = stats_proxy_.GetStats(); + stats.total_bitrate_bps = 0; + StreamStatistician* statistician = + rtp_receive_statistics_->GetStatistician(stats.ssrc); + if (statistician) { + stats.rtp_stats = statistician->GetStats(); + stats.total_bitrate_bps = statistician->BitrateReceived(); + } + if (config_.rtp.rtx_ssrc) { + StreamStatistician* rtx_statistician = + rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc); + if (rtx_statistician) + stats.total_bitrate_bps += rtx_statistician->BitrateReceived(); + } + return stats; +} + +void VideoReceiveStream::UpdateHistograms() { + absl::optional fraction_lost; + StreamDataCounters rtp_stats; + StreamStatistician* statistician = + rtp_receive_statistics_->GetStatistician(config_.rtp.remote_ssrc); + if (statistician) { + fraction_lost = statistician->GetFractionLostInPercent(); + rtp_stats = statistician->GetReceiveStreamDataCounters(); + } + if (config_.rtp.rtx_ssrc) { + StreamStatistician* rtx_statistician = + rtp_receive_statistics_->GetStatistician(config_.rtp.rtx_ssrc); + if (rtx_statistician) { + StreamDataCounters rtx_stats = + rtx_statistician->GetReceiveStreamDataCounters(); + stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, &rtx_stats); + return; + } + } + stats_proxy_.UpdateHistograms(fraction_lost, rtp_stats, nullptr); +} + +void VideoReceiveStream::AddSecondarySink(RtpPacketSinkInterface* sink) { + rtp_video_stream_receiver_.AddSecondarySink(sink); +} + +void VideoReceiveStream::RemoveSecondarySink( + const RtpPacketSinkInterface* sink) { + rtp_video_stream_receiver_.RemoveSecondarySink(sink); +} + +void VideoReceiveStream::SetRtpExtensions( + std::vector extensions) { + // VideoReceiveStream is deprecated and this function not supported. + RTC_DCHECK_NOTREACHED(); +} + +bool VideoReceiveStream::SetBaseMinimumPlayoutDelayMs(int delay_ms) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + if (delay_ms < kMinBaseMinimumDelayMs || delay_ms > kMaxBaseMinimumDelayMs) { + return false; + } + + MutexLock lock(&playout_delay_lock_); + base_minimum_playout_delay_ms_ = delay_ms; + UpdatePlayoutDelays(); + return true; +} + +int VideoReceiveStream::GetBaseMinimumPlayoutDelayMs() const { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + + MutexLock lock(&playout_delay_lock_); + return base_minimum_playout_delay_ms_; +} + +// TODO(tommi): This method grabs a lock 6 times. +void VideoReceiveStream::OnFrame(const VideoFrame& video_frame) { + int64_t video_playout_ntp_ms; + int64_t sync_offset_ms; + double estimated_freq_khz; + + // TODO(bugs.webrtc.org/10739): we should set local capture clock offset for + // `video_frame.packet_infos`. But VideoFrame is const qualified here. + + // TODO(tommi): GetStreamSyncOffsetInMs grabs three locks. One inside the + // function itself, another in GetChannel() and a third in + // GetPlayoutTimestamp. Seems excessive. Anyhow, I'm assuming the function + // succeeds most of the time, which leads to grabbing a fourth lock. + if (rtp_stream_sync_.GetStreamSyncOffsetInMs( + video_frame.timestamp(), video_frame.render_time_ms(), + &video_playout_ntp_ms, &sync_offset_ms, &estimated_freq_khz)) { + // TODO(tommi): OnSyncOffsetUpdated grabs a lock. + stats_proxy_.OnSyncOffsetUpdated(video_playout_ntp_ms, sync_offset_ms, + estimated_freq_khz); + } + source_tracker_.OnFrameDelivered(video_frame.packet_infos()); + + config_.renderer->OnFrame(video_frame); + + // TODO(tommi): OnRenderFrame grabs a lock too. + stats_proxy_.OnRenderedFrame(video_frame); +} + +void VideoReceiveStream::SetFrameDecryptor( + rtc::scoped_refptr frame_decryptor) { + rtp_video_stream_receiver_.SetFrameDecryptor(std::move(frame_decryptor)); +} + +void VideoReceiveStream::SetDepacketizerToDecoderFrameTransformer( + rtc::scoped_refptr frame_transformer) { + rtp_video_stream_receiver_.SetDepacketizerToDecoderFrameTransformer( + std::move(frame_transformer)); +} + +void VideoReceiveStream::SendNack(const std::vector& sequence_numbers, + bool buffering_allowed) { + RTC_DCHECK(buffering_allowed); + rtp_video_stream_receiver_.RequestPacketRetransmit(sequence_numbers); +} + +void VideoReceiveStream::RequestKeyFrame(int64_t timestamp_ms) { + rtp_video_stream_receiver_.RequestKeyFrame(); + last_keyframe_request_ms_ = timestamp_ms; +} + +void VideoReceiveStream::OnCompleteFrame(std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&network_sequence_checker_); + // TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. + int64_t time_now_ms = clock_->TimeInMilliseconds(); + if (last_complete_frame_time_ms_ > 0 && + time_now_ms - last_complete_frame_time_ms_ > kInactiveStreamThresholdMs) { + frame_buffer_->Clear(); + } + last_complete_frame_time_ms_ = time_now_ms; + + const VideoPlayoutDelay& playout_delay = frame->EncodedImage().playout_delay_; + if (playout_delay.min_ms >= 0) { + MutexLock lock(&playout_delay_lock_); + frame_minimum_playout_delay_ms_ = playout_delay.min_ms; + UpdatePlayoutDelays(); + } + + if (playout_delay.max_ms >= 0) { + MutexLock lock(&playout_delay_lock_); + frame_maximum_playout_delay_ms_ = playout_delay.max_ms; + UpdatePlayoutDelays(); + } + + int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame)); + if (last_continuous_pid != -1) + rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid); +} + +void VideoReceiveStream::OnRttUpdate(int64_t avg_rtt_ms, int64_t max_rtt_ms) { + RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); + frame_buffer_->UpdateRtt(max_rtt_ms); + rtp_video_stream_receiver_.UpdateRtt(max_rtt_ms); +} + +uint32_t VideoReceiveStream::id() const { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + return config_.rtp.remote_ssrc; +} + +absl::optional VideoReceiveStream::GetInfo() const { + RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); + absl::optional info = + rtp_video_stream_receiver_.GetSyncInfo(); + + if (!info) + return absl::nullopt; + + info->current_delay_ms = timing_->TargetVideoDelay(); + return info; +} + +bool VideoReceiveStream::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, + int64_t* time_ms) const { + RTC_DCHECK_NOTREACHED(); + return 0; +} + +void VideoReceiveStream::SetEstimatedPlayoutNtpTimestampMs( + int64_t ntp_timestamp_ms, + int64_t time_ms) { + RTC_DCHECK_NOTREACHED(); +} + +bool VideoReceiveStream::SetMinimumPlayoutDelay(int delay_ms) { + RTC_DCHECK_RUN_ON(&module_process_sequence_checker_); + MutexLock lock(&playout_delay_lock_); + syncable_minimum_playout_delay_ms_ = delay_ms; + UpdatePlayoutDelays(); + return true; +} + +int64_t VideoReceiveStream::GetWaitMs() const { + return keyframe_required_ ? max_wait_for_keyframe_ms_ + : max_wait_for_frame_ms_; +} + +void VideoReceiveStream::StartNextDecode() { + TRACE_EVENT0("webrtc", "VideoReceiveStream::StartNextDecode"); + frame_buffer_->NextFrame(GetWaitMs(), keyframe_required_, &decode_queue_, + /* encoded frame handler */ + [this](std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&decode_queue_); + if (decoder_stopped_) + return; + if (frame) { + HandleEncodedFrame(std::move(frame)); + } else { + HandleFrameBufferTimeout(); + } + StartNextDecode(); + }); +} + +void VideoReceiveStream::HandleEncodedFrame( + std::unique_ptr frame) { + int64_t now_ms = clock_->TimeInMilliseconds(); + + // Current OnPreDecode only cares about QP for VP8. + int qp = -1; + if (frame->CodecSpecific()->codecType == kVideoCodecVP8) { + if (!vp8::GetQp(frame->data(), frame->size(), &qp)) { + RTC_LOG(LS_WARNING) << "Failed to extract QP from VP8 video frame"; + } + } + stats_proxy_.OnPreDecode(frame->CodecSpecific()->codecType, qp); + HandleKeyFrameGeneration(frame->FrameType() == VideoFrameType::kVideoFrameKey, + now_ms); + int decode_result = video_receiver_.Decode(frame.get()); + if (decode_result == WEBRTC_VIDEO_CODEC_OK || + decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) { + keyframe_required_ = false; + frame_decoded_ = true; + rtp_video_stream_receiver_.FrameDecoded(frame->Id()); + + if (decode_result == WEBRTC_VIDEO_CODEC_OK_REQUEST_KEYFRAME) + RequestKeyFrame(now_ms); + } else if (!frame_decoded_ || !keyframe_required_ || + (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ < now_ms)) { + keyframe_required_ = true; + // TODO(philipel): Remove this keyframe request when downstream project + // has been fixed. + RequestKeyFrame(now_ms); + } + + if (encoded_frame_buffer_function_) { + encoded_frame_buffer_function_(WebRtcRecordableEncodedFrame(*frame)); + } +} + +void VideoReceiveStream::HandleKeyFrameGeneration( + bool received_frame_is_keyframe, + int64_t now_ms) { + // Repeat sending keyframe requests if we've requested a keyframe. + if (!keyframe_generation_requested_) { + return; + } + if (received_frame_is_keyframe) { + keyframe_generation_requested_ = false; + } else if (last_keyframe_request_ms_ + max_wait_for_keyframe_ms_ <= now_ms) { + if (!IsReceivingKeyFrame(now_ms)) { + RequestKeyFrame(now_ms); + } + } else { + // It hasn't been long enough since the last keyframe request, do nothing. + } +} + +void VideoReceiveStream::HandleFrameBufferTimeout() { + int64_t now_ms = clock_->TimeInMilliseconds(); + absl::optional last_packet_ms = + rtp_video_stream_receiver_.LastReceivedPacketMs(); + + // To avoid spamming keyframe requests for a stream that is not active we + // check if we have received a packet within the last 5 seconds. + bool stream_is_active = last_packet_ms && now_ms - *last_packet_ms < 5000; + if (!stream_is_active) + stats_proxy_.OnStreamInactive(); + + if (stream_is_active && !IsReceivingKeyFrame(now_ms) && + (!config_.crypto_options.sframe.require_frame_encryption || + rtp_video_stream_receiver_.IsDecryptable())) { + RTC_LOG(LS_WARNING) << "No decodable frame in " << GetWaitMs() + << " ms, requesting keyframe."; + RequestKeyFrame(now_ms); + } +} + +bool VideoReceiveStream::IsReceivingKeyFrame(int64_t timestamp_ms) const { + absl::optional last_keyframe_packet_ms = + rtp_video_stream_receiver_.LastReceivedKeyframePacketMs(); + + // If we recently have been receiving packets belonging to a keyframe then + // we assume a keyframe is currently being received. + bool receiving_keyframe = + last_keyframe_packet_ms && + timestamp_ms - *last_keyframe_packet_ms < max_wait_for_keyframe_ms_; + return receiving_keyframe; +} + +void VideoReceiveStream::UpdatePlayoutDelays() const { + const int minimum_delay_ms = + std::max({frame_minimum_playout_delay_ms_, base_minimum_playout_delay_ms_, + syncable_minimum_playout_delay_ms_}); + if (minimum_delay_ms >= 0) { + timing_->set_min_playout_delay(minimum_delay_ms); + } + + const int maximum_delay_ms = frame_maximum_playout_delay_ms_; + if (maximum_delay_ms >= 0) { + timing_->set_max_playout_delay(maximum_delay_ms); + } +} + +std::vector VideoReceiveStream::GetSources() const { + return source_tracker_.GetSources(); +} + +VideoReceiveStream::RecordingState VideoReceiveStream::SetAndGetRecordingState( + RecordingState state, + bool generate_key_frame) { + RTC_DCHECK_RUN_ON(&worker_sequence_checker_); + rtc::Event event; + RecordingState old_state; + decode_queue_.PostTask([this, &event, &old_state, generate_key_frame, + state = std::move(state)] { + RTC_DCHECK_RUN_ON(&decode_queue_); + // Save old state. + old_state.callback = std::move(encoded_frame_buffer_function_); + old_state.last_keyframe_request_ms = last_keyframe_request_ms_; + + // Set new state. + encoded_frame_buffer_function_ = std::move(state.callback); + if (generate_key_frame) { + RequestKeyFrame(clock_->TimeInMilliseconds()); + keyframe_generation_requested_ = true; + } else { + keyframe_generation_requested_ = false; + last_keyframe_request_ms_ = state.last_keyframe_request_ms.value_or(0); + } + event.Set(); + }); + event.Wait(rtc::Event::kForever); + return old_state; +} + +void VideoReceiveStream::GenerateKeyFrame() { + decode_queue_.PostTask([this]() { + RTC_DCHECK_RUN_ON(&decode_queue_); + RequestKeyFrame(clock_->TimeInMilliseconds()); + keyframe_generation_requested_ = true; + }); +} + +} // namespace internal +} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc index 5436c0156..528b2998e 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.cc @@ -20,15 +20,9 @@ #include #include "absl/algorithm/container.h" -#include "absl/container/inlined_vector.h" -#include "absl/functional/bind_front.h" #include "absl/types/optional.h" #include "api/array_view.h" #include "api/crypto/frame_decryptor_interface.h" -#include "api/scoped_refptr.h" -#include "api/sequence_checker.h" -#include "api/task_queue/task_queue_base.h" -#include "api/units/time_delta.h" #include "api/video/encoded_image.h" #include "api/video_codecs/h264_profile_level_id.h" #include "api/video_codecs/sdp_video_format.h" @@ -38,36 +32,24 @@ #include "call/rtp_stream_receiver_controller_interface.h" #include "call/rtx_receive_stream.h" #include "common_video/include/incoming_video_stream.h" -#include "modules/video_coding/frame_buffer2.h" -#include "modules/video_coding/frame_buffer3.h" -#include "modules/video_coding/frame_helpers.h" #include "modules/video_coding/include/video_codec_interface.h" #include "modules/video_coding/include/video_coding_defines.h" #include "modules/video_coding/include/video_error_codes.h" -#include "modules/video_coding/inter_frame_delay.h" -#include "modules/video_coding/jitter_estimator.h" #include "modules/video_coding/timing.h" #include "modules/video_coding/utility/vp8_header_parser.h" #include "rtc_base/checks.h" -#include "rtc_base/experiments/rtt_mult_experiment.h" #include "rtc_base/location.h" #include "rtc_base/logging.h" #include "rtc_base/strings/string_builder.h" #include "rtc_base/synchronization/mutex.h" #include "rtc_base/system/thread_registry.h" -#include "rtc_base/task_queue.h" -#include "rtc_base/task_utils/pending_task_safety_flag.h" -#include "rtc_base/task_utils/to_queued_task.h" -#include "rtc_base/thread_annotations.h" #include "rtc_base/time_utils.h" #include "rtc_base/trace_event.h" #include "system_wrappers/include/clock.h" #include "system_wrappers/include/field_trial.h" #include "video/call_stats2.h" -#include "video/frame_decode_scheduler.h" #include "video/frame_dumping_decoder.h" #include "video/receive_statistics_proxy2.h" -#include "video/video_receive_stream_timeout_tracker.h" namespace webrtc { @@ -211,8 +193,7 @@ VideoReceiveStream2::VideoReceiveStream2( CallStats* call_stats, Clock* clock, VCMTiming* timing, - NackPeriodicProcessor* nack_periodic_processor, - DecodeSynchronizer* decode_sync) + NackPeriodicProcessor* nack_periodic_processor) : task_queue_factory_(task_queue_factory), transport_adapter_(config.rtcp_send_transport), config_(std::move(config)), @@ -241,13 +222,12 @@ VideoReceiveStream2::VideoReceiveStream2( std::move(config_.frame_decryptor), std::move(config_.frame_transformer)), rtp_stream_sync_(call->worker_thread(), this), - max_wait_for_keyframe_ms_(DetermineMaxWaitForFrame(config_, true)), - max_wait_for_frame_ms_(DetermineMaxWaitForFrame(config_, false)), + max_wait_for_keyframe_ms_(DetermineMaxWaitForFrame(config, true)), + max_wait_for_frame_ms_(DetermineMaxWaitForFrame(config, false)), low_latency_renderer_enabled_("enabled", true), low_latency_renderer_include_predecode_buffer_("include_predecode_buffer", true), maximum_pre_stream_decoders_("max", kDefaultMaximumPreStreamDecoders), - decode_sync_(decode_sync), decode_queue_(task_queue_factory_->CreateTaskQueue( "DecodingQueue", TaskQueueFactory::Priority::HIGH)) { @@ -271,17 +251,15 @@ VideoReceiveStream2::VideoReceiveStream2( timing_->set_render_delay(config_.render_delay_ms); - frame_buffer_ = FrameBufferProxy::CreateFromFieldTrial( - clock_, call_->worker_thread(), timing_.get(), &stats_proxy_, - &decode_queue_, this, TimeDelta::Millis(max_wait_for_keyframe_ms_), - TimeDelta::Millis(max_wait_for_frame_ms_), decode_sync_); + frame_buffer_.reset( + new video_coding::FrameBuffer(clock_, timing_.get(), &stats_proxy_)); if (config_.rtp.rtx_ssrc) { rtx_receive_stream_ = std::make_unique( - &rtp_video_stream_receiver_, config_.rtp.rtx_associated_payload_types, + &rtp_video_stream_receiver_, config.rtp.rtx_associated_payload_types, config_.rtp.remote_ssrc, rtp_receive_statistics_.get()); } else { - rtp_receive_statistics_->EnableRetransmitDetection(config_.rtp.remote_ssrc, + rtp_receive_statistics_->EnableRetransmitDetection(config.rtp.remote_ssrc, true); } @@ -417,8 +395,8 @@ void VideoReceiveStream2::Start() { decode_queue_.PostTask([this] { RTC_DCHECK_RUN_ON(&decode_queue_); decoder_stopped_ = false; + StartNextDecode(); }); - frame_buffer_->StartNextDecode(true); decoder_running_ = true; { @@ -441,8 +419,10 @@ void VideoReceiveStream2::Stop() { stats_proxy_.OnUniqueFramesCounted( rtp_video_stream_receiver_.GetUniqueFramesSeen()); - frame_buffer_->StopOnWorker(); + decode_queue_.PostTask([this] { frame_buffer_->Stop(); }); + call_stats_->DeregisterStatsObserver(this); + if (decoder_running_) { rtc::Event done; decode_queue_.PostTask([this, &done] { @@ -659,9 +639,6 @@ void VideoReceiveStream2::OnCompleteFrame(std::unique_ptr frame) { RTC_DCHECK_RUN_ON(&worker_sequence_checker_); // TODO(https://bugs.webrtc.org/9974): Consider removing this workaround. - // TODO(https://bugs.webrtc.org/13343): Remove this check when FrameBuffer3 is - // deployed. With FrameBuffer3, this case is properly handled and tested in - // the FrameBufferProxyTest.PausedStream unit test. int64_t time_now_ms = clock_->TimeInMilliseconds(); if (last_complete_frame_time_ms_ > 0 && time_now_ms - last_complete_frame_time_ms_ > kInactiveStreamThresholdMs) { @@ -680,12 +657,12 @@ void VideoReceiveStream2::OnCompleteFrame(std::unique_ptr frame) { UpdatePlayoutDelays(); } - auto last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame)); - if (last_continuous_pid.has_value()) { + int64_t last_continuous_pid = frame_buffer_->InsertFrame(std::move(frame)); + if (last_continuous_pid != -1) { { // TODO(bugs.webrtc.org/11993): Call on the network thread. RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - rtp_video_stream_receiver_.FrameContinuous(*last_continuous_pid); + rtp_video_stream_receiver_.FrameContinuous(last_continuous_pid); } } } @@ -717,7 +694,7 @@ absl::optional VideoReceiveStream2::GetInfo() const { bool VideoReceiveStream2::GetPlayoutRtpTimestamp(uint32_t* rtp_timestamp, int64_t* time_ms) const { RTC_DCHECK_NOTREACHED(); - return false; + return 0; } void VideoReceiveStream2::SetEstimatedPlayoutNtpTimestampMs( @@ -738,37 +715,29 @@ int64_t VideoReceiveStream2::GetMaxWaitMs() const { : max_wait_for_frame_ms_; } -void VideoReceiveStream2::OnEncodedFrame(std::unique_ptr frame) { - if (!decode_queue_.IsCurrent()) { - decode_queue_.PostTask([this, frame = std::move(frame)]() mutable { - OnEncodedFrame(std::move(frame)); - }); - return; - } - RTC_DCHECK_RUN_ON(&decode_queue_); - if (decoder_stopped_) - return; - HandleEncodedFrame(std::move(frame)); - frame_buffer_->StartNextDecode(keyframe_required_); -} - -void VideoReceiveStream2::OnDecodableFrameTimeout(TimeDelta wait_time) { - if (!call_->worker_thread()->IsCurrent()) { - call_->worker_thread()->PostTask(ToQueuedTask( - task_safety_, - [this, wait_time] { OnDecodableFrameTimeout(wait_time); })); - return; - } - - // TODO(bugs.webrtc.org/11993): PostTask to the network thread. - RTC_DCHECK_RUN_ON(&packet_sequence_checker_); - int64_t now_ms = clock_->TimeInMilliseconds(); - HandleFrameBufferTimeout(now_ms, wait_time.ms()); - - decode_queue_.PostTask([this] { - RTC_DCHECK_RUN_ON(&decode_queue_); - frame_buffer_->StartNextDecode(keyframe_required_); - }); +void VideoReceiveStream2::StartNextDecode() { + // Running on the decode thread. + TRACE_EVENT0("webrtc", "VideoReceiveStream2::StartNextDecode"); + frame_buffer_->NextFrame( + GetMaxWaitMs(), keyframe_required_, &decode_queue_, + /* encoded frame handler */ + [this](std::unique_ptr frame) { + RTC_DCHECK_RUN_ON(&decode_queue_); + if (decoder_stopped_) + return; + if (frame) { + HandleEncodedFrame(std::move(frame)); + } else { + int64_t now_ms = clock_->TimeInMilliseconds(); + // TODO(bugs.webrtc.org/11993): PostTask to the network thread. + call_->worker_thread()->PostTask(ToQueuedTask( + task_safety_, [this, now_ms, wait_ms = GetMaxWaitMs()]() { + RTC_DCHECK_RUN_ON(&packet_sequence_checker_); + HandleFrameBufferTimeout(now_ms, wait_ms); + })); + } + StartNextDecode(); + }); } void VideoReceiveStream2::HandleEncodedFrame( diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h index 6a7795da6..cf637f8c0 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream2.h @@ -25,6 +25,7 @@ #include "call/video_receive_stream.h" #include "modules/rtp_rtcp/include/flexfec_receiver.h" #include "modules/rtp_rtcp/source/source_tracker.h" +#include "modules/video_coding/frame_buffer2.h" #include "modules/video_coding/nack_requester.h" #include "modules/video_coding/video_receiver2.h" #include "rtc_base/system/no_unique_address.h" @@ -32,7 +33,6 @@ #include "rtc_base/task_utils/pending_task_safety_flag.h" #include "rtc_base/thread_annotations.h" #include "system_wrappers/include/clock.h" -#include "video/frame_buffer_proxy.h" #include "video/receive_statistics_proxy2.h" #include "video/rtp_streams_synchronizer2.h" #include "video/rtp_video_stream_receiver2.h" @@ -83,8 +83,7 @@ class VideoReceiveStream2 public NackSender, public RtpVideoStreamReceiver2::OnCompleteFrameCallback, public Syncable, - public CallStatsObserver, - public FrameSchedulingReceiver { + public CallStatsObserver { public: // The default number of milliseconds to pass before re-requesting a key frame // to be sent. @@ -101,8 +100,7 @@ class VideoReceiveStream2 CallStats* call_stats, Clock* clock, VCMTiming* timing, - NackPeriodicProcessor* nack_periodic_processor, - DecodeSynchronizer* decode_sync); + NackPeriodicProcessor* nack_periodic_processor); // Destruction happens on the worker thread. Prior to destruction the caller // must ensure that a registration with the transport has been cleared. See // `RegisterWithTransport` for details. @@ -186,10 +184,9 @@ class VideoReceiveStream2 void GenerateKeyFrame() override; private: - void OnEncodedFrame(std::unique_ptr frame) override; - void OnDecodableFrameTimeout(TimeDelta wait_time) override; void CreateAndRegisterExternalDecoder(const Decoder& decoder); int64_t GetMaxWaitMs() const RTC_RUN_ON(decode_queue_); + void StartNextDecode() RTC_RUN_ON(decode_queue_); void HandleEncodedFrame(std::unique_ptr frame) RTC_RUN_ON(decode_queue_); void HandleFrameBufferTimeout(int64_t now_ms, int64_t wait_ms) @@ -250,7 +247,8 @@ class VideoReceiveStream2 // moved to the new VideoStreamDecoder. std::vector> video_decoders_; - std::unique_ptr frame_buffer_; + // Members for the new jitter buffer experiment. + std::unique_ptr frame_buffer_; std::unique_ptr media_receiver_ RTC_GUARDED_BY(packet_sequence_checker_); @@ -324,8 +322,6 @@ class VideoReceiveStream2 // any video frame has been received. FieldTrialParameter maximum_pre_stream_decoders_; - DecodeSynchronizer* decode_sync_; - // Defined last so they are destroyed before all other members. rtc::TaskQueue decode_queue_; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.cc b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.cc deleted file mode 100644 index 79e0e2cfc..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.cc +++ /dev/null @@ -1,81 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#include "video/video_receive_stream_timeout_tracker.h" - -#include - -namespace webrtc { - -VideoReceiveStreamTimeoutTracker::VideoReceiveStreamTimeoutTracker( - Clock* clock, - TaskQueueBase* const bookkeeping_queue, - const Timeouts& timeouts, - TimeoutCallback callback) - : clock_(clock), - bookkeeping_queue_(bookkeeping_queue), - timeouts_(timeouts), - callback_(std::move(callback)) {} - -VideoReceiveStreamTimeoutTracker::~VideoReceiveStreamTimeoutTracker() { - RTC_DCHECK(!timeout_task_.Running()); -} - -bool VideoReceiveStreamTimeoutTracker::Running() const { - return timeout_task_.Running(); -} - -void VideoReceiveStreamTimeoutTracker::Start(bool waiting_for_keyframe) { - RTC_DCHECK(!timeout_task_.Running()); - waiting_for_keyframe_ = waiting_for_keyframe; - TimeDelta timeout_delay = TimeoutForNextFrame(); - timeout_ = clock_->CurrentTime() + timeout_delay; - timeout_task_ = RepeatingTaskHandle::DelayedStart( - bookkeeping_queue_, timeout_delay, [this] { - RTC_DCHECK_RUN_ON(bookkeeping_queue_); - return HandleTimeoutTask(); - }); -} - -void VideoReceiveStreamTimeoutTracker::Stop() { - timeout_task_.Stop(); -} - -void VideoReceiveStreamTimeoutTracker::SetWaitingForKeyframe() { - waiting_for_keyframe_ = true; - TimeDelta timeout_delay = TimeoutForNextFrame(); - if (clock_->CurrentTime() + timeout_delay < timeout_) { - Stop(); - Start(waiting_for_keyframe_); - } -} - -void VideoReceiveStreamTimeoutTracker::OnEncodedFrameReleased() { - // If we were waiting for a keyframe, then it has just been released. - waiting_for_keyframe_ = false; - timeout_ = clock_->CurrentTime() + TimeoutForNextFrame(); -} - -TimeDelta VideoReceiveStreamTimeoutTracker::HandleTimeoutTask() { - Timestamp now = clock_->CurrentTime(); - // `timeout_` is hit and we have timed out. Schedule the next timeout at - // the timeout delay. - if (now >= timeout_) { - TimeDelta timeout_delay = TimeoutForNextFrame(); - timeout_ = now + timeout_delay; - callback_(); - return timeout_delay; - } - // Otherwise, `timeout_` changed since we scheduled a timeout. Reschedule - // a timeout check. - return timeout_ - now; -} - -} // namespace webrtc diff --git a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.h b/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.h deleted file mode 100644 index 6993e13ad..000000000 --- a/TMessagesProj/jni/voip/webrtc/video/video_receive_stream_timeout_tracker.h +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2022 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#ifndef VIDEO_VIDEO_RECEIVE_STREAM_TIMEOUT_TRACKER_H_ -#define VIDEO_VIDEO_RECEIVE_STREAM_TIMEOUT_TRACKER_H_ - -#include - -#include "api/task_queue/task_queue_base.h" -#include "api/units/time_delta.h" -#include "rtc_base/task_utils/repeating_task.h" -#include "system_wrappers/include/clock.h" - -namespace webrtc { - -class VideoReceiveStreamTimeoutTracker { - public: - struct Timeouts { - TimeDelta max_wait_for_keyframe; - TimeDelta max_wait_for_frame; - }; - - using TimeoutCallback = std::function; - VideoReceiveStreamTimeoutTracker(Clock* clock, - TaskQueueBase* const bookkeeping_queue, - const Timeouts& timeouts, - TimeoutCallback callback); - ~VideoReceiveStreamTimeoutTracker(); - VideoReceiveStreamTimeoutTracker(const VideoReceiveStreamTimeoutTracker&) = - delete; - VideoReceiveStreamTimeoutTracker& operator=( - const VideoReceiveStreamTimeoutTracker&) = delete; - - bool Running() const; - void Start(bool waiting_for_keyframe); - void Stop(); - void SetWaitingForKeyframe(); - void OnEncodedFrameReleased(); - - private: - TimeDelta TimeoutForNextFrame() const { - return waiting_for_keyframe_ ? timeouts_.max_wait_for_keyframe - : timeouts_.max_wait_for_frame; - } - TimeDelta HandleTimeoutTask(); - - Clock* const clock_; - TaskQueueBase* const bookkeeping_queue_; - const Timeouts timeouts_; - const TimeoutCallback callback_; - RepeatingTaskHandle timeout_task_; - - Timestamp timeout_ = Timestamp::MinusInfinity(); - bool waiting_for_keyframe_; -}; -} // namespace webrtc - -#endif // VIDEO_VIDEO_RECEIVE_STREAM_TIMEOUT_TRACKER_H_ diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h index d1afefa78..58a0f989b 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream.h @@ -75,7 +75,8 @@ class VideoSendStream : public webrtc::VideoSendStream { void DeliverRtcp(const uint8_t* packet, size_t length); // webrtc::VideoSendStream implementation. - void UpdateActiveSimulcastLayers(std::vector active_layers) override; + void UpdateActiveSimulcastLayers( + const std::vector active_layers) override; void Start() override; void Stop() override; bool started() override; diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h index 74f3a8631..a29f186af 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_impl.h @@ -77,7 +77,7 @@ class VideoSendStreamImpl : public webrtc::BitrateAllocatorObserver, ~VideoSendStreamImpl() override; void DeliverRtcp(const uint8_t* packet, size_t length); - void UpdateActiveSimulcastLayers(std::vector active_layers); + void UpdateActiveSimulcastLayers(const std::vector active_layers); void Start(); void Stop(); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc index 01669a50d..23f9faa1f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_send_stream_tests.cc @@ -947,7 +947,12 @@ void VideoSendStreamTest::TestNackRetransmission( ++send_count_; // NACK packets at arbitrary points. - if (send_count_ % 25 == 0) { + if (send_count_ == 5 || send_count_ == 25) { + nacked_sequence_numbers_.insert( + nacked_sequence_numbers_.end(), + non_padding_sequence_numbers_.end() - kNackedPacketsAtOnceCount, + non_padding_sequence_numbers_.end()); + RTCPSender::Configuration config; config.clock = Clock::GetRealTimeClock(); config.outgoing_transport = transport_adapter_.get(); @@ -959,19 +964,11 @@ void VideoSendStreamTest::TestNackRetransmission( rtcp_sender.SetRemoteSSRC(kVideoSendSsrcs[0]); RTCPSender::FeedbackState feedback_state; - uint16_t nack_sequence_numbers[kNackedPacketsAtOnceCount]; - int nack_count = 0; - for (uint16_t sequence_number : - sequence_numbers_pending_retransmission_) { - if (nack_count < kNackedPacketsAtOnceCount) { - nack_sequence_numbers[nack_count++] = sequence_number; - } else { - break; - } - } - EXPECT_EQ(0, rtcp_sender.SendRTCP(feedback_state, kRtcpNack, nack_count, - nack_sequence_numbers)); + EXPECT_EQ(0, rtcp_sender.SendRTCP( + feedback_state, kRtcpNack, + static_cast(nacked_sequence_numbers_.size()), + &nacked_sequence_numbers_.front())); } uint16_t sequence_number = rtp_packet.SequenceNumber(); @@ -983,25 +980,17 @@ void VideoSendStreamTest::TestNackRetransmission( sequence_number = (rtx_header[0] << 8) + rtx_header[1]; } - auto it = sequence_numbers_pending_retransmission_.find(sequence_number); - if (it == sequence_numbers_pending_retransmission_.end()) { - // Not currently pending retransmission. Add it to retransmission queue - // if media and limit not reached. - if (rtp_packet.Ssrc() == kVideoSendSsrcs[0] && - rtp_packet.payload_size() > 0 && - retransmit_count_ + - sequence_numbers_pending_retransmission_.size() < - kRetransmitTarget) { - sequence_numbers_pending_retransmission_.insert(sequence_number); - } - } else { - // Packet is a retransmission, remove it from queue and check if done. - sequence_numbers_pending_retransmission_.erase(it); + auto found = absl::c_find(nacked_sequence_numbers_, sequence_number); + if (found != nacked_sequence_numbers_.end()) { + nacked_sequence_numbers_.erase(found); + if (++retransmit_count_ == kRetransmitTarget) { EXPECT_EQ(retransmit_ssrc_, rtp_packet.Ssrc()); EXPECT_EQ(retransmit_payload_type_, rtp_packet.PayloadType()); observation_complete_.Set(); } + } else { + non_padding_sequence_numbers_.push_back(sequence_number); } return SEND_PACKET; @@ -1029,7 +1018,8 @@ void VideoSendStreamTest::TestNackRetransmission( int retransmit_count_; const uint32_t retransmit_ssrc_; const uint8_t retransmit_payload_type_; - std::set sequence_numbers_pending_retransmission_; + std::vector nacked_sequence_numbers_; + std::vector non_padding_sequence_numbers_; } test(retransmit_ssrc, retransmit_payload_type); RunBaseTest(&test); @@ -1675,8 +1665,7 @@ TEST_F(VideoSendStreamTest, ChangingNetworkRoute) { // Test that if specified, relay cap is lifted on transition to direct // connection. -// TODO(https://bugs.webrtc.org/13353): Test disabled due to flakiness. -TEST_F(VideoSendStreamTest, DISABLED_RelayToDirectRoute) { +TEST_F(VideoSendStreamTest, RelayToDirectRoute) { static const int kStartBitrateBps = 300000; static const int kRelayBandwidthCapBps = 800000; static const int kMinPacketsToSend = 100; @@ -3099,20 +3088,20 @@ class Vp9HeaderObserver : public test::SendTest { void VerifyTemporalLayerStructure0(const RTPVideoHeaderVP9& vp9) const { EXPECT_EQ(kNoTl0PicIdx, vp9.tl0_pic_idx); EXPECT_EQ(kNoTemporalIdx, vp9.temporal_idx); // no tid - // Technically true, but layer indices not available. EXPECT_FALSE(vp9.temporal_up_switch); } void VerifyTemporalLayerStructure1(const RTPVideoHeaderVP9& vp9) const { EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx); EXPECT_EQ(0, vp9.temporal_idx); // 0,0,0,... + EXPECT_FALSE(vp9.temporal_up_switch); } void VerifyTemporalLayerStructure2(const RTPVideoHeaderVP9& vp9) const { EXPECT_NE(kNoTl0PicIdx, vp9.tl0_pic_idx); EXPECT_GE(vp9.temporal_idx, 0); // 0,1,0,1,... (tid reset on I-frames). EXPECT_LE(vp9.temporal_idx, 1); - EXPECT_TRUE(vp9.temporal_up_switch); + EXPECT_EQ(vp9.temporal_idx > 0, vp9.temporal_up_switch); if (IsNewPictureId(vp9)) { uint8_t expected_tid = (!vp9.inter_pic_predicted || last_vp9_.temporal_idx == 1) ? 0 : 1; @@ -3126,16 +3115,18 @@ class Vp9HeaderObserver : public test::SendTest { EXPECT_LE(vp9.temporal_idx, 2); if (IsNewPictureId(vp9) && vp9.inter_pic_predicted) { EXPECT_NE(vp9.temporal_idx, last_vp9_.temporal_idx); - EXPECT_TRUE(vp9.temporal_up_switch); switch (vp9.temporal_idx) { case 0: - EXPECT_EQ(last_vp9_.temporal_idx, 2); + EXPECT_EQ(2, last_vp9_.temporal_idx); + EXPECT_FALSE(vp9.temporal_up_switch); break; case 1: - EXPECT_EQ(last_vp9_.temporal_idx, 2); + EXPECT_EQ(2, last_vp9_.temporal_idx); + EXPECT_TRUE(vp9.temporal_up_switch); break; case 2: EXPECT_LT(last_vp9_.temporal_idx, 2); + EXPECT_TRUE(vp9.temporal_up_switch); break; } } @@ -3200,12 +3191,8 @@ class Vp9HeaderObserver : public test::SendTest { EXPECT_FALSE(vp9.inter_pic_predicted); // P if (!vp9.inter_pic_predicted) { - if (vp9.temporal_idx == kNoTemporalIdx) { - EXPECT_FALSE(vp9.temporal_up_switch); - } else { - EXPECT_EQ(vp9.temporal_idx, 0); - EXPECT_TRUE(vp9.temporal_up_switch); - } + EXPECT_TRUE(vp9.temporal_idx == 0 || vp9.temporal_idx == kNoTemporalIdx); + EXPECT_FALSE(vp9.temporal_up_switch); } } diff --git a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc index cf3b649f7..810a4ff1f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.cc @@ -75,12 +75,6 @@ bool VideoSourceSinkController::HasSource() const { return source_ != nullptr; } -void VideoSourceSinkController::RequestRefreshFrame() { - RTC_DCHECK_RUN_ON(&sequence_checker_); - if (source_) - source_->RequestRefreshFrame(); -} - void VideoSourceSinkController::PushSourceSinkSettings() { RTC_DCHECK_RUN_ON(&sequence_checker_); if (!source_) diff --git a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h index e2a7eb7c7..d2e3267a8 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_source_sink_controller.h @@ -38,9 +38,6 @@ class VideoSourceSinkController { void SetSource(rtc::VideoSourceInterface* source); bool HasSource() const; - // Requests a refresh frame from the current source, if set. - void RequestRefreshFrame(); - // Must be called in order for changes to settings to have an effect. This // allows you to modify multiple properties in a single push to the sink. void PushSourceSinkSettings(); diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc index dc82f8c86..4f1d8c260 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.cc @@ -28,7 +28,6 @@ #include "api/video/video_bitrate_allocator_factory.h" #include "api/video/video_codec_constants.h" #include "api/video/video_layers_allocation.h" -#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/video_encoder.h" #include "call/adaptation/resource_adaptation_processor.h" #include "call/adaptation/video_stream_adapter.h" @@ -36,6 +35,7 @@ #include "modules/video_coding/svc/svc_rate_allocator.h" #include "rtc_base/arraysize.h" #include "rtc_base/checks.h" +#include "rtc_base/constructor_magic.h" #include "rtc_base/event.h" #include "rtc_base/experiments/alr_experiment.h" #include "rtc_base/experiments/encoder_info_settings.h" @@ -51,7 +51,6 @@ #include "system_wrappers/include/metrics.h" #include "video/adaptation/video_stream_encoder_resource_manager.h" #include "video/alignment_adjuster.h" -#include "video/frame_cadence_adapter.h" namespace webrtc { @@ -65,11 +64,6 @@ const int64_t kPendingFrameTimeoutMs = 1000; constexpr char kFrameDropperFieldTrial[] = "WebRTC-FrameDropper"; -// TODO(bugs.webrtc.org/13572): Remove this kill switch after deploying the -// feature. -constexpr char kSwitchEncoderOnInitializationFailuresFieldTrial[] = - "WebRTC-SwitchEncoderOnInitializationFailures"; - const size_t kDefaultPayloadSize = 1440; const int64_t kParameterUpdateIntervalMs = 1000; @@ -653,6 +647,9 @@ VideoStreamEncoder::VideoStreamEncoder( video_stream_adapter_( std::make_unique(&input_state_provider_, encoder_stats_observer)), + resource_adaptation_processor_( + std::make_unique( + video_stream_adapter_.get())), degradation_preference_manager_( std::make_unique( video_stream_adapter_.get())), @@ -669,8 +666,6 @@ VideoStreamEncoder::VideoStreamEncoder( !field_trial::IsEnabled("WebRTC-DefaultBitrateLimitsKillSwitch")), qp_parsing_allowed_( !field_trial::IsEnabled("WebRTC-QpParsingKillSwitch")), - switch_encoder_on_init_failures_(!field_trial::IsDisabled( - kSwitchEncoderOnInitializationFailuresFieldTrial)), encoder_queue_(std::move(encoder_queue)) { TRACE_EVENT0("webrtc", "VideoStreamEncoder::VideoStreamEncoder"); RTC_DCHECK_RUN_ON(worker_queue_); @@ -680,13 +675,10 @@ VideoStreamEncoder::VideoStreamEncoder( frame_cadence_adapter_->Initialize(&cadence_callback_); stream_resource_manager_.Initialize(&encoder_queue_); - encoder_queue_.PostTask([this] { + rtc::Event initialize_processor_event; + encoder_queue_.PostTask([this, &initialize_processor_event] { RTC_DCHECK_RUN_ON(&encoder_queue_); - - resource_adaptation_processor_ = - std::make_unique( - video_stream_adapter_.get()); - + resource_adaptation_processor_->SetTaskQueue(encoder_queue_.Get()); stream_resource_manager_.SetAdaptationProcessor( resource_adaptation_processor_.get(), video_stream_adapter_.get()); resource_adaptation_processor_->AddResourceLimitationsListener( @@ -700,7 +692,9 @@ VideoStreamEncoder::VideoStreamEncoder( for (auto* constraint : adaptation_constraints_) { video_stream_adapter_->AddAdaptationConstraint(constraint); } + initialize_processor_event.Set(); }); + initialize_processor_event.Wait(rtc::Event::kForever); } VideoStreamEncoder::~VideoStreamEncoder() { @@ -764,31 +758,22 @@ void VideoStreamEncoder::AddAdaptationResource( // of this MapResourceToReason() call. TRACE_EVENT_ASYNC_BEGIN0( "webrtc", "VideoStreamEncoder::AddAdaptationResource(latency)", this); - encoder_queue_.PostTask([this, resource = std::move(resource)] { + rtc::Event map_resource_event; + encoder_queue_.PostTask([this, resource, &map_resource_event] { TRACE_EVENT_ASYNC_END0( "webrtc", "VideoStreamEncoder::AddAdaptationResource(latency)", this); RTC_DCHECK_RUN_ON(&encoder_queue_); additional_resources_.push_back(resource); stream_resource_manager_.AddResource(resource, VideoAdaptationReason::kCpu); + map_resource_event.Set(); }); + map_resource_event.Wait(rtc::Event::kForever); } std::vector> VideoStreamEncoder::GetAdaptationResources() { RTC_DCHECK_RUN_ON(worker_queue_); - // In practice, this method is only called by tests to verify operations that - // run on the encoder queue. So rather than force PostTask() operations to - // be accompanied by an event and a `Wait()`, we'll use PostTask + Wait() - // here. - rtc::Event event; - std::vector> resources; - encoder_queue_.PostTask([&] { - RTC_DCHECK_RUN_ON(&encoder_queue_); - resources = resource_adaptation_processor_->GetResources(); - event.Set(); - }); - event.Wait(rtc::Event::kForever); - return resources; + return resource_adaptation_processor_->GetResources(); } void VideoStreamEncoder::SetSource( @@ -845,20 +830,8 @@ void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config, RTC_DCHECK(sink_); RTC_LOG(LS_INFO) << "ConfigureEncoder requested."; - // Set up the frame cadence adapter according to if we're going to do - // screencast. The final number of spatial layers is based on info - // in `send_codec_`, which is computed based on incoming frame - // dimensions which can only be determined later. - // - // Note: zero-hertz mode isn't enabled by this alone. Constraints also - // have to be set up with min_fps = 0 and max_fps > 0. - if (config.content_type == VideoEncoderConfig::ContentType::kScreen) { - frame_cadence_adapter_->SetZeroHertzModeEnabled( - FrameCadenceAdapterInterface::ZeroHertzModeParams{}); - } else { - frame_cadence_adapter_->SetZeroHertzModeEnabled(absl::nullopt); - } - + frame_cadence_adapter_->SetZeroHertzModeEnabled( + config.content_type == VideoEncoderConfig::ContentType::kScreen); pending_encoder_creation_ = (!encoder_ || encoder_config_.video_format != config.video_format || max_data_payload_length_ != max_data_payload_length); @@ -1154,6 +1127,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { // Encoder creation block is split in two since EncoderInfo needed to start // CPU adaptation with the correct settings should be polled after // encoder_->InitEncode(). + bool success = true; if (encoder_reset_required) { ReleaseEncoder(); const size_t max_data_payload_length = max_data_payload_length_ > 0 @@ -1168,6 +1142,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { << CodecTypeToPayloadString(send_codec_.codecType) << " (" << send_codec_.codecType << ")"; ReleaseEncoder(); + success = false; } else { encoder_initialized_ = true; encoder_->RegisterEncodeCompleteCallback(this); @@ -1186,7 +1161,7 @@ void VideoStreamEncoder::ReconfigureEncoder() { // Inform dependents of updated encoder settings. OnEncoderSettingsChanged(); - if (encoder_initialized_) { + if (success) { RTC_LOG(LS_VERBOSE) << " max bitrate " << codec.maxBitrate << " start bitrate " << codec.startBitrate << " max frame rate " << codec.maxFramerate @@ -1272,49 +1247,6 @@ void VideoStreamEncoder::ReconfigureEncoder() { stream_resource_manager_.ConfigureQualityScaler(info); stream_resource_manager_.ConfigureBandwidthQualityScaler(info); - - if (!encoder_initialized_) { - RTC_LOG(LS_WARNING) << "Failed to initialize " - << CodecTypeToPayloadString(codec.codecType) - << " encoder." - << "switch_encoder_on_init_failures: " - << switch_encoder_on_init_failures_; - - if (switch_encoder_on_init_failures_) { - RequestEncoderSwitch(); - } - } -} - -void VideoStreamEncoder::RequestEncoderSwitch() { - bool is_encoder_switching_supported = - settings_.encoder_switch_request_callback != nullptr; - bool is_encoder_selector_available = encoder_selector_ != nullptr; - - RTC_LOG(LS_INFO) << "RequestEncoderSwitch." - << " is_encoder_selector_available: " - << is_encoder_selector_available - << " is_encoder_switching_supported: " - << is_encoder_switching_supported; - - if (!is_encoder_switching_supported) { - return; - } - - // If encoder selector is available, switch to the encoder it prefers. - // Otherwise try switching to VP8 (default WebRTC codec). - absl::optional preferred_fallback_encoder; - if (is_encoder_selector_available) { - preferred_fallback_encoder = encoder_selector_->OnEncoderBroken(); - } - - if (!preferred_fallback_encoder) { - preferred_fallback_encoder = - SdpVideoFormat(CodecTypeToPayloadString(kVideoCodecVP8)); - } - - settings_.encoder_switch_request_callback->RequestEncoderSwitch( - *preferred_fallback_encoder, /*allow_default_fallback=*/true); } void VideoStreamEncoder::OnEncoderSettingsChanged() { @@ -1327,11 +1259,6 @@ void VideoStreamEncoder::OnEncoderSettingsChanged() { bool is_screenshare = encoder_settings.encoder_config().content_type == VideoEncoderConfig::ContentType::kScreen; degradation_preference_manager_->SetIsScreenshare(is_screenshare); - if (is_screenshare) { - frame_cadence_adapter_->SetZeroHertzModeEnabled( - FrameCadenceAdapterInterface::ZeroHertzModeParams{ - send_codec_.numberOfSimulcastStreams}); - } } void VideoStreamEncoder::OnFrame(Timestamp post_time, @@ -1529,16 +1456,8 @@ void VideoStreamEncoder::SetEncoderRates( last_encoder_rate_settings_ = rate_settings; } - if (!encoder_) + if (!encoder_) { return; - - // Make the cadence adapter know if streams were disabled. - for (int spatial_index = 0; - spatial_index != send_codec_.numberOfSimulcastStreams; ++spatial_index) { - frame_cadence_adapter_->UpdateLayerStatus( - spatial_index, - /*enabled=*/rate_settings.rate_control.target_bitrate - .GetSpatialLayerSum(spatial_index) > 0); } // `bitrate_allocation` is 0 it means that the network is down or the send @@ -1547,8 +1466,9 @@ void VideoStreamEncoder::SetEncoderRates( // bitrate. // TODO(perkj): Make sure all known encoder implementations handle zero // target bitrate and remove this check. - if (rate_settings.rate_control.bitrate.get_sum_bps() == 0) + if (rate_settings.rate_control.bitrate.get_sum_bps() == 0) { return; + } if (rate_control_changed) { encoder_->SetRates(rate_settings.rate_control); @@ -1713,7 +1633,7 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, // If the encoder fail we can't continue to encode frames. When this happens // the WebrtcVideoSender is notified and the whole VideoSendStream is // recreated. - if (encoder_failed_ || !encoder_initialized_) + if (encoder_failed_) return; // It's possible that EncodeVideoFrame can be called after we've completed @@ -1851,7 +1771,21 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, if (encode_status == WEBRTC_VIDEO_CODEC_ENCODER_FAILURE) { RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " << encoder_config_.video_format.ToString(); - RequestEncoderSwitch(); + + if (settings_.encoder_switch_request_callback) { + if (encoder_selector_) { + if (auto encoder = encoder_selector_->OnEncoderBroken()) { + settings_.encoder_switch_request_callback->RequestEncoderSwitch( + *encoder); + } + } else { + encoder_failed_ = true; + settings_.encoder_switch_request_callback->RequestEncoderFallback(); + } + } else { + RTC_LOG(LS_ERROR) + << "Encoder failed but no encoder fallback callback is registered"; + } } else { RTC_LOG(LS_ERROR) << "Failed to encode frame. Error code: " << encode_status; @@ -1865,13 +1799,6 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, } } -void VideoStreamEncoder::RequestRefreshFrame() { - worker_queue_->PostTask(ToQueuedTask(task_safety_, [this] { - RTC_DCHECK_RUN_ON(worker_queue_); - video_source_sink_controller_.RequestRefreshFrame(); - })); -} - void VideoStreamEncoder::SendKeyFrame() { if (!encoder_queue_.IsCurrent()) { encoder_queue_.PostTask([this] { SendKeyFrame(); }); @@ -1881,13 +1808,8 @@ void VideoStreamEncoder::SendKeyFrame() { TRACE_EVENT0("webrtc", "OnKeyFrameRequest"); RTC_DCHECK(!next_frame_types_.empty()); - if (frame_cadence_adapter_) - frame_cadence_adapter_->ProcessKeyFrameRequest(); - - if (!encoder_) { - RTC_DLOG(LS_INFO) << __func__ << " no encoder."; - return; // Shutting down, or not configured yet. - } + if (!encoder_) + return; // Shutting down. // TODO(webrtc:10615): Map keyframe request to spatial layer. std::fill(next_frame_types_.begin(), next_frame_types_.end(), @@ -1952,24 +1874,14 @@ EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage( RTC_CHECK(videocontenttypehelpers::SetSimulcastId( &image_copy.content_type_, static_cast(spatial_idx + 1))); - // Post a task because `send_codec_` requires `encoder_queue_` lock and we - // need to update on quality convergence. + // Currently internal quality scaler is used for VP9 instead of webrtc qp + // scaler (in no-svc case or if only a single spatial layer is encoded). + // It has to be explicitly detected and reported to adaptation metrics. + // Post a task because `send_codec_` requires `encoder_queue_` lock. unsigned int image_width = image_copy._encodedWidth; unsigned int image_height = image_copy._encodedHeight; - encoder_queue_.PostTask([this, codec_type, image_width, image_height, - spatial_idx, - at_target_quality = image_copy.IsAtTargetQuality()] { + encoder_queue_.PostTask([this, codec_type, image_width, image_height] { RTC_DCHECK_RUN_ON(&encoder_queue_); - - // Let the frame cadence adapter know about quality convergence. - if (frame_cadence_adapter_) - frame_cadence_adapter_->UpdateLayerQualityConvergence(spatial_idx, - at_target_quality); - - // Currently, the internal quality scaler is used for VP9 instead of the - // webrtc qp scaler (in the no-svc case or if only a single spatial layer is - // encoded). It has to be explicitly detected and reported to adaptation - // metrics. if (codec_type == VideoCodecType::kVideoCodecVP9 && send_codec_.VP9()->automaticResizeOn) { unsigned int expected_width = send_codec_.width; @@ -2107,8 +2019,7 @@ void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate, if (!video_is_suspended && settings_.encoder_switch_request_callback && encoder_selector_) { if (auto encoder = encoder_selector_->OnAvailableBitrate(link_allocation)) { - settings_.encoder_switch_request_callback->RequestEncoderSwitch( - *encoder, /*allow_default_fallback=*/false); + settings_.encoder_switch_request_callback->RequestEncoderSwitch(*encoder); } } @@ -2362,11 +2273,14 @@ void VideoStreamEncoder::CheckForAnimatedContent( void VideoStreamEncoder::InjectAdaptationResource( rtc::scoped_refptr resource, VideoAdaptationReason reason) { - encoder_queue_.PostTask([this, resource = std::move(resource), reason] { + rtc::Event map_resource_event; + encoder_queue_.PostTask([this, resource, reason, &map_resource_event] { RTC_DCHECK_RUN_ON(&encoder_queue_); additional_resources_.push_back(resource); stream_resource_manager_.AddResource(resource, reason); + map_resource_event.Set(); }); + map_resource_event.Wait(rtc::Event::kForever); } void VideoStreamEncoder::InjectAdaptationConstraint( diff --git a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h index c667f049f..cd181fc6f 100644 --- a/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h +++ b/TMessagesProj/jni/voip/webrtc/video/video_stream_encoder.h @@ -82,9 +82,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, BitrateAllocationCallbackType allocation_cb_type); ~VideoStreamEncoder() override; - VideoStreamEncoder(const VideoStreamEncoder&) = delete; - VideoStreamEncoder& operator=(const VideoStreamEncoder&) = delete; - void AddAdaptationResource(rtc::scoped_refptr resource) override; std::vector> GetAdaptationResources() override; @@ -158,9 +155,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, void OnDiscardedFrame() override { video_stream_encoder_.OnDiscardedFrame(); } - void RequestRefreshFrame() override { - video_stream_encoder_.RequestRefreshFrame(); - } private: VideoStreamEncoder& video_stream_encoder_; @@ -205,7 +199,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, int frames_scheduled_for_processing, const VideoFrame& video_frame); void OnDiscardedFrame(); - void RequestRefreshFrame(); void MaybeEncodeVideoFrame(const VideoFrame& frame, int64_t time_when_posted_in_ms); @@ -248,8 +241,6 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, int64_t time_when_posted_in_ms) RTC_RUN_ON(&encoder_queue_); - void RequestEncoderSwitch() RTC_RUN_ON(&encoder_queue_); - TaskQueueBase* const worker_queue_; const uint32_t number_of_cores_; @@ -397,13 +388,13 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, // Provides video stream input states: current resolution and frame rate. VideoStreamInputStateProvider input_state_provider_; - const std::unique_ptr video_stream_adapter_ + std::unique_ptr video_stream_adapter_ RTC_GUARDED_BY(&encoder_queue_); // Responsible for adapting input resolution or frame rate to ensure resources // (e.g. CPU or bandwidth) are not overused. Adding resources can occur on any // thread. std::unique_ptr - resource_adaptation_processor_ RTC_GUARDED_BY(&encoder_queue_); + resource_adaptation_processor_; std::unique_ptr degradation_preference_manager_ RTC_GUARDED_BY(&encoder_queue_); std::vector adaptation_constraints_ @@ -434,15 +425,14 @@ class VideoStreamEncoder : public VideoStreamEncoderInterface, QpParser qp_parser_; const bool qp_parsing_allowed_; - // Enables encoder switching on initialization failures. - bool switch_encoder_on_init_failures_; - // Public methods are proxied to the task queues. The queues must be destroyed // first to make sure no tasks run that use other members. rtc::TaskQueue encoder_queue_; // Used to cancel any potentially pending tasks to the worker thread. ScopedTaskSafety task_safety_; + + RTC_DISALLOW_COPY_AND_ASSIGN(VideoStreamEncoder); }; } // namespace webrtc diff --git a/TMessagesProj/src/main/assets/darkblue.attheme b/TMessagesProj/src/main/assets/darkblue.attheme index 0aca296d0..53855489c 100644 --- a/TMessagesProj/src/main/assets/darkblue.attheme +++ b/TMessagesProj/src/main/assets/darkblue.attheme @@ -432,7 +432,7 @@ windowBackgroundWhiteGrayText4=-931296359 chat_inTimeText=-645885536 dialogRadioBackground=-11245959 statisticChartRipple=748994002 -chat_BlurAlpha=-16777216 +chat_BlurAlpha=-520093696 chat_outReplyMessageText=-1 chat_recordedVoiceDot=-1221292 chat_messagePanelBackground=-14602949 diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java b/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java index b2d4edc64..264a3f411 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/AndroidUtilities.java @@ -1903,7 +1903,7 @@ public class AndroidUtilities { public static boolean isSmallScreen() { if (isSmallScreen == null) { - isSmallScreen = (Math.max(displaySize.x, displaySize.y) - statusBarHeight - navigationBarHeight) / density <= 610; + isSmallScreen = (Math.max(displaySize.x, displaySize.y) - statusBarHeight - navigationBarHeight) / density <= 650; } return isSmallScreen; } @@ -4166,4 +4166,13 @@ public class AndroidUtilities { } return null; } + + public static boolean isNumeric(String str) { + try { + Double.parseDouble(str); + return true; + } catch (NumberFormatException e) { + return false; + } + } } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java index 70551934d..817b5082e 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java @@ -20,8 +20,8 @@ public class BuildVars { public static boolean USE_CLOUD_STRINGS = true; public static boolean CHECK_UPDATES = true; public static boolean NO_SCOPED_STORAGE = Build.VERSION.SDK_INT <= 29; - public static int BUILD_VERSION = 2587; - public static String BUILD_VERSION_STRING = "8.6.0"; + public static int BUILD_VERSION = 2594; + public static String BUILD_VERSION_STRING = "8.6.1"; public static int APP_ID = 4; public static String APP_HASH = "014b35b6184100b085b0d0572f9b5103"; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java index cbb2bf5df..d356db13e 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/ImageLoader.java @@ -938,20 +938,7 @@ public class ImageLoader { } } if (firstFrameBitmap) { - Bitmap bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); - Canvas canvas = new Canvas(bitmap); - canvas.scale(2f, 2f, w / 2f, h / 2f); - - BitmapDrawable bitmapDrawable = null; - lottieDrawable.setCurrentFrame(0, false, true); - if (lottieDrawable.hasBitmap()) { - lottieDrawable.draw(canvas); - bitmapDrawable = new BitmapDrawable(bitmap); - } - AndroidUtilities.runOnUIThread(() -> { - lottieDrawable.recycle(); - }); - onPostExecute(bitmapDrawable); + loadFirstFrame(lottieDrawable, h, w); } else { lottieDrawable.setAutoRepeat(autoRepeat); onPostExecute(lottieDrawable); @@ -1468,6 +1455,27 @@ public class ImageLoader { } } + private void loadFirstFrame(RLottieDrawable lottieDrawable, int w, int h) { + Bitmap bitmap = Bitmap.createBitmap(w, h, Bitmap.Config.ARGB_8888); + Canvas canvas = new Canvas(bitmap); + canvas.scale(2f, 2f, w / 2f, h / 2f); + + AndroidUtilities.runOnUIThread(() -> { + lottieDrawable.setOnFrameReadyRunnable(() -> { + lottieDrawable.setOnFrameReadyRunnable(null); + BitmapDrawable bitmapDrawable = null; + if (lottieDrawable.getBackgroundBitmap() != null || lottieDrawable.getRenderingBitmap() != null) { + Bitmap currentBitmap = lottieDrawable.getBackgroundBitmap() != null ? lottieDrawable.getBackgroundBitmap() : lottieDrawable.getRenderingBitmap(); + canvas.drawBitmap(currentBitmap, 0, 0, null); + bitmapDrawable = new BitmapDrawable(bitmap); + } + onPostExecute(bitmapDrawable); + lottieDrawable.recycle(); + }); + lottieDrawable.setCurrentFrame(lottieDrawable.getFramesCount() - 1, false, true); + }); + } + private void onPostExecute(final Drawable drawable) { AndroidUtilities.runOnUIThread(() -> { Drawable toSet = null; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java b/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java index 84c7955e2..c9f6c5213 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/LocaleController.java @@ -40,7 +40,6 @@ import java.util.Currency; import java.util.Date; import java.util.HashMap; import java.util.Locale; -import java.util.Map; import java.util.TimeZone; public class LocaleController { @@ -876,10 +875,12 @@ public class LocaleController { saveOtherLanguages(); } } + boolean isLoadingRemote = false; if ((localeInfo.isRemote() || localeInfo.isUnofficial()) && (force || !pathToFile.exists() || hasBase && !pathToBaseFile.exists())) { if (BuildVars.LOGS_ENABLED) { FileLog.d("reload locale because one of file doesn't exist" + pathToFile + " " + pathToBaseFile); } + isLoadingRemote = true; if (init) { AndroidUtilities.runOnUIThread(() -> applyRemoteLanguage(localeInfo, null, true, currentAccount)); } else { @@ -946,6 +947,9 @@ public class LocaleController { } reloadLastFile = false; } + if (!isLoadingRemote) { + NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.reloadInterface); + } } catch (Exception e) { FileLog.e(e); changingConfiguration = false; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java index dbdea518d..756c74f81 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java @@ -331,6 +331,7 @@ public class MessagesController extends BaseController implements NotificationCe public HashMap diceSuccess = new HashMap<>(); public HashMap emojiSounds = new HashMap<>(); public HashMap> emojiInteractions = new HashMap<>(); + public boolean remoteConfigLoaded; private SharedPreferences notificationsPreferences; private SharedPreferences mainPreferences; @@ -781,6 +782,7 @@ public class MessagesController extends BaseController implements NotificationCe } enableJoined = notificationsPreferences.getBoolean("EnableContactJoined", true); + remoteConfigLoaded = mainPreferences.getBoolean("remoteConfigLoaded", false); secretWebpagePreview = mainPreferences.getInt("secretWebpage2", 2); maxGroupCount = mainPreferences.getInt("maxGroupCount", 200); maxMegagroupCount = mainPreferences.getInt("maxMegagroupCount", 10000); @@ -1943,6 +1945,7 @@ public class MessagesController extends BaseController implements NotificationCe AndroidUtilities.runOnUIThread(() -> { getDownloadController().loadAutoDownloadConfig(false); loadAppConfig(); + remoteConfigLoaded = true; maxMegagroupCount = config.megagroup_size_max; maxGroupCount = config.chat_size_max; maxEditTime = config.edit_time_limit; @@ -1983,9 +1986,12 @@ public class MessagesController extends BaseController implements NotificationCe blockedCountry = config.blocked_mode; dcDomainName = config.dc_txt_domain_name; webFileDatacenterId = config.webfile_dc_id; - if (config.suggested_lang_code != null && (suggestedLangCode == null || !suggestedLangCode.equals(config.suggested_lang_code))) { + if (config.suggested_lang_code != null) { + boolean loadRemote = suggestedLangCode == null || !suggestedLangCode.equals(config.suggested_lang_code); suggestedLangCode = config.suggested_lang_code; - LocaleController.getInstance().loadRemoteLanguages(currentAccount); + if (loadRemote) { + LocaleController.getInstance().loadRemoteLanguages(currentAccount); + } } Theme.loadRemoteThemes(currentAccount, false); Theme.checkCurrentRemoteTheme(false); @@ -2034,6 +2040,7 @@ public class MessagesController extends BaseController implements NotificationCe } SharedPreferences.Editor editor = mainPreferences.edit(); + editor.putBoolean("remoteConfigLoaded", remoteConfigLoaded); editor.putInt("maxGroupCount", maxGroupCount); editor.putInt("maxMegagroupCount", maxMegagroupCount); editor.putInt("maxEditTime", maxEditTime); @@ -4855,6 +4862,17 @@ public class MessagesController extends BaseController implements NotificationCe return dialogs; } + public int getAllFoldersDialogsCount() { + int count = 0; + for (int i = 0; i < dialogsByFolder.size(); i++) { + List dialogs = dialogsByFolder.get(dialogsByFolder.keyAt(i)); + if (dialogs != null) { + count += dialogs.size(); + } + } + return count; + } + public int getTotalDialogsCount() { int count = 0; ArrayList dialogs = dialogsByFolder.get(0); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java b/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java index 3a5fff9ed..3280f8cf0 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/NativeLoader.java @@ -22,7 +22,7 @@ import java.util.zip.ZipFile; public class NativeLoader { - private final static int LIB_VERSION = 41; + private final static int LIB_VERSION = 42; private final static String LIB_NAME = "tmessages." + LIB_VERSION; private final static String LIB_SO_NAME = "lib" + LIB_NAME + ".so"; private final static String LOCALE_LIB_SO_NAME = "lib" + LIB_NAME + "loc.so"; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java b/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java index f70aed750..a578de919 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/SendMessagesHelper.java @@ -2663,7 +2663,7 @@ public class SendMessagesHelper extends BaseController implements NotificationCe return; } TLRPC.TL_messages_sendReaction req = new TLRPC.TL_messages_sendReaction(); - if (messageObject.messageOwner.isThreadMessage) { + if (messageObject.messageOwner.isThreadMessage && messageObject.messageOwner.fwd_from != null) { req.peer = getMessagesController().getInputPeer(messageObject.getFromChatId()); req.msg_id = messageObject.messageOwner.fwd_from.saved_from_msg_id; } else { diff --git a/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java b/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java index 89642b8ae..49bb8bc2b 100644 --- a/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java +++ b/TMessagesProj/src/main/java/org/telegram/tgnet/TLRPC.java @@ -22223,6 +22223,24 @@ public class TLRPC { } } + public static class TL_inputReportReasonIllegalDrugs extends ReportReason { + public static int constructor = 0xa8eb2be; + + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + } + } + + public static class TL_inputReportReasonPersonalDetails extends ReportReason { + public static int constructor = 0x9ec7863d; + + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + } + } + public static class TL_messages_archivedStickers extends TLObject { public static int constructor = 0x4fcba9c8; @@ -45747,6 +45765,21 @@ public class TLRPC { } } + public static class TL_contacts_resolvePhone extends TLObject { + public static int constructor = 0x8af94344; + + public String phone; + + public TLObject deserializeResponse(AbstractSerializedData stream, int constructor, boolean exception) { + return TL_contacts_resolvedPeer.TLdeserialize(stream, constructor, exception); + } + + public void serializeToStream(AbstractSerializedData stream) { + stream.writeInt32(constructor); + stream.writeString(phone); + } + } + public static class TL_messages_getMessages extends TLObject { public static int constructor = 0x4222fa74; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java index ebf9e291f..52bca41d2 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionBar/BaseFragment.java @@ -767,6 +767,9 @@ public abstract class BaseFragment { } public boolean isLightStatusBar() { + if (hasForceLightStatusBar() && !Theme.getCurrentTheme().isDark()) { + return true; + } Theme.ResourcesProvider resourcesProvider = getResourceProvider(); int color; if (resourcesProvider != null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java index 7219a9a8c..b59690039 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ActionIntroActivity.java @@ -765,6 +765,11 @@ public class ActionIntroActivity extends BaseFragment implements LocationControl return fragmentView; } + @Override + public boolean hasForceLightStatusBar() { + return true; + } + @Override public void onLocationAddressAvailable(String address, String displayAddress, Location location) { if (subtitleTextView == null) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java index 395a5d84e..d1095b389 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Adapters/DialogsAdapter.java @@ -194,7 +194,7 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter { boolean hasContacts = false; if (hasHints) { count += 2 + messagesController.hintDialogs.size(); - } else if (dialogsType == 0 && dialogsCount <= 10 && folderId == 0 && messagesController.isDialogsEndReached(folderId)) { + } else if (dialogsType == 0 && folderId == 0 && messagesController.isDialogsEndReached(folderId)) { if (ContactsController.getInstance(currentAccount).contacts.isEmpty() && !ContactsController.getInstance(currentAccount).doneLoadingContacts && !forceUpdatingContacts) { onlineContacts = null; if (BuildVars.LOGS_ENABLED) { @@ -203,7 +203,7 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter { return (currentCount = 0); } - if (ContactsController.getInstance(currentAccount).doneLoadingContacts && !ContactsController.getInstance(currentAccount).contacts.isEmpty()) { + if (messagesController.getAllFoldersDialogsCount() <= 10 && ContactsController.getInstance(currentAccount).doneLoadingContacts && !ContactsController.getInstance(currentAccount).contacts.isEmpty()) { if (onlineContacts == null || prevDialogsCount != dialogsCount || prevContactsCount != ContactsController.getInstance(currentAccount).contacts.size()) { onlineContacts = new ArrayList<>(ContactsController.getInstance(currentAccount).contacts); prevContactsCount = onlineContacts.size(); @@ -240,7 +240,7 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter { } } } - if (folderId == 0 && !hasContacts && forceUpdatingContacts) { + if (folderId == 0 && !hasContacts && dialogsCount == 0 && forceUpdatingContacts) { count += 3; } if (folderId == 0 && onlineContacts != null) { @@ -627,7 +627,7 @@ public class DialogsAdapter extends RecyclerListView.SelectionAdapter { cell.setText(LocaleController.getString("ImportHeaderContacts", R.string.ImportHeaderContacts)); } } else { - cell.setText(LocaleController.getString(forceUpdatingContacts ? R.string.ConnectingYourContacts : R.string.YourContacts)); + cell.setText(LocaleController.getString(dialogsCount == 0 && forceUpdatingContacts ? R.string.ConnectingYourContacts : R.string.YourContacts)); } break; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java index cd4379d96..48cd3f1a8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java @@ -2464,14 +2464,18 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not avatarContainer.setOccupyStatusBar(false); } if (reportType >= 0) { - if (reportType == 0) { + if (reportType == AlertsCreator.REPORT_TYPE_SPAM) { actionBar.setTitle(LocaleController.getString("ReportChatSpam", R.string.ReportChatSpam)); - } else if (reportType == 2) { + } else if (reportType == AlertsCreator.REPORT_TYPE_VIOLENCE) { actionBar.setTitle(LocaleController.getString("ReportChatViolence", R.string.ReportChatViolence)); - } else if (reportType == 3) { + } else if (reportType == AlertsCreator.REPORT_TYPE_CHILD_ABUSE) { actionBar.setTitle(LocaleController.getString("ReportChatChild", R.string.ReportChatChild)); - } else if (reportType == 4) { + } else if (reportType == AlertsCreator.REPORT_TYPE_PORNOGRAPHY) { actionBar.setTitle(LocaleController.getString("ReportChatPornography", R.string.ReportChatPornography)); + } else if (reportType == AlertsCreator.REPORT_TYPE_ILLEGAL_DRUGS) { + actionBar.setTitle(LocaleController.getString("ReportChatIllegalDrugs", R.string.ReportChatIllegalDrugs)); + } else if (reportType == AlertsCreator.REPORT_TYPE_PERSONAL_DETAILS) { + actionBar.setTitle(LocaleController.getString("ReportChatPersonalDetails", R.string.ReportChatPersonalDetails)); } actionBar.setSubtitle(LocaleController.getString("ReportSelectMessages", R.string.ReportSelectMessages)); } else if (startLoadFromDate != 0) { @@ -5522,7 +5526,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not if (!foundTopView) { scrolled = super.scrollVerticallyBy(dy, recycler, state); } - if (dy > 0 && scrolled == 0 && ChatObject.isChannel(currentChat) && !currentChat.megagroup && chatListView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING && !chatListView.isFastScrollAnimationRunning() && !chatListView.isMultiselect()) { + if (dy > 0 && scrolled == 0 && ChatObject.isChannel(currentChat) && !currentChat.megagroup && chatListView.getScrollState() == RecyclerView.SCROLL_STATE_DRAGGING && !chatListView.isFastScrollAnimationRunning() && !chatListView.isMultiselect() && reportType < 0) { if (pullingDownOffset == 0 && pullingDownDrawable != null) { pullingDownDrawable.updateDialog(); } @@ -27752,4 +27756,19 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } return super.hideKeyboardOnShow(); } + + @Override + public boolean isLightStatusBar() { + if (reportType >= 0) { + Theme.ResourcesProvider resourcesProvider = getResourceProvider(); + int color; + if (resourcesProvider != null) { + color = resourcesProvider.getColorOrDefault(Theme.key_actionBarActionModeDefault); + } else { + color = Theme.getColor(Theme.key_actionBarActionModeDefault, null, true); + } + return ColorUtils.calculateLuminance(color) > 0.7f; + } + return super.isLightStatusBar(); + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java index dcb48e2cc..0e9bd8f5a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/AlertsCreator.java @@ -42,6 +42,7 @@ import android.text.TextUtils; import android.text.TextWatcher; import android.text.style.URLSpan; import android.util.Base64; +import android.util.Log; import android.util.SparseArray; import android.util.TypedValue; import android.view.Gravity; @@ -2990,17 +2991,21 @@ public class AlertsCreator { request.peer = peer; request.id.addAll(messages); request.message = message; - if (type == 0) { + if (type == AlertsCreator.REPORT_TYPE_SPAM) { request.reason = new TLRPC.TL_inputReportReasonSpam(); - } else if (type == 1) { + } else if (type == AlertsCreator.REPORT_TYPE_FAKE_ACCOUNT) { request.reason = new TLRPC.TL_inputReportReasonFake(); - } else if (type == 2) { + } else if (type == AlertsCreator.REPORT_TYPE_VIOLENCE) { request.reason = new TLRPC.TL_inputReportReasonViolence(); - } else if (type == 3) { + } else if (type == AlertsCreator.REPORT_TYPE_CHILD_ABUSE) { request.reason = new TLRPC.TL_inputReportReasonChildAbuse(); - } else if (type == 4) { + } else if (type == AlertsCreator.REPORT_TYPE_PORNOGRAPHY) { request.reason = new TLRPC.TL_inputReportReasonPornography(); - } else if (type == 5) { + } else if (type == AlertsCreator.REPORT_TYPE_ILLEGAL_DRUGS) { + request.reason = new TLRPC.TL_inputReportReasonIllegalDrugs(); + } else if (type == AlertsCreator.REPORT_TYPE_PERSONAL_DETAILS) { + request.reason = new TLRPC.TL_inputReportReasonPersonalDetails(); + } else if (type == AlertsCreator.REPORT_TYPE_OTHER) { request.reason = new TLRPC.TL_inputReportReasonOther(); } ConnectionsManager.getInstance(UserConfig.selectedAccount).sendRequest(request, (response, error) -> { @@ -3012,6 +3017,15 @@ public class AlertsCreator { createReportAlert(context, dialog_id, messageId, parentFragment, null, hideDim); } + public final static int REPORT_TYPE_SPAM = 0; + public final static int REPORT_TYPE_VIOLENCE = 1; + public final static int REPORT_TYPE_CHILD_ABUSE = 2; + public final static int REPORT_TYPE_ILLEGAL_DRUGS = 3; + public final static int REPORT_TYPE_PERSONAL_DETAILS = 4; + public final static int REPORT_TYPE_PORNOGRAPHY = 5; + public final static int REPORT_TYPE_FAKE_ACCOUNT = 6; + public final static int REPORT_TYPE_OTHER = 100; + public static void createReportAlert(final Context context, final long dialog_id, final int messageId, final BaseFragment parentFragment, Theme.ResourcesProvider resourcesProvider, Runnable hideDim) { if (context == null || parentFragment == null) { return; @@ -3027,11 +3041,15 @@ public class AlertsCreator { builder.setTitle(LocaleController.getString("ReportChat", R.string.ReportChat), true); CharSequence[] items; int[] icons; + int[] types; if (messageId != 0) { + items = new CharSequence[]{ LocaleController.getString("ReportChatSpam", R.string.ReportChatSpam), LocaleController.getString("ReportChatViolence", R.string.ReportChatViolence), LocaleController.getString("ReportChatChild", R.string.ReportChatChild), + LocaleController.getString("ReportChatIllegalDrugs", R.string.ReportChatIllegalDrugs), + LocaleController.getString("ReportChatPersonalDetails", R.string.ReportChatPersonalDetails), LocaleController.getString("ReportChatPornography", R.string.ReportChatPornography), LocaleController.getString("ReportChatOther", R.string.ReportChatOther) }; @@ -3039,15 +3057,28 @@ public class AlertsCreator { R.drawable.msg_report_spam, R.drawable.msg_report_violence, R.drawable.msg_report_abuse, + R.drawable.msg_report_drugs, + R.drawable.msg_report_personal, R.drawable.msg_report_xxx, R.drawable.msg_report_other }; + types = new int[] { + REPORT_TYPE_SPAM, + REPORT_TYPE_VIOLENCE, + REPORT_TYPE_CHILD_ABUSE, + REPORT_TYPE_ILLEGAL_DRUGS, + REPORT_TYPE_PERSONAL_DETAILS, + REPORT_TYPE_PORNOGRAPHY, + REPORT_TYPE_OTHER + }; } else { items = new CharSequence[]{ LocaleController.getString("ReportChatSpam", R.string.ReportChatSpam), LocaleController.getString("ReportChatFakeAccount", R.string.ReportChatFakeAccount), LocaleController.getString("ReportChatViolence", R.string.ReportChatViolence), LocaleController.getString("ReportChatChild", R.string.ReportChatChild), + LocaleController.getString("ReportChatIllegalDrugs", R.string.ReportChatIllegalDrugs), + LocaleController.getString("ReportChatPersonalDetails", R.string.ReportChatPersonalDetails), LocaleController.getString("ReportChatPornography", R.string.ReportChatPornography), LocaleController.getString("ReportChatOther", R.string.ReportChatOther) }; @@ -3056,19 +3087,32 @@ public class AlertsCreator { R.drawable.msg_report_fake, R.drawable.msg_report_violence, R.drawable.msg_report_abuse, + R.drawable.msg_report_drugs, + R.drawable.msg_report_personal, R.drawable.msg_report_xxx, R.drawable.msg_report_other }; + types = new int[] { + REPORT_TYPE_SPAM, + REPORT_TYPE_FAKE_ACCOUNT, + REPORT_TYPE_VIOLENCE, + REPORT_TYPE_CHILD_ABUSE, + REPORT_TYPE_ILLEGAL_DRUGS, + REPORT_TYPE_PERSONAL_DETAILS, + REPORT_TYPE_PORNOGRAPHY, + REPORT_TYPE_OTHER + }; } builder.setItems(items, icons, (dialogInterface, i) -> { - if (messageId == 0 && (i == 0 || i == 2 || i == 3 || i == 4) && parentFragment instanceof ChatActivity) { - ((ChatActivity) parentFragment).openReportChat(i); + int type = types[i]; + if (messageId == 0 && (type == REPORT_TYPE_SPAM || type == REPORT_TYPE_VIOLENCE || type == REPORT_TYPE_CHILD_ABUSE || type == REPORT_TYPE_PORNOGRAPHY || type == REPORT_TYPE_ILLEGAL_DRUGS || type == REPORT_TYPE_PERSONAL_DETAILS) && parentFragment instanceof ChatActivity) { + ((ChatActivity) parentFragment).openReportChat(type); return; - } else if (messageId == 0 && (i == 5 || i == 1) || messageId != 0 && i == 4) { + } else if (messageId == 0 && (type == REPORT_TYPE_OTHER || type == REPORT_TYPE_FAKE_ACCOUNT) || messageId != 0 && type == REPORT_TYPE_OTHER) { if (parentFragment instanceof ChatActivity) { AndroidUtilities.requestAdjustNothing(parentFragment.getParentActivity(), parentFragment.getClassGuid()); } - parentFragment.showDialog(new ReportAlert(context, i == 4 ? 5 : i) { + parentFragment.showDialog(new ReportAlert(context, type) { @Override public void dismissInternal() { @@ -3100,30 +3144,38 @@ public class AlertsCreator { request.peer = peer; request.id.add(messageId); request.message = ""; - if (i == 0) { + if (type == REPORT_TYPE_SPAM) { request.reason = new TLRPC.TL_inputReportReasonSpam(); - } else if (i == 1) { + } else if (type == REPORT_TYPE_VIOLENCE) { request.reason = new TLRPC.TL_inputReportReasonViolence(); - } else if (i == 2) { + } else if (type == REPORT_TYPE_CHILD_ABUSE) { request.reason = new TLRPC.TL_inputReportReasonChildAbuse(); - } else if (i == 3) { + } else if (type == REPORT_TYPE_PORNOGRAPHY) { request.reason = new TLRPC.TL_inputReportReasonPornography(); + } else if (type == REPORT_TYPE_ILLEGAL_DRUGS) { + request.reason = new TLRPC.TL_inputReportReasonIllegalDrugs(); + } else if (type == REPORT_TYPE_PERSONAL_DETAILS) { + request.reason = new TLRPC.TL_inputReportReasonPersonalDetails(); } req = request; } else { TLRPC.TL_account_reportPeer request = new TLRPC.TL_account_reportPeer(); request.peer = peer; request.message = ""; - if (i == 0) { + if (type == REPORT_TYPE_SPAM) { request.reason = new TLRPC.TL_inputReportReasonSpam(); - } else if (i == 1) { + } else if (type == REPORT_TYPE_FAKE_ACCOUNT) { request.reason = new TLRPC.TL_inputReportReasonFake(); - } else if (i == 2) { + } else if (type == REPORT_TYPE_VIOLENCE) { request.reason = new TLRPC.TL_inputReportReasonViolence(); - } else if (i == 3) { + } else if (type == REPORT_TYPE_CHILD_ABUSE) { request.reason = new TLRPC.TL_inputReportReasonChildAbuse(); - } else if (i == 4) { + } else if (type == REPORT_TYPE_PORNOGRAPHY) { request.reason = new TLRPC.TL_inputReportReasonPornography(); + } else if (type == REPORT_TYPE_ILLEGAL_DRUGS) { + request.reason = new TLRPC.TL_inputReportReasonIllegalDrugs(); + } else if (type == REPORT_TYPE_PERSONAL_DETAILS) { + request.reason = new TLRPC.TL_inputReportReasonPersonalDetails(); } req = request; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java index 6fa5fc20e..8383bc2ce 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/FragmentContextView.java @@ -663,7 +663,7 @@ public class FragmentContextView extends FrameLayout implements NotificationCent if (call == null) { return; } - VoIPHelper.startCall(chatActivity.getMessagesController().getChat(call.chatId), null, null, false, fragment.getParentActivity(), fragment, fragment.getAccountInstance()); + VoIPHelper.startCall(chatActivity.getMessagesController().getChat(call.chatId), null, null, false, call.call != null && !call.call.rtmp_stream, fragment.getParentActivity(), fragment, fragment.getAccountInstance()); } else if (currentStyle == 5) { SendMessagesHelper.ImportingHistory importingHistory = parentFragment.getSendMessagesHelper().getImportingHistory(((ChatActivity) parentFragment).getDialogId()); if (importingHistory == null) { @@ -913,7 +913,7 @@ public class FragmentContextView extends FrameLayout implements NotificationCent boolean isRtmpStream = false; if (fragment instanceof ChatActivity) { ChatActivity chatActivity = (ChatActivity) fragment; - isRtmpStream = chatActivity.getGroupCall().call != null && chatActivity.getGroupCall().call.rtmp_stream; + isRtmpStream = chatActivity.getGroupCall() != null && chatActivity.getGroupCall().call != null && chatActivity.getGroupCall().call.rtmp_stream; } avatars.setVisibility(!isRtmpStream ? VISIBLE : GONE); @@ -933,7 +933,13 @@ public class FragmentContextView extends FrameLayout implements NotificationCent selector.setBackground(null); updateCallTitle(); - avatars.setVisibility(!VoIPService.hasRtmpStream() ? VISIBLE : GONE); + boolean isRtmpStream = false; + if (fragment instanceof ChatActivity) { + ChatActivity chatActivity = (ChatActivity) fragment; + isRtmpStream = chatActivity.getGroupCall() != null && chatActivity.getGroupCall().call != null && chatActivity.getGroupCall().call.rtmp_stream; + } + + avatars.setVisibility(!isRtmpStream ? VISIBLE : GONE); if (style == 3) { if (VoIPService.getSharedInstance() != null) { VoIPService.getSharedInstance().registerStateListener(this); @@ -945,7 +951,7 @@ public class FragmentContextView extends FrameLayout implements NotificationCent titleTextView.setTranslationX(0); subtitleTextView.setTranslationX(0); } - muteButton.setVisibility(!VoIPService.hasRtmpStream() ? VISIBLE : GONE); + muteButton.setVisibility(!isRtmpStream ? VISIBLE : GONE); isMuted = VoIPService.getSharedInstance() != null && VoIPService.getSharedInstance().isMicMute(); muteDrawable.setCustomEndFrame(isMuted ? 15 : 29); muteDrawable.setCurrentFrame(muteDrawable.getCustomEndFrame() - 1, false, true); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java index 5ea456877..67771c9f8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/RLottieDrawable.java @@ -122,12 +122,16 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable { private static ThreadPoolExecutor lottieCacheGenerateQueue; private Runnable onAnimationEndListener; + private Runnable onFrameReadyRunnable; protected Runnable uiRunnableNoFrame = new Runnable() { @Override public void run() { loadFrameTask = null; decodeFrameFinishedInternal(); + if (onFrameReadyRunnable != null) { + onFrameReadyRunnable.run(); + } } }; @@ -145,6 +149,9 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable { singleFrameDecoded = true; invalidateInternal(); decodeFrameFinishedInternal(); + if (onFrameReadyRunnable != null) { + onFrameReadyRunnable.run(); + } } }; @@ -1087,4 +1094,8 @@ public class RLottieDrawable extends BitmapDrawable implements Animatable { public boolean isGeneratingCache() { return cacheGenerateTask != null; } + + public void setOnFrameReadyRunnable(Runnable onFrameReadyRunnable) { + this.onFrameReadyRunnable = onFrameReadyRunnable; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsLayoutInBubble.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsLayoutInBubble.java index e46d227d7..8ce6825ab 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsLayoutInBubble.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/Reactions/ReactionsLayoutInBubble.java @@ -437,8 +437,9 @@ public class ReactionsLayoutInBubble { TLRPC.TL_availableReaction r = MediaDataController.getInstance(currentAccount).getReactionsMap().get(reaction); if (r != null) { SvgHelper.SvgDrawable svgThumb = DocumentObject.getSvgThumb(r.static_icon, Theme.key_windowBackgroundGray, 1.0f); - imageReceiver.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_firstframe", svgThumb, "webp", r, 1); - imageReceiver.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_firstframe", svgThumb, "webp", r, 1); + imageReceiver.setImage(ImageLocation.getForDocument(r.static_icon), "40_40", svgThumb, "webp", r, 1); + //TODO uncomment when fixed ImageLoader + //imageReceiver.setImage(ImageLocation.getForDocument(r.center_icon), "40_40_firstframe", svgThumb, "webp", r, 1); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReportAlert.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReportAlert.java index 6c8027424..2229a669a 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/ReportAlert.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/ReportAlert.java @@ -87,17 +87,17 @@ public class ReportAlert extends BottomSheet { percentTextView.setTypeface(AndroidUtilities.getTypeface("fonts/rmedium.ttf")); percentTextView.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 24); percentTextView.setTextColor(Theme.getColor(Theme.key_dialogTextBlack)); - if (type == 0) { + if (type == AlertsCreator.REPORT_TYPE_SPAM) { percentTextView.setText(LocaleController.getString("ReportTitleSpam", R.string.ReportTitleSpam)); - } else if (type == 1) { + } else if (type == AlertsCreator.REPORT_TYPE_FAKE_ACCOUNT) { percentTextView.setText(LocaleController.getString("ReportTitleFake", R.string.ReportTitleFake)); - } else if (type == 2) { + } else if (type == AlertsCreator.REPORT_TYPE_VIOLENCE) { percentTextView.setText(LocaleController.getString("ReportTitleViolence", R.string.ReportTitleViolence)); - } else if (type == 3) { + } else if (type == AlertsCreator.REPORT_TYPE_CHILD_ABUSE) { percentTextView.setText(LocaleController.getString("ReportTitleChild", R.string.ReportTitleChild)); - } else if (type == 4) { + } else if (type == AlertsCreator.REPORT_TYPE_PORNOGRAPHY) { percentTextView.setText(LocaleController.getString("ReportTitlePornography", R.string.ReportTitlePornography)); - } else if (type == 5) { + } else if (type == AlertsCreator.REPORT_TYPE_OTHER) { percentTextView.setText(LocaleController.getString("ReportChat", R.string.ReportChat)); } frameLayout.addView(percentTextView, LayoutHelper.createFrame(LayoutHelper.WRAP_CONTENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 17, 197, 17, 0)); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java index ae11b0dde..5dc969048 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/SizeNotifierFrameLayout.java @@ -562,6 +562,13 @@ public class SizeNotifierFrameLayout extends FrameLayout { } AndroidUtilities.runOnUIThread(() -> { + if (!blurIsRunning) { + if (finalBitmap != null) { + finalBitmap.recycle(); + } + blurGeneratingTuskIsRunning = false; + return; + } prevBitmap = currentBitmap; BlurBitmap oldBitmap = currentBitmap; blurPaintTop2.setShader(blurPaintTop.getShader()); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallMiniTextureView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallMiniTextureView.java index 16fc212f8..8e1a92164 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallMiniTextureView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallMiniTextureView.java @@ -475,6 +475,9 @@ public class GroupCallMiniTextureView extends FrameLayout implements GroupCallSt } } + if (renderer.rotatedFrameHeight != 0 && renderer.rotatedFrameWidth != 0 && participant != null) { + participant.setAspectRatio(renderer.rotatedFrameWidth, renderer.rotatedFrameHeight, call); + } } }; textureView.renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallRenderersContainer.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallRenderersContainer.java index c90922380..6aa2c6758 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallRenderersContainer.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/GroupCallRenderersContainer.java @@ -476,7 +476,7 @@ public class GroupCallRenderersContainer extends FrameLayout { canvas.save(); } boolean swipeToBack = swipeToBackGesture || swipeToBackAnimator != null; - if (swipeToBack) { + if (swipeToBack && !isRtmpStream()) { canvas.clipRect(0, 0, getMeasuredWidth(), getMeasuredHeight() - ((isLandscapeMode || GroupCallActivity.isTabletMode) ? 0 : AndroidUtilities.dp(90))); } canvas.translate(fullscreenTextureView.getX(), fullscreenTextureView.getY()); diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java index 836e8cf78..ab5480b2f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPHelper.java @@ -108,16 +108,20 @@ public class VoIPHelper { permissions.add(Manifest.permission.CAMERA); } if (permissions.isEmpty()) { - initiateCall(user, null, null, videoCall, canVideoCall, false, activity, null, accountInstance); + initiateCall(user, null, null, videoCall, canVideoCall, false, null, activity, null, accountInstance); } else { activity.requestPermissions(permissions.toArray(new String[0]), videoCall ? 102 : 101); } } else { - initiateCall(user, null, null, videoCall, canVideoCall, false, activity, null, accountInstance); + initiateCall(user, null, null, videoCall, canVideoCall, false, null, activity, null, accountInstance); } } public static void startCall(TLRPC.Chat chat, TLRPC.InputPeer peer, String hash, boolean createCall, Activity activity, BaseFragment fragment, AccountInstance accountInstance) { + startCall(chat, peer, hash, createCall, null, activity, fragment, accountInstance); + } + + public static void startCall(TLRPC.Chat chat, TLRPC.InputPeer peer, String hash, boolean createCall, Boolean checkJoiner, Activity activity, BaseFragment fragment, AccountInstance accountInstance) { if (activity == null) { return; } @@ -147,16 +151,16 @@ public class VoIPHelper { permissions.add(Manifest.permission.RECORD_AUDIO); } if (permissions.isEmpty()) { - initiateCall(null, chat, hash, false, false, createCall, activity, fragment, accountInstance); + initiateCall(null, chat, hash, false, false, createCall, checkJoiner, activity, fragment, accountInstance); } else { activity.requestPermissions(permissions.toArray(new String[0]), 103); } } else { - initiateCall(null, chat, hash, false, false, createCall, activity, fragment, accountInstance); + initiateCall(null, chat, hash, false, false, createCall, checkJoiner, activity, fragment, accountInstance); } } - private static void initiateCall(TLRPC.User user, TLRPC.Chat chat, String hash, boolean videoCall, boolean canVideoCall, boolean createCall, final Activity activity, BaseFragment fragment, AccountInstance accountInstance) { + private static void initiateCall(TLRPC.User user, TLRPC.Chat chat, String hash, boolean videoCall, boolean canVideoCall, boolean createCall, Boolean checkJoiner, final Activity activity, BaseFragment fragment, AccountInstance accountInstance) { if (activity == null || user == null && chat == null) { return; } @@ -222,7 +226,7 @@ public class VoIPHelper { } } } else if (VoIPService.callIShouldHavePutIntoIntent == null) { - doInitiateCall(user, chat, hash, null, false, videoCall, canVideoCall, createCall, activity, fragment, accountInstance, true, true); + doInitiateCall(user, chat, hash, null, false, videoCall, canVideoCall, createCall, activity, fragment, accountInstance, checkJoiner != null ? checkJoiner : true, true); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java index 81a817820..f8e5c961f 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/DialogsActivity.java @@ -2656,7 +2656,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. } } }; -// viewPage.listView.setItemAnimator(viewPage.dialogsItemAnimator); + viewPage.listView.setItemAnimator(viewPage.dialogsItemAnimator); viewPage.listView.setVerticalScrollBarEnabled(true); viewPage.listView.setInstantClick(true); viewPage.layoutManager = new LinearLayoutManager(context) { @@ -6753,7 +6753,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. boolean updateVisibleRows = false; for (int a = 0; a < viewPages.length; a++) { - if (viewPages[a].isDefaultDialogType() && getMessagesController().getDialogs(folderId).size() <= 10) { + if (viewPages[a].isDefaultDialogType() && getMessagesController().getAllFoldersDialogsCount() <= 10) { viewPages[a].dialogsAdapter.notifyDataSetChanged(); } else { updateVisibleRows = true; @@ -8226,7 +8226,7 @@ public class DialogsActivity extends BaseFragment implements NotificationCenter. @Override public boolean isLightStatusBar() { - int color = searching ? Theme.getColor(Theme.key_windowBackgroundWhite) : Theme.getColor(folderId == 0 ? Theme.key_actionBarDefault : Theme.key_actionBarDefaultArchived); + int color = (searching && whiteActionBar) ? Theme.getColor(Theme.key_windowBackgroundWhite) : Theme.getColor(folderId == 0 ? Theme.key_actionBarDefault : Theme.key_actionBarDefaultArchived); if (actionBar.isActionModeShowed()) { color = Theme.getColor(Theme.key_actionBarActionModeDefault); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java index 237150756..c82269a00 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/GroupCallActivity.java @@ -357,7 +357,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter ImageUpdater currentAvatarUpdater; AvatarUpdaterDelegate avatarUpdaterDelegate; - private Boolean wasNotInLayoutFullscreen = false; + private Boolean wasNotInLayoutFullscreen; private Boolean wasExpandBigSize = true; private int scheduleStartAt; @@ -4200,12 +4200,10 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter if (isLandscapeMode == isRtmpLandscapeMode()) { fullscreenFor(participant); } - if (participant.aspectRatioFromWidth != 0 && participant.aspectRatioFromHeight != 0) { - if (isRtmpLandscapeMode()) { - parentActivity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_LANDSCAPE); - } else { - parentActivity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); - } + if (isRtmpLandscapeMode()) { + parentActivity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_SENSOR_LANDSCAPE); + } else { + parentActivity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT); } } } @@ -8601,7 +8599,7 @@ public class GroupCallActivity extends BottomSheet implements NotificationCenter } public boolean isRtmpLandscapeMode() { - return isRtmpStream() && !call.visibleVideoParticipants.isEmpty() && call.visibleVideoParticipants.get(0).aspectRatio != 0 && call.visibleVideoParticipants.get(0).aspectRatio >= 1; + return isRtmpStream() && !call.visibleVideoParticipants.isEmpty() && (call.visibleVideoParticipants.get(0).aspectRatio == 0 || call.visibleVideoParticipants.get(0).aspectRatio >= 1); } public boolean isRtmpStream() { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/IntroActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/IntroActivity.java index b29b98d36..3f1a53904 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/IntroActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/IntroActivity.java @@ -42,6 +42,7 @@ import android.widget.ScrollView; import android.widget.TextView; import androidx.annotation.NonNull; +import androidx.core.graphics.ColorUtils; import androidx.viewpager.widget.PagerAdapter; import androidx.viewpager.widget.ViewPager; @@ -60,6 +61,7 @@ import org.telegram.messenger.R; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; +import org.telegram.ui.ActionBar.AlertDialog; import org.telegram.ui.ActionBar.BaseFragment; import org.telegram.ui.ActionBar.Theme; import org.telegram.ui.ActionBar.ThemeDescription; @@ -362,20 +364,39 @@ public class IntroActivity extends BaseFragment implements NotificationCenter.No if (startPressed || localeInfo == null) { return; } - LocaleController.getInstance().applyLanguage(localeInfo, true, false, currentAccount); startPressed = true; - presentFragment(new LoginActivity().setIntroView(frameContainerView, startMessagingButton), true); - destroyed = true; + + AlertDialog loaderDialog = new AlertDialog(v.getContext(), 3); + loaderDialog.setCanCancel(false); + loaderDialog.showDelayed(1000); + + NotificationCenter.getGlobalInstance().addObserver(new NotificationCenter.NotificationCenterDelegate() { + @Override + public void didReceivedNotification(int id, int account, Object... args) { + if (id == NotificationCenter.reloadInterface) { + loaderDialog.dismiss(); + + NotificationCenter.getGlobalInstance().removeObserver(this, id); + AndroidUtilities.runOnUIThread(()->{ + presentFragment(new LoginActivity().setIntroView(frameContainerView, startMessagingButton), true); + destroyed = true; + }, 100); + } + } + }, NotificationCenter.reloadInterface); + LocaleController.getInstance().applyLanguage(localeInfo, true, false, currentAccount); }); frameContainerView.addView(themeFrameLayout, LayoutHelper.createFrame(64, 64, Gravity.TOP | Gravity.RIGHT, 0, themeMargin, themeMargin, 0)); fragmentView = scrollView; + NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.suggestedLangpack); + NotificationCenter.getInstance(currentAccount).addObserver(this, NotificationCenter.configLoaded); + ConnectionsManager.getInstance(currentAccount).updateDcSettings(); LocaleController.getInstance().loadRemoteLanguages(currentAccount); checkContinueText(); justCreated = true; - NotificationCenter.getGlobalInstance().addObserver(this, NotificationCenter.suggestedLangpack); updateColors(false); @@ -426,6 +447,7 @@ public class IntroActivity extends BaseFragment implements NotificationCenter.No super.onFragmentDestroy(); destroyed = true; NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.suggestedLangpack); + NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.configLoaded); MessagesController.getGlobalMainSettings().edit().putLong("intro_crashed_time", 0).apply(); } @@ -433,7 +455,14 @@ public class IntroActivity extends BaseFragment implements NotificationCenter.No LocaleController.LocaleInfo englishInfo = null; LocaleController.LocaleInfo systemInfo = null; LocaleController.LocaleInfo currentLocaleInfo = LocaleController.getInstance().getCurrentLocaleInfo(); - final String systemLang = MessagesController.getInstance(currentAccount).suggestedLangCode; + String systemLang = MessagesController.getInstance(currentAccount).suggestedLangCode; + if (systemLang == null || systemLang.equals("en") && LocaleController.getInstance().getSystemDefaultLocale().getLanguage() != null && !LocaleController.getInstance().getSystemDefaultLocale().getLanguage().equals("en")) { + systemLang = LocaleController.getInstance().getSystemDefaultLocale().getLanguage(); + if (systemLang == null) { + systemLang = "en"; + } + } + String arg = systemLang.contains("-") ? systemLang.split("-")[0] : systemLang; String alias = LocaleController.getLocaleAlias(arg); for (int a = 0; a < LocaleController.getInstance().languages.size(); a++) { @@ -460,6 +489,7 @@ public class IntroActivity extends BaseFragment implements NotificationCenter.No localeInfo = englishInfo; } req.keys.add("ContinueOnThisLanguage"); + String finalSystemLang = systemLang; ConnectionsManager.getInstance(currentAccount).sendRequest(req, (response, error) -> { if (response != null) { TLRPC.Vector vector = (TLRPC.Vector) response; @@ -472,7 +502,7 @@ public class IntroActivity extends BaseFragment implements NotificationCenter.No if (!destroyed) { switchLanguageTextView.setText(string.value); SharedPreferences preferences = MessagesController.getGlobalMainSettings(); - preferences.edit().putString("language_showed2", systemLang.toLowerCase()).apply(); + preferences.edit().putString("language_showed2", finalSystemLang.toLowerCase()).apply(); } }); } @@ -482,7 +512,7 @@ public class IntroActivity extends BaseFragment implements NotificationCenter.No @Override public void didReceivedNotification(int id, int account, Object... args) { - if (id == NotificationCenter.suggestedLangpack) { + if (id == NotificationCenter.suggestedLangpack || id == NotificationCenter.configLoaded) { checkContinueText(); } } @@ -935,4 +965,10 @@ public class IntroActivity extends BaseFragment implements NotificationCenter.No } } else Intro.setBackgroundColor(Theme.getColor(Theme.key_windowBackgroundWhite)); } + + @Override + public boolean isLightStatusBar() { + int color = Theme.getColor(Theme.key_windowBackgroundWhite, null, true); + return ColorUtils.calculateLuminance(color) > 0.7f; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java index c72bdda87..8db66d4d8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java @@ -1725,6 +1725,10 @@ public class LaunchActivity extends BasePermissionsActivity implements ActionBar group = path.replace("joinchat/", ""); } else if (path.startsWith("+")) { group = path.replace("+", ""); + if (AndroidUtilities.isNumeric(group)) { + username = group; + group = null; + } } else if (path.startsWith("addstickers/")) { sticker = path.replace("addstickers/", ""); } else if (path.startsWith("msg/") || path.startsWith("share/")) { @@ -2877,8 +2881,16 @@ public class LaunchActivity extends BasePermissionsActivity implements ActionBar Runnable cancelRunnable = null; if (username != null) { - TLRPC.TL_contacts_resolveUsername req = new TLRPC.TL_contacts_resolveUsername(); - req.username = username; + TLObject req; + if (AndroidUtilities.isNumeric(username)) { + TLRPC.TL_contacts_resolvePhone resolvePhone = new TLRPC.TL_contacts_resolvePhone(); + resolvePhone.phone = username; + req = resolvePhone; + } else { + TLRPC.TL_contacts_resolveUsername resolveUsername = new TLRPC.TL_contacts_resolveUsername(); + resolveUsername.username = username; + req = resolveUsername; + } requestId[0] = ConnectionsManager.getInstance(intentAccount).sendRequest(req, (response, error) -> AndroidUtilities.runOnUIThread(() -> { if (!LaunchActivity.this.isFinishing()) { boolean hideProgressDialog = true; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java index 1f239d4bc..0f5c6ea94 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LoginActivity.java @@ -280,6 +280,13 @@ public class LoginActivity extends BaseFragment { private Runnable[] editDoneCallback = new Runnable[2]; private boolean[] postedEditDoneCallback = new boolean[2]; + private static Map phoneNumberExclusionRules = new HashMap<>(); + + static { + phoneNumberExclusionRules.put("60", hintLengthFrom -> --hintLengthFrom); + phoneNumberExclusionRules.put("372", hintLengthFrom -> --hintLengthFrom); + } + private static class ProgressView extends View { private final Path path = new Path(); @@ -1447,29 +1454,28 @@ public class LoginActivity extends BaseFragment { } private void onAuthSuccess(TLRPC.TL_auth_authorization res, boolean afterSignup) { - Utilities.cacheClearQueue.postRunnable(()->{ - ConnectionsManager.getInstance(currentAccount).setUserId(res.user.id); - UserConfig.getInstance(currentAccount).clearConfig(); - MessagesController.getInstance(currentAccount).cleanup(); - UserConfig.getInstance(currentAccount).syncContacts = syncContacts; - UserConfig.getInstance(currentAccount).setCurrentUser(res.user); - UserConfig.getInstance(currentAccount).saveConfig(true); - MessagesStorage.getInstance(currentAccount).cleanup(true); - ArrayList users = new ArrayList<>(); - users.add(res.user); - MessagesStorage.getInstance(currentAccount).putUsersAndChats(users, null, true, true); - MessagesController.getInstance(currentAccount).putUser(res.user, false); - ContactsController.getInstance(currentAccount).checkAppAccount(); - MessagesController.getInstance(currentAccount).checkPromoInfo(true); - ConnectionsManager.getInstance(currentAccount).updateDcSettings(); + MessagesController.getInstance(currentAccount).cleanup(); + ConnectionsManager.getInstance(currentAccount).setUserId(res.user.id); + UserConfig.getInstance(currentAccount).clearConfig(); + MessagesController.getInstance(currentAccount).cleanup(); + UserConfig.getInstance(currentAccount).syncContacts = syncContacts; + UserConfig.getInstance(currentAccount).setCurrentUser(res.user); + UserConfig.getInstance(currentAccount).saveConfig(true); + MessagesStorage.getInstance(currentAccount).cleanup(true); + ArrayList users = new ArrayList<>(); + users.add(res.user); + MessagesStorage.getInstance(currentAccount).putUsersAndChats(users, null, true, true); + MessagesController.getInstance(currentAccount).putUser(res.user, false); + ContactsController.getInstance(currentAccount).checkAppAccount(); + MessagesController.getInstance(currentAccount).checkPromoInfo(true); + ConnectionsManager.getInstance(currentAccount).updateDcSettings(); - if (afterSignup) { - MessagesController.getInstance(currentAccount).putDialogsEndReachedAfterRegistration(); - } - MediaDataController.getInstance(currentAccount).loadStickersByEmojiOrName(AndroidUtilities.STICKERS_PLACEHOLDER_PACK_NAME, false, true); + if (afterSignup) { + MessagesController.getInstance(currentAccount).putDialogsEndReachedAfterRegistration(); + } + MediaDataController.getInstance(currentAccount).loadStickersByEmojiOrName(AndroidUtilities.STICKERS_PLACEHOLDER_PACK_NAME, false, true); - AndroidUtilities.runOnUIThread(()-> needFinishActivity(afterSignup, res.setup_password_required, res.otherwise_relogin_days)); - }); + needFinishActivity(afterSignup, res.setup_password_required, res.otherwise_relogin_days); } private void fillNextCodeParams(Bundle params, TLRPC.TL_auth_sentCode res) { @@ -2177,8 +2183,9 @@ public class LoginActivity extends BaseFragment { String phoneNumber = "+" + codeField.getText() + " " + phoneField.getText(); String hintText = phoneField.getHintText(); int hintLength = hintText != null ? hintText.length() : 0; - if (codeField.getText().toString().equals("60")) { // Malaysia's numbers can contain less symbols - hintLength--; + PhoneNumberExclusionRule exclusionRule = phoneNumberExclusionRules.get(codeField.getText().toString()); + if (exclusionRule != null) { + hintLength = exclusionRule.modifyHintLengthRequirement(hintLength); } if (hintText != null && phoneField.length() < hintLength) { new AlertDialog.Builder(getParentActivity()) @@ -6118,4 +6125,14 @@ public class LoginActivity extends BaseFragment { void onDismiss(PhoneNumberConfirmView confirmView); } } + + private interface PhoneNumberExclusionRule { + int modifyHintLengthRequirement(int lengthFrom); + } + + @Override + public boolean isLightStatusBar() { + int color = Theme.getColor(Theme.key_windowBackgroundWhite, null, true); + return ColorUtils.calculateLuminance(color) > 0.7f; + } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java index 5628140f6..7f98b16cc 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PasscodeActivity.java @@ -693,6 +693,11 @@ public class PasscodeActivity extends BaseFragment implements NotificationCenter return fragmentView; } + @Override + public boolean hasForceLightStatusBar() { + return type != TYPE_MANAGE_CODE_SETTINGS; + } + /** * Sets custom keyboard visibility * diff --git a/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java index 40a2de213..9f3a58f86 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/PrivacyControlActivity.java @@ -16,21 +16,32 @@ import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.ColorDrawable; import android.graphics.drawable.Drawable; import android.graphics.drawable.GradientDrawable; +import android.os.Build; import android.os.Bundle; +import android.text.SpannableString; +import android.text.SpannableStringBuilder; +import android.text.Spanned; +import android.text.style.ClickableSpan; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.widget.FrameLayout; +import androidx.annotation.NonNull; +import androidx.recyclerview.widget.DefaultItemAnimator; +import androidx.recyclerview.widget.LinearLayoutManager; +import androidx.recyclerview.widget.RecyclerView; + import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.ContactsController; import org.telegram.messenger.DialogObject; +import org.telegram.messenger.FileLog; import org.telegram.messenger.LocaleController; import org.telegram.messenger.MessageObject; import org.telegram.messenger.MessagesController; import org.telegram.messenger.NotificationCenter; -import org.telegram.messenger.FileLog; import org.telegram.messenger.R; import org.telegram.messenger.UserConfig; import org.telegram.tgnet.ConnectionsManager; @@ -48,6 +59,7 @@ import org.telegram.ui.Cells.ShadowSectionCell; import org.telegram.ui.Cells.TextInfoPrivacyCell; import org.telegram.ui.Cells.TextSettingsCell; import org.telegram.ui.Components.BackgroundGradientDrawable; +import org.telegram.ui.Components.BulletinFactory; import org.telegram.ui.Components.CombinedDrawable; import org.telegram.ui.Components.HintView; import org.telegram.ui.Components.LayoutHelper; @@ -56,10 +68,7 @@ import org.telegram.ui.Components.RecyclerListView; import java.util.ArrayList; import java.util.Collections; - -import androidx.recyclerview.widget.DefaultItemAnimator; -import androidx.recyclerview.widget.LinearLayoutManager; -import androidx.recyclerview.widget.RecyclerView; +import java.util.Locale; public class PrivacyControlActivity extends BaseFragment implements NotificationCenter.NotificationCenterDelegate { @@ -1000,7 +1009,29 @@ public class PrivacyControlActivity extends BaseFragment implements Notification if (position == detailRow) { if (rulesType == PRIVACY_RULES_TYPE_PHONE) { if (prevSubtypeContacts = (currentType == TYPE_NOBODY && currentSubType == 1)) { - privacyCell.setText(LocaleController.getString("PrivacyPhoneInfo3", R.string.PrivacyPhoneInfo3)); + SpannableStringBuilder spannableStringBuilder = new SpannableStringBuilder(); + + String phoneLinkStr = String.format(Locale.ENGLISH, "https//t.me/+%s", getUserConfig().getClientPhone()); + SpannableString phoneLink = new SpannableString(phoneLinkStr); + phoneLink.setSpan(new ClickableSpan() { + @Override + public void onClick(@NonNull View view) { + android.content.ClipboardManager clipboard = (android.content.ClipboardManager) ApplicationLoader.applicationContext.getSystemService(Context.CLIPBOARD_SERVICE); + android.content.ClipData clip = android.content.ClipData.newPlainText("label", phoneLinkStr); + clipboard.setPrimaryClip(clip); + if (Build.VERSION.SDK_INT < Build.VERSION_CODES.S) { + BulletinFactory.of(PrivacyControlActivity.this).createCopyBulletin(LocaleController.getString("PhoneCopied", R.string.PhoneCopied)).show(); + } + } + }, 0, phoneLinkStr.length(), Spanned.SPAN_EXCLUSIVE_EXCLUSIVE); + + spannableStringBuilder.append(LocaleController.getString("PrivacyPhoneInfo3", R.string.PrivacyPhoneInfo3)) + .append("\n\n") + .append(LocaleController.getString("PrivacyPhoneInfo4", R.string.PrivacyPhoneInfo4)) + .append("\n") + .append(phoneLink); + + privacyCell.setText(spannableStringBuilder); } else { privacyCell.setText(LocaleController.getString("PrivacyPhoneInfo", R.string.PrivacyPhoneInfo)); } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationSetupActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationSetupActivity.java index f8db27f65..ece4f9d3b 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationSetupActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/TwoStepVerificationSetupActivity.java @@ -717,7 +717,7 @@ public class TwoStepVerificationSetupActivity extends BaseFragment { super.onMeasure(widthMeasureSpec, heightMeasureSpec); MarginLayoutParams params = (MarginLayoutParams) titleTextView.getLayoutParams(); - params.topMargin = (imageView.getVisibility() == GONE && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP ? AndroidUtilities.statusBarHeight : 0) + AndroidUtilities.dp(8); + params.topMargin = (imageView.getVisibility() == GONE && Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP ? AndroidUtilities.statusBarHeight : 0) + AndroidUtilities.dp(8) + (currentType == TYPE_ENTER_HINT && AndroidUtilities.isSmallScreen() && !isLandscape() ? AndroidUtilities.dp(32) : 0); } }; scrollViewLinearLayout.setOrientation(LinearLayout.VERTICAL); @@ -839,7 +839,7 @@ public class TwoStepVerificationSetupActivity extends BaseFragment { }); outlineTextFirstRow.addView(firstRowLinearLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT)); - scrollViewLinearLayout.addView(outlineTextFirstRow, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 24, 32, 24, 0)); + scrollViewLinearLayout.addView(outlineTextFirstRow, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.TOP | Gravity.CENTER_HORIZONTAL, 24, 32, 24, 32)); outlineTextSecondRow = new OutlineTextContainerView(context); @@ -1279,6 +1279,11 @@ public class TwoStepVerificationSetupActivity extends BaseFragment { } } + @Override + public boolean hasForceLightStatusBar() { + return true; + } + private boolean isCustomKeyboardVisible() { return (currentType == TYPE_EMAIL_CONFIRM || currentType == TYPE_EMAIL_RECOVERY) && !AndroidUtilities.isTablet() && AndroidUtilities.displaySize.x < AndroidUtilities.displaySize.y && !AndroidUtilities.isAccessibilityTouchExplorationEnabled(); diff --git a/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java b/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java index 51be90aa0..ebbf2655f 100644 --- a/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/TMessagesProj/src/main/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -17,11 +17,15 @@ import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioRecord; import android.media.AudioRecordingConfiguration; +import android.media.AudioTimestamp; import android.media.MediaRecorder.AudioSource; import android.os.Build; import android.os.Process; import androidx.annotation.Nullable; import androidx.annotation.RequiresApi; + +import com.google.android.exoplayer2.util.Log; + import java.lang.System; import java.nio.ByteBuffer; import java.util.Arrays; diff --git a/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioRecord.java b/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioRecord.java index 7956038ac..5ea67a8d7 100644 --- a/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioRecord.java +++ b/TMessagesProj/src/main/java/org/webrtc/voiceengine/WebRtcAudioRecord.java @@ -197,7 +197,12 @@ public class WebRtcAudioRecord { // failed to join this thread. To be a bit safer, try to avoid calling any native methods // in case they've been unregistered after stopRecording() returned. if (keepAlive) { - nativeDataIsRecorded(bytesRead, nativeAudioRecord); + try { + nativeDataIsRecorded(bytesRead, nativeAudioRecord); + } catch (UnsatisfiedLinkError unsatisfiedLinkError) { + FileLog.e(unsatisfiedLinkError); + keepAlive = false; + } } if (audioSamplesReadyCallback != null) { // Copy the entire byte buffer array. Assume that the start of the byteBuffer is diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_report_drugs.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_report_drugs.png new file mode 100644 index 000000000..75542dd0c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_report_drugs.png differ diff --git a/TMessagesProj/src/main/res/drawable-hdpi/msg_report_personal.png b/TMessagesProj/src/main/res/drawable-hdpi/msg_report_personal.png new file mode 100644 index 000000000..390ac8466 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-hdpi/msg_report_personal.png differ diff --git a/TMessagesProj/src/main/res/drawable-mdpi/msg_report_drugs.png b/TMessagesProj/src/main/res/drawable-mdpi/msg_report_drugs.png new file mode 100644 index 000000000..951808635 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-mdpi/msg_report_drugs.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_report_drugs.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_report_drugs.png new file mode 100644 index 000000000..240c3df7b Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_report_drugs.png differ diff --git a/TMessagesProj/src/main/res/drawable-xhdpi/msg_report_personal.png b/TMessagesProj/src/main/res/drawable-xhdpi/msg_report_personal.png new file mode 100644 index 000000000..7f3098c0c Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xhdpi/msg_report_personal.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_report_drugs.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_report_drugs.png new file mode 100644 index 000000000..e5505506f Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_report_drugs.png differ diff --git a/TMessagesProj/src/main/res/drawable-xxhdpi/msg_report_personal.png b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_report_personal.png new file mode 100644 index 000000000..680356cb7 Binary files /dev/null and b/TMessagesProj/src/main/res/drawable-xxhdpi/msg_report_personal.png differ diff --git a/TMessagesProj/src/main/res/values/strings.xml b/TMessagesProj/src/main/res/values/strings.xml index c84ec77a1..5e3120a34 100644 --- a/TMessagesProj/src/main/res/values/strings.xml +++ b/TMessagesProj/src/main/res/values/strings.xml @@ -5099,4 +5099,7 @@ Please enter the password you will use to unlock Telegram. If you forget your passcode, you\'ll need to **reinstall** the app. All secret chats will be lost. Do you want to edit your number?\n\n**%1$s**\n\nIf the number above is correct, please wait for the verification code. + Illegal Drugs + Personal Details + This public link opens a chat with you: