From 4c5f32babf6cf9d0869f1732280684d974841653 Mon Sep 17 00:00:00 2001 From: DrKLO Date: Sun, 16 Aug 2020 00:06:36 +0300 Subject: [PATCH] Update to 7.0.0 (2064) --- TMessagesProj/build.gradle | 2 +- .../org_telegram_messenger_voip_Instance.cpp | 41 +- .../jni/tgcalls/CodecSelectHelper.cpp | 33 +- TMessagesProj/jni/tgcalls/CodecSelectHelper.h | 3 +- TMessagesProj/jni/tgcalls/CryptoHelper.cpp | 2 + .../jni/tgcalls/EncryptedConnection.cpp | 18 +- TMessagesProj/jni/tgcalls/Instance.cpp | 14 +- TMessagesProj/jni/tgcalls/Instance.h | 19 +- TMessagesProj/jni/tgcalls/InstanceImpl.cpp | 88 +- TMessagesProj/jni/tgcalls/InstanceImpl.h | 4 +- TMessagesProj/jni/tgcalls/LogSinkImpl.cpp | 4 +- TMessagesProj/jni/tgcalls/Manager.cpp | 82 +- TMessagesProj/jni/tgcalls/Manager.h | 9 + TMessagesProj/jni/tgcalls/MediaManager.cpp | 56 +- TMessagesProj/jni/tgcalls/MediaManager.h | 10 +- TMessagesProj/jni/tgcalls/Message.cpp | 14 + TMessagesProj/jni/tgcalls/Message.h | 10 +- TMessagesProj/jni/tgcalls/NetworkManager.cpp | 141 +- TMessagesProj/jni/tgcalls/NetworkManager.h | 20 +- .../jni/tgcalls/VideoCaptureInterfaceImpl.cpp | 58 +- .../jni/tgcalls/VideoCaptureInterfaceImpl.h | 1 + .../jni/tgcalls/legacy/InstanceImplLegacy.cpp | 14 +- .../jni/tgcalls/legacy/InstanceImplLegacy.h | 4 +- .../jni/tgcalls/platform/PlatformInterface.h | 3 +- .../platform/android/AndroidInterface.cpp | 14 +- .../platform/android/AndroidInterface.h | 5 +- .../tgcalls/platform/darwin/DarwinInterface.h | 3 +- .../platform/darwin/DarwinInterface.mm | 20 +- .../jni/tgcalls/platform/darwin/GLVideoView.h | 13 +- .../tgcalls/platform/darwin/GLVideoViewMac.h | 50 + .../tgcalls/platform/darwin/GLVideoViewMac.mm | 491 +++++ .../platform/darwin/VideoCameraCapturer.mm | 30 +- .../platform/darwin/VideoCameraCapturerMac.h | 2 +- .../platform/darwin/VideoCameraCapturerMac.mm | 60 +- .../darwin/VideoCapturerInterfaceImpl.h | 2 +- .../darwin/VideoCapturerInterfaceImpl.mm | 217 ++- .../platform/darwin/VideoMetalViewMac.h | 2 +- .../platform/darwin/VideoMetalViewMac.mm | 75 +- .../DesktopInterface.cpp} | 18 +- .../DesktopInterface.h} | 8 +- .../reference/InstanceImplReference.cpp | 25 +- .../tgcalls/reference/InstanceImplReference.h | 4 +- .../org/telegram/messenger/BuildVars.java | 2 +- .../messenger/MessagesController.java | 8 +- .../messenger/voip/NativeInstance.java | 25 +- .../messenger/voip/VideoCameraCapturer.java | 3 - .../messenger/voip/VoIPBaseService.java | 17 +- .../telegram/messenger/voip/VoIPService.java | 3 +- .../ui/Charts/view_data/ChartHeaderView.java | 2 + .../java/org/telegram/ui/ChatActivity.java | 2 +- .../Components/voip/VoIPFloatingLayout.java | 2 - .../ui/Components/voip/VoIPPiPView.java | 3 +- .../ui/Components/voip/VoIPTextureView.java | 24 +- .../ui/Components/voip/VoIPWindowView.java | 6 +- .../java/org/telegram/ui/LaunchActivity.java | 1623 +++++++++-------- .../org/telegram/ui/StatisticActivity.java | 3 + .../java/org/telegram/ui/VoIPFragment.java | 9 +- .../main/java/org/webrtc/Camera1Session.java | 13 +- .../main/java/org/webrtc/Camera2Session.java | 14 +- .../main/java/org/webrtc/CameraSession.java | 18 - .../src/main/java/org/webrtc/EglRenderer.java | 17 +- .../webrtc/HardwareVideoEncoderFactory.java | 96 +- .../java/org/webrtc/OrientationHelper.java | 68 + .../main/java/org/webrtc/RendererCommon.java | 4 +- .../java/org/webrtc/SurfaceViewRenderer.java | 2 +- .../java/org/webrtc/TextureViewRenderer.java | 69 +- .../java/org/webrtc/VideoFrameDrawer.java | 8 +- 67 files changed, 2447 insertions(+), 1283 deletions(-) create mode 100644 TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.h create mode 100644 TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.mm rename TMessagesProj/jni/tgcalls/platform/{windows/WindowsInterface.cpp => tdesktop/DesktopInterface.cpp} (67%) rename TMessagesProj/jni/tgcalls/platform/{windows/WindowsInterface.h => tdesktop/DesktopInterface.h} (82%) create mode 100644 TMessagesProj/src/main/java/org/webrtc/OrientationHelper.java diff --git a/TMessagesProj/build.gradle b/TMessagesProj/build.gradle index a6f389388..afff641c3 100644 --- a/TMessagesProj/build.gradle +++ b/TMessagesProj/build.gradle @@ -280,7 +280,7 @@ android { } } - defaultConfig.versionCode = 2061 + defaultConfig.versionCode = 2064 applicationVariants.all { variant -> variant.outputs.all { output -> diff --git a/TMessagesProj/jni/libtgvoip/client/android/org_telegram_messenger_voip_Instance.cpp b/TMessagesProj/jni/libtgvoip/client/android/org_telegram_messenger_voip_Instance.cpp index 54a65a4d8..18c1b77be 100644 --- a/TMessagesProj/jni/libtgvoip/client/android/org_telegram_messenger_voip_Instance.cpp +++ b/TMessagesProj/jni/libtgvoip/client/android/org_telegram_messenger_voip_Instance.cpp @@ -71,6 +71,10 @@ struct InstanceHolder { std::shared_ptr _videoCapture; }; +jclass TrafficStatsClass; +jclass FinalStateClass; +jmethodID FinalStateInitMethod; + jlong getInstanceHolderId(JNIEnv *env, jobject obj) { return env->GetLongField(obj, env->GetFieldID(env->GetObjectClass(obj), "nativePtr", "J")); } @@ -200,9 +204,8 @@ jint asJavaState(const State &state) { } jobject asJavaTrafficStats(JNIEnv *env, const TrafficStats &trafficStats) { - jclass clazz = env->FindClass("org/telegram/messenger/voip/Instance$TrafficStats"); - jmethodID initMethodId = env->GetMethodID(clazz, "", "(JJJJ)V"); - return env->NewObject(clazz, initMethodId, (jlong) trafficStats.bytesSentWifi, (jlong) trafficStats.bytesReceivedWifi, (jlong) trafficStats.bytesSentMobile, (jlong) trafficStats.bytesReceivedMobile); + jmethodID initMethodId = env->GetMethodID(TrafficStatsClass, "", "(JJJJ)V"); + return env->NewObject(TrafficStatsClass, initMethodId, (jlong) trafficStats.bytesSentWifi, (jlong) trafficStats.bytesReceivedWifi, (jlong) trafficStats.bytesSentMobile, (jlong) trafficStats.bytesReceivedMobile); } jobject asJavaFinalState(JNIEnv *env, const FinalState &finalState) { @@ -210,9 +213,7 @@ jobject asJavaFinalState(JNIEnv *env, const FinalState &finalState) { jstring debugLog = env->NewStringUTF(finalState.debugLog.c_str()); jobject trafficStats = asJavaTrafficStats(env, finalState.trafficStats); auto isRatingSuggested = static_cast(finalState.isRatingSuggested); - jclass finalStateClass = env->FindClass("org/telegram/messenger/voip/Instance$FinalState"); - jmethodID finalStateInitMethodId = env->GetMethodID(finalStateClass, "", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V"); - return env->NewObject(finalStateClass, finalStateInitMethodId, persistentState, debugLog, trafficStats, isRatingSuggested); + return env->NewObject(FinalStateClass, FinalStateInitMethod, persistentState, debugLog, trafficStats, isRatingSuggested); } extern "C" { @@ -229,6 +230,10 @@ void initWebRTC(JNIEnv *env) { webrtc::JVM::Initialize(vm); rtc::InitializeSSL(); webrtcLoaded = true; + + TrafficStatsClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$TrafficStats"))); + FinalStateClass = static_cast(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$FinalState"))); + FinalStateInitMethod = env->GetMethodID(FinalStateClass, "", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V"); } JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNativeInstance(JNIEnv *env, jclass clazz, jstring version, jobject instanceObj, jobject config, jstring persistentStateFilePath, jobjectArray endpoints, jobject proxyClass, jint networkType, jobject encryptionKey, jobject remoteSink, jlong videoCapturer, jfloat aspectRatio) { @@ -259,7 +264,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati .enableVolumeControl = true, .logPath = tgvoip::jni::JavaStringToStdString(env, configObject.getStringField("logPath")), .maxApiLayer = configObject.getIntField("maxApiLayer"), - /*.preferredAspectRatio = aspectRatio*/ + .preferredAspectRatio = aspectRatio }, .encryptionKey = EncryptionKey( std::move(encryptionKeyValue), @@ -332,6 +337,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati holder->javaInstance = globalRef; holder->_videoCapture = videoCapture; holder->nativeInstance->setIncomingVideoOutput(webrtc::JavaToNativeVideoSink(env, remoteSink)); + holder->nativeInstance->setNetworkType(parseNetworkType(networkType)); return reinterpret_cast(holder); } @@ -384,19 +390,16 @@ JNIEXPORT jbyteArray JNICALL Java_org_telegram_messenger_voip_NativeInstance_get return copyVectorToJavaByteArray(env, getInstance(env, obj)->getPersistentState().value); } -JNIEXPORT jobject JNICALL Java_org_telegram_messenger_voip_NativeInstance_stop(JNIEnv *env, jobject obj) { +JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_stopNative(JNIEnv *env, jobject obj) { InstanceHolder *instance = getInstanceHolder(env, obj); - FinalState finalState = instance->nativeInstance->stop(); - - // saving persistent state - const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, obj).getStringField("persistentStateFilePath")); - savePersistentState(path.c_str(), finalState.persistentState); - - // clean - env->DeleteGlobalRef(instance->javaInstance); - delete instance; - - return asJavaFinalState(env, finalState); + instance->nativeInstance->stop([instance](FinalState finalState) { + JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); + const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, instance->javaInstance).getStringField("persistentStateFilePath")); + savePersistentState(path.c_str(), finalState.persistentState); + env->CallVoidMethod(instance->javaInstance, env->GetMethodID(env->GetObjectClass(instance->javaInstance), "onStop", "(Lorg/telegram/messenger/voip/Instance$FinalState;)V"), asJavaFinalState(env, finalState)); + env->DeleteGlobalRef(instance->javaInstance); + delete instance; + }); } JNIEXPORT long JNICALL Java_org_telegram_messenger_voip_NativeInstance_createVideoCapturer(JNIEnv *env, jclass clazz, jobject localSink) { diff --git a/TMessagesProj/jni/tgcalls/CodecSelectHelper.cpp b/TMessagesProj/jni/tgcalls/CodecSelectHelper.cpp index 4adcb51d7..5c18b6d45 100644 --- a/TMessagesProj/jni/tgcalls/CodecSelectHelper.cpp +++ b/TMessagesProj/jni/tgcalls/CodecSelectHelper.cpp @@ -23,10 +23,10 @@ bool CompareFormats(const VideoFormat &a, const VideoFormat &b) { } } -int FormatPriority(const VideoFormat &format) { +int FormatPriority(const VideoFormat &format, const std::vector &preferredCodecs) { static const auto kCodecs = { std::string(cricket::kAv1CodecName), - std::string(cricket::kVp9CodecName), + std::string(cricket::kVp9CodecName), std::string(cricket::kH265CodecName), std::string(cricket::kH264CodecName), std::string(cricket::kVp8CodecName), @@ -43,8 +43,16 @@ int FormatPriority(const VideoFormat &format) { } return result; }(); + + for (int i = 0; i < preferredCodecs.size(); i++) { + for (const auto &name : kSupported) { + if (absl::EqualsIgnoreCase(format.name, preferredCodecs[i]) && absl::EqualsIgnoreCase(format.name, name)) { + return i; + } + } + } - auto result = 0; + auto result = (int)preferredCodecs.size(); for (const auto &name : kSupported) { if (absl::EqualsIgnoreCase(format.name, name)) { return result; @@ -54,17 +62,19 @@ int FormatPriority(const VideoFormat &format) { return -1; } -bool ComparePriorities(const VideoFormat &a, const VideoFormat &b) { - return FormatPriority(a) < FormatPriority(b); +bool ComparePriorities(const VideoFormat &a, const VideoFormat &b, const std::vector &preferredCodecs) { + return FormatPriority(a, preferredCodecs) < FormatPriority(b, preferredCodecs); } -std::vector FilterAndSortEncoders(std::vector list) { +std::vector FilterAndSortEncoders(std::vector list, const std::vector &preferredCodecs) { const auto listBegin = begin(list); const auto listEnd = end(list); - std::sort(listBegin, listEnd, ComparePriorities); + std::sort(listBegin, listEnd, [&preferredCodecs](const VideoFormat &lhs, const VideoFormat &rhs) { + return ComparePriorities(lhs, rhs, preferredCodecs); + }); auto eraseFrom = listBegin; auto eraseTill = eraseFrom; - while (eraseTill != listEnd && FormatPriority(*eraseTill) == -1) { + while (eraseTill != listEnd && FormatPriority(*eraseTill, preferredCodecs) == -1) { ++eraseTill; } if (eraseTill != eraseFrom) { @@ -131,11 +141,12 @@ void AddDefaultFeedbackParams(cricket::VideoCodec *codec) { VideoFormatsMessage ComposeSupportedFormats( std::vector encoders, - std::vector decoders) { - encoders = FilterAndSortEncoders(std::move(encoders)); + std::vector decoders, + const std::vector &preferredCodecs) { + encoders = FilterAndSortEncoders(std::move(encoders), preferredCodecs); auto result = VideoFormatsMessage(); - result.encodersCount = encoders.size(); + result.encodersCount = (int)encoders.size(); result.formats = AppendUnique(std::move(encoders), std::move(decoders)); for (const auto &format : result.formats) { RTC_LOG(LS_INFO) << "Format: " << format.ToString(); diff --git a/TMessagesProj/jni/tgcalls/CodecSelectHelper.h b/TMessagesProj/jni/tgcalls/CodecSelectHelper.h index 1be000738..d50ffc329 100644 --- a/TMessagesProj/jni/tgcalls/CodecSelectHelper.h +++ b/TMessagesProj/jni/tgcalls/CodecSelectHelper.h @@ -18,7 +18,8 @@ struct CommonCodecs { VideoFormatsMessage ComposeSupportedFormats( std::vector encoders, - std::vector decoders); + std::vector decoders, + const std::vector &preferredCodecs); CommonFormats ComputeCommonFormats( const VideoFormatsMessage &my, diff --git a/TMessagesProj/jni/tgcalls/CryptoHelper.cpp b/TMessagesProj/jni/tgcalls/CryptoHelper.cpp index 9e21b5180..67603734e 100644 --- a/TMessagesProj/jni/tgcalls/CryptoHelper.cpp +++ b/TMessagesProj/jni/tgcalls/CryptoHelper.cpp @@ -1,5 +1,7 @@ #include "CryptoHelper.h" +#include + namespace tgcalls { AesKeyIv PrepareAesKeyIv(const uint8_t *key, const uint8_t *msgKey, int x) { diff --git a/TMessagesProj/jni/tgcalls/EncryptedConnection.cpp b/TMessagesProj/jni/tgcalls/EncryptedConnection.cpp index fcec05565..9b801caa1 100644 --- a/TMessagesProj/jni/tgcalls/EncryptedConnection.cpp +++ b/TMessagesProj/jni/tgcalls/EncryptedConnection.cpp @@ -58,6 +58,16 @@ absl::nullopt_t LogError( return absl::nullopt; } +bool ConstTimeIsDifferent(const void *a, const void *b, size_t size) { + auto ca = reinterpret_cast(a); + auto cb = reinterpret_cast(b); + volatile auto different = false; + for (const auto ce = ca + size; ca != ce; ++ca, ++cb) { + different |= (*ca != *cb); + } + return different; +} + } // namespace EncryptedConnection::EncryptedConnection( @@ -326,7 +336,7 @@ auto EncryptedConnection::handleIncomingPacket(const char *bytes, size_t size) const auto msgKeyLarge = ConcatSHA256( MemorySpan{ key + 88 + x, 32 }, MemorySpan{ decryptionBuffer.data(), decryptionBuffer.size() }); - if (memcmp(msgKeyLarge.data() + 8, msgKey, 16)) { + if (ConstTimeIsDifferent(msgKeyLarge.data() + 8, msgKey, 16)) { return LogError("Bad incoming data hash."); } @@ -364,10 +374,16 @@ auto EncryptedConnection::processPacket( } if (type == kEmptyId) { + if (additionalMessage) { + return LogError("Empty message should be only the first one in the packet."); + } RTC_LOG(LS_INFO) << logHeader() << "Got RECV:empty" << "#" << currentCounter; reader.Consume(1); } else if (type == kAckId) { + if (!additionalMessage) { + return LogError("Ack message must not be the first one in the packet."); + } ackMyMessage(currentSeq); reader.Consume(1); } else if (auto message = DeserializeMessage(reader, singleMessagePacket)) { diff --git a/TMessagesProj/jni/tgcalls/Instance.cpp b/TMessagesProj/jni/tgcalls/Instance.cpp index ebd7247d4..38d7e707f 100644 --- a/TMessagesProj/jni/tgcalls/Instance.cpp +++ b/TMessagesProj/jni/tgcalls/Instance.cpp @@ -1,7 +1,5 @@ #include "Instance.h" -#include "VideoCaptureInterfaceImpl.h" - #include #include @@ -10,8 +8,8 @@ namespace { std::function globalLoggingFunction; -std::map> &MetaMap() { - static auto result = std::map>(); +std::map> &MetaMap() { + static auto result = std::map>(); return result; } @@ -44,10 +42,12 @@ std::unique_ptr Meta::Create( : nullptr; } -void Meta::RegisterOne(std::unique_ptr meta) { +void Meta::RegisterOne(std::shared_ptr meta) { if (meta) { - const auto version = meta->version(); - MetaMap().emplace(version, std::move(meta)); + const auto versions = meta->versions(); + for (auto &it : versions) { + MetaMap().emplace(it, meta); + } } } diff --git a/TMessagesProj/jni/tgcalls/Instance.h b/TMessagesProj/jni/tgcalls/Instance.h index b58990720..38520d817 100644 --- a/TMessagesProj/jni/tgcalls/Instance.h +++ b/TMessagesProj/jni/tgcalls/Instance.h @@ -55,6 +55,11 @@ struct Endpoint { unsigned char peerTag[16] = { 0 }; }; +enum class ProtocolVersion { + V0, + V1 // Low-cost network negotiation +}; + enum class NetworkType { Unknown, Gprs, @@ -98,6 +103,8 @@ struct Config { int maxApiLayer = 0; float preferredAspectRatio; bool enableHighBitrateVideo = false; + std::vector preferredVideoCodecs; + ProtocolVersion protocolVersion = ProtocolVersion::V0; }; struct EncryptionKey { @@ -174,7 +181,7 @@ public: virtual void receiveSignalingData(const std::vector &data) = 0; virtual void setVideoCapture(std::shared_ptr videoCapture) = 0; - virtual FinalState stop() = 0; + virtual void stop(std::function completion) = 0; }; @@ -204,7 +211,7 @@ public: virtual std::unique_ptr construct(Descriptor &&descriptor) = 0; virtual int connectionMaxLayer() = 0; - virtual std::string version() = 0; + virtual std::vector versions() = 0; static std::unique_ptr Create( const std::string &version, @@ -218,7 +225,7 @@ private: template static bool RegisterOne(); - static void RegisterOne(std::unique_ptr meta); + static void RegisterOne(std::shared_ptr meta); }; @@ -229,14 +236,14 @@ bool Meta::RegisterOne() { int connectionMaxLayer() override { return Implementation::GetConnectionMaxLayer(); } - std::string version() override { - return Implementation::GetVersion(); + std::vector versions() override { + return Implementation::GetVersions(); } std::unique_ptr construct(Descriptor &&descriptor) override { return std::make_unique(std::move(descriptor)); } }; - RegisterOne(std::make_unique()); + RegisterOne(std::make_shared()); return true; } diff --git a/TMessagesProj/jni/tgcalls/InstanceImpl.cpp b/TMessagesProj/jni/tgcalls/InstanceImpl.cpp index 8b328a413..7f7108df7 100644 --- a/TMessagesProj/jni/tgcalls/InstanceImpl.cpp +++ b/TMessagesProj/jni/tgcalls/InstanceImpl.cpp @@ -29,6 +29,8 @@ InstanceImpl::InstanceImpl(Descriptor &&descriptor) rtc::LogMessage::LogToDebug(rtc::LS_INFO); rtc::LogMessage::SetLogToStderr(false); rtc::LogMessage::AddLogToStream(_logSink.get(), rtc::LS_INFO); + + auto networkType = descriptor.initialNetworkType; _manager.reset(new ThreadLocalObject(getManagerThread(), [descriptor = std::move(descriptor)]() mutable { return new Manager(getManagerThread(), std::move(descriptor)); @@ -36,6 +38,8 @@ InstanceImpl::InstanceImpl(Descriptor &&descriptor) _manager->perform(RTC_FROM_HERE, [](Manager *manager) { manager->start(); }); + + setNetworkType(networkType); } InstanceImpl::~InstanceImpl() { @@ -55,51 +59,19 @@ void InstanceImpl::setVideoCapture(std::shared_ptr videoC } void InstanceImpl::setNetworkType(NetworkType networkType) { - /*message::NetworkType mappedType; - - switch (networkType) { - case NetworkType::Unknown: - mappedType = message::NetworkType::nUnknown; - break; - case NetworkType::Gprs: - mappedType = message::NetworkType::nGprs; - break; - case NetworkType::Edge: - mappedType = message::NetworkType::nEdge; - break; - case NetworkType::ThirdGeneration: - mappedType = message::NetworkType::n3gOrAbove; - break; - case NetworkType::Hspa: - mappedType = message::NetworkType::n3gOrAbove; - break; - case NetworkType::Lte: - mappedType = message::NetworkType::n3gOrAbove; - break; - case NetworkType::WiFi: - mappedType = message::NetworkType::nHighSpeed; - break; - case NetworkType::Ethernet: - mappedType = message::NetworkType::nHighSpeed; - break; - case NetworkType::OtherHighSpeed: - mappedType = message::NetworkType::nHighSpeed; - break; - case NetworkType::OtherLowSpeed: - mappedType = message::NetworkType::nEdge; - break; - case NetworkType::OtherMobile: - mappedType = message::NetworkType::n3gOrAbove; - break; - case NetworkType::Dialup: - mappedType = message::NetworkType::nGprs; - break; - default: - mappedType = message::NetworkType::nUnknown; - break; - } - - controller_->SetNetworkType(mappedType);*/ + bool isLowCostNetwork = false; + switch (networkType) { + case NetworkType::WiFi: + case NetworkType::Ethernet: + isLowCostNetwork = true; + break; + default: + break; + } + + _manager->perform(RTC_FROM_HERE, [isLowCostNetwork](Manager *manager) { + manager->setIsLocalNetworkLowCost(isLowCostNetwork); + }); } void InstanceImpl::setMuteMicrophone(bool muteMicrophone) { @@ -166,12 +138,19 @@ PersistentState InstanceImpl::getPersistentState() { return PersistentState{}; // we dont't have such information } -FinalState InstanceImpl::stop() { - FinalState finalState; - finalState.debugLog = _logSink->result(); - finalState.isRatingSuggested = false; - - return finalState; +void InstanceImpl::stop(std::function completion) { + std::string debugLog = _logSink->result(); + + _manager->perform(RTC_FROM_HERE, [completion, debugLog = std::move(debugLog)](Manager *manager) { + manager->getNetworkStats([completion, debugLog = std::move(debugLog)](TrafficStats stats) { + FinalState finalState; + finalState.debugLog = debugLog; + finalState.isRatingSuggested = false; + finalState.trafficStats = stats; + + completion(finalState); + }); + }); } /*void InstanceImpl::controllerStateCallback(Controller::State state) { @@ -201,8 +180,11 @@ int InstanceImpl::GetConnectionMaxLayer() { return 92; // TODO: retrieve from LayerBase } -std::string InstanceImpl::GetVersion() { - return "2.7.7"; // TODO: version not known while not released +std::vector InstanceImpl::GetVersions() { + std::vector result; + result.push_back("2.7.7"); + result.push_back("3.0.0"); + return result; } template <> diff --git a/TMessagesProj/jni/tgcalls/InstanceImpl.h b/TMessagesProj/jni/tgcalls/InstanceImpl.h index 7ddf46e0a..b0a6f2559 100644 --- a/TMessagesProj/jni/tgcalls/InstanceImpl.h +++ b/TMessagesProj/jni/tgcalls/InstanceImpl.h @@ -17,7 +17,7 @@ public: ~InstanceImpl() override; static int GetConnectionMaxLayer(); - static std::string GetVersion(); + static std::vector GetVersions(); void receiveSignalingData(const std::vector &data) override; void setVideoCapture(std::shared_ptr videoCapture) override; @@ -37,7 +37,7 @@ public: int64_t getPreferredRelayId() override; TrafficStats getTrafficStats() override; PersistentState getPersistentState() override; - FinalState stop() override; + void stop(std::function completion) override; //void controllerStateCallback(Controller::State state); private: diff --git a/TMessagesProj/jni/tgcalls/LogSinkImpl.cpp b/TMessagesProj/jni/tgcalls/LogSinkImpl.cpp index c9dbbe5d1..77755c319 100644 --- a/TMessagesProj/jni/tgcalls/LogSinkImpl.cpp +++ b/TMessagesProj/jni/tgcalls/LogSinkImpl.cpp @@ -5,9 +5,9 @@ #ifdef WEBRTC_WIN #include "windows.h" #include -#elif defined(WEBRTC_IOS) || defined(WEBRTC_MAC) +#else // WEBRTC_WIN #include -#endif //WEBRTC_IOS || WEBRTC_MAC +#endif // WEBRTC_WIN namespace tgcalls { diff --git a/TMessagesProj/jni/tgcalls/Manager.cpp b/TMessagesProj/jni/tgcalls/Manager.cpp index 4ec6e8bd5..de3f869da 100644 --- a/TMessagesProj/jni/tgcalls/Manager.cpp +++ b/TMessagesProj/jni/tgcalls/Manager.cpp @@ -39,6 +39,7 @@ _signaling( _encryptionKey, [=](int delayMs, int cause) { sendSignalingAsync(delayMs, cause); }), _enableP2P(descriptor.config.enableP2P), +_protocolVersion(descriptor.config.protocolVersion), _rtcServers(std::move(descriptor.rtcServers)), _videoCapture(std::move(descriptor.videoCapture)), _stateUpdated(std::move(descriptor.stateUpdated)), @@ -52,6 +53,8 @@ _enableHighBitrateVideo(descriptor.config.enableHighBitrateVideo) { assert(_thread->IsCurrent()); assert(_stateUpdated != nullptr); assert(_signalingDataEmitted != nullptr); + + _preferredCodecs = descriptor.config.preferredVideoCodecs; _sendSignalingMessage = [=](const Message &message) { if (const auto prepared = _signaling.prepareForSending(message)) { @@ -112,12 +115,19 @@ void Manager::start() { if (!strong) { return; } - const auto mappedState = state.isReadyToSendData - ? State::Established - : State::Reconnecting; + State mappedState; + if (state.isFailed) { + mappedState = State::Failed; + } else { + mappedState = state.isReadyToSendData + ? State::Established + : State::Reconnecting; + } + bool isFirstConnection = false; if (state.isReadyToSendData) { if (!strong->_didConnectOnce) { strong->_didConnectOnce = true; + isFirstConnection = true; } } strong->_state = mappedState; @@ -126,6 +136,10 @@ void Manager::start() { strong->_mediaManager->perform(RTC_FROM_HERE, [=](MediaManager *mediaManager) { mediaManager->setIsConnected(state.isReadyToSendData); }); + + if (isFirstConnection) { + strong->sendInitialSignalingMessages(); + } }); }, [=](DecryptedMessage &&message) { @@ -152,7 +166,7 @@ void Manager::start() { }); })); bool isOutgoing = _encryptionKey.isOutgoing; - _mediaManager.reset(new ThreadLocalObject(getMediaThread(), [weak, isOutgoing, thread, sendSignalingMessage, videoCapture = _videoCapture, localPreferredVideoAspectRatio = _localPreferredVideoAspectRatio, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated]() { + _mediaManager.reset(new ThreadLocalObject(getMediaThread(), [weak, isOutgoing, thread, sendSignalingMessage, videoCapture = _videoCapture, localPreferredVideoAspectRatio = _localPreferredVideoAspectRatio, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, preferredCodecs = _preferredCodecs]() { return new MediaManager( getMediaThread(), isOutgoing, @@ -169,8 +183,12 @@ void Manager::start() { }, signalBarsUpdated, localPreferredVideoAspectRatio, - enableHighBitrateVideo); + enableHighBitrateVideo, + preferredCodecs); })); + _networkManager->perform(RTC_FROM_HERE, [](NetworkManager *networkManager) { + networkManager->start(); + }); _mediaManager->perform(RTC_FROM_HERE, [](MediaManager *mediaManager) { mediaManager->start(); }); @@ -208,6 +226,10 @@ void Manager::receiveMessage(DecryptedMessage &&message) { if (_remoteBatteryLevelIsLowUpdated) { _remoteBatteryLevelIsLowUpdated(remoteBatteryLevelIsLow->batteryLow); } + } else if (const auto remoteNetworkType = absl::get_if(data)) { + bool wasCurrentNetworkLowCost = calculateIsCurrentNetworkLowCost(); + _remoteNetworkIsLowCost = remoteNetworkType->isLowCost; + updateIsCurrentNetworkLowCost(wasCurrentNetworkLowCost); } else { if (const auto videoParameters = absl::get_if(data)) { float value = ((float)videoParameters->aspectRatio) / 1000.0; @@ -249,4 +271,54 @@ void Manager::setIsLowBatteryLevel(bool isLowBatteryLevel) { _sendTransportMessage({ RemoteBatteryLevelIsLowMessage{ isLowBatteryLevel } }); } +void Manager::setIsLocalNetworkLowCost(bool isLocalNetworkLowCost) { + if (isLocalNetworkLowCost != _localNetworkIsLowCost) { + _networkManager->perform(RTC_FROM_HERE, [isLocalNetworkLowCost](NetworkManager *networkManager) { + networkManager->setIsLocalNetworkLowCost(isLocalNetworkLowCost); + }); + + bool wasCurrentNetworkLowCost = calculateIsCurrentNetworkLowCost(); + _localNetworkIsLowCost = isLocalNetworkLowCost; + updateIsCurrentNetworkLowCost(wasCurrentNetworkLowCost); + + switch (_protocolVersion) { + case ProtocolVersion::V1: + if (_didConnectOnce) { + _sendTransportMessage({ RemoteNetworkTypeMessage{ isLocalNetworkLowCost } }); + } + break; + default: + break; + } + } +} + +void Manager::getNetworkStats(std::function completion) { + _networkManager->perform(RTC_FROM_HERE, [completion = std::move(completion)](NetworkManager *networkManager) { + completion(networkManager->getNetworkStats()); + }); +} + +bool Manager::calculateIsCurrentNetworkLowCost() const { + return _localNetworkIsLowCost && _remoteNetworkIsLowCost; +} +void Manager::updateIsCurrentNetworkLowCost(bool wasLowCost) { + bool isLowCost = calculateIsCurrentNetworkLowCost(); + if (isLowCost != wasLowCost) { + _mediaManager->perform(RTC_FROM_HERE, [isLowCost](MediaManager *mediaManager) { + mediaManager->setIsCurrentNetworkLowCost(isLowCost); + }); + } +} + +void Manager::sendInitialSignalingMessages() { + switch (_protocolVersion) { + case ProtocolVersion::V1: + _sendTransportMessage({ RemoteNetworkTypeMessage{ _localNetworkIsLowCost } }); + break; + default: + break; + } +} + } // namespace tgcalls diff --git a/TMessagesProj/jni/tgcalls/Manager.h b/TMessagesProj/jni/tgcalls/Manager.h index afc3b4970..3368231d2 100644 --- a/TMessagesProj/jni/tgcalls/Manager.h +++ b/TMessagesProj/jni/tgcalls/Manager.h @@ -22,15 +22,21 @@ public: void setMuteOutgoingAudio(bool mute); void setIncomingVideoOutput(std::shared_ptr> sink); void setIsLowBatteryLevel(bool isLowBatteryLevel); + void setIsLocalNetworkLowCost(bool isLocalNetworkLowCost); + void getNetworkStats(std::function completion); private: void sendSignalingAsync(int delayMs, int cause); void receiveMessage(DecryptedMessage &&message); + bool calculateIsCurrentNetworkLowCost() const; + void updateIsCurrentNetworkLowCost(bool wasLowCost); + void sendInitialSignalingMessages(); rtc::Thread *_thread; EncryptionKey _encryptionKey; EncryptedConnection _signaling; bool _enableP2P = false; + ProtocolVersion _protocolVersion = ProtocolVersion::V0; std::vector _rtcServers; std::shared_ptr _videoCapture; std::function _stateUpdated; @@ -47,6 +53,9 @@ private: bool _didConnectOnce = false; float _localPreferredVideoAspectRatio = 0.0f; bool _enableHighBitrateVideo = false; + std::vector _preferredCodecs; + bool _localNetworkIsLowCost = false; + bool _remoteNetworkIsLowCost = false; }; diff --git a/TMessagesProj/jni/tgcalls/MediaManager.cpp b/TMessagesProj/jni/tgcalls/MediaManager.cpp index a345c4e7e..7cecc060a 100644 --- a/TMessagesProj/jni/tgcalls/MediaManager.cpp +++ b/TMessagesProj/jni/tgcalls/MediaManager.cpp @@ -58,7 +58,8 @@ MediaManager::MediaManager( std::function sendTransportMessage, std::function signalBarsUpdated, float localPreferredVideoAspectRatio, - bool enableHighBitrateVideo) : + bool enableHighBitrateVideo, + std::vector preferredCodecs) : _thread(thread), _eventLog(std::make_unique()), _taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()), @@ -103,7 +104,8 @@ _enableHighBitrateVideo(enableHighBitrateVideo) { _myVideoFormats = ComposeSupportedFormats( mediaDeps.video_encoder_factory->GetSupportedFormats(), - mediaDeps.video_decoder_factory->GetSupportedFormats()); + mediaDeps.video_decoder_factory->GetSupportedFormats(), + preferredCodecs); mediaDeps.audio_processing = webrtc::AudioProcessingBuilder().Create(); _mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps)); @@ -169,7 +171,7 @@ _enableHighBitrateVideo(enableHighBitrateVideo) { _videoChannel->SetInterface(_videoNetworkInterface.get()); - adjustBitratePreferences(); + adjustBitratePreferences(true); } void MediaManager::start() { @@ -379,7 +381,7 @@ void MediaManager::configureSendingVideoIfNeeded() { codec.SetParam(cricket::kCodecParamMinBitrate, 64); codec.SetParam(cricket::kCodecParamStartBitrate, 400); - codec.SetParam(cricket::kCodecParamMaxBitrate, _enableHighBitrateVideo ? 1600 : 800); + codec.SetParam(cricket::kCodecParamMaxBitrate, _enableHighBitrateVideo ? 2000 : 800); cricket::VideoSendParameters videoSendParameters; videoSendParameters.codecs.push_back(codec); @@ -408,7 +410,7 @@ void MediaManager::configureSendingVideoIfNeeded() { _videoChannel->AddSendStream(cricket::StreamParams::CreateLegacy(_ssrcVideo.outgoing)); } - adjustBitratePreferences(); + adjustBitratePreferences(true); } void MediaManager::checkIsSendingVideoChanged(bool wasSending) { @@ -432,46 +434,42 @@ void MediaManager::checkIsSendingVideoChanged(bool wasSending) { _videoChannel->SetVideoSend(_ssrcVideo.fecOutgoing, NULL, nullptr); } - adjustBitratePreferences(); + adjustBitratePreferences(true); } -void MediaManager::adjustBitratePreferences() { +int MediaManager::getMaxVideoBitrate() const { + return (_enableHighBitrateVideo && _isLowCostNetwork) ? 2000000 : 800000; +} + +void MediaManager::adjustBitratePreferences(bool resetStartBitrate) { if (computeIsSendingVideo()) { webrtc::BitrateConstraints preferences; preferences.min_bitrate_bps = 64000; - preferences.start_bitrate_bps = 400000; - preferences.max_bitrate_bps = _enableHighBitrateVideo ? 1600000 : 800000; + if (resetStartBitrate) { + preferences.start_bitrate_bps = 400000; + } + preferences.max_bitrate_bps = getMaxVideoBitrate(); _call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences); - - webrtc::BitrateSettings settings; - settings.min_bitrate_bps = 64000; - settings.start_bitrate_bps = 400000; - settings.max_bitrate_bps = _enableHighBitrateVideo ? 1600000 : 800000; - - _call->GetTransportControllerSend()->SetClientBitratePreferences(settings); } else { webrtc::BitrateConstraints preferences; if (_didConfigureVideo) { // After we have configured outgoing video, RTCP stops working for outgoing audio // TODO: investigate preferences.min_bitrate_bps = 16000; - preferences.start_bitrate_bps = 16000; + if (resetStartBitrate) { + preferences.start_bitrate_bps = 16000; + } preferences.max_bitrate_bps = 32000; } else { preferences.min_bitrate_bps = 8000; - preferences.start_bitrate_bps = 16000; + if (resetStartBitrate) { + preferences.start_bitrate_bps = 16000; + } preferences.max_bitrate_bps = 32000; } _call->GetTransportControllerSend()->SetSdpBitrateParameters(preferences); - - webrtc::BitrateSettings settings; - settings.min_bitrate_bps = preferences.min_bitrate_bps; - settings.start_bitrate_bps = preferences.start_bitrate_bps; - settings.max_bitrate_bps = preferences.max_bitrate_bps; - - _call->GetTransportControllerSend()->SetClientBitratePreferences(settings); } } @@ -586,6 +584,14 @@ void MediaManager::remoteVideoStateUpdated(VideoState videoState) { } } +void MediaManager::setIsCurrentNetworkLowCost(bool isCurrentNetworkLowCost) { + if (_isLowCostNetwork != isCurrentNetworkLowCost) { + _isLowCostNetwork = isCurrentNetworkLowCost; + RTC_LOG(LS_INFO) << "MediaManager isLowCostNetwork updated: " << isCurrentNetworkLowCost ? 1 : 0; + adjustBitratePreferences(false); + } +} + MediaManager::NetworkInterfaceImpl::NetworkInterfaceImpl(MediaManager *mediaManager, bool isVideo) : _mediaManager(mediaManager), _isVideo(isVideo) { diff --git a/TMessagesProj/jni/tgcalls/MediaManager.h b/TMessagesProj/jni/tgcalls/MediaManager.h index 32c99e359..82a035681 100644 --- a/TMessagesProj/jni/tgcalls/MediaManager.h +++ b/TMessagesProj/jni/tgcalls/MediaManager.h @@ -42,7 +42,8 @@ public: std::function sendTransportMessage, std::function signalBarsUpdated, float localPreferredVideoAspectRatio, - bool enableHighBitrateVideo); + bool enableHighBitrateVideo, + std::vector preferredCodecs); ~MediaManager(); void start(); @@ -53,6 +54,7 @@ public: void setIncomingVideoOutput(std::shared_ptr> sink); void receiveMessage(DecryptedMessage &&message); void remoteVideoStateUpdated(VideoState videoState); + void setIsCurrentNetworkLowCost(bool isCurrentNetworkLowCost); private: struct SSRC { @@ -85,8 +87,9 @@ private: void configureSendingVideoIfNeeded(); void checkIsSendingVideoChanged(bool wasSending); bool videoCodecsNegotiated() const; - - void adjustBitratePreferences(); + + int getMaxVideoBitrate() const; + void adjustBitratePreferences(bool resetStartBitrate); bool computeIsReceivingVideo() const; void checkIsReceivingVideoChanged(bool wasReceiving); @@ -133,6 +136,7 @@ private: float _localPreferredVideoAspectRatio = 0.0f; float _preferredAspectRatio = 0.0f; bool _enableHighBitrateVideo = false; + bool _isLowCostNetwork = false; std::unique_ptr _audioNetworkInterface; std::unique_ptr _videoNetworkInterface; diff --git a/TMessagesProj/jni/tgcalls/Message.cpp b/TMessagesProj/jni/tgcalls/Message.cpp index aba80ca36..07fce427b 100644 --- a/TMessagesProj/jni/tgcalls/Message.cpp +++ b/TMessagesProj/jni/tgcalls/Message.cpp @@ -264,6 +264,20 @@ bool Deserialize(RemoteBatteryLevelIsLowMessage &to, rtc::ByteBufferReader &read return true; } +void Serialize(rtc::ByteBufferWriter &to, const RemoteNetworkTypeMessage &from, bool singleMessagePacket) { + to.WriteUInt8(from.isLowCost ? 1 : 0); +} + +bool Deserialize(RemoteNetworkTypeMessage &to, rtc::ByteBufferReader &reader, bool singleMessagePacket) { + uint8_t value = 0; + if (!reader.ReadUInt8(&value)) { + RTC_LOG(LS_ERROR) << "Could not read isLowCost."; + return false; + } + to.isLowCost = (value != 0); + return true; +} + enum class TryResult : uint8_t { Success, TryNext, diff --git a/TMessagesProj/jni/tgcalls/Message.h b/TMessagesProj/jni/tgcalls/Message.h index c9906d4f9..de6b1f329 100644 --- a/TMessagesProj/jni/tgcalls/Message.h +++ b/TMessagesProj/jni/tgcalls/Message.h @@ -97,6 +97,13 @@ struct RemoteBatteryLevelIsLowMessage { bool batteryLow = false; }; +struct RemoteNetworkTypeMessage { + static constexpr uint8_t kId = 10; + static constexpr bool kRequiresAck = true; + + bool isLowCost = false; +}; + // To add a new message you should: // 1. Add the message struct. // 2. Add the message to the variant in Message struct. @@ -112,7 +119,8 @@ struct Message { VideoDataMessage, UnstructuredDataMessage, VideoParametersMessage, - RemoteBatteryLevelIsLowMessage> data; + RemoteBatteryLevelIsLowMessage, + RemoteNetworkTypeMessage> data; }; rtc::CopyOnWriteBuffer SerializeMessageWithSeq( diff --git a/TMessagesProj/jni/tgcalls/NetworkManager.cpp b/TMessagesProj/jni/tgcalls/NetworkManager.cpp index 10cac49d3..a731ead6a 100644 --- a/TMessagesProj/jni/tgcalls/NetworkManager.cpp +++ b/TMessagesProj/jni/tgcalls/NetworkManager.cpp @@ -33,6 +33,8 @@ NetworkManager::NetworkManager( std::function sendSignalingMessage, std::function sendTransportServiceAsync) : _thread(thread), +_enableP2P(enableP2P), +_rtcServers(rtcServers), _transport( EncryptedConnection::Type::Transport, encryptionKey, @@ -43,24 +45,38 @@ _transportMessageReceived(std::move(transportMessageReceived)), _sendSignalingMessage(std::move(sendSignalingMessage)), _localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::CreateRandomString(cricket::ICE_PWD_LENGTH)) { assert(_thread->IsCurrent()); +} - _socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread)); +NetworkManager::~NetworkManager() { + assert(_thread->IsCurrent()); + + RTC_LOG(LS_INFO) << "NetworkManager::~NetworkManager()"; - _networkManager = std::make_unique(); - _portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), nullptr, nullptr)); + _transportChannel.reset(); + _asyncResolverFactory.reset(); + _portAllocator.reset(); + _networkManager.reset(); + _socketFactory.reset(); +} - uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP; - if (!enableP2P) { - flags |= cricket::PORTALLOCATOR_DISABLE_UDP; - flags |= cricket::PORTALLOCATOR_DISABLE_STUN; - } - _portAllocator->set_flags(_portAllocator->flags() | flags); - _portAllocator->Initialize(); +void NetworkManager::start() { + _socketFactory.reset(new rtc::BasicPacketSocketFactory(_thread)); - cricket::ServerAddresses stunServers; - std::vector turnServers; + _networkManager = std::make_unique(); + _portAllocator.reset(new cricket::BasicPortAllocator(_networkManager.get(), _socketFactory.get(), nullptr, nullptr)); - for (auto &server : rtcServers) { + uint32_t flags = cricket::PORTALLOCATOR_DISABLE_TCP; + if (!_enableP2P) { + flags |= cricket::PORTALLOCATOR_DISABLE_UDP; + flags |= cricket::PORTALLOCATOR_DISABLE_STUN; + } + _portAllocator->set_flags(_portAllocator->flags() | flags); + _portAllocator->Initialize(); + + cricket::ServerAddresses stunServers; + std::vector turnServers; + + for (auto &server : _rtcServers) { if (server.isTurn) { turnServers.push_back(cricket::RelayServerConfig( rtc::SocketAddress(server.host, server.port), @@ -74,16 +90,16 @@ _localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::Cre } } - _portAllocator->SetConfiguration(stunServers, turnServers, 2, webrtc::NO_PRUNE); + _portAllocator->SetConfiguration(stunServers, turnServers, 2, webrtc::NO_PRUNE); - _asyncResolverFactory = std::make_unique(); - _transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, _portAllocator.get(), _asyncResolverFactory.get(), nullptr)); + _asyncResolverFactory = std::make_unique(); + _transportChannel.reset(new cricket::P2PTransportChannel("transport", 0, _portAllocator.get(), _asyncResolverFactory.get(), nullptr)); - cricket::IceConfig iceConfig; - iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY; + cricket::IceConfig iceConfig; + iceConfig.continual_gathering_policy = cricket::GATHER_CONTINUALLY; iceConfig.prioritize_most_likely_candidate_pairs = true; iceConfig.regather_on_failed_networks_interval = 8000; - _transportChannel->SetIceConfig(iceConfig); + _transportChannel->SetIceConfig(iceConfig); cricket::IceParameters localIceParameters( _localIceParameters.ufrag, @@ -91,30 +107,22 @@ _localIceParameters(rtc::CreateRandomString(cricket::ICE_UFRAG_LENGTH), rtc::Cre false ); - _transportChannel->SetIceParameters(localIceParameters); - _transportChannel->SetIceRole(_isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED); + _transportChannel->SetIceParameters(localIceParameters); + _transportChannel->SetIceRole(_isOutgoing ? cricket::ICEROLE_CONTROLLING : cricket::ICEROLE_CONTROLLED); - _transportChannel->SignalCandidateGathered.connect(this, &NetworkManager::candidateGathered); - _transportChannel->SignalGatheringState.connect(this, &NetworkManager::candidateGatheringState); - _transportChannel->SignalIceTransportStateChanged.connect(this, &NetworkManager::transportStateChanged); - _transportChannel->SignalReadPacket.connect(this, &NetworkManager::transportPacketReceived); + _transportChannel->SignalCandidateGathered.connect(this, &NetworkManager::candidateGathered); + _transportChannel->SignalGatheringState.connect(this, &NetworkManager::candidateGatheringState); + _transportChannel->SignalIceTransportStateChanged.connect(this, &NetworkManager::transportStateChanged); + _transportChannel->SignalReadPacket.connect(this, &NetworkManager::transportPacketReceived); _transportChannel->SignalNetworkRouteChanged.connect(this, &NetworkManager::transportRouteChanged); - _transportChannel->MaybeStartGathering(); + _transportChannel->MaybeStartGathering(); - _transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL); -} - -NetworkManager::~NetworkManager() { - assert(_thread->IsCurrent()); + _transportChannel->SetRemoteIceMode(cricket::ICEMODE_FULL); - RTC_LOG(LS_INFO) << "NetworkManager::~NetworkManager()"; - - _transportChannel.reset(); - _asyncResolverFactory.reset(); - _portAllocator.reset(); - _networkManager.reset(); - _socketFactory.reset(); + _lastNetworkActivityMs = rtc::TimeMillis(); + + checkConnectionTimeout(); } void NetworkManager::receiveSignalingMessage(DecryptedMessage &&message) { @@ -143,6 +151,7 @@ uint32_t NetworkManager::sendMessage(const Message &message) { if (const auto prepared = _transport.prepareForSending(message)) { rtc::PacketOptions packetOptions; _transportChannel->SendPacket((const char *)prepared->bytes.data(), prepared->bytes.size(), packetOptions, 0); + addTrafficStats(prepared->bytes.size(), false); return prepared->counter; } return 0; @@ -152,9 +161,45 @@ void NetworkManager::sendTransportService(int cause) { if (const auto prepared = _transport.prepareForSendingService(cause)) { rtc::PacketOptions packetOptions; _transportChannel->SendPacket((const char *)prepared->bytes.data(), prepared->bytes.size(), packetOptions, 0); + addTrafficStats(prepared->bytes.size(), false); } } +void NetworkManager::setIsLocalNetworkLowCost(bool isLocalNetworkLowCost) { + _isLocalNetworkLowCost = isLocalNetworkLowCost; +} + +TrafficStats NetworkManager::getNetworkStats() { + TrafficStats stats; + stats.bytesSentWifi = _trafficStatsWifi.outgoing; + stats.bytesReceivedWifi = _trafficStatsWifi.incoming; + stats.bytesSentMobile = _trafficStatsCellular.outgoing; + stats.bytesReceivedMobile = _trafficStatsCellular.incoming; + return stats; +} + +void NetworkManager::checkConnectionTimeout() { + const auto weak = std::weak_ptr(shared_from_this()); + _thread->PostDelayedTask(RTC_FROM_HERE, [weak]() { + auto strong = weak.lock(); + if (!strong) { + return; + } + + int64_t currentTimestamp = rtc::TimeMillis(); + const int64_t maxTimeout = 20000; + + if (strong->_lastNetworkActivityMs + maxTimeout < currentTimestamp) { + NetworkManager::State emitState; + emitState.isReadyToSendData = false; + emitState.isFailed = true; + strong->_stateUpdated(emitState); + } + + strong->checkConnectionTimeout(); + }, 1000); +} + void NetworkManager::candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate) { assert(_thread->IsCurrent()); _sendSignalingMessage({ CandidatesListMessage{ { 1, candidate }, _localIceParameters } }); @@ -188,6 +233,10 @@ void NetworkManager::transportReadyToSend(cricket::IceTransportInternal *transpo void NetworkManager::transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused) { assert(_thread->IsCurrent()); + + _lastNetworkActivityMs = rtc::TimeMillis(); + + addTrafficStats(size, true); if (auto decrypted = _transport.handleIncomingPacket(bytes, size)) { if (_transportMessageReceived) { @@ -212,4 +261,20 @@ void NetworkManager::transportRouteChanged(absl::optional rou } } +void NetworkManager::addTrafficStats(int64_t byteCount, bool isIncoming) { + if (_isLocalNetworkLowCost) { + if (isIncoming) { + _trafficStatsWifi.incoming += byteCount; + } else { + _trafficStatsWifi.outgoing += byteCount; + } + } else { + if (isIncoming) { + _trafficStatsCellular.incoming += byteCount; + } else { + _trafficStatsCellular.outgoing += byteCount; + } + } +} + } // namespace tgcalls diff --git a/TMessagesProj/jni/tgcalls/NetworkManager.h b/TMessagesProj/jni/tgcalls/NetworkManager.h index e34c645bc..a1eda7d13 100644 --- a/TMessagesProj/jni/tgcalls/NetworkManager.h +++ b/TMessagesProj/jni/tgcalls/NetworkManager.h @@ -34,11 +34,17 @@ namespace tgcalls { struct Message; -class NetworkManager : public sigslot::has_slots<> { +class NetworkManager : public sigslot::has_slots<>, public std::enable_shared_from_this { public: struct State { bool isReadyToSendData = false; + bool isFailed = false; }; + + struct InterfaceTrafficStats { + int64_t incoming = 0; + int64_t outgoing = 0; + }; NetworkManager( rtc::Thread *thread, @@ -51,19 +57,26 @@ public: std::function sendTransportServiceAsync); ~NetworkManager(); + void start(); void receiveSignalingMessage(DecryptedMessage &&message); uint32_t sendMessage(const Message &message); void sendTransportService(int cause); + void setIsLocalNetworkLowCost(bool isLocalNetworkLowCost); + TrafficStats getNetworkStats(); private: + void checkConnectionTimeout(); void candidateGathered(cricket::IceTransportInternal *transport, const cricket::Candidate &candidate); void candidateGatheringState(cricket::IceTransportInternal *transport); void transportStateChanged(cricket::IceTransportInternal *transport); void transportReadyToSend(cricket::IceTransportInternal *transport); void transportPacketReceived(rtc::PacketTransportInternal *transport, const char *bytes, size_t size, const int64_t ×tamp, int unused); void transportRouteChanged(absl::optional route); + void addTrafficStats(int64_t byteCount, bool isIncoming); rtc::Thread *_thread = nullptr; + bool _enableP2P = false; + std::vector _rtcServers; EncryptedConnection _transport; bool _isOutgoing = false; std::function _stateUpdated; @@ -78,6 +91,11 @@ private: PeerIceParameters _localIceParameters; absl::optional _remoteIceParameters; + + bool _isLocalNetworkLowCost = false; + int64_t _lastNetworkActivityMs = 0; + InterfaceTrafficStats _trafficStatsWifi; + InterfaceTrafficStats _trafficStatsCellular; }; } // namespace tgcalls diff --git a/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.cpp b/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.cpp index 19edd537c..96455d735 100644 --- a/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.cpp +++ b/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.cpp @@ -11,15 +11,17 @@ VideoCaptureInterfaceObject::VideoCaptureInterfaceObject(std::shared_ptrmakeVideoSource(Manager::getMediaThread(), MediaManager::getWorkerThread()); _platformContext = platformContext; //this should outlive the capturer - _videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) { - if (this->_stateUpdated) { - this->_stateUpdated(state); - } - }, platformContext); + if (_videoSource) { + _videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) { + if (this->_stateUpdated) { + this->_stateUpdated(state); + } + }, platformContext, _videoCapturerResolution); + } } VideoCaptureInterfaceObject::~VideoCaptureInterfaceObject() { - if (_currentUncroppedSink != nullptr) { + if (_videoCapturer && _currentUncroppedSink != nullptr) { //_videoSource->RemoveSink(_currentSink.get()); _videoCapturer->setUncroppedOutput(nullptr); } @@ -30,30 +32,52 @@ void VideoCaptureInterfaceObject::switchCamera() { if (_videoCapturer && _currentUncroppedSink) { _videoCapturer->setUncroppedOutput(nullptr); } - _videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) { - if (this->_stateUpdated) { - this->_stateUpdated(state); + if (_videoSource) { + _videoCapturer = PlatformInterface::SharedInstance()->makeVideoCapturer(_videoSource, _useFrontCamera, [this](VideoState state) { + if (this->_stateUpdated) { + this->_stateUpdated(state); + } + }, _platformContext, _videoCapturerResolution); + } + if (_videoCapturer) { + if (_currentUncroppedSink) { + _videoCapturer->setUncroppedOutput(_currentUncroppedSink); } - }, _platformContext); - if (_currentUncroppedSink) { - _videoCapturer->setUncroppedOutput(_currentUncroppedSink); - } - _videoCapturer->setState(_state); + _videoCapturer->setState(_state); + } } void VideoCaptureInterfaceObject::setState(VideoState state) { if (_state != state) { _state = state; - _videoCapturer->setState(state); + if (_videoCapturer) { + _videoCapturer->setState(state); + } } } void VideoCaptureInterfaceObject::setPreferredAspectRatio(float aspectRatio) { - _videoCapturer->setPreferredCaptureAspectRatio(aspectRatio); + if (_videoCapturer) { + if (aspectRatio > 0.01 && _videoCapturerResolution.first != 0 && _videoCapturerResolution.second != 0) { + float originalWidth = (float)_videoCapturerResolution.first; + float originalHeight = (float)_videoCapturerResolution.second; + + float width = (originalWidth > aspectRatio * originalHeight) + ? int(std::round(aspectRatio * originalHeight)) + : originalWidth; + float height = (originalWidth > aspectRatio * originalHeight) + ? originalHeight + : int(std::round(originalHeight / aspectRatio)); + + PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, (int)width, (int)height, 30); + } + } } void VideoCaptureInterfaceObject::setOutput(std::shared_ptr> sink) { - _videoCapturer->setUncroppedOutput(sink); + if (_videoCapturer) { + _videoCapturer->setUncroppedOutput(sink); + } _currentUncroppedSink = sink; } diff --git a/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.h b/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.h index 3cda39520..65293f3cc 100644 --- a/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.h +++ b/TMessagesProj/jni/tgcalls/VideoCaptureInterfaceImpl.h @@ -28,6 +28,7 @@ public: private: std::shared_ptr> _currentUncroppedSink; std::shared_ptr _platformContext; + std::pair _videoCapturerResolution; std::unique_ptr _videoCapturer; std::function _stateUpdated; bool _useFrontCamera = true; diff --git a/TMessagesProj/jni/tgcalls/legacy/InstanceImplLegacy.cpp b/TMessagesProj/jni/tgcalls/legacy/InstanceImplLegacy.cpp index af1a68942..e088693ad 100644 --- a/TMessagesProj/jni/tgcalls/legacy/InstanceImplLegacy.cpp +++ b/TMessagesProj/jni/tgcalls/legacy/InstanceImplLegacy.cpp @@ -155,8 +155,8 @@ onSignalBarsUpdated_(std::move(descriptor.signalBarsUpdated)) { InstanceImplLegacy::~InstanceImplLegacy() { if (controller_) { - stop(); - } + stop([](FinalState state){}); + } } void InstanceImplLegacy::setNetworkType(NetworkType networkType) { @@ -273,7 +273,7 @@ PersistentState InstanceImplLegacy::getPersistentState() { return {controller_->GetPersistentState()}; } -FinalState InstanceImplLegacy::stop() { +void InstanceImplLegacy::stop(std::function completion) { controller_->Stop(); auto result = FinalState(); @@ -285,7 +285,7 @@ FinalState InstanceImplLegacy::stop() { delete controller_; controller_ = nullptr; - return result; + completion(result); } void InstanceImplLegacy::ControllerStateCallback(tgvoip::VoIPController *controller, int state) { @@ -323,8 +323,10 @@ int InstanceImplLegacy::GetConnectionMaxLayer() { return tgvoip::VoIPController::GetConnectionMaxLayer(); } -std::string InstanceImplLegacy::GetVersion() { - return tgvoip::VoIPController::GetVersion(); +std::vector InstanceImplLegacy::GetVersions() { + std::vector result; + result.push_back("2.4.4"); + return result; } template <> diff --git a/TMessagesProj/jni/tgcalls/legacy/InstanceImplLegacy.h b/TMessagesProj/jni/tgcalls/legacy/InstanceImplLegacy.h index 500a9a483..6dc7e7c1b 100644 --- a/TMessagesProj/jni/tgcalls/legacy/InstanceImplLegacy.h +++ b/TMessagesProj/jni/tgcalls/legacy/InstanceImplLegacy.h @@ -14,7 +14,7 @@ public: ~InstanceImplLegacy(); static int GetConnectionMaxLayer(); - static std::string GetVersion(); + static std::vector GetVersions(); void receiveSignalingData(const std::vector &data) override; void setNetworkType(NetworkType networkType) override; @@ -35,7 +35,7 @@ public: int64_t getPreferredRelayId() override; TrafficStats getTrafficStats() override; PersistentState getPersistentState() override; - FinalState stop() override; + void stop(std::function completion) override; private: tgvoip::VoIPController *controller_; diff --git a/TMessagesProj/jni/tgcalls/platform/PlatformInterface.h b/TMessagesProj/jni/tgcalls/platform/PlatformInterface.h index ba4f54e0e..d92096919 100644 --- a/TMessagesProj/jni/tgcalls/platform/PlatformInterface.h +++ b/TMessagesProj/jni/tgcalls/platform/PlatformInterface.h @@ -27,7 +27,8 @@ public: virtual std::unique_ptr makeVideoDecoderFactory() = 0; virtual bool supportsEncoding(const std::string &codecName) = 0; virtual rtc::scoped_refptr makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) = 0; - virtual std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext) = 0; + virtual void adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) = 0; + virtual std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext, std::pair &outResolution) = 0; }; diff --git a/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.cpp b/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.cpp index a8e5c47b3..0b4e1b510 100644 --- a/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.cpp +++ b/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.cpp @@ -20,6 +20,14 @@ namespace tgcalls { +void AndroidInterface::configurePlatformAudio() { + +} + +float AndroidInterface::getDisplayAspectRatio() { + return 0; +} + std::unique_ptr AndroidInterface::makeVideoEncoderFactory() { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); webrtc::ScopedJavaLocalRef factory_class = @@ -46,6 +54,10 @@ std::unique_ptr AndroidInterface::makeVideoDecoderF return webrtc::JavaToNativeVideoDecoderFactory(env, factory_object.obj()); } +void AndroidInterface::adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) { + +} + rtc::scoped_refptr AndroidInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) { JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded(); _source = webrtc::CreateJavaVideoSource(env, signalingThread, false, false); @@ -75,7 +87,7 @@ bool AndroidInterface::supportsEncoding(const std::string &codecName) { return codecName == cricket::kVp8CodecName; } -std::unique_ptr AndroidInterface::makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext) { +std::unique_ptr AndroidInterface::makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext, std::pair &outResolution) { return std::make_unique(_source, useFrontCamera, stateUpdated, platformContext); } diff --git a/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.h b/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.h index 430f80f14..63fc9d7e7 100644 --- a/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.h +++ b/TMessagesProj/jni/tgcalls/platform/android/AndroidInterface.h @@ -9,11 +9,14 @@ namespace tgcalls { class AndroidInterface : public PlatformInterface { public: + void configurePlatformAudio() override; + float getDisplayAspectRatio() override; std::unique_ptr makeVideoEncoderFactory() override; std::unique_ptr makeVideoDecoderFactory() override; bool supportsEncoding(const std::string &codecName) override; rtc::scoped_refptr makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override; - std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext) override; + void adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) override; + std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext, std::pair &outResolution) override; private: rtc::scoped_refptr _source; diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/DarwinInterface.h b/TMessagesProj/jni/tgcalls/platform/darwin/DarwinInterface.h index 5d91ea776..6fbcbd9fd 100644 --- a/TMessagesProj/jni/tgcalls/platform/darwin/DarwinInterface.h +++ b/TMessagesProj/jni/tgcalls/platform/darwin/DarwinInterface.h @@ -13,7 +13,8 @@ public: std::unique_ptr makeVideoDecoderFactory() override; bool supportsEncoding(const std::string &codecName) override; rtc::scoped_refptr makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override; - std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext) override; + virtual void adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) override; + std::unique_ptr makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext, std::pair &outResolution) override; }; diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/DarwinInterface.mm b/TMessagesProj/jni/tgcalls/platform/darwin/DarwinInterface.mm index f58c71be9..c9aa5c195 100644 --- a/TMessagesProj/jni/tgcalls/platform/darwin/DarwinInterface.mm +++ b/TMessagesProj/jni/tgcalls/platform/darwin/DarwinInterface.mm @@ -18,6 +18,12 @@ namespace tgcalls { +static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr nativeSource) { + webrtc::VideoTrackSourceProxy *proxy_source = + static_cast(nativeSource.get()); + return static_cast(proxy_source->internal()); +} + void DarwinInterface::configurePlatformAudio() { #ifdef WEBRTC_IOS [RTCAudioSession sharedInstance].useManualAudio = true; @@ -45,7 +51,7 @@ bool DarwinInterface::supportsEncoding(const std::string &codecName) { return [[AVAssetExportSession allExportPresets] containsObject:AVAssetExportPresetHEVCHighestQuality]; } #elif defined WEBRTC_MAC // WEBRTC_IOS - if (@available(macOS 10.13, *)) { + if (@available(macOS 10.14, *)) { return [[AVAssetExportSession allExportPresets] containsObject:AVAssetExportPresetHEVCHighestQuality]; } #endif // WEBRTC_IOS || WEBRTC_MAC @@ -54,11 +60,7 @@ bool DarwinInterface::supportsEncoding(const std::string &codecName) { } else if (codecName == cricket::kVp8CodecName) { return true; } else if (codecName == cricket::kVp9CodecName) { - #ifndef WEBRTC_IOS return true; - #else - return false; - #endif } return false; } @@ -68,8 +70,12 @@ rtc::scoped_refptr DarwinInterface::makeVideo return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, objCVideoTrackSource); } -std::unique_ptr DarwinInterface::makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext) { - return std::make_unique(source, useFrontCamera, stateUpdated); +void DarwinInterface::adaptVideoSource(rtc::scoped_refptr videoSource, int width, int height, int fps) { + getObjCVideoSource(videoSource)->OnOutputFormatRequest(width, height, fps); +} + +std::unique_ptr DarwinInterface::makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext, std::pair &outResolution) { + return std::make_unique(source, useFrontCamera, stateUpdated, outResolution); } std::unique_ptr CreatePlatformInterface() { diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/GLVideoView.h b/TMessagesProj/jni/tgcalls/platform/darwin/GLVideoView.h index e88c2f0fb..dadec48cf 100644 --- a/TMessagesProj/jni/tgcalls/platform/darwin/GLVideoView.h +++ b/TMessagesProj/jni/tgcalls/platform/darwin/GLVideoView.h @@ -9,7 +9,11 @@ */ #import +#ifdef WEBRTC_IOS #import +#else +#import +#endif #import "RTCMacros.h" #import "RTCVideoRenderer.h" @@ -28,8 +32,13 @@ NS_ASSUME_NONNULL_BEGIN * bounds using OpenGLES 2.0 or OpenGLES 3.0. */ RTC_OBJC_EXPORT -NS_EXTENSION_UNAVAILABLE_IOS("Rendering not available in app extensions.") -@interface GLVideoView : UIView +@interface GLVideoView : +#ifdef WEBRTC_IOS +UIView +#else +NSView +#endif + @property(nonatomic, weak) id delegate; diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.h b/TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.h new file mode 100644 index 000000000..cded56339 --- /dev/null +++ b/TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.h @@ -0,0 +1,50 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#if !TARGET_OS_IPHONE + +#import +#import "api/media_stream_interface.h" +#import "RTCVideoRenderer.h" +#import "RTCVideoViewShading.h" + +NS_ASSUME_NONNULL_BEGIN + +@class GLVideoView; + +@protocol GLVideoViewDelegate @end + +@interface GLVideoView : NSView + +@property(nonatomic, weak) id delegate; + +- (instancetype)initWithFrame:(NSRect)frameRect +pixelFormat:(NSOpenGLPixelFormat *)format +shader:(id)shader +NS_DESIGNATED_INITIALIZER; + + +@property(nonatomic, nullable) NSValue *rotationOverride; + +@property (nonatomic, readwrite) int internalOrientation; + +- (std::shared_ptr>)getSink; +- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived; +- (void)internalSetOnOrientationUpdated:(void (^ _Nullable)(int))onOrientationUpdated; +- (void)internalSetOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated; +- (void)setVideoContentMode:(CALayerContentsGravity)mode; +- (void)setIsForceMirrored:(BOOL)forceMirrored; +@end + +NS_ASSUME_NONNULL_END + +#endif diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.mm b/TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.mm new file mode 100644 index 000000000..e252db9a3 --- /dev/null +++ b/TMessagesProj/jni/tgcalls/platform/darwin/GLVideoViewMac.mm @@ -0,0 +1,491 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "GLVideoViewMac.h" + +#import "TGRTCCVPixelBuffer.h" + +#import + +#import "RTCDefaultShader.h" +#import "RTCDisplayLinkTimer.h" +#import "RTCI420TextureCache.h" +#import "base/RTCLogging.h" +#import "base/RTCVideoFrame.h" +#import "base/RTCVideoFrameBuffer.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" +#include "sdk/objc/native/api/video_frame.h" +#import "rtc_base/time_utils.h" +#include "sdk/objc/native/src/objc_frame_buffer.h" + +namespace { + + static RTCVideoFrame *customToObjCVideoFrame(const webrtc::VideoFrame &frame, RTCVideoRotation &rotation) { + rotation = RTCVideoRotation(frame.rotation()); + RTCVideoFrame *videoFrame = + [[RTCVideoFrame alloc] initWithBuffer:webrtc::ToObjCVideoFrameBuffer(frame.video_frame_buffer()) + rotation:rotation + timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]; + videoFrame.timeStamp = frame.timestamp(); + + return videoFrame; + } + + class VideoRendererAdapterImpl : public rtc::VideoSinkInterface { + public: + VideoRendererAdapterImpl(void (^frameReceived)(CGSize, RTCVideoFrame *, RTCVideoRotation)) { + _frameReceived = [frameReceived copy]; + } + + void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { + RTCVideoRotation rotation = RTCVideoRotation_0; + RTCVideoFrame* videoFrame = customToObjCVideoFrame(nativeVideoFrame, rotation); + + CGSize currentSize = (videoFrame.rotation % 180 == 0) ? CGSizeMake(videoFrame.width, videoFrame.height) : CGSizeMake(videoFrame.height, videoFrame.width); + + if (_frameReceived) { + _frameReceived(currentSize, videoFrame, rotation); + } + } + + private: + void (^_frameReceived)(CGSize, RTCVideoFrame *, RTCVideoRotation); + }; + +} + + + +static CGSize scaleToFillSize(CGSize size, CGSize maxSize) { + if (size.width < 1.0f) { + size.width = 1.0f; + } + if (size.height < 1.0f) { + size.height = 1.0f; + } + if (size.width < maxSize.width) { + size.height = floor(maxSize.width * size.height / MAX(1.0f, size.width)); + size.width = maxSize.width; + } + if (size.height < maxSize.height) { + size.width = floor(maxSize.height * size.width / MAX(1.0f, size.height)); + size.height = maxSize.height; + } + return size; +} + +static CGSize aspectFilled(CGSize from, CGSize to) { + CGFloat scale = MAX(from.width / MAX(1.0, to.width), from.height / MAX(1.0, to.height)); + return NSMakeSize(ceil(to.width * scale), ceil(to.height * scale)); +} +static CGSize aspectFitted(CGSize from, CGSize to) { + CGFloat scale = MAX(from.width / MAX(1.0, to.width), from.height / MAX(1.0, to.height)); + return NSMakeSize(ceil(to.width * scale), ceil(to.height * scale)); +} + +/* + + func aspectFilled(_ size: CGSize) -> CGSize { + let scale = max(size.width / max(1.0, self.width), size.height / max(1.0, self.height)) + return CGSize(width: ceil(self.width * scale), height: ceil(self.height * scale)) + } + func fittedToWidthOrSmaller(_ width: CGFloat) -> CGSize { + let scale = min(1.0, width / max(1.0, self.width)) + return CGSize(width: floor(self.width * scale), height: floor(self.height * scale)) + } + + func aspectFitted(_ size: CGSize) -> CGSize { + let scale = min(size.width / max(1.0, self.width), size.height / max(1.0, self.height)) + return CGSize(width: ceil(self.width * scale), height: ceil(self.height * scale)) + } + */ + + +#if !TARGET_OS_IPHONE + +@interface OpenGLVideoView : NSOpenGLView +@property(atomic, strong) RTCVideoFrame *videoFrame; +@property(atomic, strong) RTCI420TextureCache *i420TextureCache; + +- (void)drawFrame; +- (instancetype)initWithFrame:(NSRect)frame + pixelFormat:(NSOpenGLPixelFormat *)format + shader:(id)shader; +@end + +static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, + const CVTimeStamp *now, + const CVTimeStamp *outputTime, + CVOptionFlags flagsIn, + CVOptionFlags *flagsOut, + void *displayLinkContext) { + OpenGLVideoView *view = + (__bridge OpenGLVideoView *)displayLinkContext; + [view drawFrame]; + return kCVReturnSuccess; +} + + +@implementation OpenGLVideoView { + CVDisplayLinkRef _displayLink; + RTCVideoFrame * _lastDrawnFrame; + id _shader; + + int64_t _lastDrawnFrameTimeStampNs; + void (^_onFirstFrameReceived)(float); + bool _firstFrameReceivedReported; +} + +@synthesize videoFrame = _videoFrame; +@synthesize i420TextureCache = _i420TextureCache; + +- (instancetype)initWithFrame:(NSRect)frame + pixelFormat:(NSOpenGLPixelFormat *)format + shader:(id)shader { + if (self = [super initWithFrame:frame pixelFormat:format]) { + self->_shader = shader; + } + return self; +} + +- (void)reshape { + [super reshape]; + NSRect frame = [self frame]; + [self ensureGLContext]; + CGLLockContext([[self openGLContext] CGLContextObj]); + glViewport(0, 0, frame.size.width, frame.size.height); + CGLUnlockContext([[self openGLContext] CGLContextObj]); +} + +- (void)lockFocus { + NSOpenGLContext *context = [self openGLContext]; + [super lockFocus]; + if ([context view] != self) { + [context setView:self]; + } + [context makeCurrentContext]; +} + +- (void)prepareOpenGL { + [super prepareOpenGL]; + [self ensureGLContext]; + glDisable(GL_DITHER); + [self setupDisplayLink]; +} + +- (void)clearGLContext { + [self ensureGLContext]; + self.i420TextureCache = nil; + [super clearGLContext]; +} + +- (void)drawRect:(NSRect)rect { + [self drawFrame]; +} + +- (void)drawFrame { + RTCVideoFrame *frame = self.videoFrame; + if (!frame || frame == _lastDrawnFrame) { + return; + } + // This method may be called from CVDisplayLink callback which isn't on the + // main thread so we have to lock the GL context before drawing. + NSOpenGLContext *context = [self openGLContext]; + CGLLockContext([context CGLContextObj]); + + [self ensureGLContext]; + glClear(GL_COLOR_BUFFER_BIT); + + + // Rendering native CVPixelBuffer is not supported on OS X. + // TODO(magjed): Add support for NV12 texture cache on OS X. + frame = [frame newI420VideoFrame]; + if (!self.i420TextureCache) { + self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context]; + } + RTCVideoRotation rotation = frame.rotation; + + RTCI420TextureCache *i420TextureCache = self.i420TextureCache; + if (i420TextureCache) { + [i420TextureCache uploadFrameToTextures:frame]; + [_shader applyShadingForFrameWithWidth:frame.width + height:frame.height + rotation:rotation + yPlane:i420TextureCache.yTexture + uPlane:i420TextureCache.uTexture + vPlane:i420TextureCache.vTexture]; + [context flushBuffer]; + _lastDrawnFrame = frame; + } + CGLUnlockContext([context CGLContextObj]); + + if (!_firstFrameReceivedReported && _onFirstFrameReceived) { + _firstFrameReceivedReported = true; + float aspectRatio = (float)frame.width / (float)frame.height; + dispatch_async(dispatch_get_main_queue(), ^{ + self->_onFirstFrameReceived(aspectRatio); + }); + } + +} + + +- (void)setupDisplayLink { + if (_displayLink) { + return; + } + // Synchronize buffer swaps with vertical refresh rate. + GLint swapInt = 1; + [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval]; + + // Create display link. + CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink); + CVDisplayLinkSetOutputCallback(_displayLink, + &OnDisplayLinkFired, + (__bridge void *)self); + // Set the display link for the current renderer. + CGLContextObj cglContext = [[self openGLContext] CGLContextObj]; + CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj]; + CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext( + _displayLink, cglContext, cglPixelFormat); + CVDisplayLinkStart(_displayLink); +} + +-(void)setFrameOrigin:(NSPoint)newOrigin { + [super setFrameOrigin:newOrigin]; +} + +- (void)teardownDisplayLink { + if (!_displayLink) { + return; + } + CVDisplayLinkRelease(_displayLink); + _displayLink = NULL; +} + +- (void)ensureGLContext { + NSOpenGLContext* context = [self openGLContext]; + NSAssert(context, @"context shouldn't be nil"); + if ([NSOpenGLContext currentContext] != context) { + [context makeCurrentContext]; + } +} + +- (void)dealloc { + [self teardownDisplayLink]; +} + +- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived { + _onFirstFrameReceived = [onFirstFrameReceived copy]; + _firstFrameReceivedReported = false; +} + + +@end + + + + +@interface GLVideoView () +@property(nonatomic, strong) OpenGLVideoView *glView; +@end + +@implementation GLVideoView { + + CGSize _currentSize; + + std::shared_ptr _sink; + + void (^_onOrientationUpdated)(int); + void (^_onIsMirroredUpdated)(bool); + + bool _didSetShouldBeMirrored; + bool _shouldBeMirrored; + bool _forceMirrored; + +} + +@synthesize delegate = _delegate; + +-(instancetype)initWithFrame:(NSRect)frameRect { + NSOpenGLPixelFormatAttribute attributes[] = { + NSOpenGLPFADoubleBuffer, + NSOpenGLPFADepthSize, 24, + NSOpenGLPFAOpenGLProfile, + NSOpenGLProfileVersion3_2Core, + 0 + }; + NSOpenGLPixelFormat* pixelFormat = + [[NSOpenGLPixelFormat alloc] initWithAttributes:attributes]; + return [self initWithFrame:frameRect pixelFormat: pixelFormat]; +} + +- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format { + return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]]; +} + +- (instancetype)initWithFrame:(NSRect)frame + pixelFormat:(NSOpenGLPixelFormat *)format + shader:(id)shader { + if (self = [super initWithFrame:frame]) { + + _glView = [[OpenGLVideoView alloc] initWithFrame:frame pixelFormat:format shader:shader]; + _glView.wantsLayer = YES; + self.layerContentsRedrawPolicy = NSViewLayerContentsRedrawDuringViewResize; + _glView.layerContentsRedrawPolicy = NSViewLayerContentsRedrawDuringViewResize; + + [self addSubview:_glView]; + + __weak GLVideoView *weakSelf = self; + + self.wantsLayer = YES; + + _sink.reset(new VideoRendererAdapterImpl(^(CGSize size, RTCVideoFrame *videoFrame, RTCVideoRotation rotation) { + dispatch_async(dispatch_get_main_queue(), ^{ + __strong GLVideoView *strongSelf = weakSelf; + if (strongSelf == nil) { + return; + } + if (!CGSizeEqualToSize(size, strongSelf->_currentSize)) { + strongSelf->_currentSize = size; + [strongSelf setSize:size]; + } + + int mappedValue = 0; + switch (rotation) { + case RTCVideoRotation_90: + mappedValue = 0; + break; + case RTCVideoRotation_180: + mappedValue = 1; + break; + case RTCVideoRotation_270: + mappedValue = 2; + break; + default: + mappedValue = 0; + break; + } + [strongSelf setInternalOrientation:mappedValue]; + + [strongSelf renderFrame:videoFrame]; + }); + })); + } + return self; +} + + + +- (CALayerContentsGravity)videoContentMode { + return self.glView.layer.contentsGravity; +} + +- (void)setVideoContentMode:(CALayerContentsGravity)mode { + self.glView.layer.contentsGravity = mode; + [self setNeedsLayout:YES]; +} + +-(void)layout { + [super layout]; + + if (self.bounds.size.width > 0.0f && _currentSize.width > 0) { + + NSSize size = _currentSize; + NSSize frameSize = self.frame.size; + if ( self.glView.layer.contentsGravity == kCAGravityResizeAspectFill) { + size = aspectFitted(frameSize, _currentSize); + } else { + size = aspectFilled(frameSize, _currentSize); + } + _glView.frame = CGRectMake(floor((self.bounds.size.width - size.width) / 2.0), floor((self.bounds.size.height - size.height) / 2.0), size.width, size.height); + } + + if (_shouldBeMirrored || _forceMirrored) { + self.glView.layer.anchorPoint = NSMakePoint(1, 0); + self.glView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1); + } else { + self.glView.layer.anchorPoint = NSMakePoint(0, 0); + self.glView.layer.affineTransform = CGAffineTransformIdentity; + } +} + +- (void)setSize:(CGSize)size { + [self.delegate videoView:self didChangeVideoSize:size]; + [self setNeedsLayout:YES]; +} + +- (void)renderFrame:(RTCVideoFrame *)videoFrame { + self.glView.videoFrame = videoFrame; + + if ([videoFrame.buffer isKindOfClass:[RTCCVPixelBuffer class]]) { + RTCCVPixelBuffer *buffer = (RTCCVPixelBuffer*)videoFrame.buffer; + if ([buffer isKindOfClass:[TGRTCCVPixelBuffer class]]) { + bool shouldBeMirrored = ((TGRTCCVPixelBuffer *)buffer).shouldBeMirrored; + if (shouldBeMirrored != _shouldBeMirrored) { + _shouldBeMirrored = shouldBeMirrored; + if (shouldBeMirrored || _forceMirrored) { + self.glView.layer.anchorPoint = NSMakePoint(1, 0); + self.glView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1); + } else { + self.glView.layer.anchorPoint = NSMakePoint(0, 0); + self.glView.layer.affineTransform = CGAffineTransformIdentity; + } + } + + if (shouldBeMirrored != _shouldBeMirrored) { + if (_didSetShouldBeMirrored) { + if (_onIsMirroredUpdated) { + _onIsMirroredUpdated(_shouldBeMirrored); + } + } else { + _didSetShouldBeMirrored = true; + } + } + } + } +} + +#pragma mark - Private + + + +- (std::shared_ptr>)getSink { + assert([NSThread isMainThread]); + + return _sink; +} + +- (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived { + [self.glView setOnFirstFrameReceived:onFirstFrameReceived]; +} + +- (void)setInternalOrientation:(int)internalOrientation { + _internalOrientation = internalOrientation; + if (_onOrientationUpdated) { + _onOrientationUpdated(internalOrientation); + } +} + +- (void)internalSetOnOrientationUpdated:(void (^ _Nullable)(int))onOrientationUpdated { + _onOrientationUpdated = [onOrientationUpdated copy]; +} + +- (void)internalSetOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated { +} + +- (void)setIsForceMirrored:(BOOL)forceMirrored { + _forceMirrored = forceMirrored; + [self setNeedsLayout:YES]; +} + +@end + +#endif // !TARGET_OS_IPHONE diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturer.mm b/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturer.mm index 5f90112d5..5c71452d7 100644 --- a/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturer.mm +++ b/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturer.mm @@ -154,13 +154,18 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr @interface VideoCameraCapturer () { rtc::scoped_refptr _source; + // Live on main thread. bool _isFrontCamera; dispatch_queue_t _frameQueue; + + // Live on RTCDispatcherTypeCaptureSession. AVCaptureDevice *_currentDevice; BOOL _hasRetriedOnFatalError; BOOL _isRunning; - BOOL _willBeRunning; + + // Live on RTCDispatcherTypeCaptureSession and main thread. + std::atomic _willBeRunning; AVCaptureVideoDataOutput *_videoDataOutput; AVCaptureSession *_captureSession; @@ -170,16 +175,21 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr UIDeviceOrientation _orientation; bool _rotationLock; + // Live on mainThread. void (^_isActiveUpdated)(bool); bool _isActiveValue; bool _inForegroundValue; - bool _isPaused; + // Live on frameQueue and main thread. + std::atomic _isPaused; + + // Live on frameQueue. float _aspectRatio; std::vector _croppingBuffer; std::shared_ptr> _uncroppedSink; - int _warmupFrameCount; + // Live on frameQueue and RTCDispatcherTypeCaptureSession. + std::atomic _warmupFrameCount; } @end @@ -292,18 +302,22 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr } - (void)setUncroppedSink:(std::shared_ptr>)sink { - _uncroppedSink = sink; + dispatch_async(self.frameQueue, ^{ + _uncroppedSink = sink; + }); } - (void)setPreferredCaptureAspectRatio:(float)aspectRatio { - _aspectRatio = aspectRatio; + dispatch_async(self.frameQueue, ^{ + _aspectRatio = aspectRatio; + }); } - (void)startCaptureWithDevice:(AVCaptureDevice *)device format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps completionHandler:(nullable void (^)(NSError *))completionHandler { - _willBeRunning = YES; + _willBeRunning = true; [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ @@ -323,7 +337,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr if (completionHandler) { completionHandler(error); } - _willBeRunning = NO; + _willBeRunning = false; return; } [self reconfigureCaptureSessionInput]; @@ -340,7 +354,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr } - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { - _willBeRunning = NO; + _willBeRunning = false; [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturerMac.h b/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturerMac.h index d18117e9d..806ec124b 100644 --- a/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturerMac.h +++ b/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturerMac.h @@ -20,7 +20,7 @@ - (void)setIsEnabled:(bool)isEnabled; - (void)setPreferredCaptureAspectRatio:(float)aspectRatio; - (void)setUncroppedSink:(std::shared_ptr>)sink; - +- (BOOL)deviceIsCaptureCompitable:(AVCaptureDevice *)device; @end #endif //WEBRTC_MAC diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturerMac.mm b/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturerMac.mm index 9ead99be8..fdd9b0343 100644 --- a/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturerMac.mm +++ b/TMessagesProj/jni/tgcalls/platform/darwin/VideoCameraCapturerMac.mm @@ -157,9 +157,13 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr dispatch_queue_t _frameQueue; AVCaptureDevice *_currentDevice; + + // Live on RTCDispatcherTypeCaptureSession. BOOL _hasRetriedOnFatalError; BOOL _isRunning; - BOOL _willBeRunning; + + // Live on RTCDispatcherTypeCaptureSession and main thread. + std::atomic _willBeRunning; AVCaptureVideoDataOutput *_videoDataOutput; AVCaptureSession *_captureSession; @@ -171,15 +175,21 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr FourCharCode _outputPixelFormat; RTCVideoRotation _rotation; + // Live on mainThread. void (^_isActiveUpdated)(bool); bool _isActiveValue; bool _inForegroundValue; - bool _isPaused; - int _skippedFrame; + + // Live on frameQueue and main thread. + std::atomic _isPaused; + std::atomic _skippedFrame; + // Live on frameQueue; float _aspectRatio; std::vector _croppingBuffer; std::shared_ptr> _uncroppedSink; + + int _warmupFrameCount; } @@ -197,6 +207,8 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr _isPaused = false; _skippedFrame = 0; _rotation = RTCVideoRotation_0; + + _warmupFrameCount = 100; if (![self setupCaptureSession:[[AVCaptureSession alloc] init]]) { return nil; @@ -211,7 +223,21 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr } + (NSArray *)captureDevices { - return [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo]; + AVCaptureDevice * defaultDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]; + NSMutableArray * devices = [[AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo] mutableCopy]; + + [devices insertObject:defaultDevice atIndex:0]; + + return devices; +} + +- (BOOL)deviceIsCaptureCompitable:(AVCaptureDevice *)device { + if (![device isConnected] || [device isSuspended]) { + return NO; + } + AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:nil]; + + return [_captureSession canAddInput:input]; } + (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device { @@ -264,11 +290,15 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr - (void)setUncroppedSink:(std::shared_ptr>)sink { - _uncroppedSink = sink; + dispatch_async(self.frameQueue, ^{ + _uncroppedSink = sink; + }); } - (void)setPreferredCaptureAspectRatio:(float)aspectRatio { - _aspectRatio = aspectRatio; + dispatch_async(self.frameQueue, ^{ + _aspectRatio = MAX(0.7, aspectRatio); + }); } - (void)updateIsActiveValue { @@ -286,7 +316,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr format:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps completionHandler:(nullable void (^)(NSError *))completionHandler { - _willBeRunning = YES; + _willBeRunning = true; [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ @@ -302,7 +332,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr if (completionHandler) { completionHandler(error); } - self->_willBeRunning = NO; + self->_willBeRunning = false; return; } [self reconfigureCaptureSessionInput]; @@ -318,7 +348,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr } - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { - _willBeRunning = NO; + _willBeRunning = false; [RTCDispatcher dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ @@ -344,6 +374,12 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr fromConnection:(AVCaptureConnection *)connection { NSParameterAssert(captureOutput == _videoDataOutput); + int minWarmupFrameCount = 12; + _warmupFrameCount++; + if (_warmupFrameCount < minWarmupFrameCount) { + return; + } + if (CMSampleBufferGetNumSamples(sampleBuffer) != 1 || !CMSampleBufferIsValid(sampleBuffer) || !CMSampleBufferDataIsReady(sampleBuffer)) { return; @@ -374,6 +410,8 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:pixelBuffer adaptedWidth:width adaptedHeight:height cropWidth:width cropHeight:height cropX:left cropY:top]; + rtcPixelBuffer.shouldBeMirrored = YES; + CVPixelBufferRef outputPixelBufferRef = NULL; OSType pixelFormat = CVPixelBufferGetPixelFormatType(rtcPixelBuffer.pixelBuffer); CVPixelBufferCreate(NULL, width, height, pixelFormat, NULL, &outputPixelBufferRef); @@ -384,6 +422,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr } if ([rtcPixelBuffer cropAndScaleTo:outputPixelBufferRef withTempBuffer:_croppingBuffer.data()]) { rtcPixelBuffer = [[TGRTCCVPixelBuffer alloc] initWithPixelBuffer:outputPixelBufferRef]; + rtcPixelBuffer.shouldBeMirrored = YES; } CVPixelBufferRelease(outputPixelBufferRef); } @@ -480,6 +519,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr if (!self->_hasRetriedOnFatalError) { RTCLogWarning(@"Attempting to recover from fatal capture error."); [self handleNonFatalError]; + self->_warmupFrameCount = 0; self->_hasRetriedOnFatalError = YES; } else { RTCLogError(@"Previous fatal error recovery failed."); @@ -492,6 +532,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr block:^{ RTCLog(@"Restarting capture session after error."); if (self->_isRunning) { + self->_warmupFrameCount = 0; [self->_captureSession startRunning]; } }]; @@ -504,6 +545,7 @@ static webrtc::ObjCVideoTrackSource *getObjCVideoSource(const rtc::scoped_refptr block:^{ if (self->_isRunning && !self->_captureSession.isRunning) { RTCLog(@"Restarting capture session on active."); + self->_warmupFrameCount = 0; [self->_captureSession startRunning]; } }]; diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/VideoCapturerInterfaceImpl.h b/TMessagesProj/jni/tgcalls/platform/darwin/VideoCapturerInterfaceImpl.h index 1797dbcf4..8789b3ef4 100644 --- a/TMessagesProj/jni/tgcalls/platform/darwin/VideoCapturerInterfaceImpl.h +++ b/TMessagesProj/jni/tgcalls/platform/darwin/VideoCapturerInterfaceImpl.h @@ -16,7 +16,7 @@ namespace tgcalls { class VideoCapturerInterfaceImpl : public VideoCapturerInterface { public: - VideoCapturerInterfaceImpl(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated); + VideoCapturerInterfaceImpl(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::pair &outResolution); ~VideoCapturerInterfaceImpl() override; void setState(VideoState state) override; diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/VideoCapturerInterfaceImpl.mm b/TMessagesProj/jni/tgcalls/platform/darwin/VideoCapturerInterfaceImpl.mm index bb331fe5f..dc3446ef4 100644 --- a/TMessagesProj/jni/tgcalls/platform/darwin/VideoCapturerInterfaceImpl.mm +++ b/TMessagesProj/jni/tgcalls/platform/darwin/VideoCapturerInterfaceImpl.mm @@ -32,6 +32,28 @@ #import "VideoCaptureInterface.h" +@interface VideoCapturerInterfaceImplSourceDescription : NSObject + +@property (nonatomic, readonly) bool isFrontCamera; +@property (nonatomic, strong, readonly, nonnull) AVCaptureDevice *device; +@property (nonatomic, strong, readonly, nonnull) AVCaptureDeviceFormat *format; + +@end + +@implementation VideoCapturerInterfaceImplSourceDescription + +- (instancetype)initWithIsFrontCamera:(bool)isFrontCamera device:(AVCaptureDevice * _Nonnull)device format:(AVCaptureDeviceFormat * _Nonnull)format { + self = [super init]; + if (self != nil) { + _isFrontCamera = isFrontCamera; + _device = device; + _format = format; + } + return self; +} + +@end + @interface VideoCapturerInterfaceImplReference : NSObject { VideoCameraCapturer *_videoCapturer; } @@ -40,90 +62,118 @@ @implementation VideoCapturerInterfaceImplReference -- (instancetype)initWithSource:(rtc::scoped_refptr)source useFrontCamera:(bool)useFrontCamera isActiveUpdated:(void (^)(bool))isActiveUpdated { ++ (AVCaptureDevice *)selectCaptureDevice:(bool)useFrontCamera { + AVCaptureDevice *selectedCamera = nil; + +#ifdef WEBRTC_IOS + AVCaptureDevice *frontCamera = nil; + AVCaptureDevice *backCamera = nil; + for (AVCaptureDevice *device in [VideoCameraCapturer captureDevices]) { + if (device.position == AVCaptureDevicePositionFront) { + frontCamera = device; + } else if (device.position == AVCaptureDevicePositionBack) { + backCamera = device; + } + } + if (useFrontCamera && frontCamera != nil) { + selectedCamera = frontCamera; + } else { + selectedCamera = backCamera; + } +#else + NSArray *devices = [VideoCameraCapturer captureDevices]; + for (int i = 0; i < devices.count; i++) { + if ([_videoCapturer deviceIsCaptureCompitable:devices[i]]) { + selectedCamera = devices[i]; + break; + } + } +#endif + + return selectedCamera; +} + ++ (AVCaptureDeviceFormat *)selectCaptureDeviceFormatForDevice:(AVCaptureDevice *)selectedCamera { + NSArray *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:selectedCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) { + int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width; + int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width; + return width1 < width2 ? NSOrderedAscending : NSOrderedDescending; + }]; + + AVCaptureDeviceFormat *bestFormat = sortedFormats.firstObject; + + bool didSelectPreferredFormat = false; + #ifdef WEBRTC_IOS + for (AVCaptureDeviceFormat *format in sortedFormats) { + CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + if (dimensions.width == 1280 && dimensions.height == 720) { + if (format.videoFieldOfView > 60.0f && format.videoSupportedFrameRateRanges.lastObject.maxFrameRate == 30) { + didSelectPreferredFormat = true; + bestFormat = format; + break; + } + } + } + #endif + if (!didSelectPreferredFormat) { + for (AVCaptureDeviceFormat *format in sortedFormats) { + CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + if (dimensions.width >= 1000 || dimensions.height >= 1000) { + bestFormat = format; + break; + } + } + } + + if (bestFormat == nil) { + assert(false); + return nil; + } + + AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) { + if (lhs.maxFrameRate < rhs.maxFrameRate) { + return NSOrderedAscending; + } else { + return NSOrderedDescending; + } + }] lastObject]; + + if (frameRateRange == nil) { + assert(false); + return nil; + } + + return bestFormat; +} + ++ (VideoCapturerInterfaceImplSourceDescription *)selectCapturerDescription:(bool)useFrontCamera { + AVCaptureDevice *selectedCamera = [VideoCapturerInterfaceImplReference selectCaptureDevice:useFrontCamera]; + + if (selectedCamera == nil) { + return nil; + } + + AVCaptureDeviceFormat *bestFormat = [VideoCapturerInterfaceImplReference selectCaptureDeviceFormatForDevice:selectedCamera]; + + if (bestFormat == nil) { + return nil; + } + + return [[VideoCapturerInterfaceImplSourceDescription alloc] initWithIsFrontCamera:useFrontCamera device:selectedCamera format:bestFormat]; +} + +- (instancetype)initWithSource:(rtc::scoped_refptr)source sourceDescription:(VideoCapturerInterfaceImplSourceDescription *)sourceDescription isActiveUpdated:(void (^)(bool))isActiveUpdated { self = [super init]; if (self != nil) { assert([NSThread isMainThread]); + #ifdef WEBRTC_IOS - _videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source useFrontCamera:useFrontCamera isActiveUpdated:isActiveUpdated]; + _videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source useFrontCamera:sourceDescription.isFrontCamera isActiveUpdated:isActiveUpdated]; #else _videoCapturer = [[VideoCameraCapturer alloc] initWithSource:source isActiveUpdated:isActiveUpdated]; #endif - AVCaptureDevice *selectedCamera = nil; -#ifdef WEBRTC_IOS - AVCaptureDevice *frontCamera = nil; - AVCaptureDevice *backCamera = nil; - for (AVCaptureDevice *device in [VideoCameraCapturer captureDevices]) { - if (device.position == AVCaptureDevicePositionFront) { - frontCamera = device; - } else if (device.position == AVCaptureDevicePositionBack) { - backCamera = device; - } - } - if (useFrontCamera && frontCamera != nil) { - selectedCamera = frontCamera; - } else { - selectedCamera = backCamera; - } -#else - selectedCamera = [VideoCameraCapturer captureDevices].firstObject; -#endif - // NSLog(@"%@", selectedCamera); - if (selectedCamera == nil) { - return nil; - } - - NSArray *sortedFormats = [[VideoCameraCapturer supportedFormatsForDevice:selectedCamera] sortedArrayUsingComparator:^NSComparisonResult(AVCaptureDeviceFormat* lhs, AVCaptureDeviceFormat *rhs) { - int32_t width1 = CMVideoFormatDescriptionGetDimensions(lhs.formatDescription).width; - int32_t width2 = CMVideoFormatDescriptionGetDimensions(rhs.formatDescription).width; - return width1 < width2 ? NSOrderedAscending : NSOrderedDescending; - }]; - - AVCaptureDeviceFormat *bestFormat = sortedFormats.firstObject; - - bool didSelectPreferredFormat = false; - #ifdef WEBRTC_IOS - for (AVCaptureDeviceFormat *format in sortedFormats) { - CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - if (dimensions.width == 1280 && dimensions.height == 720) { - if (format.videoFieldOfView > 60.0f && format.videoSupportedFrameRateRanges.lastObject.maxFrameRate == 30) { - didSelectPreferredFormat = true; - bestFormat = format; - break; - } - } - } - #endif - if (!didSelectPreferredFormat) { - for (AVCaptureDeviceFormat *format in sortedFormats) { - CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(format.formatDescription); - if (dimensions.width >= 1000 || dimensions.height >= 1000) { - bestFormat = format; - break; - } - } - } - - if (bestFormat == nil) { - assert(false); - return nil; - } - - AVFrameRateRange *frameRateRange = [[bestFormat.videoSupportedFrameRateRanges sortedArrayUsingComparator:^NSComparisonResult(AVFrameRateRange *lhs, AVFrameRateRange *rhs) { - if (lhs.maxFrameRate < rhs.maxFrameRate) { - return NSOrderedAscending; - } else { - return NSOrderedDescending; - } - }] lastObject]; - - if (frameRateRange == nil) { - assert(false); - return nil; - } - - [_videoCapturer startCaptureWithDevice:selectedCamera format:bestFormat fps:30]; + [_videoCapturer startCaptureWithDevice:sourceDescription.device format:sourceDescription.format fps:30]; } return self; } @@ -154,12 +204,23 @@ namespace tgcalls { -VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated) : +VideoCapturerInterfaceImpl::VideoCapturerInterfaceImpl(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::pair &outResolution) : _source(source) { + VideoCapturerInterfaceImplSourceDescription *sourceDescription = [VideoCapturerInterfaceImplReference selectCapturerDescription:useFrontCamera]; + + CMVideoDimensions dimensions = CMVideoFormatDescriptionGetDimensions(sourceDescription.format.formatDescription); + #ifdef WEBRTC_IOS + outResolution.first = dimensions.height; + outResolution.second = dimensions.width; + #else + outResolution.first = dimensions.width; + outResolution.second = dimensions.height; + #endif + _implReference = [[VideoCapturerInterfaceImplHolder alloc] init]; VideoCapturerInterfaceImplHolder *implReference = _implReference; dispatch_async(dispatch_get_main_queue(), ^{ - VideoCapturerInterfaceImplReference *value = [[VideoCapturerInterfaceImplReference alloc] initWithSource:source useFrontCamera:useFrontCamera isActiveUpdated:^(bool isActive) { + VideoCapturerInterfaceImplReference *value = [[VideoCapturerInterfaceImplReference alloc] initWithSource:source sourceDescription:sourceDescription isActiveUpdated:^(bool isActive) { stateUpdated(isActive ? VideoState::Active : VideoState::Paused); }]; if (value != nil) { diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/VideoMetalViewMac.h b/TMessagesProj/jni/tgcalls/platform/darwin/VideoMetalViewMac.h index 30e981922..65e5c9ed1 100644 --- a/TMessagesProj/jni/tgcalls/platform/darwin/VideoMetalViewMac.h +++ b/TMessagesProj/jni/tgcalls/platform/darwin/VideoMetalViewMac.h @@ -29,7 +29,7 @@ - (void)setOnFirstFrameReceived:(void (^ _Nullable)(float))onFirstFrameReceived; - (void)internalSetOnOrientationUpdated:(void (^ _Nullable)(int))onOrientationUpdated; - (void)internalSetOnIsMirroredUpdated:(void (^ _Nullable)(bool))onIsMirroredUpdated; - +- (void)setIsForceMirrored:(BOOL)forceMirrored; @end #endif // WEBRTC_MAC diff --git a/TMessagesProj/jni/tgcalls/platform/darwin/VideoMetalViewMac.mm b/TMessagesProj/jni/tgcalls/platform/darwin/VideoMetalViewMac.mm index 1d2d9a8b0..3ba0db37c 100644 --- a/TMessagesProj/jni/tgcalls/platform/darwin/VideoMetalViewMac.mm +++ b/TMessagesProj/jni/tgcalls/platform/darwin/VideoMetalViewMac.mm @@ -19,13 +19,9 @@ #import "api/media_stream_interface.h" #import "RTCMTLI420Renderer.h" -#import "RTCMTLNV12Renderer.h" -#import "RTCMTLRGBRenderer.h" #define MTKViewClass NSClassFromString(@"MTKView") -#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer") #define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer") -#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer") namespace { @@ -67,7 +63,7 @@ private: @interface VideoMetalView () { RTCMTLI420Renderer *_rendererI420; - RTCMTLNV12Renderer *_rendererNV12; + MTKView *_metalView; RTCVideoFrame *_videoFrame; CGSize _videoFrameSize; @@ -83,7 +79,7 @@ private: bool _didSetShouldBeMirrored; bool _shouldBeMirrored; - + bool _forceMirrored; } @end @@ -91,7 +87,7 @@ private: @implementation VideoMetalView + (bool)isSupported { - return YES; + return [VideoMetalView isMetalAvailable]; } - (instancetype)initWithFrame:(CGRect)frameRect { @@ -164,10 +160,6 @@ private: return [[MTKViewClass alloc] initWithFrame:frame]; } -+ (RTCMTLNV12Renderer *)createNV12Renderer { - return [[RTCMTLNV12RendererClass alloc] init]; -} - + (RTCMTLI420Renderer *)createI420Renderer { return [[RTCMTLI420RendererClass alloc] init]; } @@ -192,7 +184,7 @@ private: - (void)layout { [super layout]; - if (_shouldBeMirrored) { + if (_shouldBeMirrored || _forceMirrored) { _metalView.layer.anchorPoint = NSMakePoint(1, 0); _metalView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1); // _metalView.layer.transform = CATransform3DMakeScale(-1, 1, 1); @@ -232,20 +224,19 @@ private: if ([buffer isKindOfClass:[TGRTCCVPixelBuffer class]]) { bool shouldBeMirrored = ((TGRTCCVPixelBuffer *)buffer).shouldBeMirrored; - - if (shouldBeMirrored) { - _metalView.layer.anchorPoint = NSMakePoint(1, 0); - _metalView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1); - // _metalView.layer.transform = CATransform3DMakeScale(-1, 1, 1); - } else { - _metalView.layer.anchorPoint = NSMakePoint(0, 0); - _metalView.layer.affineTransform = CGAffineTransformIdentity; - //_metalView.layer.transform = CATransform3DIdentity; - } - if (shouldBeMirrored != _shouldBeMirrored) { _shouldBeMirrored = shouldBeMirrored; + bool shouldBeMirrored = ((TGRTCCVPixelBuffer *)buffer).shouldBeMirrored; + if (shouldBeMirrored || _forceMirrored) { + _metalView.layer.anchorPoint = NSMakePoint(1, 0); + _metalView.layer.affineTransform = CGAffineTransformMakeScale(-1, 1); + // _metalView.layer.transform = CATransform3DMakeScale(-1, 1, 1); + } else { + _metalView.layer.anchorPoint = NSMakePoint(0, 0); + _metalView.layer.affineTransform = CGAffineTransformIdentity; + //_metalView.layer.transform = CATransform3DIdentity; + } if (_didSetShouldBeMirrored) { if (_onIsMirroredUpdated) { @@ -256,31 +247,25 @@ private: } } } - - if (!_rendererI420) { - _rendererI420 = [VideoMetalView createI420Renderer]; - if (![_rendererI420 addRenderingDestination:_metalView]) { - _rendererI420 = nil; - RTCLogError(@"Failed to create I420 renderer"); - return; - } + } + if (!_rendererI420) { + _rendererI420 = [VideoMetalView createI420Renderer]; + if (![_rendererI420 addRenderingDestination:_metalView]) { + _rendererI420 = nil; + RTCLogError(@"Failed to create I420 renderer"); + return; } - renderer = _rendererI420; } - - if (!_firstFrameReceivedReported && _onFirstFrameReceived) { - _firstFrameReceivedReported = true; - _onFirstFrameReceived((float)videoFrame.width / (float)videoFrame.height); - } - - renderer = _rendererI420; renderer.rotationOverride = _rotationOverride; [renderer drawFrame:videoFrame]; _lastFrameTimeNs = videoFrame.timeStampNs; - + if (!_firstFrameReceivedReported && _onFirstFrameReceived) { + _firstFrameReceivedReported = true; + _onFirstFrameReceived((float)videoFrame.width / (float)videoFrame.height); + } } - (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { @@ -346,11 +331,15 @@ private: return; } + + if (frame == nil) { RTCLogInfo(@"Incoming frame is nil. Exiting render callback."); return; } _videoFrame = frame; + + } - (std::shared_ptr>)getSink { @@ -379,4 +368,10 @@ private: _onIsMirroredUpdated = [onIsMirroredUpdated copy]; } +- (void)setIsForceMirrored:(BOOL)forceMirrored { + _forceMirrored = forceMirrored; + [self setNeedsLayout:YES]; +} + + @end diff --git a/TMessagesProj/jni/tgcalls/platform/windows/WindowsInterface.cpp b/TMessagesProj/jni/tgcalls/platform/tdesktop/DesktopInterface.cpp similarity index 67% rename from TMessagesProj/jni/tgcalls/platform/windows/WindowsInterface.cpp rename to TMessagesProj/jni/tgcalls/platform/tdesktop/DesktopInterface.cpp index 2b3ce79f7..4090d1a32 100644 --- a/TMessagesProj/jni/tgcalls/platform/windows/WindowsInterface.cpp +++ b/TMessagesProj/jni/tgcalls/platform/tdesktop/DesktopInterface.cpp @@ -1,4 +1,4 @@ -#include "WindowsInterface.h" +#include "DesktopInterface.h" #include "platform/tdesktop/VideoCapturerInterfaceImpl.h" #include "platform/tdesktop/VideoCapturerTrackSource.h" @@ -9,30 +9,32 @@ namespace tgcalls { -std::unique_ptr WindowsInterface::makeVideoEncoderFactory() { +std::unique_ptr DesktopInterface::makeVideoEncoderFactory() { return webrtc::CreateBuiltinVideoEncoderFactory(); } -std::unique_ptr WindowsInterface::makeVideoDecoderFactory() { +std::unique_ptr DesktopInterface::makeVideoDecoderFactory() { return webrtc::CreateBuiltinVideoDecoderFactory(); } -rtc::scoped_refptr WindowsInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) { +rtc::scoped_refptr DesktopInterface::makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) { const auto videoTrackSource = VideoCapturerTrackSource::Create(); - return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, videoTrackSource); + return videoTrackSource + ? webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, videoTrackSource) + : nullptr; } -bool WindowsInterface::supportsEncoding(const std::string &codecName) { +bool DesktopInterface::supportsEncoding(const std::string &codecName) { return (codecName == cricket::kH264CodecName) || (codecName == cricket::kVp8CodecName); } -std::unique_ptr WindowsInterface::makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext) { +std::unique_ptr DesktopInterface::makeVideoCapturer(rtc::scoped_refptr source, bool useFrontCamera, std::function stateUpdated, std::shared_ptr platformContext) { return std::make_unique(source, useFrontCamera, stateUpdated); } std::unique_ptr CreatePlatformInterface() { - return std::make_unique(); + return std::make_unique(); } } // namespace tgcalls diff --git a/TMessagesProj/jni/tgcalls/platform/windows/WindowsInterface.h b/TMessagesProj/jni/tgcalls/platform/tdesktop/DesktopInterface.h similarity index 82% rename from TMessagesProj/jni/tgcalls/platform/windows/WindowsInterface.h rename to TMessagesProj/jni/tgcalls/platform/tdesktop/DesktopInterface.h index fcca32c9f..fe995f7bc 100644 --- a/TMessagesProj/jni/tgcalls/platform/windows/WindowsInterface.h +++ b/TMessagesProj/jni/tgcalls/platform/tdesktop/DesktopInterface.h @@ -1,12 +1,12 @@ -#ifndef TGCALLS_WINDOWS_INTERFACE_H -#define TGCALLS_WINDOWS_INTERFACE_H +#ifndef TGCALLS_DESKTOP_INTERFACE_H +#define TGCALLS_DESKTOP_INTERFACE_H #include "platform/PlatformInterface.h" #include "VideoCapturerInterface.h" namespace tgcalls { -class WindowsInterface : public PlatformInterface { +class DesktopInterface : public PlatformInterface { public: std::unique_ptr makeVideoEncoderFactory() override; std::unique_ptr makeVideoDecoderFactory() override; @@ -18,4 +18,4 @@ public: } // namespace tgcalls -#endif +#endif // TGCALLS_DESKTOP_INTERFACE_H diff --git a/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.cpp b/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.cpp index cea973761..f594151c2 100644 --- a/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.cpp +++ b/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.cpp @@ -325,7 +325,6 @@ public: dependencies.event_log_factory = std::make_unique(dependencies.task_queue_factory.get()); dependencies.network_controller_factory = nullptr; - //dependencies.media_transport_factory = nullptr; _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies)); @@ -1002,8 +1001,10 @@ int InstanceImplReference::GetConnectionMaxLayer() { return 92; } -std::string InstanceImplReference::GetVersion() { - return "2.8.8"; +std::vector InstanceImplReference::GetVersions() { + std::vector result; + result.push_back("2.8.8"); + return result; } std::string InstanceImplReference::getLastError() { @@ -1027,15 +1028,15 @@ PersistentState InstanceImplReference::getPersistentState() { return PersistentState(); } -FinalState InstanceImplReference::stop() { - auto result = FinalState(); - - result.persistentState = getPersistentState(); - result.debugLog = logSink_->result(); - result.trafficStats = getTrafficStats(); - result.isRatingSuggested = false; - - return result; +void InstanceImplReference::stop(std::function completion) { + auto result = FinalState(); + + result.persistentState = getPersistentState(); + result.debugLog = logSink_->result(); + result.trafficStats = getTrafficStats(); + result.isRatingSuggested = false; + + completion(result); } template <> diff --git a/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.h b/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.h index 4c21bf608..47bc5ff6a 100644 --- a/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.h +++ b/TMessagesProj/jni/tgcalls/reference/InstanceImplReference.h @@ -28,13 +28,13 @@ public: void setAudioOutputDuckingEnabled(bool enabled) override; void setIsLowBatteryLevel(bool isLowBatteryLevel) override; static int GetConnectionMaxLayer(); - static std::string GetVersion(); + static std::vector GetVersions(); std::string getLastError() override; std::string getDebugInfo() override; int64_t getPreferredRelayId() override; TrafficStats getTrafficStats() override; PersistentState getPersistentState() override; - FinalState stop() override; + void stop(std::function completion) override; private: std::unique_ptr logSink_; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java index 8c25cd0d0..0011b59be 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/BuildVars.java @@ -19,7 +19,7 @@ public class BuildVars { public static boolean USE_CLOUD_STRINGS = true; public static boolean CHECK_UPDATES = true; public static boolean TON_WALLET_STANDALONE = false; - public static int BUILD_VERSION = 2061; + public static int BUILD_VERSION = 2064; public static String BUILD_VERSION_STRING = "7.0.0"; public static int APP_ID = 4; public static String APP_HASH = "014b35b6184100b085b0d0572f9b5103"; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java index 93d2402fb..983c54618 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/MessagesController.java @@ -288,6 +288,8 @@ public class MessagesController extends BaseController implements NotificationCe private SharedPreferences mainPreferences; private SharedPreferences emojiPreferences; + public volatile boolean ignoreSetOnline; + public static class FaqSearchResult { public String title; @@ -2064,6 +2066,7 @@ public class MessagesController extends BaseController implements NotificationCe suggestedFilters.clear(); gettingAppChangelog = false; dialogFiltersLoaded = false; + ignoreSetOnline = false; Utilities.stageQueue.postRunnable(() -> { readTasks.clear(); @@ -4343,7 +4346,7 @@ public class MessagesController extends BaseController implements NotificationCe checkReadTasks(); if (getUserConfig().isClientActivated()) { - if (getConnectionsManager().getPauseTime() == 0 && ApplicationLoader.isScreenOn && !ApplicationLoader.mainInterfacePausedStageQueue) { + if (!ignoreSetOnline && getConnectionsManager().getPauseTime() == 0 && ApplicationLoader.isScreenOn && !ApplicationLoader.mainInterfacePausedStageQueue) { if (ApplicationLoader.mainInterfacePausedStageQueueTime != 0 && Math.abs(ApplicationLoader.mainInterfacePausedStageQueueTime - System.currentTimeMillis()) > 1000) { if (statusSettingState != 1 && (lastStatusUpdateTime == 0 || Math.abs(System.currentTimeMillis() - lastStatusUpdateTime) >= 55000 || offlineSent)) { statusSettingState = 1; @@ -11785,6 +11788,9 @@ public class MessagesController extends BaseController implements NotificationCe } else { ApplicationLoader.applicationContext.startService(intent); } + if (ApplicationLoader.mainInterfacePaused || !ApplicationLoader.isScreenOn) { + ignoreSetOnline = true; + } } catch (Throwable e) { FileLog.e(e); } diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java index 3128272c7..f66075cd3 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/NativeInstance.java @@ -5,6 +5,8 @@ import org.telegram.messenger.BuildVars; import org.telegram.messenger.FileLog; import org.webrtc.VideoSink; +import java.util.concurrent.CountDownLatch; + public class NativeInstance { private Instance.OnStateUpdatedListener onStateUpdatedListener; @@ -69,6 +71,27 @@ public class NativeInstance { } } + + private Instance.FinalState finalState; + private CountDownLatch stopBarrier; + private void onStop(Instance.FinalState state) { + finalState = state; + if (stopBarrier != null) { + stopBarrier.countDown(); + } + } + + public Instance.FinalState stop() { + stopBarrier = new CountDownLatch(1); + stopNative(); + try { + stopBarrier.await(); + } catch (Exception e) { + FileLog.e(e); + } + return finalState; + } + private static native long makeNativeInstance(String version, NativeInstance instance, Instance.Config config, String persistentStateFilePath, Instance.Endpoint[] endpoints, Instance.Proxy proxy, int networkType, Instance.EncryptionKey encryptionKey, VideoSink remoteSink, long videoCapturer, float aspectRatio); public static native long createVideoCapturer(VideoSink localSink); public static native void setVideoStateCapturer(long videoCapturer, int videoState); @@ -87,7 +110,7 @@ public class NativeInstance { public native long getPreferredRelayId(); public native Instance.TrafficStats getTrafficStats(); public native byte[] getPersistentState(); - public native Instance.FinalState stop(); + private native void stopNative(); public native void setupOutgoingVideo(VideoSink localSink); public native void switchCamera(); public native void setVideoState(int videoState); diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java index 7871c0cee..303171891 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VideoCameraCapturer.java @@ -14,10 +14,7 @@ import org.webrtc.CameraEnumerator; import org.webrtc.CameraVideoCapturer; import org.webrtc.CapturerObserver; import org.webrtc.EglBase; -import org.webrtc.NativeAndroidVideoTrackSource; -import org.webrtc.NativeCapturerObserver; import org.webrtc.SurfaceTextureHelper; -import org.webrtc.VideoSource; @TargetApi(18) public class VideoCameraCapturer { diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java index 5af7e96d1..3c3f76d94 100644 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPBaseService.java @@ -62,6 +62,7 @@ import android.view.View; import android.widget.RemoteViews; import org.telegram.messenger.AndroidUtilities; +import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.BuildConfig; import org.telegram.messenger.BuildVars; import org.telegram.messenger.ContactsController; @@ -631,6 +632,9 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } stopForeground(true); stopRinging(); + if (ApplicationLoader.mainInterfacePaused || !ApplicationLoader.isScreenOn) { + MessagesController.getInstance(currentAccount).ignoreSetOnline = false; + } NotificationCenter.getInstance(currentAccount).removeObserver(this, NotificationCenter.appDidLogout); SensorManager sm = (SensorManager) getSystemService(SENSOR_SERVICE); Sensor proximity = sm.getDefaultSensor(Sensor.TYPE_PROXIMITY); @@ -649,10 +653,11 @@ public abstract class VoIPBaseService extends Service implements SensorEventList sharedInstance = null; AndroidUtilities.runOnUIThread(() -> NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.didEndCall)); if (tgVoip != null) { - updateTrafficStats(); StatsController.getInstance(currentAccount).incrementTotalCallsTime(getStatsNetworkType(), (int) (getCallDuration() / 1000) % 5); onTgVoipPreStop(); - onTgVoipStop(tgVoip.stop()); + Instance.FinalState state = tgVoip.stop(); + updateTrafficStats(state.trafficStats); + onTgVoipStop(state); prevTrafficStats = null; callStartTime = 0; tgVoip = null; @@ -802,8 +807,10 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } } - protected void updateTrafficStats() { - final Instance.TrafficStats trafficStats = tgVoip.getTrafficStats(); + protected void updateTrafficStats(Instance.TrafficStats trafficStats) { + if (trafficStats == null) { + trafficStats = tgVoip.getTrafficStats(); + } final long wifiSentDiff = trafficStats.bytesSentWifi - (prevTrafficStats != null ? prevTrafficStats.bytesSentWifi : 0); final long wifiRecvdDiff = trafficStats.bytesReceivedWifi - (prevTrafficStats != null ? prevTrafficStats.bytesReceivedWifi : 0); final long mobileSentDiff = trafficStats.bytesSentMobile - (prevTrafficStats != null ? prevTrafficStats.bytesSentMobile : 0); @@ -1596,7 +1603,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList } } - public class SharedUIParams { + public static class SharedUIParams { public boolean tapToVideoTooltipWasShowed; public boolean cameraAlertWasShowed; public boolean wasVideoCall; diff --git a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java index 68ee9adbb..13d2f290e 100755 --- a/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java +++ b/TMessagesProj/src/main/java/org/telegram/messenger/voip/VoIPService.java @@ -537,6 +537,7 @@ public class VoIPService extends VoIPBaseService { } public void acceptIncomingCall() { + MessagesController.getInstance(currentAccount).ignoreSetOnline = false; stopRinging(); showNotification(); configureDeviceForCall(); @@ -1141,7 +1142,7 @@ public class VoIPService extends VoIPBaseService { @Override public void run() { if (tgVoip != null) { - updateTrafficStats(); + updateTrafficStats(null); AndroidUtilities.runOnUIThread(this, 5000); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/ChartHeaderView.java b/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/ChartHeaderView.java index 7df9b9b2a..c4854bad5 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/ChartHeaderView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Charts/view_data/ChartHeaderView.java @@ -151,6 +151,8 @@ public class ChartHeaderView extends FrameLayout { back.setAlpha(1f); back.setTranslationX(0); back.setTranslationY(0); + back.setScaleX(1f); + back.setScaleY(1f); title.setAlpha(0f); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java index 8ffa65ee3..d2504f3a7 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/ChatActivity.java @@ -8798,7 +8798,7 @@ public class ChatActivity extends BaseFragment implements NotificationCenter.Not } updateChatListViewTopPadding(); - if (!firstLoading && !paused && !inPreviewMode && !inScheduleMode) { + if (!firstLoading && !paused && !inPreviewMode && !inScheduleMode && !getMessagesController().ignoreSetOnline) { int scheduledRead = 0; if ((maxPositiveUnreadId != Integer.MIN_VALUE || maxNegativeUnreadId != Integer.MAX_VALUE)) { int counterDecrement = 0; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPFloatingLayout.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPFloatingLayout.java index 7fc0212b0..18cba3cb8 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPFloatingLayout.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPFloatingLayout.java @@ -27,8 +27,6 @@ import androidx.annotation.NonNull; import androidx.core.content.ContextCompat; import androidx.core.graphics.ColorUtils; -import com.google.android.exoplayer2.util.Log; - import org.telegram.messenger.AndroidUtilities; import org.telegram.messenger.R; import org.telegram.ui.Components.CubicBezierInterpolator; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java index 4c8437302..2afa97796 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPPiPView.java @@ -201,7 +201,8 @@ public class VoIPPiPView implements VoIPBaseService.StateListener { windowLayoutParams.flags = WindowManager.LayoutParams.FLAG_NOT_FOCUSABLE | WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED | WindowManager.LayoutParams.FLAG_LAYOUT_NO_LIMITS | - WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN; + WindowManager.LayoutParams.FLAG_LAYOUT_IN_SCREEN | + WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON; return windowLayoutParams; } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPTextureView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPTextureView.java index 07c06e113..2062842ff 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPTextureView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPTextureView.java @@ -12,6 +12,7 @@ import android.graphics.PorterDuff; import android.graphics.PorterDuffXfermode; import android.graphics.RectF; import android.os.Build; +import android.view.Gravity; import android.view.View; import android.view.ViewOutlineProvider; import android.widget.FrameLayout; @@ -19,11 +20,10 @@ import android.widget.ImageView; import androidx.annotation.NonNull; -import com.google.android.exoplayer2.util.Log; - import org.telegram.messenger.ApplicationLoader; import org.telegram.messenger.Utilities; -import org.telegram.ui.Components.BackupImageView; +import org.telegram.ui.Components.LayoutHelper; +import org.webrtc.RendererCommon; import org.webrtc.TextureViewRenderer; import java.io.File; @@ -41,6 +41,7 @@ public class VoIPTextureView extends FrameLayout { public final TextureViewRenderer renderer; public final ImageView imageView; + public View backgroundView; public Bitmap cameraLastBitmap; public float stubVisibleProgress = 1f; @@ -55,10 +56,23 @@ public class VoIPTextureView extends FrameLayout { super.onFirstFrameRendered(); VoIPTextureView.this.invalidate(); } + + @Override + protected void onMeasure(int widthSpec, int heightSpec) { + super.onMeasure(widthSpec, heightSpec); + } }; renderer.setEnableHardwareScaler(true); - - addView(renderer); + renderer.setIsCamera(isCamera); + if (!isCamera) { + backgroundView = new View(context); + backgroundView.setBackgroundColor(0xff1b1f23); + addView(backgroundView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + renderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT); + addView(renderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); + } else { + addView(renderer); + } addView(imageView); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { diff --git a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPWindowView.java b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPWindowView.java index cc683e509..ee4877f39 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPWindowView.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/Components/voip/VoIPWindowView.java @@ -3,13 +3,11 @@ package org.telegram.ui.Components.voip; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.app.Activity; -import android.app.KeyguardManager; import android.content.Context; import android.content.pm.ActivityInfo; import android.graphics.PixelFormat; import android.os.Build; import android.view.Gravity; -import android.view.KeyEvent; import android.view.MotionEvent; import android.view.VelocityTracker; import android.view.View; @@ -134,7 +132,7 @@ public class VoIPWindowView extends FrameLayout { try { WindowManager wm = (WindowManager) activity.getSystemService(Context.WINDOW_SERVICE); wm.removeView(VoIPWindowView.this); - } catch (Exception e) { + } catch (Exception ignore) { } } else { @@ -150,7 +148,7 @@ public class VoIPWindowView extends FrameLayout { setVisibility(View.GONE); try { wm.removeView(VoIPWindowView.this); - } catch (Exception e) { + } catch (Exception ignore) { } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java index f19d202a4..af235f564 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/LaunchActivity.java @@ -82,6 +82,7 @@ import org.telegram.messenger.Utilities; import org.telegram.messenger.browser.Browser; import org.telegram.messenger.camera.CameraController; import org.telegram.messenger.voip.VoIPPendingCall; +import org.telegram.messenger.voip.VoIPService; import org.telegram.tgnet.ConnectionsManager; import org.telegram.tgnet.TLRPC; import org.telegram.ui.ActionBar.ActionBarLayout; @@ -111,7 +112,6 @@ import org.telegram.ui.Components.StickersAlert; import org.telegram.ui.Components.TermsOfServiceView; import org.telegram.ui.Components.ThemeEditorView; import org.telegram.ui.Components.UpdateAppAlertDialog; -import org.telegram.ui.Components.voip.VoIPPiPView; import java.io.File; import java.util.ArrayList; @@ -1001,231 +1001,88 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa int flags = intent.getFlags(); final int[] intentAccount = new int[]{intent.getIntExtra("currentAccount", UserConfig.selectedAccount)}; switchToAccount(intentAccount[0], true); + boolean isVoipIntent = intent.getAction() != null && intent.getAction().equals("voip"); if (!fromPassword && (AndroidUtilities.needShowPasscode(true) || SharedConfig.isWaitingForPasscodeEnter)) { showPasscodeActivity(); - passcodeSaveIntent = intent; - passcodeSaveIntentIsNew = isNew; - passcodeSaveIntentIsRestore = restore; UserConfig.getInstance(currentAccount).saveConfig(false); - } else { - boolean pushOpened = false; + if (!isVoipIntent) { + passcodeSaveIntent = intent; + passcodeSaveIntentIsNew = isNew; + passcodeSaveIntentIsRestore = restore; + return false; + } + } + boolean pushOpened = false; - int push_user_id = 0; - int push_chat_id = 0; - int push_enc_id = 0; - int push_msg_id = 0; - int open_settings = 0; - int open_new_dialog = 0; - long dialogId = 0; - boolean showDialogsList = false; - boolean showPlayer = false; - boolean showLocations = false; - boolean audioCallUser = false; - boolean videoCallUser = false; + int push_user_id = 0; + int push_chat_id = 0; + int push_enc_id = 0; + int push_msg_id = 0; + int open_settings = 0; + int open_new_dialog = 0; + long dialogId = 0; + boolean showDialogsList = false; + boolean showPlayer = false; + boolean showLocations = false; + boolean audioCallUser = false; + boolean videoCallUser = false; - photoPathsArray = null; - videoPath = null; - sendingText = null; - documentsPathsArray = null; - documentsOriginalPathsArray = null; - documentsMimeType = null; - documentsUrisArray = null; - contactsToSend = null; - contactsToSendUri = null; + photoPathsArray = null; + videoPath = null; + sendingText = null; + documentsPathsArray = null; + documentsOriginalPathsArray = null; + documentsMimeType = null; + documentsUrisArray = null; + contactsToSend = null; + contactsToSendUri = null; - if ((flags & Intent.FLAG_ACTIVITY_LAUNCHED_FROM_HISTORY) == 0) { - if (intent != null && intent.getAction() != null && !restore) { - if (Intent.ACTION_SEND.equals(intent.getAction())) { - if (SharedConfig.directShare && intent != null && intent.getExtras() != null) { - dialogId = intent.getExtras().getLong("dialogId", 0); - String hash = null; - if (dialogId == 0) { - try { - String id = intent.getExtras().getString(ShortcutManagerCompat.EXTRA_SHORTCUT_ID); - if (id != null) { - List list = ShortcutManagerCompat.getDynamicShortcuts(ApplicationLoader.applicationContext); - for (int a = 0, N = list.size(); a < N; a++) { - ShortcutInfoCompat info = list.get(a); - if (id.equals(info.getId())) { - Bundle extras = info.getIntent().getExtras(); - dialogId = extras.getLong("dialogId", 0); - hash = extras.getString("hash", null); - break; - } + if ((flags & Intent.FLAG_ACTIVITY_LAUNCHED_FROM_HISTORY) == 0) { + if (intent != null && intent.getAction() != null && !restore) { + if (Intent.ACTION_SEND.equals(intent.getAction())) { + if (SharedConfig.directShare && intent != null && intent.getExtras() != null) { + dialogId = intent.getExtras().getLong("dialogId", 0); + String hash = null; + if (dialogId == 0) { + try { + String id = intent.getExtras().getString(ShortcutManagerCompat.EXTRA_SHORTCUT_ID); + if (id != null) { + List list = ShortcutManagerCompat.getDynamicShortcuts(ApplicationLoader.applicationContext); + for (int a = 0, N = list.size(); a < N; a++) { + ShortcutInfoCompat info = list.get(a); + if (id.equals(info.getId())) { + Bundle extras = info.getIntent().getExtras(); + dialogId = extras.getLong("dialogId", 0); + hash = extras.getString("hash", null); + break; } } - } catch (Throwable e) { - FileLog.e(e); } - } else { - hash = intent.getExtras().getString("hash", null); - } - if (SharedConfig.directShareHash == null || !SharedConfig.directShareHash.equals(hash)) { - dialogId = 0; - } - } - - boolean error = false; - String type = intent.getType(); - if (type != null && type.equals(ContactsContract.Contacts.CONTENT_VCARD_TYPE)) { - try { - Uri uri = (Uri) intent.getExtras().get(Intent.EXTRA_STREAM); - if (uri != null) { - contactsToSend = AndroidUtilities.loadVCardFromStream(uri, currentAccount, false, null, null); - if (contactsToSend.size() > 5) { - contactsToSend = null; - documentsUrisArray = new ArrayList<>(); - documentsUrisArray.add(uri); - documentsMimeType = type; - } else { - contactsToSendUri = uri; - } - } else { - error = true; - } - } catch (Exception e) { + } catch (Throwable e) { FileLog.e(e); - error = true; } } else { - String text = intent.getStringExtra(Intent.EXTRA_TEXT); - if (text == null) { - CharSequence textSequence = intent.getCharSequenceExtra(Intent.EXTRA_TEXT); - if (textSequence != null) { - text = textSequence.toString(); - } - } - String subject = intent.getStringExtra(Intent.EXTRA_SUBJECT); - - if (!TextUtils.isEmpty(text)) { - if ((text.startsWith("http://") || text.startsWith("https://")) && !TextUtils.isEmpty(subject)) { - text = subject + "\n" + text; - } - sendingText = text; - } else if (!TextUtils.isEmpty(subject)) { - sendingText = subject; - } - - Parcelable parcelable = intent.getParcelableExtra(Intent.EXTRA_STREAM); - if (parcelable != null) { - String path; - if (!(parcelable instanceof Uri)) { - parcelable = Uri.parse(parcelable.toString()); - } - Uri uri = (Uri) parcelable; - if (uri != null) { - if (AndroidUtilities.isInternalUri(uri)) { - error = true; - } - } - if (!error) { - if (uri != null && (type != null && type.startsWith("image/") || uri.toString().toLowerCase().endsWith(".jpg"))) { - if (photoPathsArray == null) { - photoPathsArray = new ArrayList<>(); - } - SendMessagesHelper.SendingMediaInfo info = new SendMessagesHelper.SendingMediaInfo(); - info.uri = uri; - photoPathsArray.add(info); - } else { - path = AndroidUtilities.getPath(uri); - if (path != null) { - if (path.startsWith("file:")) { - path = path.replace("file://", ""); - } - if (type != null && type.startsWith("video/")) { - videoPath = path; - } else { - if (documentsPathsArray == null) { - documentsPathsArray = new ArrayList<>(); - documentsOriginalPathsArray = new ArrayList<>(); - } - documentsPathsArray.add(path); - documentsOriginalPathsArray.add(uri.toString()); - } - } else { - if (documentsUrisArray == null) { - documentsUrisArray = new ArrayList<>(); - } - documentsUrisArray.add(uri); - documentsMimeType = type; - } - } - } - } else if (sendingText == null) { - error = true; - } + hash = intent.getExtras().getString("hash", null); } - if (error) { - Toast.makeText(this, "Unsupported content", Toast.LENGTH_SHORT).show(); + if (SharedConfig.directShareHash == null || !SharedConfig.directShareHash.equals(hash)) { + dialogId = 0; } - } else if (Intent.ACTION_SEND_MULTIPLE.equals(intent.getAction())) { - boolean error = false; + } + + boolean error = false; + String type = intent.getType(); + if (type != null && type.equals(ContactsContract.Contacts.CONTENT_VCARD_TYPE)) { try { - ArrayList uris = intent.getParcelableArrayListExtra(Intent.EXTRA_STREAM); - String type = intent.getType(); - if (uris != null) { - for (int a = 0; a < uris.size(); a++) { - Parcelable parcelable = uris.get(a); - if (!(parcelable instanceof Uri)) { - parcelable = Uri.parse(parcelable.toString()); - } - Uri uri = (Uri) parcelable; - if (uri != null) { - if (AndroidUtilities.isInternalUri(uri)) { - uris.remove(a); - a--; - } - } - } - if (uris.isEmpty()) { - uris = null; - } - } - if (uris != null) { - if (type != null && type.startsWith("image/")) { - for (int a = 0; a < uris.size(); a++) { - Parcelable parcelable = uris.get(a); - if (!(parcelable instanceof Uri)) { - parcelable = Uri.parse(parcelable.toString()); - } - Uri uri = (Uri) parcelable; - if (photoPathsArray == null) { - photoPathsArray = new ArrayList<>(); - } - SendMessagesHelper.SendingMediaInfo info = new SendMessagesHelper.SendingMediaInfo(); - info.uri = uri; - photoPathsArray.add(info); - } + Uri uri = (Uri) intent.getExtras().get(Intent.EXTRA_STREAM); + if (uri != null) { + contactsToSend = AndroidUtilities.loadVCardFromStream(uri, currentAccount, false, null, null); + if (contactsToSend.size() > 5) { + contactsToSend = null; + documentsUrisArray = new ArrayList<>(); + documentsUrisArray.add(uri); + documentsMimeType = type; } else { - for (int a = 0; a < uris.size(); a++) { - Parcelable parcelable = uris.get(a); - if (!(parcelable instanceof Uri)) { - parcelable = Uri.parse(parcelable.toString()); - } - Uri uri = (Uri) parcelable; - String path = AndroidUtilities.getPath(uri); - String originalPath = parcelable.toString(); - if (originalPath == null) { - originalPath = path; - } - if (path != null) { - if (path.startsWith("file:")) { - path = path.replace("file://", ""); - } - if (documentsPathsArray == null) { - documentsPathsArray = new ArrayList<>(); - documentsOriginalPathsArray = new ArrayList<>(); - } - documentsPathsArray.add(path); - documentsOriginalPathsArray.add(originalPath); - } else { - if (documentsUrisArray == null) { - documentsUrisArray = new ArrayList<>(); - } - documentsUrisArray.add(uri); - documentsMimeType = type; - } - } + contactsToSendUri = uri; } } else { error = true; @@ -1234,349 +1091,331 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa FileLog.e(e); error = true; } - if (error) { - Toast.makeText(this, "Unsupported content", Toast.LENGTH_SHORT).show(); + } else { + String text = intent.getStringExtra(Intent.EXTRA_TEXT); + if (text == null) { + CharSequence textSequence = intent.getCharSequenceExtra(Intent.EXTRA_TEXT); + if (textSequence != null) { + text = textSequence.toString(); + } } - } else if (Intent.ACTION_VIEW.equals(intent.getAction())) { - Uri data = intent.getData(); - if (data != null) { - String username = null; - String login = null; - String group = null; - String sticker = null; - HashMap auth = null; - String unsupportedUrl = null; - String botUser = null; - String botChat = null; - String message = null; - String phone = null; - String game = null; - String phoneHash = null; - String lang = null; - String theme = null; - String code = null; - TLRPC.TL_wallPaper wallPaper = null; - Integer messageId = null; - Integer channelId = null; - boolean hasUrl = false; - String scheme = data.getScheme(); - if (scheme != null) { - switch (scheme) { - case "http": - case "https": { - String host = data.getHost().toLowerCase(); - if (host.equals("telegram.me") || host.equals("t.me") || host.equals("telegram.dog")) { - String path = data.getPath(); - if (path != null && path.length() > 1) { - path = path.substring(1); - if (path.startsWith("bg/")) { - wallPaper = new TLRPC.TL_wallPaper(); - wallPaper.settings = new TLRPC.TL_wallPaperSettings(); - wallPaper.slug = path.replace("bg/", ""); - if (wallPaper.slug != null && wallPaper.slug.length() == 6) { - try { - wallPaper.settings.background_color = Integer.parseInt(wallPaper.slug, 16) | 0xff000000; - } catch (Exception ignore) { + String subject = intent.getStringExtra(Intent.EXTRA_SUBJECT); - } - wallPaper.slug = null; - } else if (wallPaper.slug != null && wallPaper.slug.length() == 13 && wallPaper.slug.charAt(6) == '-') { - try { - wallPaper.settings.background_color = Integer.parseInt(wallPaper.slug.substring(0, 6), 16) | 0xff000000; - wallPaper.settings.second_background_color = Integer.parseInt(wallPaper.slug.substring(7), 16) | 0xff000000; - wallPaper.settings.rotation = 45; - } catch (Exception ignore) { + if (!TextUtils.isEmpty(text)) { + if ((text.startsWith("http://") || text.startsWith("https://")) && !TextUtils.isEmpty(subject)) { + text = subject + "\n" + text; + } + sendingText = text; + } else if (!TextUtils.isEmpty(subject)) { + sendingText = subject; + } - } - try { - String rotation = data.getQueryParameter("rotation"); - if (!TextUtils.isEmpty(rotation)) { - wallPaper.settings.rotation = Utilities.parseInt(rotation); - } - } catch (Exception ignore) { - - } - wallPaper.slug = null; - } else { - String mode = data.getQueryParameter("mode"); - if (mode != null) { - mode = mode.toLowerCase(); - String[] modes = mode.split(" "); - if (modes != null && modes.length > 0) { - for (int a = 0; a < modes.length; a++) { - if ("blur".equals(modes[a])) { - wallPaper.settings.blur = true; - } else if ("motion".equals(modes[a])) { - wallPaper.settings.motion = true; - } - } - } - } - String intensity = data.getQueryParameter("intensity"); - if (!TextUtils.isEmpty(intensity)) { - wallPaper.settings.intensity = Utilities.parseInt(intensity); - } else { - wallPaper.settings.intensity = 50; - } - try { - String bgColor = data.getQueryParameter("bg_color"); - if (!TextUtils.isEmpty(bgColor)) { - wallPaper.settings.background_color = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; - if (bgColor.length() > 6) { - wallPaper.settings.second_background_color = Integer.parseInt(bgColor.substring(7), 16) | 0xff000000; - wallPaper.settings.rotation = 45; - } - } else { - wallPaper.settings.background_color = 0xffffffff; - } - } catch (Exception ignore) { - - } - try { - String rotation = data.getQueryParameter("rotation"); - if (!TextUtils.isEmpty(rotation)) { - wallPaper.settings.rotation = Utilities.parseInt(rotation); - } - } catch (Exception ignore) { - - } - } - } else if (path.startsWith("login/")) { - code = path.replace("login/", ""); - } else if (path.startsWith("joinchat/")) { - group = path.replace("joinchat/", ""); - } else if (path.startsWith("addstickers/")) { - sticker = path.replace("addstickers/", ""); - } else if (path.startsWith("msg/") || path.startsWith("share/")) { - message = data.getQueryParameter("url"); - if (message == null) { - message = ""; - } - if (data.getQueryParameter("text") != null) { - if (message.length() > 0) { - hasUrl = true; - message += "\n"; - } - message += data.getQueryParameter("text"); - } - if (message.length() > 4096 * 4) { - message = message.substring(0, 4096 * 4); - } - while (message.endsWith("\n")) { - message = message.substring(0, message.length() - 1); - } - } else if (path.startsWith("confirmphone")) { - phone = data.getQueryParameter("phone"); - phoneHash = data.getQueryParameter("hash"); - } else if (path.startsWith("setlanguage/")) { - lang = path.substring(12); - } else if (path.startsWith("addtheme/")) { - theme = path.substring(9); - } else if (path.startsWith("c/")) { - List segments = data.getPathSegments(); - if (segments.size() == 3) { - channelId = Utilities.parseInt(segments.get(1)); - messageId = Utilities.parseInt(segments.get(2)); - if (messageId == 0 || channelId == 0) { - messageId = null; - channelId = null; - } - } - } else if (path.length() >= 1) { - ArrayList segments = new ArrayList<>(data.getPathSegments()); - if (segments.size() > 0 && segments.get(0).equals("s")) { - segments.remove(0); - } - if (segments.size() > 0) { - username = segments.get(0); - if (segments.size() > 1) { - messageId = Utilities.parseInt(segments.get(1)); - if (messageId == 0) { - messageId = null; - } - } - } - botUser = data.getQueryParameter("start"); - botChat = data.getQueryParameter("startgroup"); - game = data.getQueryParameter("game"); - } - } - } - break; + Parcelable parcelable = intent.getParcelableExtra(Intent.EXTRA_STREAM); + if (parcelable != null) { + String path; + if (!(parcelable instanceof Uri)) { + parcelable = Uri.parse(parcelable.toString()); + } + Uri uri = (Uri) parcelable; + if (uri != null) { + if (AndroidUtilities.isInternalUri(uri)) { + error = true; + } + } + if (!error) { + if (uri != null && (type != null && type.startsWith("image/") || uri.toString().toLowerCase().endsWith(".jpg"))) { + if (photoPathsArray == null) { + photoPathsArray = new ArrayList<>(); } - case "tg": { - String url = data.toString(); - if (url.startsWith("tg:resolve") || url.startsWith("tg://resolve")) { - url = url.replace("tg:resolve", "tg://telegram.org").replace("tg://resolve", "tg://telegram.org"); - data = Uri.parse(url); - username = data.getQueryParameter("domain"); - if ("telegrampassport".equals(username)) { - username = null; - auth = new HashMap<>(); - String scope = data.getQueryParameter("scope"); - if (!TextUtils.isEmpty(scope) && scope.startsWith("{") && scope.endsWith("}")) { - auth.put("nonce", data.getQueryParameter("nonce")); + SendMessagesHelper.SendingMediaInfo info = new SendMessagesHelper.SendingMediaInfo(); + info.uri = uri; + photoPathsArray.add(info); + } else { + path = AndroidUtilities.getPath(uri); + if (path != null) { + if (path.startsWith("file:")) { + path = path.replace("file://", ""); + } + if (type != null && type.startsWith("video/")) { + videoPath = path; + } else { + if (documentsPathsArray == null) { + documentsPathsArray = new ArrayList<>(); + documentsOriginalPathsArray = new ArrayList<>(); + } + documentsPathsArray.add(path); + documentsOriginalPathsArray.add(uri.toString()); + } + } else { + if (documentsUrisArray == null) { + documentsUrisArray = new ArrayList<>(); + } + documentsUrisArray.add(uri); + documentsMimeType = type; + } + } + } + } else if (sendingText == null) { + error = true; + } + } + if (error) { + Toast.makeText(this, "Unsupported content", Toast.LENGTH_SHORT).show(); + } + } else if (Intent.ACTION_SEND_MULTIPLE.equals(intent.getAction())) { + boolean error = false; + try { + ArrayList uris = intent.getParcelableArrayListExtra(Intent.EXTRA_STREAM); + String type = intent.getType(); + if (uris != null) { + for (int a = 0; a < uris.size(); a++) { + Parcelable parcelable = uris.get(a); + if (!(parcelable instanceof Uri)) { + parcelable = Uri.parse(parcelable.toString()); + } + Uri uri = (Uri) parcelable; + if (uri != null) { + if (AndroidUtilities.isInternalUri(uri)) { + uris.remove(a); + a--; + } + } + } + if (uris.isEmpty()) { + uris = null; + } + } + if (uris != null) { + if (type != null && type.startsWith("image/")) { + for (int a = 0; a < uris.size(); a++) { + Parcelable parcelable = uris.get(a); + if (!(parcelable instanceof Uri)) { + parcelable = Uri.parse(parcelable.toString()); + } + Uri uri = (Uri) parcelable; + if (photoPathsArray == null) { + photoPathsArray = new ArrayList<>(); + } + SendMessagesHelper.SendingMediaInfo info = new SendMessagesHelper.SendingMediaInfo(); + info.uri = uri; + photoPathsArray.add(info); + } + } else { + for (int a = 0; a < uris.size(); a++) { + Parcelable parcelable = uris.get(a); + if (!(parcelable instanceof Uri)) { + parcelable = Uri.parse(parcelable.toString()); + } + Uri uri = (Uri) parcelable; + String path = AndroidUtilities.getPath(uri); + String originalPath = parcelable.toString(); + if (originalPath == null) { + originalPath = path; + } + if (path != null) { + if (path.startsWith("file:")) { + path = path.replace("file://", ""); + } + if (documentsPathsArray == null) { + documentsPathsArray = new ArrayList<>(); + documentsOriginalPathsArray = new ArrayList<>(); + } + documentsPathsArray.add(path); + documentsOriginalPathsArray.add(originalPath); + } else { + if (documentsUrisArray == null) { + documentsUrisArray = new ArrayList<>(); + } + documentsUrisArray.add(uri); + documentsMimeType = type; + } + } + } + } else { + error = true; + } + } catch (Exception e) { + FileLog.e(e); + error = true; + } + if (error) { + Toast.makeText(this, "Unsupported content", Toast.LENGTH_SHORT).show(); + } + } else if (Intent.ACTION_VIEW.equals(intent.getAction())) { + Uri data = intent.getData(); + if (data != null) { + String username = null; + String login = null; + String group = null; + String sticker = null; + HashMap auth = null; + String unsupportedUrl = null; + String botUser = null; + String botChat = null; + String message = null; + String phone = null; + String game = null; + String phoneHash = null; + String lang = null; + String theme = null; + String code = null; + TLRPC.TL_wallPaper wallPaper = null; + Integer messageId = null; + Integer channelId = null; + boolean hasUrl = false; + String scheme = data.getScheme(); + if (scheme != null) { + switch (scheme) { + case "http": + case "https": { + String host = data.getHost().toLowerCase(); + if (host.equals("telegram.me") || host.equals("t.me") || host.equals("telegram.dog")) { + String path = data.getPath(); + if (path != null && path.length() > 1) { + path = path.substring(1); + if (path.startsWith("bg/")) { + wallPaper = new TLRPC.TL_wallPaper(); + wallPaper.settings = new TLRPC.TL_wallPaperSettings(); + wallPaper.slug = path.replace("bg/", ""); + if (wallPaper.slug != null && wallPaper.slug.length() == 6) { + try { + wallPaper.settings.background_color = Integer.parseInt(wallPaper.slug, 16) | 0xff000000; + } catch (Exception ignore) { + + } + wallPaper.slug = null; + } else if (wallPaper.slug != null && wallPaper.slug.length() == 13 && wallPaper.slug.charAt(6) == '-') { + try { + wallPaper.settings.background_color = Integer.parseInt(wallPaper.slug.substring(0, 6), 16) | 0xff000000; + wallPaper.settings.second_background_color = Integer.parseInt(wallPaper.slug.substring(7), 16) | 0xff000000; + wallPaper.settings.rotation = 45; + } catch (Exception ignore) { + + } + try { + String rotation = data.getQueryParameter("rotation"); + if (!TextUtils.isEmpty(rotation)) { + wallPaper.settings.rotation = Utilities.parseInt(rotation); + } + } catch (Exception ignore) { + + } + wallPaper.slug = null; } else { - auth.put("payload", data.getQueryParameter("payload")); + String mode = data.getQueryParameter("mode"); + if (mode != null) { + mode = mode.toLowerCase(); + String[] modes = mode.split(" "); + if (modes != null && modes.length > 0) { + for (int a = 0; a < modes.length; a++) { + if ("blur".equals(modes[a])) { + wallPaper.settings.blur = true; + } else if ("motion".equals(modes[a])) { + wallPaper.settings.motion = true; + } + } + } + } + String intensity = data.getQueryParameter("intensity"); + if (!TextUtils.isEmpty(intensity)) { + wallPaper.settings.intensity = Utilities.parseInt(intensity); + } else { + wallPaper.settings.intensity = 50; + } + try { + String bgColor = data.getQueryParameter("bg_color"); + if (!TextUtils.isEmpty(bgColor)) { + wallPaper.settings.background_color = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; + if (bgColor.length() > 6) { + wallPaper.settings.second_background_color = Integer.parseInt(bgColor.substring(7), 16) | 0xff000000; + wallPaper.settings.rotation = 45; + } + } else { + wallPaper.settings.background_color = 0xffffffff; + } + } catch (Exception ignore) { + + } + try { + String rotation = data.getQueryParameter("rotation"); + if (!TextUtils.isEmpty(rotation)) { + wallPaper.settings.rotation = Utilities.parseInt(rotation); + } + } catch (Exception ignore) { + + } + } + } else if (path.startsWith("login/")) { + code = path.replace("login/", ""); + } else if (path.startsWith("joinchat/")) { + group = path.replace("joinchat/", ""); + } else if (path.startsWith("addstickers/")) { + sticker = path.replace("addstickers/", ""); + } else if (path.startsWith("msg/") || path.startsWith("share/")) { + message = data.getQueryParameter("url"); + if (message == null) { + message = ""; + } + if (data.getQueryParameter("text") != null) { + if (message.length() > 0) { + hasUrl = true; + message += "\n"; + } + message += data.getQueryParameter("text"); + } + if (message.length() > 4096 * 4) { + message = message.substring(0, 4096 * 4); + } + while (message.endsWith("\n")) { + message = message.substring(0, message.length() - 1); + } + } else if (path.startsWith("confirmphone")) { + phone = data.getQueryParameter("phone"); + phoneHash = data.getQueryParameter("hash"); + } else if (path.startsWith("setlanguage/")) { + lang = path.substring(12); + } else if (path.startsWith("addtheme/")) { + theme = path.substring(9); + } else if (path.startsWith("c/")) { + List segments = data.getPathSegments(); + if (segments.size() == 3) { + channelId = Utilities.parseInt(segments.get(1)); + messageId = Utilities.parseInt(segments.get(2)); + if (messageId == 0 || channelId == 0) { + messageId = null; + channelId = null; + } + } + } else if (path.length() >= 1) { + ArrayList segments = new ArrayList<>(data.getPathSegments()); + if (segments.size() > 0 && segments.get(0).equals("s")) { + segments.remove(0); + } + if (segments.size() > 0) { + username = segments.get(0); + if (segments.size() > 1) { + messageId = Utilities.parseInt(segments.get(1)); + if (messageId == 0) { + messageId = null; + } + } } - auth.put("bot_id", data.getQueryParameter("bot_id")); - auth.put("scope", scope); - auth.put("public_key", data.getQueryParameter("public_key")); - auth.put("callback_url", data.getQueryParameter("callback_url")); - } else { botUser = data.getQueryParameter("start"); botChat = data.getQueryParameter("startgroup"); game = data.getQueryParameter("game"); - messageId = Utilities.parseInt(data.getQueryParameter("post")); - if (messageId == 0) { - messageId = null; - } } - } else if (url.startsWith("tg:privatepost") || url.startsWith("tg://privatepost")) { - url = url.replace("tg:privatepost", "tg://telegram.org").replace("tg://privatepost", "tg://telegram.org"); - data = Uri.parse(url); - messageId = Utilities.parseInt(data.getQueryParameter("post")); - channelId = Utilities.parseInt(data.getQueryParameter("channel")); - if (messageId == 0 || channelId == 0) { - messageId = null; - channelId = null; - } - } else if (url.startsWith("tg:bg") || url.startsWith("tg://bg")) { - url = url.replace("tg:bg", "tg://telegram.org").replace("tg://bg", "tg://telegram.org"); - data = Uri.parse(url); - wallPaper = new TLRPC.TL_wallPaper(); - wallPaper.settings = new TLRPC.TL_wallPaperSettings(); - wallPaper.slug = data.getQueryParameter("slug"); - if (wallPaper.slug == null) { - wallPaper.slug = data.getQueryParameter("color"); - } - if (wallPaper.slug != null && wallPaper.slug.length() == 6) { - try { - wallPaper.settings.background_color = Integer.parseInt(wallPaper.slug, 16) | 0xff000000; - } catch (Exception ignore) { - - } - wallPaper.slug = null; - } else if (wallPaper.slug != null && wallPaper.slug.length() == 13 && wallPaper.slug.charAt(6) == '-') { - try { - wallPaper.settings.background_color = Integer.parseInt(wallPaper.slug.substring(0, 6), 16) | 0xff000000; - wallPaper.settings.second_background_color = Integer.parseInt(wallPaper.slug.substring(7), 16) | 0xff000000; - wallPaper.settings.rotation = 45; - } catch (Exception ignore) { - - } - try { - String rotation = data.getQueryParameter("rotation"); - if (!TextUtils.isEmpty(rotation)) { - wallPaper.settings.rotation = Utilities.parseInt(rotation); - } - } catch (Exception ignore) { - - } - wallPaper.slug = null; - } else { - String mode = data.getQueryParameter("mode"); - if (mode != null) { - mode = mode.toLowerCase(); - String[] modes = mode.split(" "); - if (modes != null && modes.length > 0) { - for (int a = 0; a < modes.length; a++) { - if ("blur".equals(modes[a])) { - wallPaper.settings.blur = true; - } else if ("motion".equals(modes[a])) { - wallPaper.settings.motion = true; - } - } - } - } - wallPaper.settings.intensity = Utilities.parseInt(data.getQueryParameter("intensity")); - try { - String bgColor = data.getQueryParameter("bg_color"); - if (!TextUtils.isEmpty(bgColor)) { - wallPaper.settings.background_color = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; - if (bgColor.length() > 6) { - wallPaper.settings.second_background_color = Integer.parseInt(bgColor.substring(7), 16) | 0xff000000; - wallPaper.settings.rotation = 45; - } - } - } catch (Exception ignore) { - - } - try { - String rotation = data.getQueryParameter("rotation"); - if (!TextUtils.isEmpty(rotation)) { - wallPaper.settings.rotation = Utilities.parseInt(rotation); - } - } catch (Exception ignore) { - - } - } - } else if (url.startsWith("tg:join") || url.startsWith("tg://join")) { - url = url.replace("tg:join", "tg://telegram.org").replace("tg://join", "tg://telegram.org"); - data = Uri.parse(url); - group = data.getQueryParameter("invite"); - } else if (url.startsWith("tg:addstickers") || url.startsWith("tg://addstickers")) { - url = url.replace("tg:addstickers", "tg://telegram.org").replace("tg://addstickers", "tg://telegram.org"); - data = Uri.parse(url); - sticker = data.getQueryParameter("set"); - } else if (url.startsWith("tg:msg") || url.startsWith("tg://msg") || url.startsWith("tg://share") || url.startsWith("tg:share")) { - url = url.replace("tg:msg", "tg://telegram.org").replace("tg://msg", "tg://telegram.org").replace("tg://share", "tg://telegram.org").replace("tg:share", "tg://telegram.org"); - data = Uri.parse(url); - message = data.getQueryParameter("url"); - if (message == null) { - message = ""; - } - if (data.getQueryParameter("text") != null) { - if (message.length() > 0) { - hasUrl = true; - message += "\n"; - } - message += data.getQueryParameter("text"); - } - if (message.length() > 4096 * 4) { - message = message.substring(0, 4096 * 4); - } - while (message.endsWith("\n")) { - message = message.substring(0, message.length() - 1); - } - } else if (url.startsWith("tg:confirmphone") || url.startsWith("tg://confirmphone")) { - url = url.replace("tg:confirmphone", "tg://telegram.org").replace("tg://confirmphone", "tg://telegram.org"); - data = Uri.parse(url); - - phone = data.getQueryParameter("phone"); - phoneHash = data.getQueryParameter("hash"); - } else if (url.startsWith("tg:login") || url.startsWith("tg://login")) { - url = url.replace("tg:login", "tg://telegram.org").replace("tg://login", "tg://telegram.org"); - data = Uri.parse(url); - login = data.getQueryParameter("token"); - code = data.getQueryParameter("code"); - } else if (url.startsWith("tg:openmessage") || url.startsWith("tg://openmessage")) { - url = url.replace("tg:openmessage", "tg://telegram.org").replace("tg://openmessage", "tg://telegram.org"); - data = Uri.parse(url); - - String userID = data.getQueryParameter("user_id"); - String chatID = data.getQueryParameter("chat_id"); - String msgID = data.getQueryParameter("message_id"); - if (userID != null) { - try { - push_user_id = Integer.parseInt(userID); - } catch (NumberFormatException ignore) { - } - } else if (chatID != null) { - try { - push_chat_id = Integer.parseInt(chatID); - } catch (NumberFormatException ignore) { - } - } - if (msgID != null) { - try { - push_msg_id = Integer.parseInt(msgID); - } catch (NumberFormatException ignore) { - } - } - } else if (url.startsWith("tg:passport") || url.startsWith("tg://passport") || url.startsWith("tg:secureid")) { - url = url.replace("tg:passport", "tg://telegram.org").replace("tg://passport", "tg://telegram.org").replace("tg:secureid", "tg://telegram.org"); - data = Uri.parse(url); + } + } + break; + } + case "tg": { + String url = data.toString(); + if (url.startsWith("tg:resolve") || url.startsWith("tg://resolve")) { + url = url.replace("tg:resolve", "tg://telegram.org").replace("tg://resolve", "tg://telegram.org"); + data = Uri.parse(url); + username = data.getQueryParameter("domain"); + if ("telegrampassport".equals(username)) { + username = null; auth = new HashMap<>(); String scope = data.getQueryParameter("scope"); if (!TextUtils.isEmpty(scope) && scope.startsWith("{") && scope.endsWith("}")) { @@ -1588,313 +1427,473 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa auth.put("scope", scope); auth.put("public_key", data.getQueryParameter("public_key")); auth.put("callback_url", data.getQueryParameter("callback_url")); - } else if (url.startsWith("tg:setlanguage") || url.startsWith("tg://setlanguage")) { - url = url.replace("tg:setlanguage", "tg://telegram.org").replace("tg://setlanguage", "tg://telegram.org"); - data = Uri.parse(url); - lang = data.getQueryParameter("lang"); - } else if (url.startsWith("tg:addtheme") || url.startsWith("tg://addtheme")) { - url = url.replace("tg:addtheme", "tg://telegram.org").replace("tg://addtheme", "tg://telegram.org"); - data = Uri.parse(url); - theme = data.getQueryParameter("slug"); - } else if (url.startsWith("tg:settings") || url.startsWith("tg://settings")) { - if (url.contains("themes")) { - open_settings = 2; - } else if (url.contains("devices")) { - open_settings = 3; - } else if (url.contains("folders")) { - open_settings = 4; - } else if (url.contains("change_number")) { - open_settings = 5; - } } else { - unsupportedUrl = url.replace("tg://", "").replace("tg:", ""); - int index; - if ((index = unsupportedUrl.indexOf('?')) >= 0) { - unsupportedUrl = unsupportedUrl.substring(0, index); + botUser = data.getQueryParameter("start"); + botChat = data.getQueryParameter("startgroup"); + game = data.getQueryParameter("game"); + messageId = Utilities.parseInt(data.getQueryParameter("post")); + if (messageId == 0) { + messageId = null; } } - break; - } - } - } - if (code != null || UserConfig.getInstance(currentAccount).isClientActivated()) { - if (phone != null || phoneHash != null) { - final Bundle args = new Bundle(); - args.putString("phone", phone); - args.putString("hash", phoneHash); - AndroidUtilities.runOnUIThread(() -> presentFragment(new CancelAccountDeletionActivity(args))); - } else if (username != null || group != null || sticker != null || message != null || game != null || auth != null || unsupportedUrl != null || lang != null || code != null || wallPaper != null || channelId != null || theme != null || login != null) { - if (message != null && message.startsWith("@")) { - message = " " + message; - } - runLinkRequest(intentAccount[0], username, group, sticker, botUser, botChat, message, hasUrl, messageId, channelId, game, auth, lang, unsupportedUrl, code, login, wallPaper, theme, 0); - } else { - try (Cursor cursor = getContentResolver().query(intent.getData(), null, null, null, null)) { - if (cursor != null) { - if (cursor.moveToFirst()) { - int accountId = Utilities.parseInt(cursor.getString(cursor.getColumnIndex(ContactsContract.RawContacts.ACCOUNT_NAME))); - for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { - if (UserConfig.getInstance(a).getClientUserId() == accountId) { - intentAccount[0] = a; - switchToAccount(intentAccount[0], true); - break; + } else if (url.startsWith("tg:privatepost") || url.startsWith("tg://privatepost")) { + url = url.replace("tg:privatepost", "tg://telegram.org").replace("tg://privatepost", "tg://telegram.org"); + data = Uri.parse(url); + messageId = Utilities.parseInt(data.getQueryParameter("post")); + channelId = Utilities.parseInt(data.getQueryParameter("channel")); + if (messageId == 0 || channelId == 0) { + messageId = null; + channelId = null; + } + } else if (url.startsWith("tg:bg") || url.startsWith("tg://bg")) { + url = url.replace("tg:bg", "tg://telegram.org").replace("tg://bg", "tg://telegram.org"); + data = Uri.parse(url); + wallPaper = new TLRPC.TL_wallPaper(); + wallPaper.settings = new TLRPC.TL_wallPaperSettings(); + wallPaper.slug = data.getQueryParameter("slug"); + if (wallPaper.slug == null) { + wallPaper.slug = data.getQueryParameter("color"); + } + if (wallPaper.slug != null && wallPaper.slug.length() == 6) { + try { + wallPaper.settings.background_color = Integer.parseInt(wallPaper.slug, 16) | 0xff000000; + } catch (Exception ignore) { + + } + wallPaper.slug = null; + } else if (wallPaper.slug != null && wallPaper.slug.length() == 13 && wallPaper.slug.charAt(6) == '-') { + try { + wallPaper.settings.background_color = Integer.parseInt(wallPaper.slug.substring(0, 6), 16) | 0xff000000; + wallPaper.settings.second_background_color = Integer.parseInt(wallPaper.slug.substring(7), 16) | 0xff000000; + wallPaper.settings.rotation = 45; + } catch (Exception ignore) { + + } + try { + String rotation = data.getQueryParameter("rotation"); + if (!TextUtils.isEmpty(rotation)) { + wallPaper.settings.rotation = Utilities.parseInt(rotation); + } + } catch (Exception ignore) { + + } + wallPaper.slug = null; + } else { + String mode = data.getQueryParameter("mode"); + if (mode != null) { + mode = mode.toLowerCase(); + String[] modes = mode.split(" "); + if (modes != null && modes.length > 0) { + for (int a = 0; a < modes.length; a++) { + if ("blur".equals(modes[a])) { + wallPaper.settings.blur = true; + } else if ("motion".equals(modes[a])) { + wallPaper.settings.motion = true; + } } } - int userId = cursor.getInt(cursor.getColumnIndex(ContactsContract.Data.DATA4)); - NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); - push_user_id = userId; - String mimeType = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.MIMETYPE)); - if (TextUtils.equals(mimeType, "vnd.android.cursor.item/vnd.org.telegram.messenger.android.call")) { - audioCallUser = true; - } else if (TextUtils.equals(mimeType, "vnd.android.cursor.item/vnd.org.telegram.messenger.android.call.video")) { - videoCallUser = true; + } + wallPaper.settings.intensity = Utilities.parseInt(data.getQueryParameter("intensity")); + try { + String bgColor = data.getQueryParameter("bg_color"); + if (!TextUtils.isEmpty(bgColor)) { + wallPaper.settings.background_color = Integer.parseInt(bgColor.substring(0, 6), 16) | 0xff000000; + if (bgColor.length() > 6) { + wallPaper.settings.second_background_color = Integer.parseInt(bgColor.substring(7), 16) | 0xff000000; + wallPaper.settings.rotation = 45; + } } + } catch (Exception ignore) { + + } + try { + String rotation = data.getQueryParameter("rotation"); + if (!TextUtils.isEmpty(rotation)) { + wallPaper.settings.rotation = Utilities.parseInt(rotation); + } + } catch (Exception ignore) { + } } - } catch (Exception e) { - FileLog.e(e); + } else if (url.startsWith("tg:join") || url.startsWith("tg://join")) { + url = url.replace("tg:join", "tg://telegram.org").replace("tg://join", "tg://telegram.org"); + data = Uri.parse(url); + group = data.getQueryParameter("invite"); + } else if (url.startsWith("tg:addstickers") || url.startsWith("tg://addstickers")) { + url = url.replace("tg:addstickers", "tg://telegram.org").replace("tg://addstickers", "tg://telegram.org"); + data = Uri.parse(url); + sticker = data.getQueryParameter("set"); + } else if (url.startsWith("tg:msg") || url.startsWith("tg://msg") || url.startsWith("tg://share") || url.startsWith("tg:share")) { + url = url.replace("tg:msg", "tg://telegram.org").replace("tg://msg", "tg://telegram.org").replace("tg://share", "tg://telegram.org").replace("tg:share", "tg://telegram.org"); + data = Uri.parse(url); + message = data.getQueryParameter("url"); + if (message == null) { + message = ""; + } + if (data.getQueryParameter("text") != null) { + if (message.length() > 0) { + hasUrl = true; + message += "\n"; + } + message += data.getQueryParameter("text"); + } + if (message.length() > 4096 * 4) { + message = message.substring(0, 4096 * 4); + } + while (message.endsWith("\n")) { + message = message.substring(0, message.length() - 1); + } + } else if (url.startsWith("tg:confirmphone") || url.startsWith("tg://confirmphone")) { + url = url.replace("tg:confirmphone", "tg://telegram.org").replace("tg://confirmphone", "tg://telegram.org"); + data = Uri.parse(url); + + phone = data.getQueryParameter("phone"); + phoneHash = data.getQueryParameter("hash"); + } else if (url.startsWith("tg:login") || url.startsWith("tg://login")) { + url = url.replace("tg:login", "tg://telegram.org").replace("tg://login", "tg://telegram.org"); + data = Uri.parse(url); + login = data.getQueryParameter("token"); + code = data.getQueryParameter("code"); + } else if (url.startsWith("tg:openmessage") || url.startsWith("tg://openmessage")) { + url = url.replace("tg:openmessage", "tg://telegram.org").replace("tg://openmessage", "tg://telegram.org"); + data = Uri.parse(url); + + String userID = data.getQueryParameter("user_id"); + String chatID = data.getQueryParameter("chat_id"); + String msgID = data.getQueryParameter("message_id"); + if (userID != null) { + try { + push_user_id = Integer.parseInt(userID); + } catch (NumberFormatException ignore) { + } + } else if (chatID != null) { + try { + push_chat_id = Integer.parseInt(chatID); + } catch (NumberFormatException ignore) { + } + } + if (msgID != null) { + try { + push_msg_id = Integer.parseInt(msgID); + } catch (NumberFormatException ignore) { + } + } + } else if (url.startsWith("tg:passport") || url.startsWith("tg://passport") || url.startsWith("tg:secureid")) { + url = url.replace("tg:passport", "tg://telegram.org").replace("tg://passport", "tg://telegram.org").replace("tg:secureid", "tg://telegram.org"); + data = Uri.parse(url); + auth = new HashMap<>(); + String scope = data.getQueryParameter("scope"); + if (!TextUtils.isEmpty(scope) && scope.startsWith("{") && scope.endsWith("}")) { + auth.put("nonce", data.getQueryParameter("nonce")); + } else { + auth.put("payload", data.getQueryParameter("payload")); + } + auth.put("bot_id", data.getQueryParameter("bot_id")); + auth.put("scope", scope); + auth.put("public_key", data.getQueryParameter("public_key")); + auth.put("callback_url", data.getQueryParameter("callback_url")); + } else if (url.startsWith("tg:setlanguage") || url.startsWith("tg://setlanguage")) { + url = url.replace("tg:setlanguage", "tg://telegram.org").replace("tg://setlanguage", "tg://telegram.org"); + data = Uri.parse(url); + lang = data.getQueryParameter("lang"); + } else if (url.startsWith("tg:addtheme") || url.startsWith("tg://addtheme")) { + url = url.replace("tg:addtheme", "tg://telegram.org").replace("tg://addtheme", "tg://telegram.org"); + data = Uri.parse(url); + theme = data.getQueryParameter("slug"); + } else if (url.startsWith("tg:settings") || url.startsWith("tg://settings")) { + if (url.contains("themes")) { + open_settings = 2; + } else if (url.contains("devices")) { + open_settings = 3; + } else if (url.contains("folders")) { + open_settings = 4; + } else if (url.contains("change_number")) { + open_settings = 5; + } + } else { + unsupportedUrl = url.replace("tg://", "").replace("tg:", ""); + int index; + if ((index = unsupportedUrl.indexOf('?')) >= 0) { + unsupportedUrl = unsupportedUrl.substring(0, index); + } } + break; } } } - } else if (intent.getAction().equals("org.telegram.messenger.OPEN_ACCOUNT")) { - open_settings = 1; - } else if (intent.getAction().equals("new_dialog")) { - open_new_dialog = 1; - } else if (intent.getAction().startsWith("com.tmessages.openchat")) { - int chatId = intent.getIntExtra("chatId", 0); - int userId = intent.getIntExtra("userId", 0); - int encId = intent.getIntExtra("encId", 0); - if (chatId != 0) { - NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); - push_chat_id = chatId; - } else if (userId != 0) { - NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); - push_user_id = userId; - } else if (encId != 0) { - NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); - push_enc_id = encId; - } else { - showDialogsList = true; - } - } else if (intent.getAction().equals("com.tmessages.openplayer")) { - showPlayer = true; - } else if (intent.getAction().equals("org.tmessages.openlocations")) { - showLocations = true; - } - } - } - - if (UserConfig.getInstance(currentAccount).isClientActivated()) { - if (push_user_id != 0) { - if (audioCallUser) { - VoIPPendingCall.startOrSchedule(this, push_user_id, false); - } else if (videoCallUser) { - VoIPPendingCall.startOrSchedule(this, push_user_id, true); - } else { - Bundle args = new Bundle(); - args.putInt("user_id", push_user_id); - if (push_msg_id != 0) { - args.putInt("message_id", push_msg_id); - } - if (mainFragmentsStack.isEmpty() || MessagesController.getInstance(intentAccount[0]).checkCanOpenChat(args, mainFragmentsStack.get(mainFragmentsStack.size() - 1))) { - ChatActivity fragment = new ChatActivity(args); - if (actionBarLayout.presentFragment(fragment, false, true, true, false)) { - pushOpened = true; + if (code != null || UserConfig.getInstance(currentAccount).isClientActivated()) { + if (phone != null || phoneHash != null) { + final Bundle args = new Bundle(); + args.putString("phone", phone); + args.putString("hash", phoneHash); + AndroidUtilities.runOnUIThread(() -> presentFragment(new CancelAccountDeletionActivity(args))); + } else if (username != null || group != null || sticker != null || message != null || game != null || auth != null || unsupportedUrl != null || lang != null || code != null || wallPaper != null || channelId != null || theme != null || login != null) { + if (message != null && message.startsWith("@")) { + message = " " + message; + } + runLinkRequest(intentAccount[0], username, group, sticker, botUser, botChat, message, hasUrl, messageId, channelId, game, auth, lang, unsupportedUrl, code, login, wallPaper, theme, 0); + } else { + try (Cursor cursor = getContentResolver().query(intent.getData(), null, null, null, null)) { + if (cursor != null) { + if (cursor.moveToFirst()) { + int accountId = Utilities.parseInt(cursor.getString(cursor.getColumnIndex(ContactsContract.RawContacts.ACCOUNT_NAME))); + for (int a = 0; a < UserConfig.MAX_ACCOUNT_COUNT; a++) { + if (UserConfig.getInstance(a).getClientUserId() == accountId) { + intentAccount[0] = a; + switchToAccount(intentAccount[0], true); + break; + } + } + int userId = cursor.getInt(cursor.getColumnIndex(ContactsContract.Data.DATA4)); + NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); + push_user_id = userId; + String mimeType = cursor.getString(cursor.getColumnIndex(ContactsContract.Data.MIMETYPE)); + if (TextUtils.equals(mimeType, "vnd.android.cursor.item/vnd.org.telegram.messenger.android.call")) { + audioCallUser = true; + } else if (TextUtils.equals(mimeType, "vnd.android.cursor.item/vnd.org.telegram.messenger.android.call.video")) { + videoCallUser = true; + } + } + } + } catch (Exception e) { + FileLog.e(e); + } } } } - } else if (push_chat_id != 0) { + } else if (intent.getAction().equals("org.telegram.messenger.OPEN_ACCOUNT")) { + open_settings = 1; + } else if (intent.getAction().equals("new_dialog")) { + open_new_dialog = 1; + } else if (intent.getAction().startsWith("com.tmessages.openchat")) { + int chatId = intent.getIntExtra("chatId", 0); + int userId = intent.getIntExtra("userId", 0); + int encId = intent.getIntExtra("encId", 0); + if (chatId != 0) { + NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); + push_chat_id = chatId; + } else if (userId != 0) { + NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); + push_user_id = userId; + } else if (encId != 0) { + NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); + push_enc_id = encId; + } else { + showDialogsList = true; + } + } else if (intent.getAction().equals("com.tmessages.openplayer")) { + showPlayer = true; + } else if (intent.getAction().equals("org.tmessages.openlocations")) { + showLocations = true; + } + } + } + + if (UserConfig.getInstance(currentAccount).isClientActivated()) { + if (push_user_id != 0) { + if (audioCallUser) { + VoIPPendingCall.startOrSchedule(this, push_user_id, false); + } else if (videoCallUser) { + VoIPPendingCall.startOrSchedule(this, push_user_id, true); + } else { Bundle args = new Bundle(); - args.putInt("chat_id", push_chat_id); - if (push_msg_id != 0) + args.putInt("user_id", push_user_id); + if (push_msg_id != 0) { args.putInt("message_id", push_msg_id); + } if (mainFragmentsStack.isEmpty() || MessagesController.getInstance(intentAccount[0]).checkCanOpenChat(args, mainFragmentsStack.get(mainFragmentsStack.size() - 1))) { ChatActivity fragment = new ChatActivity(args); if (actionBarLayout.presentFragment(fragment, false, true, true, false)) { pushOpened = true; } } - } else if (push_enc_id != 0) { - Bundle args = new Bundle(); - args.putInt("enc_id", push_enc_id); + } + } else if (push_chat_id != 0) { + Bundle args = new Bundle(); + args.putInt("chat_id", push_chat_id); + if (push_msg_id != 0) + args.putInt("message_id", push_msg_id); + if (mainFragmentsStack.isEmpty() || MessagesController.getInstance(intentAccount[0]).checkCanOpenChat(args, mainFragmentsStack.get(mainFragmentsStack.size() - 1))) { ChatActivity fragment = new ChatActivity(args); if (actionBarLayout.presentFragment(fragment, false, true, true, false)) { pushOpened = true; } - } else if (showDialogsList) { - if (!AndroidUtilities.isTablet()) { - actionBarLayout.removeAllFragments(); - } else { - if (!layersActionBarLayout.fragmentsStack.isEmpty()) { - for (int a = 0; a < layersActionBarLayout.fragmentsStack.size() - 1; a++) { - layersActionBarLayout.removeFragmentFromStack(layersActionBarLayout.fragmentsStack.get(0)); - a--; - } - layersActionBarLayout.closeLastFragment(false); - } - } - pushOpened = false; - isNew = false; - } else if (showPlayer) { - if (!actionBarLayout.fragmentsStack.isEmpty()) { - BaseFragment fragment = actionBarLayout.fragmentsStack.get(0); - fragment.showDialog(new AudioPlayerAlert(this)); - } - pushOpened = false; - } else if (showLocations) { - if (!actionBarLayout.fragmentsStack.isEmpty()) { - BaseFragment fragment = actionBarLayout.fragmentsStack.get(0); - fragment.showDialog(new SharingLocationsAlert(this, info -> { - intentAccount[0] = info.messageObject.currentAccount; - switchToAccount(intentAccount[0], true); - - LocationActivity locationActivity = new LocationActivity(2); - locationActivity.setMessageObject(info.messageObject); - final long dialog_id = info.messageObject.getDialogId(); - locationActivity.setDelegate((location, live, notify, scheduleDate) -> SendMessagesHelper.getInstance(intentAccount[0]).sendMessage(location, dialog_id, null, null, null, notify, scheduleDate)); - presentFragment(locationActivity); - })); - } - pushOpened = false; - } else if (videoPath != null || photoPathsArray != null || sendingText != null || documentsPathsArray != null || contactsToSend != null || documentsUrisArray != null) { - if (!AndroidUtilities.isTablet()) { - NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); - } - if (dialogId == 0) { - Bundle args = new Bundle(); - args.putBoolean("onlySelect", true); - args.putInt("dialogsType", 3); - args.putBoolean("allowSwitchAccount", true); - if (contactsToSend != null) { - if (contactsToSend.size() != 1) { - args.putString("selectAlertString", LocaleController.getString("SendContactToText", R.string.SendMessagesToText)); - args.putString("selectAlertStringGroup", LocaleController.getString("SendContactToGroupText", R.string.SendContactToGroupText)); - } - } else { - args.putString("selectAlertString", LocaleController.getString("SendMessagesToText", R.string.SendMessagesToText)); - args.putString("selectAlertStringGroup", LocaleController.getString("SendMessagesToGroupText", R.string.SendMessagesToGroupText)); - } - DialogsActivity fragment = new DialogsActivity(args); - fragment.setDelegate(this); - boolean removeLast; - if (AndroidUtilities.isTablet()) { - removeLast = layersActionBarLayout.fragmentsStack.size() > 0 && layersActionBarLayout.fragmentsStack.get(layersActionBarLayout.fragmentsStack.size() - 1) instanceof DialogsActivity; - } else { - removeLast = actionBarLayout.fragmentsStack.size() > 1 && actionBarLayout.fragmentsStack.get(actionBarLayout.fragmentsStack.size() - 1) instanceof DialogsActivity; - } - actionBarLayout.presentFragment(fragment, removeLast, true, true, false); - pushOpened = true; - if (SecretMediaViewer.hasInstance() && SecretMediaViewer.getInstance().isVisible()) { - SecretMediaViewer.getInstance().closePhoto(false, false); - } else if (PhotoViewer.hasInstance() && PhotoViewer.getInstance().isVisible()) { - PhotoViewer.getInstance().closePhoto(false, true); - } else if (ArticleViewer.hasInstance() && ArticleViewer.getInstance().isVisible()) { - ArticleViewer.getInstance().close(false, true); - } - - drawerLayoutContainer.setAllowOpenDrawer(false, false); - if (AndroidUtilities.isTablet()) { - actionBarLayout.showLastFragment(); - rightActionBarLayout.showLastFragment(); - } else { - drawerLayoutContainer.setAllowOpenDrawer(true, false); - } - } else { - ArrayList dids = new ArrayList<>(); - dids.add(dialogId); - didSelectDialogs(null, dids, null, false); - } - } else if (open_settings != 0) { - BaseFragment fragment; - boolean closePrevious = false; - if (open_settings == 1) { - Bundle args = new Bundle(); - args.putInt("user_id", UserConfig.getInstance(currentAccount).clientUserId); - fragment = new ProfileActivity(args); - } else if (open_settings == 2) { - fragment = new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC); - } else if (open_settings == 3) { - fragment = new SessionsActivity(0); - } else if (open_settings == 4) { - fragment = new FiltersSetupActivity(); - } else if (open_settings == 5) { - fragment = new ActionIntroActivity(ActionIntroActivity.ACTION_TYPE_CHANGE_PHONE_NUMBER); - closePrevious = true; - } else { - fragment = null; - } - boolean closePreviousFinal = closePrevious; - AndroidUtilities.runOnUIThread(() -> presentFragment(fragment, closePreviousFinal, false)); - if (AndroidUtilities.isTablet()) { - actionBarLayout.showLastFragment(); - rightActionBarLayout.showLastFragment(); - drawerLayoutContainer.setAllowOpenDrawer(false, false); - } else { - drawerLayoutContainer.setAllowOpenDrawer(true, false); - } - pushOpened = true; - } else if (open_new_dialog != 0) { - Bundle args = new Bundle(); - args.putBoolean("destroyAfterSelect", true); - actionBarLayout.presentFragment(new ContactsActivity(args), false, true, true, false); - if (AndroidUtilities.isTablet()) { - actionBarLayout.showLastFragment(); - rightActionBarLayout.showLastFragment(); - drawerLayoutContainer.setAllowOpenDrawer(false, false); - } else { - drawerLayoutContainer.setAllowOpenDrawer(true, false); - } + } + } else if (push_enc_id != 0) { + Bundle args = new Bundle(); + args.putInt("enc_id", push_enc_id); + ChatActivity fragment = new ChatActivity(args); + if (actionBarLayout.presentFragment(fragment, false, true, true, false)) { pushOpened = true; } - } + } else if (showDialogsList) { + if (!AndroidUtilities.isTablet()) { + actionBarLayout.removeAllFragments(); + } else { + if (!layersActionBarLayout.fragmentsStack.isEmpty()) { + for (int a = 0; a < layersActionBarLayout.fragmentsStack.size() - 1; a++) { + layersActionBarLayout.removeFragmentFromStack(layersActionBarLayout.fragmentsStack.get(0)); + a--; + } + layersActionBarLayout.closeLastFragment(false); + } + } + pushOpened = false; + isNew = false; + } else if (showPlayer) { + if (!actionBarLayout.fragmentsStack.isEmpty()) { + BaseFragment fragment = actionBarLayout.fragmentsStack.get(0); + fragment.showDialog(new AudioPlayerAlert(this)); + } + pushOpened = false; + } else if (showLocations) { + if (!actionBarLayout.fragmentsStack.isEmpty()) { + BaseFragment fragment = actionBarLayout.fragmentsStack.get(0); + fragment.showDialog(new SharingLocationsAlert(this, info -> { + intentAccount[0] = info.messageObject.currentAccount; + switchToAccount(intentAccount[0], true); - if (!pushOpened && !isNew) { - if (AndroidUtilities.isTablet()) { - if (!UserConfig.getInstance(currentAccount).isClientActivated()) { - if (layersActionBarLayout.fragmentsStack.isEmpty()) { - layersActionBarLayout.addFragmentToStack(new LoginActivity()); - drawerLayoutContainer.setAllowOpenDrawer(false, false); + LocationActivity locationActivity = new LocationActivity(2); + locationActivity.setMessageObject(info.messageObject); + final long dialog_id = info.messageObject.getDialogId(); + locationActivity.setDelegate((location, live, notify, scheduleDate) -> SendMessagesHelper.getInstance(intentAccount[0]).sendMessage(location, dialog_id, null, null, null, notify, scheduleDate)); + presentFragment(locationActivity); + })); + } + pushOpened = false; + } else if (videoPath != null || photoPathsArray != null || sendingText != null || documentsPathsArray != null || contactsToSend != null || documentsUrisArray != null) { + if (!AndroidUtilities.isTablet()) { + NotificationCenter.getInstance(intentAccount[0]).postNotificationName(NotificationCenter.closeChats); + } + if (dialogId == 0) { + Bundle args = new Bundle(); + args.putBoolean("onlySelect", true); + args.putInt("dialogsType", 3); + args.putBoolean("allowSwitchAccount", true); + if (contactsToSend != null) { + if (contactsToSend.size() != 1) { + args.putString("selectAlertString", LocaleController.getString("SendContactToText", R.string.SendMessagesToText)); + args.putString("selectAlertStringGroup", LocaleController.getString("SendContactToGroupText", R.string.SendContactToGroupText)); } } else { - if (actionBarLayout.fragmentsStack.isEmpty()) { - DialogsActivity dialogsActivity = new DialogsActivity(null); - dialogsActivity.setSideMenu(sideMenu); - actionBarLayout.addFragmentToStack(dialogsActivity); - drawerLayoutContainer.setAllowOpenDrawer(true, false); - } + args.putString("selectAlertString", LocaleController.getString("SendMessagesToText", R.string.SendMessagesToText)); + args.putString("selectAlertStringGroup", LocaleController.getString("SendMessagesToGroupText", R.string.SendMessagesToGroupText)); + } + DialogsActivity fragment = new DialogsActivity(args); + fragment.setDelegate(this); + boolean removeLast; + if (AndroidUtilities.isTablet()) { + removeLast = layersActionBarLayout.fragmentsStack.size() > 0 && layersActionBarLayout.fragmentsStack.get(layersActionBarLayout.fragmentsStack.size() - 1) instanceof DialogsActivity; + } else { + removeLast = actionBarLayout.fragmentsStack.size() > 1 && actionBarLayout.fragmentsStack.get(actionBarLayout.fragmentsStack.size() - 1) instanceof DialogsActivity; + } + actionBarLayout.presentFragment(fragment, removeLast, true, true, false); + pushOpened = true; + if (SecretMediaViewer.hasInstance() && SecretMediaViewer.getInstance().isVisible()) { + SecretMediaViewer.getInstance().closePhoto(false, false); + } else if (PhotoViewer.hasInstance() && PhotoViewer.getInstance().isVisible()) { + PhotoViewer.getInstance().closePhoto(false, true); + } else if (ArticleViewer.hasInstance() && ArticleViewer.getInstance().isVisible()) { + ArticleViewer.getInstance().close(false, true); + } + + drawerLayoutContainer.setAllowOpenDrawer(false, false); + if (AndroidUtilities.isTablet()) { + actionBarLayout.showLastFragment(); + rightActionBarLayout.showLastFragment(); + } else { + drawerLayoutContainer.setAllowOpenDrawer(true, false); + } + } else { + ArrayList dids = new ArrayList<>(); + dids.add(dialogId); + didSelectDialogs(null, dids, null, false); + } + } else if (open_settings != 0) { + BaseFragment fragment; + boolean closePrevious = false; + if (open_settings == 1) { + Bundle args = new Bundle(); + args.putInt("user_id", UserConfig.getInstance(currentAccount).clientUserId); + fragment = new ProfileActivity(args); + } else if (open_settings == 2) { + fragment = new ThemeActivity(ThemeActivity.THEME_TYPE_BASIC); + } else if (open_settings == 3) { + fragment = new SessionsActivity(0); + } else if (open_settings == 4) { + fragment = new FiltersSetupActivity(); + } else if (open_settings == 5) { + fragment = new ActionIntroActivity(ActionIntroActivity.ACTION_TYPE_CHANGE_PHONE_NUMBER); + closePrevious = true; + } else { + fragment = null; + } + boolean closePreviousFinal = closePrevious; + AndroidUtilities.runOnUIThread(() -> presentFragment(fragment, closePreviousFinal, false)); + if (AndroidUtilities.isTablet()) { + actionBarLayout.showLastFragment(); + rightActionBarLayout.showLastFragment(); + drawerLayoutContainer.setAllowOpenDrawer(false, false); + } else { + drawerLayoutContainer.setAllowOpenDrawer(true, false); + } + pushOpened = true; + } else if (open_new_dialog != 0) { + Bundle args = new Bundle(); + args.putBoolean("destroyAfterSelect", true); + actionBarLayout.presentFragment(new ContactsActivity(args), false, true, true, false); + if (AndroidUtilities.isTablet()) { + actionBarLayout.showLastFragment(); + rightActionBarLayout.showLastFragment(); + drawerLayoutContainer.setAllowOpenDrawer(false, false); + } else { + drawerLayoutContainer.setAllowOpenDrawer(true, false); + } + pushOpened = true; + } + } + + if (!pushOpened && !isNew) { + if (AndroidUtilities.isTablet()) { + if (!UserConfig.getInstance(currentAccount).isClientActivated()) { + if (layersActionBarLayout.fragmentsStack.isEmpty()) { + layersActionBarLayout.addFragmentToStack(new LoginActivity()); + drawerLayoutContainer.setAllowOpenDrawer(false, false); } } else { if (actionBarLayout.fragmentsStack.isEmpty()) { - if (!UserConfig.getInstance(currentAccount).isClientActivated()) { - actionBarLayout.addFragmentToStack(new LoginActivity()); - drawerLayoutContainer.setAllowOpenDrawer(false, false); - } else { - DialogsActivity dialogsActivity = new DialogsActivity(null); - dialogsActivity.setSideMenu(sideMenu); - actionBarLayout.addFragmentToStack(dialogsActivity); - drawerLayoutContainer.setAllowOpenDrawer(true, false); - } + DialogsActivity dialogsActivity = new DialogsActivity(null); + dialogsActivity.setSideMenu(sideMenu); + actionBarLayout.addFragmentToStack(dialogsActivity); + drawerLayoutContainer.setAllowOpenDrawer(true, false); } } - actionBarLayout.showLastFragment(); - if (AndroidUtilities.isTablet()) { - layersActionBarLayout.showLastFragment(); - rightActionBarLayout.showLastFragment(); + } else { + if (actionBarLayout.fragmentsStack.isEmpty()) { + if (!UserConfig.getInstance(currentAccount).isClientActivated()) { + actionBarLayout.addFragmentToStack(new LoginActivity()); + drawerLayoutContainer.setAllowOpenDrawer(false, false); + } else { + DialogsActivity dialogsActivity = new DialogsActivity(null); + dialogsActivity.setSideMenu(sideMenu); + actionBarLayout.addFragmentToStack(dialogsActivity); + drawerLayoutContainer.setAllowOpenDrawer(true, false); + } } } - - if (intent.getAction() != null && intent.getAction().equals("voip")) { - VoIPFragment.show(this); - - //Intent i = new Intent(this, VoIPActivity.class).addFlags(Intent.FLAG_ACTIVITY_NEW_TASK) ; - // startActivity(i); + actionBarLayout.showLastFragment(); + if (AndroidUtilities.isTablet()) { + layersActionBarLayout.showLastFragment(); + rightActionBarLayout.showLastFragment(); } - - intent.setAction(null); - return pushOpened; } - return false; + + if (isVoipIntent) { + VoIPFragment.show(this); + } + + intent.setAction(null); + return pushOpened; } private void runLinkRequest(final int intentAccount, @@ -2934,9 +2933,13 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa super.onPause(); NotificationCenter.getGlobalInstance().postNotificationName(NotificationCenter.stopAllHeavyOperations, 4096); ApplicationLoader.mainInterfacePaused = true; + int account = currentAccount; Utilities.stageQueue.postRunnable(() -> { ApplicationLoader.mainInterfacePausedStageQueue = true; ApplicationLoader.mainInterfacePausedStageQueueTime = 0; + if (VoIPService.getSharedInstance() == null) { + MessagesController.getInstance(account).ignoreSetOnline = false; + } }); onPasscodePause(); actionBarLayout.onPause(); @@ -3069,7 +3072,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa checkAppUpdate(false); if (VoIPFragment.getInstance() != null) { - VoIPFragment.getInstance().onResume(); + VoIPFragment.onResume(); } } diff --git a/TMessagesProj/src/main/java/org/telegram/ui/StatisticActivity.java b/TMessagesProj/src/main/java/org/telegram/ui/StatisticActivity.java index dba74bfdd..930a0b270 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/StatisticActivity.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/StatisticActivity.java @@ -1433,6 +1433,9 @@ public class StatisticActivity extends BaseFragment implements NotificationCente params.date = d; int dateIndex = Arrays.binarySearch(data.chartData.x, d); + if (dateIndex < 0) { + dateIndex = data.chartData.x.length - 1; + } params.xPercentage = data.chartData.xPercentage[dateIndex]; diff --git a/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java b/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java index 1e37cbd9a..5bffdd119 100644 --- a/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java +++ b/TMessagesProj/src/main/java/org/telegram/ui/VoIPFragment.java @@ -571,8 +571,13 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification callingUserMiniFloatingLayout.setFloatingMode(true, false); callingUserMiniTextureRenderer = new TextureViewRenderer(context); callingUserMiniTextureRenderer.setEnableHardwareScaler(true); + callingUserMiniTextureRenderer.setIsCamera(false); + callingUserMiniTextureRenderer.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FIT); - callingUserMiniFloatingLayout.addView(callingUserMiniTextureRenderer); + View backgroundView = new View(context); + backgroundView.setBackgroundColor(0xff1b1f23); + callingUserMiniFloatingLayout.addView(backgroundView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.MATCH_PARENT)); + callingUserMiniFloatingLayout.addView(callingUserMiniTextureRenderer, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.CENTER)); callingUserMiniFloatingLayout.setOnTapListener(view -> { if (cameraForceExpanded && System.currentTimeMillis() - lastContentTapTime > 500) { AndroidUtilities.cancelRunOnUIThread(hideUIRunnable); @@ -622,7 +627,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification }); emojiRationalTextView = new TextView(context); - emojiRationalTextView.setText(LocaleController.formatString("CallEmojiKeyTooltip", R.string.CallEmojiKeyTooltip, callingUser.first_name)); + emojiRationalTextView.setText(LocaleController.formatString("CallEmojiKeyTooltip", R.string.CallEmojiKeyTooltip, UserObject.getFirstName(callingUser))); emojiRationalTextView.setTextSize(16); emojiRationalTextView.setTextColor(Color.WHITE); emojiRationalTextView.setGravity(Gravity.CENTER); diff --git a/TMessagesProj/src/main/java/org/webrtc/Camera1Session.java b/TMessagesProj/src/main/java/org/webrtc/Camera1Session.java index 2d821c2ff..ab8fc0a5d 100644 --- a/TMessagesProj/src/main/java/org/webrtc/Camera1Session.java +++ b/TMessagesProj/src/main/java/org/webrtc/Camera1Session.java @@ -11,6 +11,7 @@ package org.webrtc; import android.content.Context; +import android.hardware.Camera; import android.os.Handler; import android.os.SystemClock; import java.io.IOException; @@ -45,6 +46,8 @@ class Camera1Session implements CameraSession { // Used only for stats. Only used on the camera thread. private final long constructionTimeNs; // Construction time of this class. + private OrientationHelper orientationHelper; + private SessionState state; private boolean firstFrameReported; @@ -170,6 +173,7 @@ class Camera1Session implements CameraSession { this.info = info; this.captureFormat = captureFormat; this.constructionTimeNs = constructionTimeNs; + this.orientationHelper = new OrientationHelper(); surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height); @@ -218,6 +222,7 @@ class Camera1Session implements CameraSession { } else { listenForBytebufferFrames(); } + orientationHelper.start(); try { camera.startPreview(); } catch (RuntimeException e) { @@ -242,6 +247,9 @@ class Camera1Session implements CameraSession { camera.stopPreview(); camera.release(); events.onCameraClosed(this); + if (orientationHelper != null) { + orientationHelper.stop(); + } Logging.d(TAG, "Stop done"); } @@ -313,10 +321,11 @@ class Camera1Session implements CameraSession { } private int getFrameOrientation() { - int rotation = CameraSession.getDeviceOrientation(applicationContext); - if (info.facing == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK) { + int rotation = orientationHelper.getOrientation(); + if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { rotation = 360 - rotation; } + OrientationHelper.cameraRotation = rotation; return (info.orientation + rotation) % 360; } diff --git a/TMessagesProj/src/main/java/org/webrtc/Camera2Session.java b/TMessagesProj/src/main/java/org/webrtc/Camera2Session.java index b0f1d346a..c62a5a47b 100644 --- a/TMessagesProj/src/main/java/org/webrtc/Camera2Session.java +++ b/TMessagesProj/src/main/java/org/webrtc/Camera2Session.java @@ -24,9 +24,11 @@ import android.os.Handler; import androidx.annotation.Nullable; import android.util.Range; import android.view.Surface; + import java.util.Arrays; import java.util.List; import java.util.concurrent.TimeUnit; + import org.webrtc.CameraEnumerationAndroid.CaptureFormat; @TargetApi(21) @@ -53,6 +55,8 @@ class Camera2Session implements CameraSession { private final int height; private final int framerate; + private OrientationHelper orientationHelper; + // Initialized at start private CameraCharacteristics cameraCharacteristics; private int cameraOrientation; @@ -292,6 +296,7 @@ class Camera2Session implements CameraSession { this.width = width; this.height = height; this.framerate = framerate; + this.orientationHelper = new OrientationHelper(); start(); } @@ -306,6 +311,7 @@ class Camera2Session implements CameraSession { reportError("getCameraCharacteristics(): " + e.getMessage()); return; } + orientationHelper.start(); cameraOrientation = cameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); isCameraFrontFacing = cameraCharacteristics.get(CameraCharacteristics.LENS_FACING) == CameraMetadata.LENS_FACING_FRONT; @@ -386,6 +392,9 @@ class Camera2Session implements CameraSession { cameraDevice.close(); cameraDevice = null; } + if (orientationHelper != null) { + orientationHelper.stop(); + } Logging.d(TAG, "Stop done"); } @@ -405,10 +414,11 @@ class Camera2Session implements CameraSession { } private int getFrameOrientation() { - int rotation = CameraSession.getDeviceOrientation(applicationContext); - if (!isCameraFrontFacing) { + int rotation = orientationHelper.getOrientation(); + if (isCameraFrontFacing) { rotation = 360 - rotation; } + OrientationHelper.cameraRotation = rotation; return (cameraOrientation + rotation) % 360; } diff --git a/TMessagesProj/src/main/java/org/webrtc/CameraSession.java b/TMessagesProj/src/main/java/org/webrtc/CameraSession.java index 8d137854d..8130f0a4e 100644 --- a/TMessagesProj/src/main/java/org/webrtc/CameraSession.java +++ b/TMessagesProj/src/main/java/org/webrtc/CameraSession.java @@ -10,10 +10,7 @@ package org.webrtc; -import android.content.Context; import android.graphics.Matrix; -import android.view.WindowManager; -import android.view.Surface; interface CameraSession { enum FailureType { ERROR, DISCONNECTED } @@ -39,21 +36,6 @@ interface CameraSession { */ void stop(); - static int getDeviceOrientation(Context context) { - final WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); - switch (wm.getDefaultDisplay().getRotation()) { - case Surface.ROTATION_90: - return 90; - case Surface.ROTATION_180: - return 180; - case Surface.ROTATION_270: - return 270; - case Surface.ROTATION_0: - default: - return 0; - } - } - static VideoFrame.TextureBuffer createTextureBufferWithModifiedTransformMatrix( TextureBufferImpl buffer, boolean mirror, int rotation) { final Matrix transformMatrix = new Matrix(); diff --git a/TMessagesProj/src/main/java/org/webrtc/EglRenderer.java b/TMessagesProj/src/main/java/org/webrtc/EglRenderer.java index bf033d807..678ac65fd 100644 --- a/TMessagesProj/src/main/java/org/webrtc/EglRenderer.java +++ b/TMessagesProj/src/main/java/org/webrtc/EglRenderer.java @@ -150,6 +150,8 @@ public class EglRenderer implements VideoSink { // If true, mirrors the video stream vertically. private boolean mirrorVertically; + private int rotation; + // These variables are synchronized on |statisticsLock|. private final Object statisticsLock = new Object(); // Total number of video frames received in renderFrame() call. @@ -532,6 +534,12 @@ public class EglRenderer implements VideoSink { } } + public void setRotation(int value) { + synchronized (layoutLock) { + rotation = value; + } + } + /** * Release EGL surface. This function will block until the EGL surface is released. */ @@ -637,7 +645,8 @@ public class EglRenderer implements VideoSink { final long startTimeNs = System.nanoTime(); - final float frameAspectRatio = frame.getRotatedWidth() / (float) frame.getRotatedHeight(); + boolean rotate = Math.abs(rotation) == 90 || Math.abs(rotation) == 270; + final float frameAspectRatio = (rotate ? frame.getRotatedHeight() : frame.getRotatedWidth()) / (float) (rotate ? frame.getRotatedWidth() : frame.getRotatedHeight()); final float drawnAspectRatio; synchronized (layoutLock) { drawnAspectRatio = layoutAspectRatio != 0f ? layoutAspectRatio : frameAspectRatio; @@ -656,6 +665,7 @@ public class EglRenderer implements VideoSink { drawMatrix.reset(); drawMatrix.preTranslate(0.5f, 0.5f); + drawMatrix.preRotate(rotation); drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f); drawMatrix.preScale(scaleX, scaleY); drawMatrix.preTranslate(-0.5f, -0.5f); @@ -665,7 +675,7 @@ public class EglRenderer implements VideoSink { GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); frameDrawer.drawFrame(frame, drawer, drawMatrix, 0 /* viewportX */, 0 /* viewportY */, - eglBase.surfaceWidth(), eglBase.surfaceHeight()); + eglBase.surfaceWidth(), eglBase.surfaceHeight(), rotate); final long swapBuffersStartTimeNs = System.nanoTime(); if (usePresentationTimeStamp) { @@ -715,6 +725,7 @@ public class EglRenderer implements VideoSink { drawMatrix.reset(); drawMatrix.preTranslate(0.5f, 0.5f); + drawMatrix.preRotate(rotation); drawMatrix.preScale(mirrorHorizontally ? -1f : 1f, mirrorVertically ? -1f : 1f); drawMatrix.preScale(1f, -1f); // We want the output to be upside down for Bitmap. drawMatrix.preTranslate(-0.5f, -0.5f); @@ -744,7 +755,7 @@ public class EglRenderer implements VideoSink { GLES20.glClearColor(0 /* red */, 0 /* green */, 0 /* blue */, 0 /* alpha */); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); frameDrawer.drawFrame(frame, listenerAndParams.drawer, drawMatrix, 0 /* viewportX */, - 0 /* viewportY */, scaledWidth, scaledHeight); + 0 /* viewportY */, scaledWidth, scaledHeight, false); final ByteBuffer bitmapBuffer = ByteBuffer.allocateDirect(scaledWidth * scaledHeight * 4); GLES20.glViewport(0, 0, scaledWidth, scaledHeight); diff --git a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java index 41e667be8..92e100e60 100644 --- a/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java +++ b/TMessagesProj/src/main/java/org/webrtc/HardwareVideoEncoderFactory.java @@ -35,43 +35,66 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_M_MS = 20000; private static final int QCOM_VP8_KEY_FRAME_INTERVAL_ANDROID_N_MS = 15000; - // List of devices with poor H.264 encoder quality. - // HW H.264 encoder on below devices has poor bitrate control - actual - // bitrates deviates a lot from the target value. - private static final List H264_HW_EXCEPTION_MODELS = - Arrays.asList("SAMSUNG-SGH-I337", "Nexus 7", "Nexus 4", "Pixel 3 XL", "Pixel 3"); - - private static final List VP8_HW_EXCEPTION_MODELS = - Arrays.asList("Pixel 3 XL", "Pixel 3"); - @Nullable private final EglBase14.Context sharedContext; private final boolean enableIntelVp8Encoder; private final boolean enableH264HighProfile; @Nullable private final Predicate codecAllowedPredicate; + private static final List H264_HW_EXCEPTION_MODELS = + Arrays.asList("samsung-sgh-i337", "nexus7", "nexus4", "pixel3xl", "pixel3"); + + private static final List VP8_HW_EXCEPTION_MODELS = + Arrays.asList("pixel3xl", "pixel3"); + private static Set HW_EXCEPTION_MODELS = new HashSet() {{ - add("SM-A310F"); - add("SM-A310F/DS"); - add("SM-A310Y"); - add("SM-A310M"); - add("SM-G920F"); - add("SM-G920FD"); - add("SM-G920FQ"); - add("SM-G920I"); - add("SM-G920A"); - add("SM-G920T"); - add("SM-G930F"); - add("SM-G930FD"); - add("SM-G930W8"); - add("SM-G930S"); - add("SM-G930K"); - add("SM-G930L"); - add("SM-G935F"); - add("SM-G935FD"); - add("SM-G935W8"); - add("SM-G935S"); - add("SM-G935K"); - add("SM-G935L"); + add("sm-a310f"); + add("sm-a310f/ds"); + add("sm-a310y"); + add("sm-a310m"); + add("sm-g920f"); + add("sm-g920fd"); + add("sm-g920fq"); + add("sm-g920i"); + add("sm-g920a"); + add("sm-g920t"); + add("sm-g930f"); + add("sm-g930fd"); + add("sm-g930w8"); + add("sm-g930s"); + add("sm-g930k"); + add("sm-g930l"); + add("sm-g935f"); + add("sm-g935fd"); + add("sm-g935w8"); + add("sm-g935s"); + add("sm-g935k"); + add("sm-g935l"); + + add("i537"); + add("sgh-i537"); + add("gt-i9295"); + add("sgh-i337"); + add("gt-i9505g"); + add("gt-i9505"); + add("gt-i9515"); + add("f240"); + add("e980"); + add("ls980"); + add("e988"); + add("e986"); + add("f240l"); + add("f240s"); + add("v9815"); + add("nx403a"); + add("f310l"); + add("f310lr"); + add("onem7"); + add("onemax"); + add("pn071"); + add("htc6500lvw"); + add("butterflys"); + add("mi2s"); + add("n1"); }}; /** @@ -226,8 +249,13 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { // Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the // current SDK. + + private static String getModel() { + return Build.MODEL != null ? Build.MODEL.toLowerCase().replace(" ", "") : "nomodel"; + } + private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) { - if (HW_EXCEPTION_MODELS.contains(Build.MODEL)) { + if (HW_EXCEPTION_MODELS.contains(getModel())) { return false; } switch (type) { @@ -244,7 +272,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { } private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) { - if (VP8_HW_EXCEPTION_MODELS.contains(Build.MODEL)) { + if (VP8_HW_EXCEPTION_MODELS.contains(getModel())) { return false; } String name = info.getName(); @@ -268,7 +296,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory { private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) { // First, H264 hardware might perform poorly on this model. - if (H264_HW_EXCEPTION_MODELS.contains(Build.MODEL)) { + if (H264_HW_EXCEPTION_MODELS.contains(getModel())) { return false; } String name = info.getName(); diff --git a/TMessagesProj/src/main/java/org/webrtc/OrientationHelper.java b/TMessagesProj/src/main/java/org/webrtc/OrientationHelper.java new file mode 100644 index 000000000..d3e61497e --- /dev/null +++ b/TMessagesProj/src/main/java/org/webrtc/OrientationHelper.java @@ -0,0 +1,68 @@ +package org.webrtc; + +import android.view.OrientationEventListener; + +import org.telegram.messenger.ApplicationLoader; + +public class OrientationHelper { + + private static final int ORIENTATION_HYSTERESIS = 5; + private OrientationEventListener orientationEventListener; + private int rotation; + + public static volatile int cameraRotation; + + private int roundOrientation(int orientation, int orientationHistory) { + boolean changeOrientation; + if (orientationHistory == OrientationEventListener.ORIENTATION_UNKNOWN) { + changeOrientation = true; + } else { + int dist = Math.abs(orientation - orientationHistory); + dist = Math.min(dist, 360 - dist); + changeOrientation = (dist >= 45 + ORIENTATION_HYSTERESIS); + } + if (changeOrientation) { + return ((orientation + 45) / 90 * 90) % 360; + } + return orientationHistory; + } + + public OrientationHelper() { + orientationEventListener = new OrientationEventListener(ApplicationLoader.applicationContext) { + @Override + public void onOrientationChanged(int orientation) { + if (orientationEventListener == null || orientation == ORIENTATION_UNKNOWN) { + return; + } + int newOrietation = roundOrientation(orientation, rotation); + if (newOrietation != rotation) { + onOrientationUpdate(rotation = newOrietation); + } + } + }; + } + + protected void onOrientationUpdate(int orientation) { + + } + + public void start() { + if (orientationEventListener.canDetectOrientation()) { + orientationEventListener.enable(); + } else { + orientationEventListener.disable(); + orientationEventListener = null; + } + } + + public void stop() { + if (orientationEventListener != null) { + orientationEventListener.disable(); + orientationEventListener = null; + } + } + + public int getOrientation() { + return rotation; + } +} diff --git a/TMessagesProj/src/main/java/org/webrtc/RendererCommon.java b/TMessagesProj/src/main/java/org/webrtc/RendererCommon.java index 5865b07ca..7306f95ad 100644 --- a/TMessagesProj/src/main/java/org/webrtc/RendererCommon.java +++ b/TMessagesProj/src/main/java/org/webrtc/RendererCommon.java @@ -88,7 +88,7 @@ public class RendererCommon { this.visibleFractionMismatchOrientation = visibleFractionMismatchOrientation; } - public Point measure(int widthSpec, int heightSpec, int frameWidth, int frameHeight) { + public Point measure(boolean isCamera, int widthSpec, int heightSpec, int frameWidth, int frameHeight) { // Calculate max allowed layout size. final int maxWidth = View.getDefaultSize(Integer.MAX_VALUE, widthSpec); final int maxHeight = View.getDefaultSize(Integer.MAX_VALUE, heightSpec); @@ -108,7 +108,7 @@ public class RendererCommon { if (View.MeasureSpec.getMode(widthSpec) == View.MeasureSpec.EXACTLY) { layoutSize.x = maxWidth; } - if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY) { + if (View.MeasureSpec.getMode(heightSpec) == View.MeasureSpec.EXACTLY || !isCamera && (frameAspect > 1.0f) == (displayAspect > 1.0f)) { layoutSize.y = maxHeight; } return layoutSize; diff --git a/TMessagesProj/src/main/java/org/webrtc/SurfaceViewRenderer.java b/TMessagesProj/src/main/java/org/webrtc/SurfaceViewRenderer.java index f62d27432..44ac9ff43 100644 --- a/TMessagesProj/src/main/java/org/webrtc/SurfaceViewRenderer.java +++ b/TMessagesProj/src/main/java/org/webrtc/SurfaceViewRenderer.java @@ -190,7 +190,7 @@ public class SurfaceViewRenderer extends SurfaceView protected void onMeasure(int widthSpec, int heightSpec) { ThreadUtils.checkIsOnMainThread(); Point size = - videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight); + videoLayoutMeasure.measure(true, widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight); setMeasuredDimension(size.x, size.y); logD("onMeasure(). New size: " + size.x + "x" + size.y); } diff --git a/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java b/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java index 0d9e7ed9c..a70c1da2c 100644 --- a/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java +++ b/TMessagesProj/src/main/java/org/webrtc/TextureViewRenderer.java @@ -6,6 +6,7 @@ import android.graphics.Point; import android.graphics.SurfaceTexture; import android.os.Looper; import android.view.TextureView; +import android.view.View; import org.telegram.messenger.AndroidUtilities; @@ -31,6 +32,9 @@ public class TextureViewRenderer extends TextureView private boolean enableFixedSize; private int surfaceWidth; private int surfaceHeight; + private boolean isCamera; + + private OrientationHelper orientationHelper; public static class TextureEglRenderer extends EglRenderer implements TextureView.SurfaceTextureListener { private static final String TAG = "TextureEglRenderer"; @@ -218,6 +222,9 @@ public class TextureViewRenderer extends TextureView */ public void release() { eglRenderer.release(); + if (orientationHelper != null) { + orientationHelper.stop(); + } } /** @@ -251,6 +258,19 @@ public class TextureViewRenderer extends TextureView eglRenderer.removeFrameListener(listener); } + public void setIsCamera(boolean value) { + isCamera = value; + if (!isCamera) { + orientationHelper = new OrientationHelper() { + @Override + protected void onOrientationUpdate(int orientation) { + updateRotation(); + } + }; + orientationHelper.start(); + } + } + /** * Enables fixed size for the surface. This provides better performance but might be buggy on some * devices. By default this is turned off. @@ -261,6 +281,45 @@ public class TextureViewRenderer extends TextureView updateSurfaceSize(); } + private void updateRotation() { + if (orientationHelper == null || rotatedFrameWidth == 0 || rotatedFrameHeight == 0) { + return; + } + View parentView = (View) getParent(); + if (parentView == null) { + return; + } + int orientation = orientationHelper.getOrientation(); + float viewWidth = getMeasuredWidth(); + float viewHeight = getMeasuredHeight(); + float w; + float h; + float targetWidth = parentView.getMeasuredWidth(); + float targetHeight = parentView.getMeasuredHeight(); + if (orientation == 90 || orientation == 270) { + w = viewHeight; + h = viewWidth; + } else { + w = viewWidth; + h = viewHeight; + } + float scale; + if (w < h) { + scale = Math.max(w / viewWidth, h / viewHeight); + } else { + scale = Math.min(w / viewWidth, h / viewHeight); + } + w *= scale; + h *= scale; + if (Math.abs(w / h - targetWidth / targetHeight) < 0.1f) { + scale *= Math.max(targetWidth / w, targetHeight / h); + } + if (orientation == 270) { + orientation = -90; + } + animate().scaleX(scale).scaleY(scale).rotation(-orientation).setDuration(180).start(); + } + /** * Set if the video stream should be mirrored or not. */ @@ -312,8 +371,11 @@ public class TextureViewRenderer extends TextureView @Override protected void onMeasure(int widthSpec, int heightSpec) { ThreadUtils.checkIsOnMainThread(); - Point size = videoLayoutMeasure.measure(widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight); + Point size = videoLayoutMeasure.measure(isCamera, widthSpec, heightSpec, rotatedFrameWidth, rotatedFrameHeight); setMeasuredDimension(size.x, size.y); + if (!isCamera) { + updateRotation(); + } logD("onMeasure(). New size: " + size.x + "x" + size.y); } @@ -337,7 +399,7 @@ public class TextureViewRenderer extends TextureView drawnFrameHeight = rotatedFrameHeight; } else { drawnFrameWidth = rotatedFrameWidth; - drawnFrameHeight = (int) (rotatedFrameWidth / layoutAspectRatio); + drawnFrameHeight = (int) (rotatedFrameHeight / layoutAspectRatio); } // Aspect ratio of the drawn frame and the view is the same. final int width = Math.min(getWidth(), drawnFrameWidth); @@ -413,6 +475,9 @@ public class TextureViewRenderer extends TextureView if (rendererEvents != null) { rendererEvents.onFrameResolutionChanged(videoWidth, videoHeight, rotation); } + if (isCamera) { + eglRenderer.setRotation(-OrientationHelper.cameraRotation); + } int rotatedWidth = rotation == 0 || rotation == 180 ? videoWidth : videoHeight; int rotatedHeight = rotation == 0 || rotation == 180 ? videoHeight : videoWidth; // run immediately if possible for ui thread tests diff --git a/TMessagesProj/src/main/java/org/webrtc/VideoFrameDrawer.java b/TMessagesProj/src/main/java/org/webrtc/VideoFrameDrawer.java index 4c01ad5d7..1563c88cc 100644 --- a/TMessagesProj/src/main/java/org/webrtc/VideoFrameDrawer.java +++ b/TMessagesProj/src/main/java/org/webrtc/VideoFrameDrawer.java @@ -182,14 +182,14 @@ public class VideoFrameDrawer { public void drawFrame( VideoFrame frame, RendererCommon.GlDrawer drawer, Matrix additionalRenderMatrix) { drawFrame(frame, drawer, additionalRenderMatrix, 0 /* viewportX */, 0 /* viewportY */, - frame.getRotatedWidth(), frame.getRotatedHeight()); + frame.getRotatedWidth(), frame.getRotatedHeight(), false); } public void drawFrame(VideoFrame frame, RendererCommon.GlDrawer drawer, @Nullable Matrix additionalRenderMatrix, int viewportX, int viewportY, int viewportWidth, - int viewportHeight) { - final int width = frame.getRotatedWidth(); - final int height = frame.getRotatedHeight(); + int viewportHeight, boolean rotate) { + final int width = rotate ? frame.getRotatedHeight() : frame.getRotatedWidth(); + final int height = rotate ? frame.getRotatedWidth() : frame.getRotatedHeight(); calculateTransformedRenderSize(width, height, additionalRenderMatrix); if (renderWidth <= 0 || renderHeight <= 0) { Logging.w(TAG, "Illegal frame size: " + renderWidth + "x" + renderHeight);