Merge remote-tracking branch 'upstream/master'

This commit is contained in:
世界 2020-08-22 00:08:39 +00:00
commit 8cd55728b1
No known key found for this signature in database
GPG Key ID: CD109927C34A63C4
46 changed files with 553 additions and 247 deletions

View File

@ -2,7 +2,7 @@ FROM gradle:6.1.1-jdk8
ENV ANDROID_SDK_URL https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip
ENV ANDROID_API_LEVEL android-30
ENV ANDROID_BUILD_TOOLS_VERSION 30.0.1
ENV ANDROID_BUILD_TOOLS_VERSION 30.0.2
ENV ANDROID_HOME /usr/local/android-sdk-linux
ENV ANDROID_NDK_VERSION 21.1.6352462
ENV ANDROID_VERSION 30
@ -24,4 +24,4 @@ RUN $ANDROID_HOME/tools/bin/sdkmanager "build-tools;${ANDROID_BUILD_TOOLS_VERSIO
ENV PATH ${ANDROID_NDK_HOME}:$PATH
ENV PATH ${ANDROID_NDK_HOME}/prebuilt/linux-x86_64/bin/:$PATH
CMD mkdir -p /home/source/TMessagesProj/build/outputs/apk && cp -R /home/source/. /home/gradle && cd /home/gradle && gradle assembleRelease && cp -R /home/gradle/TMessagesProj/build/outputs/apk/. /home/source/TMessagesProj/build/outputs/apk
CMD mkdir -p /home/source/TMessagesProj/build/outputs/apk && mkdir -p /home/source/TMessagesProj/build/intermediates/ndkBuild && cp -R /home/source/. /home/gradle && cd /home/gradle && gradle assembleRelease && cp -R /home/gradle/TMessagesProj/build/outputs/apk/. /home/source/TMessagesProj/build/outputs/apk && cp -R /home/gradle/TMessagesProj/build/intermediates/ndkBuild/. /home/source/TMessagesProj/build/intermediates/ndkBuild

View File

@ -753,6 +753,7 @@ LOCAL_SRC_FILES := \
./tgcalls/ThreadLocalObject.cpp \
./tgcalls/VideoCaptureInterface.cpp \
./tgcalls/VideoCaptureInterfaceImpl.cpp \
./tgcalls/JsonConfig.cpp \
./tgcalls/reference/InstanceImplReference.cpp \
./tgcalls/legacy/InstanceImplLegacy.cpp \
./tgcalls/platform/android/AndroidInterface.cpp \

View File

@ -69,14 +69,16 @@ struct InstanceHolder {
std::unique_ptr<Instance> nativeInstance;
jobject javaInstance;
std::shared_ptr<tgcalls::VideoCaptureInterface> _videoCapture;
std::shared_ptr<PlatformContext> _platformContext;
};
jclass TrafficStatsClass;
jclass FinalStateClass;
jclass NativeInstanceClass;
jmethodID FinalStateInitMethod;
jlong getInstanceHolderId(JNIEnv *env, jobject obj) {
return env->GetLongField(obj, env->GetFieldID(env->GetObjectClass(obj), "nativePtr", "J"));
return env->GetLongField(obj, env->GetFieldID(NativeInstanceClass, "nativePtr", "J"));
}
InstanceHolder *getInstanceHolder(JNIEnv *env, jobject obj) {
@ -231,6 +233,7 @@ void initWebRTC(JNIEnv *env) {
rtc::InitializeSSL();
webrtcLoaded = true;
NativeInstanceClass = static_cast<jclass>(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/NativeInstance")));
TrafficStatsClass = static_cast<jclass>(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$TrafficStats")));
FinalStateClass = static_cast<jclass>(env->NewGlobalRef(env->FindClass("org/telegram/messenger/voip/Instance$FinalState")));
FinalStateInitMethod = env->GetMethodID(FinalStateClass, "<init>", "([BLjava/lang/String;Lorg/telegram/messenger/voip/Instance$TrafficStats;Z)V");
@ -252,6 +255,8 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
jobject globalRef = env->NewGlobalRef(instanceObj);
std::shared_ptr<VideoCaptureInterface> videoCapture = videoCapturer ? std::shared_ptr<VideoCaptureInterface>(reinterpret_cast<VideoCaptureInterface *>(videoCapturer)) : nullptr;
std::shared_ptr<PlatformContext> platformContext = videoCapture ? videoCapture->getPlatformContext() : std::make_shared<AndroidContext>(env);
Descriptor descriptor = {
.config = Config{
.initializationTimeout = configObject.getDoubleField("initializationTimeout"),
@ -273,23 +278,25 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
.stateUpdated = [globalRef](State state) {
jint javaState = asJavaState(state);
tgvoip::jni::DoWithJNI([globalRef, javaState](JNIEnv *env) {
env->CallVoidMethod(globalRef, env->GetMethodID(env->GetObjectClass(globalRef), "onStateUpdated", "(I)V"), javaState);
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onStateUpdated", "(I)V"), javaState);
});
},
.platformContext = platformContext,
.signalBarsUpdated = [globalRef](int count) {
tgvoip::jni::DoWithJNI([globalRef, count](JNIEnv *env) {
env->CallVoidMethod(globalRef, env->GetMethodID(env->GetObjectClass(globalRef), "onSignalBarsUpdated", "(I)V"), count);
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onSignalBarsUpdated", "(I)V"), count);
});
},
.remoteMediaStateUpdated = [globalRef](AudioState audioState, VideoState videoState) {
tgvoip::jni::DoWithJNI([globalRef, audioState, videoState](JNIEnv *env) {
env->CallVoidMethod(globalRef, env->GetMethodID(env->GetObjectClass(globalRef), "onRemoteMediaStateUpdated", "(II)V"), audioState, videoState);
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onRemoteMediaStateUpdated", "(II)V"), (jint) audioState, (jint )videoState);
});
},
.signalingDataEmitted = [globalRef](const std::vector<uint8_t> &data) {
tgvoip::jni::DoWithJNI([globalRef, data](JNIEnv *env) {
jbyteArray arr = copyVectorToJavaByteArray(env, data);
env->CallVoidMethod(globalRef, env->GetMethodID(env->GetObjectClass(globalRef), "onSignalingData", "([B)V"), arr);
env->CallVoidMethod(globalRef, env->GetMethodID(NativeInstanceClass, "onSignalingData", "([B)V"), arr);
env->DeleteLocalRef(arr);
});
},
};
@ -336,6 +343,7 @@ JNIEXPORT jlong JNICALL Java_org_telegram_messenger_voip_NativeInstance_makeNati
holder->nativeInstance = tgcalls::Meta::Create(v, std::move(descriptor));
holder->javaInstance = globalRef;
holder->_videoCapture = videoCapture;
holder->_platformContext = platformContext;
holder->nativeInstance->setIncomingVideoOutput(webrtc::JavaToNativeVideoSink(env, remoteSink));
holder->nativeInstance->setNetworkType(parseNetworkType(networkType));
return reinterpret_cast<jlong>(holder);
@ -396,7 +404,7 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_stopNativ
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
const std::string &path = tgvoip::jni::JavaStringToStdString(env, JavaObject(env, instance->javaInstance).getStringField("persistentStateFilePath"));
savePersistentState(path.c_str(), finalState.persistentState);
env->CallVoidMethod(instance->javaInstance, env->GetMethodID(env->GetObjectClass(instance->javaInstance), "onStop", "(Lorg/telegram/messenger/voip/Instance$FinalState;)V"), asJavaFinalState(env, finalState));
env->CallVoidMethod(instance->javaInstance, env->GetMethodID(NativeInstanceClass, "onStop", "(Lorg/telegram/messenger/voip/Instance$FinalState;)V"), asJavaFinalState(env, finalState));
env->DeleteGlobalRef(instance->javaInstance);
delete instance;
});
@ -446,7 +454,7 @@ JNIEXPORT void JNICALL Java_org_telegram_messenger_voip_NativeInstance_setupOutg
if (instance->_videoCapture) {
return;
}
instance->_videoCapture = tgcalls::VideoCaptureInterface::Create(std::make_shared<AndroidContext>(env));
instance->_videoCapture = tgcalls::VideoCaptureInterface::Create(instance->_platformContext);
instance->_videoCapture->setOutput(webrtc::JavaToNativeVideoSink(env, localSink));
instance->_videoCapture->setState(VideoState::Active);
instance->nativeInstance->setVideoCapture(instance->_videoCapture);

View File

@ -773,8 +773,10 @@ std::shared_ptr<LOTAsset> LottieParserImpl::parseAsset() {
return sharedAsset;
}
std::shared_ptr<LOTData> layer = parseLayer();
staticFlag = staticFlag && layer->isStatic();
asset->mLayers.push_back(layer);
if (layer) {
staticFlag = staticFlag && layer->isStatic();
asset->mLayers.push_back(layer);
}
}
if (!IsValid()) {
parsingError = true;

View File

@ -23,7 +23,7 @@ bool CompareFormats(const VideoFormat &a, const VideoFormat &b) {
}
}
int FormatPriority(const VideoFormat &format, const std::vector<std::string> &preferredCodecs) {
int FormatPriority(const VideoFormat &format, const std::vector<std::string> &preferredCodecs, std::shared_ptr<PlatformContext> platformContext) {
static const auto kCodecs = {
std::string(cricket::kAv1CodecName),
std::string(cricket::kVp9CodecName),
@ -31,13 +31,13 @@ int FormatPriority(const VideoFormat &format, const std::vector<std::string> &pr
std::string(cricket::kH264CodecName),
std::string(cricket::kVp8CodecName),
};
static const auto kSupported = [] {
static const auto kSupported = [platformContext] {
const auto platform = PlatformInterface::SharedInstance();
auto result = std::vector<std::string>();
result.reserve(kCodecs.size());
for (const auto &codec : kCodecs) {
if (platform->supportsEncoding(codec)) {
if (platform->supportsEncoding(codec, platformContext)) {
result.push_back(codec);
}
}
@ -62,19 +62,19 @@ int FormatPriority(const VideoFormat &format, const std::vector<std::string> &pr
return -1;
}
bool ComparePriorities(const VideoFormat &a, const VideoFormat &b, const std::vector<std::string> &preferredCodecs) {
return FormatPriority(a, preferredCodecs) < FormatPriority(b, preferredCodecs);
bool ComparePriorities(const VideoFormat &a, const VideoFormat &b, const std::vector<std::string> &preferredCodecs, std::shared_ptr<PlatformContext> platformContext) {
return FormatPriority(a, preferredCodecs, platformContext) < FormatPriority(b, preferredCodecs, platformContext);
}
std::vector<VideoFormat> FilterAndSortEncoders(std::vector<VideoFormat> list, const std::vector<std::string> &preferredCodecs) {
std::vector<VideoFormat> FilterAndSortEncoders(std::vector<VideoFormat> list, const std::vector<std::string> &preferredCodecs, std::shared_ptr<PlatformContext> platformContext) {
const auto listBegin = begin(list);
const auto listEnd = end(list);
std::sort(listBegin, listEnd, [&preferredCodecs](const VideoFormat &lhs, const VideoFormat &rhs) {
return ComparePriorities(lhs, rhs, preferredCodecs);
std::sort(listBegin, listEnd, [&preferredCodecs, platformContext](const VideoFormat &lhs, const VideoFormat &rhs) {
return ComparePriorities(lhs, rhs, preferredCodecs, platformContext);
});
auto eraseFrom = listBegin;
auto eraseTill = eraseFrom;
while (eraseTill != listEnd && FormatPriority(*eraseTill, preferredCodecs) == -1) {
while (eraseTill != listEnd && FormatPriority(*eraseTill, preferredCodecs, platformContext) == -1) {
++eraseTill;
}
if (eraseTill != eraseFrom) {
@ -142,8 +142,9 @@ void AddDefaultFeedbackParams(cricket::VideoCodec *codec) {
VideoFormatsMessage ComposeSupportedFormats(
std::vector<VideoFormat> encoders,
std::vector<VideoFormat> decoders,
const std::vector<std::string> &preferredCodecs) {
encoders = FilterAndSortEncoders(std::move(encoders), preferredCodecs);
const std::vector<std::string> &preferredCodecs,
std::shared_ptr<PlatformContext> platformContext) {
encoders = FilterAndSortEncoders(std::move(encoders), preferredCodecs, platformContext);
auto result = VideoFormatsMessage();
result.encodersCount = (int)encoders.size();

View File

@ -6,6 +6,8 @@
namespace tgcalls {
class PlatformContext;
struct CommonFormats {
std::vector<webrtc::SdpVideoFormat> list;
int myEncoderIndex = -1;
@ -19,7 +21,8 @@ struct CommonCodecs {
VideoFormatsMessage ComposeSupportedFormats(
std::vector<webrtc::SdpVideoFormat> encoders,
std::vector<webrtc::SdpVideoFormat> decoders,
const std::vector<std::string> &preferredCodecs);
const std::vector<std::string> &preferredCodecs,
std::shared_ptr<PlatformContext> platformContext);
CommonFormats ComputeCommonFormats(
const VideoFormatsMessage &my,

View File

@ -19,6 +19,7 @@ class VideoFrame;
namespace tgcalls {
class VideoCaptureInterface;
class PlatformContext;
struct Proxy {
std::string host;
@ -203,6 +204,7 @@ struct Descriptor {
std::function<void(AudioState, VideoState)> remoteMediaStateUpdated;
std::function<void(float)> remotePrefferedAspectRatioUpdated;
std::function<void(const std::vector<uint8_t> &)> signalingDataEmitted;
std::shared_ptr<PlatformContext> platformContext;
};
class Meta {

View File

@ -0,0 +1,13 @@
#include "JsonConfig.h"
namespace tgcalls {
JsonConfig::JsonConfig(Values values) : _values(values) {
}
Value JsonConfig::getValue(std::string key) {
return _values[key];
}
} // namespace tgcalls

View File

@ -0,0 +1,25 @@
#ifndef TGCALLS_JSON_CONFIG_H
#define TGCALLS_JSON_CONFIG_H
#include <string>
#include <map>
#include "absl/types/variant.h"
namespace tgcalls {
typedef absl::variant<int, double, bool, std::string> Value;
typedef std::map<std::string, Value> Values;
class JsonConfig {
public:
JsonConfig(Values values);
Value getValue(std::string key);
private:
Values _values;
};
} // namespace tgcalls
#endif

View File

@ -49,7 +49,8 @@ _remotePrefferedAspectRatioUpdated(std::move(descriptor.remotePrefferedAspectRat
_signalingDataEmitted(std::move(descriptor.signalingDataEmitted)),
_signalBarsUpdated(std::move(descriptor.signalBarsUpdated)),
_localPreferredVideoAspectRatio(descriptor.config.preferredAspectRatio),
_enableHighBitrateVideo(descriptor.config.enableHighBitrateVideo) {
_enableHighBitrateVideo(descriptor.config.enableHighBitrateVideo),
_platformContext(descriptor.platformContext) {
assert(_thread->IsCurrent());
assert(_stateUpdated != nullptr);
assert(_signalingDataEmitted != nullptr);
@ -166,7 +167,7 @@ void Manager::start() {
});
}));
bool isOutgoing = _encryptionKey.isOutgoing;
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [weak, isOutgoing, thread, sendSignalingMessage, videoCapture = _videoCapture, localPreferredVideoAspectRatio = _localPreferredVideoAspectRatio, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, preferredCodecs = _preferredCodecs]() {
_mediaManager.reset(new ThreadLocalObject<MediaManager>(getMediaThread(), [weak, isOutgoing, thread, sendSignalingMessage, videoCapture = _videoCapture, localPreferredVideoAspectRatio = _localPreferredVideoAspectRatio, enableHighBitrateVideo = _enableHighBitrateVideo, signalBarsUpdated = _signalBarsUpdated, preferredCodecs = _preferredCodecs, platformContext = _platformContext]() {
return new MediaManager(
getMediaThread(),
isOutgoing,
@ -184,7 +185,8 @@ void Manager::start() {
signalBarsUpdated,
localPreferredVideoAspectRatio,
enableHighBitrateVideo,
preferredCodecs);
preferredCodecs,
platformContext);
}));
_networkManager->perform(RTC_FROM_HERE, [](NetworkManager *networkManager) {
networkManager->start();

View File

@ -57,6 +57,8 @@ private:
bool _localNetworkIsLowCost = false;
bool _remoteNetworkIsLowCost = false;
std::shared_ptr<PlatformContext> _platformContext;
};
} // namespace tgcalls

View File

@ -59,7 +59,8 @@ MediaManager::MediaManager(
std::function<void(int)> signalBarsUpdated,
float localPreferredVideoAspectRatio,
bool enableHighBitrateVideo,
std::vector<std::string> preferredCodecs) :
std::vector<std::string> preferredCodecs,
std::shared_ptr<PlatformContext> platformContext) :
_thread(thread),
_eventLog(std::make_unique<webrtc::RtcEventLogNull>()),
_taskQueueFactory(webrtc::CreateDefaultTaskQueueFactory()),
@ -69,7 +70,8 @@ _signalBarsUpdated(std::move(signalBarsUpdated)),
_outgoingVideoState(videoCapture ? VideoState::Active : VideoState::Inactive),
_videoCapture(std::move(videoCapture)),
_localPreferredVideoAspectRatio(localPreferredVideoAspectRatio),
_enableHighBitrateVideo(enableHighBitrateVideo) {
_enableHighBitrateVideo(enableHighBitrateVideo),
_platformContext(platformContext) {
_ssrcAudio.incoming = isOutgoing ? ssrcAudioIncoming : ssrcAudioOutgoing;
_ssrcAudio.outgoing = (!isOutgoing) ? ssrcAudioIncoming : ssrcAudioOutgoing;
_ssrcAudio.fecIncoming = isOutgoing ? ssrcAudioFecIncoming : ssrcAudioFecOutgoing;
@ -99,13 +101,14 @@ _enableHighBitrateVideo(enableHighBitrateVideo) {
mediaDeps.audio_encoder_factory = webrtc::CreateAudioEncoderFactory<webrtc::AudioEncoderOpus>();
mediaDeps.audio_decoder_factory = webrtc::CreateAudioDecoderFactory<webrtc::AudioDecoderOpus>();
mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory();
mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory();
mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext);
mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext);
_myVideoFormats = ComposeSupportedFormats(
mediaDeps.video_encoder_factory->GetSupportedFormats(),
mediaDeps.video_decoder_factory->GetSupportedFormats(),
preferredCodecs);
preferredCodecs,
_platformContext);
mediaDeps.audio_processing = webrtc::AudioProcessingBuilder().Create();
_mediaEngine = cricket::CreateMediaEngine(std::move(mediaDeps));
@ -121,6 +124,9 @@ _enableHighBitrateVideo(enableHighBitrateVideo) {
audioOptions.noise_suppression = true;
audioOptions.audio_jitter_buffer_fast_accelerate = true;
std::vector<std::string> streamIds;
streamIds.push_back("1");
_audioChannel.reset(_mediaEngine->voice().CreateMediaChannel(_call.get(), cricket::MediaConfig(), audioOptions, webrtc::CryptoOptions::NoGcm()));
_videoChannel.reset(_mediaEngine->video().CreateMediaChannel(_call.get(), cricket::MediaConfig(), cricket::VideoOptions(), webrtc::CryptoOptions::NoGcm(), _videoBitrateAllocatorFactory.get()));
@ -166,7 +172,9 @@ _enableHighBitrateVideo(enableHighBitrateVideo) {
audioRecvParameters.rtcp.remote_estimate = true;
_audioChannel->SetRecvParameters(audioRecvParameters);
_audioChannel->AddRecvStream(cricket::StreamParams::CreateLegacy(_ssrcAudio.incoming));
cricket::StreamParams audioRecvStreamParams = cricket::StreamParams::CreateLegacy(_ssrcAudio.incoming);
audioRecvStreamParams.set_stream_ids(streamIds);
_audioChannel->AddRecvStream(audioRecvStreamParams);
_audioChannel->SetPlayout(true);
_videoChannel->SetInterface(_videoNetworkInterface.get());
@ -506,6 +514,9 @@ void MediaManager::checkIsReceivingVideoChanged(bool wasReceiving) {
videoRecvStreamParams.ssrcs = {_ssrcVideo.incoming};
videoRecvStreamParams.ssrc_groups.push_back(videoRecvSsrcGroup);
videoRecvStreamParams.cname = "cname";
std::vector<std::string> streamIds;
streamIds.push_back("1");
videoRecvStreamParams.set_stream_ids(streamIds);
_videoChannel->SetRecvParameters(videoRecvParameters);
_videoChannel->AddRecvStream(videoRecvStreamParams);

View File

@ -43,7 +43,8 @@ public:
std::function<void(int)> signalBarsUpdated,
float localPreferredVideoAspectRatio,
bool enableHighBitrateVideo,
std::vector<std::string> preferredCodecs);
std::vector<std::string> preferredCodecs,
std::shared_ptr<PlatformContext> platformContext);
~MediaManager();
void start();
@ -140,6 +141,8 @@ private:
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _audioNetworkInterface;
std::unique_ptr<MediaManager::NetworkInterfaceImpl> _videoNetworkInterface;
std::shared_ptr<PlatformContext> _platformContext;
};
} // namespace tgcalls

View File

@ -17,7 +17,7 @@ namespace rtc {
class BasicPacketSocketFactory;
class BasicNetworkManager;
class PacketTransportInternal;
class NetworkRoute;
struct NetworkRoute;
} // namespace rtc
namespace cricket {

View File

@ -35,6 +35,9 @@ public:
virtual void setState(VideoState state) = 0;
virtual void setPreferredAspectRatio(float aspectRatio) = 0;
virtual void setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) = 0;
virtual std::shared_ptr<PlatformContext> getPlatformContext() {
return nullptr;
}
};

View File

@ -16,7 +16,7 @@ VideoCaptureInterfaceObject::VideoCaptureInterfaceObject(std::shared_ptr<Platfor
if (this->_stateUpdated) {
this->_stateUpdated(state);
}
}, platformContext, _videoCapturerResolution);
}, _platformContext, _videoCapturerResolution);
}
}
@ -61,14 +61,14 @@ void VideoCaptureInterfaceObject::setPreferredAspectRatio(float aspectRatio) {
if (aspectRatio > 0.01 && _videoCapturerResolution.first != 0 && _videoCapturerResolution.second != 0) {
float originalWidth = (float)_videoCapturerResolution.first;
float originalHeight = (float)_videoCapturerResolution.second;
float width = (originalWidth > aspectRatio * originalHeight)
? int(std::round(aspectRatio * originalHeight))
: originalWidth;
float height = (originalWidth > aspectRatio * originalHeight)
? originalHeight
: int(std::round(originalHeight / aspectRatio));
PlatformInterface::SharedInstance()->adaptVideoSource(_videoSource, (int)width, (int)height, 30);
}
}
@ -86,7 +86,8 @@ void VideoCaptureInterfaceObject::setStateUpdated(std::function<void(VideoState)
}
VideoCaptureInterfaceImpl::VideoCaptureInterfaceImpl(std::shared_ptr<PlatformContext> platformContext) :
_impl(Manager::getMediaThread(), [platformContext]() {
_platformContext(platformContext),
_impl(Manager::getMediaThread(), [platformContext]() {
return new VideoCaptureInterfaceObject(platformContext);
}) {
}
@ -117,8 +118,12 @@ void VideoCaptureInterfaceImpl::setOutput(std::shared_ptr<rtc::VideoSinkInterfac
});
}
std::shared_ptr<PlatformContext> VideoCaptureInterfaceImpl::getPlatformContext() {
return _platformContext;
}
ThreadLocalObject<VideoCaptureInterfaceObject> *VideoCaptureInterfaceImpl::object() {
return &_impl;
}
} // namespace tgcalls
}// namespace tgcalls

View File

@ -44,11 +44,13 @@ public:
void setState(VideoState state) override;
void setPreferredAspectRatio(float aspectRatio) override;
void setOutput(std::shared_ptr<rtc::VideoSinkInterface<webrtc::VideoFrame>> sink) override;
std::shared_ptr<PlatformContext> getPlatformContext() override;
ThreadLocalObject<VideoCaptureInterfaceObject> *object();
private:
ThreadLocalObject<VideoCaptureInterfaceObject> _impl;
std::shared_ptr<PlatformContext> _platformContext;
};

View File

@ -23,9 +23,9 @@ public:
virtual float getDisplayAspectRatio() {
return 0.0f;
}
virtual std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory() = 0;
virtual std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() = 0;
virtual bool supportsEncoding(const std::string &codecName) = 0;
virtual std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory(std::shared_ptr<PlatformContext> platformContext) = 0;
virtual std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory(std::shared_ptr<PlatformContext> platformContext) = 0;
virtual bool supportsEncoding(const std::string &codecName, std::shared_ptr<PlatformContext> platformContext) = 0;
virtual rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) = 0;
virtual void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) = 0;
virtual std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) = 0;

View File

@ -16,6 +16,7 @@
#include "api/video_codecs/builtin_video_encoder_factory.h"
#include "api/video_codecs/builtin_video_decoder_factory.h"
#include "api/video_track_source_proxy.h"
#include "AndroidContext.h"
namespace tgcalls {
@ -28,29 +29,29 @@ float AndroidInterface::getDisplayAspectRatio() {
return 0;
}
std::unique_ptr<webrtc::VideoEncoderFactory> AndroidInterface::makeVideoEncoderFactory() {
std::unique_ptr<webrtc::VideoEncoderFactory> AndroidInterface::makeVideoEncoderFactory(std::shared_ptr<PlatformContext> platformContext) {
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
webrtc::ScopedJavaLocalRef<jclass> factory_class =
webrtc::GetClass(env, "org/webrtc/DefaultVideoEncoderFactory");
jmethodID factory_constructor = env->GetMethodID(
factory_class.obj(), "<init>", "(Lorg/webrtc/EglBase$Context;ZZ)V");
webrtc::ScopedJavaLocalRef<jobject> factory_object(
env, env->NewObject(factory_class.obj(), factory_constructor,
nullptr /* shared_context */,
false /* enable_intel_vp8_encoder */,
true /* enable_h264_high_profile */));
AndroidContext *context = (AndroidContext *) platformContext.get();
jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "getSharedEGLContext", "()Lorg/webrtc/EglBase$Context;");
jobject eglContext = env->CallObjectMethod(context->getJavaCapturer(), methodId);
webrtc::ScopedJavaLocalRef<jclass> factory_class = webrtc::GetClass(env, "org/webrtc/DefaultVideoEncoderFactory");
jmethodID factory_constructor = env->GetMethodID(factory_class.obj(), "<init>", "(Lorg/webrtc/EglBase$Context;ZZ)V");
webrtc::ScopedJavaLocalRef<jobject> factory_object(env, env->NewObject(factory_class.obj(), factory_constructor, eglContext, false, true));
return webrtc::JavaToNativeVideoEncoderFactory(env, factory_object.obj());
}
std::unique_ptr<webrtc::VideoDecoderFactory> AndroidInterface::makeVideoDecoderFactory() {
std::unique_ptr<webrtc::VideoDecoderFactory> AndroidInterface::makeVideoDecoderFactory(std::shared_ptr<PlatformContext> platformContext) {
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
webrtc::ScopedJavaLocalRef<jclass> factory_class =
webrtc::GetClass(env, "org/webrtc/DefaultVideoDecoderFactory");
jmethodID factory_constructor = env->GetMethodID(
factory_class.obj(), "<init>", "(Lorg/webrtc/EglBase$Context;)V");
webrtc::ScopedJavaLocalRef<jobject> factory_object(
env, env->NewObject(factory_class.obj(), factory_constructor,
nullptr /* shared_context */));
AndroidContext *context = (AndroidContext *) platformContext.get();
jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "getSharedEGLContext", "()Lorg/webrtc/EglBase$Context;");
jobject eglContext = env->CallObjectMethod(context->getJavaCapturer(), methodId);
webrtc::ScopedJavaLocalRef<jclass> factory_class = webrtc::GetClass(env, "org/webrtc/DefaultVideoDecoderFactory");
jmethodID factory_constructor = env->GetMethodID(factory_class.obj(), "<init>", "(Lorg/webrtc/EglBase$Context;)V");
webrtc::ScopedJavaLocalRef<jobject> factory_object(env, env->NewObject(factory_class.obj(), factory_constructor, eglContext));
return webrtc::JavaToNativeVideoDecoderFactory(env, factory_object.obj());
}
@ -64,18 +65,17 @@ rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> AndroidInterface::makeVide
return webrtc::VideoTrackSourceProxy::Create(signalingThread, workerThread, _source);
}
bool AndroidInterface::supportsEncoding(const std::string &codecName) {
bool AndroidInterface::supportsEncoding(const std::string &codecName, std::shared_ptr<PlatformContext> platformContext) {
if (hardwareVideoEncoderFactory == nullptr) {
JNIEnv *env = webrtc::AttachCurrentThreadIfNeeded();
webrtc::ScopedJavaLocalRef<jclass> factory_class =
webrtc::GetClass(env, "org/webrtc/HardwareVideoEncoderFactory");
jmethodID factory_constructor = env->GetMethodID(
factory_class.obj(), "<init>", "(Lorg/webrtc/EglBase$Context;ZZ)V");
webrtc::ScopedJavaLocalRef<jobject> factory_object(
env, env->NewObject(factory_class.obj(), factory_constructor,
nullptr,
false,
true));
AndroidContext *context = (AndroidContext *) platformContext.get();
jmethodID methodId = env->GetMethodID(context->getJavaCapturerClass(), "getSharedEGLContext", "()Lorg/webrtc/EglBase$Context;");
jobject eglContext = env->CallObjectMethod(context->getJavaCapturer(), methodId);
webrtc::ScopedJavaLocalRef<jclass> factory_class = webrtc::GetClass(env, "org/webrtc/HardwareVideoEncoderFactory");
jmethodID factory_constructor = env->GetMethodID(factory_class.obj(), "<init>", "(Lorg/webrtc/EglBase$Context;ZZ)V");
webrtc::ScopedJavaLocalRef<jobject> factory_object(env, env->NewObject(factory_class.obj(), factory_constructor, eglContext, false, true));
hardwareVideoEncoderFactory = webrtc::JavaToNativeVideoEncoderFactory(env, factory_object.obj());
}
auto formats = hardwareVideoEncoderFactory->GetSupportedFormats();

View File

@ -11,9 +11,9 @@ class AndroidInterface : public PlatformInterface {
public:
void configurePlatformAudio() override;
float getDisplayAspectRatio() override;
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory() override;
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory() override;
bool supportsEncoding(const std::string &codecName) override;
std::unique_ptr<webrtc::VideoEncoderFactory> makeVideoEncoderFactory(std::shared_ptr<PlatformContext> platformContext) override;
std::unique_ptr<webrtc::VideoDecoderFactory> makeVideoDecoderFactory(std::shared_ptr<PlatformContext> platformContext) override;
bool supportsEncoding(const std::string &codecName, std::shared_ptr<PlatformContext> platformContext) override;
rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> makeVideoSource(rtc::Thread *signalingThread, rtc::Thread *workerThread) override;
void adaptVideoSource(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> videoSource, int width, int height, int fps) override;
std::unique_ptr<VideoCapturerInterface> makeVideoCapturer(rtc::scoped_refptr<webrtc::VideoTrackSourceInterface> source, bool useFrontCamera, std::function<void(VideoState)> stateUpdated, std::shared_ptr<PlatformContext> platformContext, std::pair<int, int> &outResolution) override;

View File

@ -241,7 +241,8 @@ public:
_videoCapture(descriptor.videoCapture),
_localPreferredVideoAspectRatio(descriptor.config.preferredAspectRatio),
_state(State::Reconnecting),
_videoState(_videoCapture ? VideoState::Active : VideoState::Inactive) {
_videoState(_videoCapture ? VideoState::Active : VideoState::Inactive),
_platformContext(descriptor.platformContext) {
assert(getMediaThread()->IsCurrent());
rtc::LogMessage::LogToDebug(rtc::LS_INFO);
@ -304,8 +305,8 @@ public:
mediaDeps.task_queue_factory = dependencies.task_queue_factory.get();
mediaDeps.audio_encoder_factory = webrtc::CreateBuiltinAudioEncoderFactory();
mediaDeps.audio_decoder_factory = webrtc::CreateBuiltinAudioDecoderFactory();
mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory();
mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory();
mediaDeps.video_encoder_factory = PlatformInterface::SharedInstance()->makeVideoEncoderFactory(_platformContext);
mediaDeps.video_decoder_factory = PlatformInterface::SharedInstance()->makeVideoDecoderFactory(_platformContext);
webrtc::AudioProcessing *apm = webrtc::AudioProcessingBuilder().Create();
webrtc::AudioProcessing::Config audioConfig;
@ -926,6 +927,8 @@ private:
bool _didSetRemoteDescription = false;
std::vector<std::shared_ptr<IceCandidateData>> _pendingRemoteIceCandidates;
std::shared_ptr<PlatformContext> _platformContext;
};
InstanceImplReference::InstanceImplReference(Descriptor &&descriptor) :

View File

@ -880,7 +880,12 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg
roundRect.set(drawRegion);
if (isRoundRect) {
canvas.drawRoundRect(roundRect,roundRadius[0], roundRadius[0],roundPaint);
try {
canvas.drawRoundRect(roundRect,roundRadius[0], roundRadius[0],roundPaint);
} catch (Exception e) {
onBitmapException(bitmapDrawable);
FileLog.e(e);
}
} else {
for (int a = 0; a < roundRadius.length; a++) {
radii[a * 2] = roundRadius[a];
@ -930,7 +935,12 @@ public class ImageReceiver implements NotificationCenter.NotificationCenterDeleg
roundPaint.setAlpha(alpha);
if (isRoundRect) {
canvas.drawRoundRect(roundRect, roundRadius[0], roundRadius[0], roundPaint);
try {
canvas.drawRoundRect(roundRect, roundRadius[0], roundRadius[0], roundPaint);
} catch (Exception e) {
onBitmapException(bitmapDrawable);
FileLog.e(e);
}
} else {
for (int a = 0; a < roundRadius.length; a++) {
radii[a * 2] = roundRadius[a];

View File

@ -3193,8 +3193,14 @@ public class NotificationsController extends BaseController {
int selfUserId = getUserConfig().getClientUserId();
boolean waitingForPasscode = AndroidUtilities.needShowPasscode() || SharedConfig.isWaitingForPasscodeEnter;
int maxCount;
if (UserConfig.getActivatedAccountsCount() >= 3) {
maxCount = 7;
} else {
maxCount = 10;
}
for (int b = 0, size = sortedDialogs.size(); b < size; b++) {
if (holders.size() >= 15) {
if (holders.size() >= maxCount) {
break;
}
long dialog_id = sortedDialogs.get(b);

View File

@ -11,7 +11,6 @@ import androidx.annotation.Nullable;
import org.telegram.messenger.ApplicationLoader;
import java.io.FileNotFoundException;
import java.io.IOException;
/**
* This is a very dirty hack to allow Telegram calls to respect user's DND settings.
@ -66,20 +65,20 @@ public class CallNotificationSoundProvider extends ContentProvider {
throw new FileNotFoundException("Unexpected application state");
}
VoIPBaseService srv = VoIPBaseService.getSharedInstance();
if (srv != null) {
srv.startRingtoneAndVibration();
}
try {
VoIPBaseService srv = VoIPBaseService.getSharedInstance();
if (srv != null) {
srv.startRingtoneAndVibration();
}
ParcelFileDescriptor[] pipe = ParcelFileDescriptor.createPipe();
ParcelFileDescriptor.AutoCloseOutputStream outputStream = new ParcelFileDescriptor.AutoCloseOutputStream(pipe[1]);
byte[] silentWav = {82, 73, 70, 70, 41, 0, 0, 0, 87, 65, 86, 69, 102, 109, 116, 32, 16, 0, 0, 0, 1, 0, 1, 0, 68, (byte) 172, 0, 0, 16, (byte) 177, 2, 0, 2, 0, 16, 0, 100, 97, 116, 97, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0};
outputStream.write(silentWav);
outputStream.close();
return pipe[0];
} catch (IOException x) {
throw new FileNotFoundException(x.getMessage());
} catch (Exception e) {
throw new FileNotFoundException(e.getMessage());
}
}
}

View File

@ -330,6 +330,15 @@ public final class Instance {
public final boolean useSystemAec;
public final double hangupUiTimeout;
public final boolean enable_vp8_encoder;
public final boolean enable_vp8_decoder;
public final boolean enable_vp9_encoder;
public final boolean enable_vp9_decoder;
public final boolean enable_h265_encoder;
public final boolean enable_h265_decoder;
public final boolean enable_h264_encoder;
public final boolean enable_h264_decoder;
private final JSONObject jsonObject;
private ServerConfig(JSONObject jsonObject) {
@ -337,6 +346,15 @@ public final class Instance {
this.useSystemNs = jsonObject.optBoolean("use_system_ns", true);
this.useSystemAec = jsonObject.optBoolean("use_system_aec", true);
this.hangupUiTimeout = jsonObject.optDouble("hangup_ui_timeout", 5);
this.enable_vp8_encoder = jsonObject.optBoolean("enable_vp8_encoder", true);
this.enable_vp8_decoder = jsonObject.optBoolean("enable_vp8_decoder", true);
this.enable_vp9_encoder = jsonObject.optBoolean("enable_vp9_encoder", true);
this.enable_vp9_decoder = jsonObject.optBoolean("enable_vp9_decoder", true);
this.enable_h265_encoder = jsonObject.optBoolean("enable_h265_encoder", true);
this.enable_h265_decoder = jsonObject.optBoolean("enable_h265_decoder", true);
this.enable_h264_encoder = jsonObject.optBoolean("enable_h264_encoder", true);
this.enable_h264_decoder = jsonObject.optBoolean("enable_h264_decoder", true);
}
public String getString(String key) {

View File

@ -5,6 +5,8 @@ import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import com.google.android.datatransport.runtime.logging.Logging;
import org.telegram.messenger.AndroidUtilities;
import org.telegram.messenger.ApplicationLoader;
import org.telegram.messenger.FileLog;
@ -44,6 +46,7 @@ public class VideoCameraCapturer {
if (Build.VERSION.SDK_INT < 18) {
return;
}
Logging.i("VideoCameraCapturer", "device model = " + Build.MANUFACTURER + Build.MODEL);
AndroidUtilities.runOnUIThread(() -> {
instance = this;
thread = new HandlerThread("CallThread");
@ -192,5 +195,12 @@ public class VideoCameraCapturer {
});
}
private EglBase.Context getSharedEGLContext() {
if (eglBase == null) {
eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
}
return eglBase != null ? eglBase.getEglBaseContext() : null;
}
private static native CapturerObserver nativeGetJavaVideoCapturerObserver(long ptr);
}

View File

@ -160,7 +160,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
protected long callStartTime;
protected boolean playingSound;
protected boolean isOutgoing;
protected boolean videoCall;
public boolean videoCall;
protected long videoCapturer;
protected Runnable timeoutRunnable;
@ -1511,7 +1511,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
}
// some non-Google devices don't implement the ConnectionService API correctly so, sadly,
// we'll have to whitelist only a handful of known-compatible devices for now
return "angler".equals(Build.PRODUCT) // Nexus 6P
return false;/*"angler".equals(Build.PRODUCT) // Nexus 6P
|| "bullhead".equals(Build.PRODUCT) // Nexus 5X
|| "sailfish".equals(Build.PRODUCT) // Pixel
|| "marlin".equals(Build.PRODUCT) // Pixel XL
@ -1519,7 +1519,7 @@ public abstract class VoIPBaseService extends Service implements SensorEventList
|| "taimen".equals(Build.PRODUCT) // Pixel 2 XL
|| "blueline".equals(Build.PRODUCT) // Pixel 3
|| "crosshatch".equals(Build.PRODUCT) // Pixel 3 XL
|| MessagesController.getGlobalMainSettings().getBoolean("dbg_force_connection_service", false);
|| MessagesController.getGlobalMainSettings().getBoolean("dbg_force_connection_service", false);*/
}
public interface StateListener {

View File

@ -941,6 +941,7 @@ public class VoIPService extends VoIPBaseService {
PendingIntent.getActivity(VoIPService.this, 0, new Intent(VoIPService.this, VoIPFeedbackActivity.class)
.putExtra("call_id", call.id)
.putExtra("call_access_hash", call.access_hash)
.putExtra("call_video", call.video)
.putExtra("account", currentAccount)
.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_SINGLE_TOP), 0).send();
} catch (Exception x) {

View File

@ -76,6 +76,12 @@ public class AcceptDeclineView extends View {
boolean retryMod;
Drawable rippleDrawable;
private boolean screenWasWakeup;
Paint linePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
Drawable arrowDrawable;
float arrowProgress;
public AcceptDeclineView(@NonNull Context context) {
super(context);
@ -107,12 +113,14 @@ public class AcceptDeclineView extends View {
cancelDrawable = ContextCompat.getDrawable(context, R.drawable.ic_close_white).mutate();
cancelDrawable.setColorFilter(new PorterDuffColorFilter(Color.BLACK, PorterDuff.Mode.MULTIPLY));
acceptCirclePaint.setColor(0x3f45bc4d);
rippleDrawable = Theme.createSimpleSelectorCircleDrawable(AndroidUtilities.dp(52), 0, ColorUtils.setAlphaComponent(Color.WHITE, (int) (255 * 0.3f)));
rippleDrawable.setCallback(this);
arrowDrawable = ContextCompat.getDrawable(context, R.drawable.call_arrow_right);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
@ -122,6 +130,8 @@ public class AcceptDeclineView extends View {
callDrawable.setBounds(padding, padding, padding + AndroidUtilities.dp(28), padding + AndroidUtilities.dp(28));
cancelDrawable.setBounds(padding, padding, padding + AndroidUtilities.dp(28), padding + AndroidUtilities.dp(28));
linePaint.setStrokeWidth(AndroidUtilities.dp(3));
linePaint.setColor(Color.WHITE);
}
@Override
@ -200,7 +210,7 @@ public class AcceptDeclineView extends View {
animator.start();
leftAnimator = animator;
if (listener != null) {
if ((!startDrag && Math.abs(dy) < touchSlop) || leftOffsetX > maxOffset * 0.8f) {
if ((!startDrag && Math.abs(dy) < touchSlop && !screenWasWakeup) || leftOffsetX > maxOffset * 0.8f) {
listener.onDicline();
}
}
@ -214,7 +224,7 @@ public class AcceptDeclineView extends View {
animator.start();
rightAnimator = animator;
if (listener != null) {
if ((!startDrag && Math.abs(dy) < touchSlop) || -rigthOffsetX > maxOffset * 0.8f) {
if ((!startDrag && Math.abs(dy) < touchSlop && !screenWasWakeup) || -rigthOffsetX > maxOffset * 0.8f) {
listener.onAccept();
}
}
@ -263,6 +273,49 @@ public class AcceptDeclineView extends View {
invalidate();
}
float k = 0.6f;
if (screenWasWakeup && !retryMod) {
arrowProgress += 16 / 1500f;
if (arrowProgress > 1) {
arrowProgress = 0;
}
int cY = (int) (AndroidUtilities.dp(40) + buttonWidth / 2f);
float startX = AndroidUtilities.dp(46) + buttonWidth + AndroidUtilities.dp(8);
float endX = getMeasuredWidth() / 2f - AndroidUtilities.dp(8);
float lineLength = AndroidUtilities.dp(10);
float stepProgress = (1f - k) / 3f;
for (int i = 0; i < 3; i++) {
int x = (int) (startX + (endX - startX - lineLength) / 3 * i);
float alpha = 0.5f;
float startAlphaFrom = i * stepProgress;
if (arrowProgress > startAlphaFrom && arrowProgress < startAlphaFrom + k) {
float p = (arrowProgress - startAlphaFrom) / k;
if (p > 0.5) p = 1f - p;
alpha = 0.5f + p;
}
canvas.save();
canvas.clipRect(leftOffsetX + AndroidUtilities.dp(46) + buttonWidth / 2,0,getMeasuredHeight(),getMeasuredWidth() >> 1);
arrowDrawable.setAlpha((int) (255 * alpha));
arrowDrawable.setBounds(x, cY - arrowDrawable.getIntrinsicHeight() / 2, x + arrowDrawable.getIntrinsicWidth(), cY + arrowDrawable.getIntrinsicHeight() / 2);
arrowDrawable.draw(canvas);
canvas.restore();
x = (int) (getMeasuredWidth() - (startX + (endX - startX - lineLength) / 3 * i));
canvas.save();
canvas.clipRect(getMeasuredWidth() >> 1, 0, rigthOffsetX + getMeasuredWidth() - AndroidUtilities.dp(46) - buttonWidth / 2, getMeasuredHeight());
canvas.rotate(180, x - arrowDrawable.getIntrinsicWidth() / 2f, cY);
arrowDrawable.setBounds(x - arrowDrawable.getIntrinsicWidth(), cY - arrowDrawable.getIntrinsicHeight() / 2, x, cY + arrowDrawable.getIntrinsicHeight() / 2);
arrowDrawable.draw(canvas);
canvas.restore();
}
invalidate();
}
bigRadius += AndroidUtilities.dp(8) * 0.005f;
canvas.save();
canvas.translate(0, AndroidUtilities.dp(40));
@ -330,6 +383,7 @@ public class AcceptDeclineView extends View {
public interface Listener {
void onAccept();
void onDicline();
}
@ -337,6 +391,7 @@ public class AcceptDeclineView extends View {
this.retryMod = retryMod;
if (retryMod) {
declineDrawable.setColor(Color.WHITE);
screenWasWakeup = false;
} else {
declineDrawable.setColor(0xFFe61e44);
}
@ -421,6 +476,10 @@ public class AcceptDeclineView extends View {
return accessibilityNodeProvider;
}
public void setScreenWasWakeup(boolean screenWasWakeup) {
this.screenWasWakeup = screenWasWakeup;
}
private static abstract class AcceptDeclineAccessibilityNodeProvider extends AccessibilityNodeProvider {
private final View hostView;
@ -514,7 +573,9 @@ public class AcceptDeclineView extends View {
}
protected abstract CharSequence getVirtualViewText(int virtualViewId);
protected abstract void getVirtualViewBoundsInScreen(int virtualViewId, Rect outRect);
protected abstract void getVirtualViewBoundsInParent(int virtualViewId, Rect outRect);
}
}

View File

@ -88,7 +88,11 @@ public class VoIPHelper {
bldr.setNeutralButton(LocaleController.getString("VoipOfflineOpenSettings", R.string.VoipOfflineOpenSettings), (dialog, which) -> activity.startActivity(settingsIntent));
}
}
bldr.show();
try {
bldr.show();
} catch (Exception e) {
FileLog.e(e);
}
return;
}
@ -227,7 +231,7 @@ public class VoIPHelper {
if (d[0].equals(call.call_id + "")) {
try {
long accessHash = Long.parseLong(d[1]);
showRateAlert(context, null, call.call_id, accessHash, UserConfig.selectedAccount, true);
showRateAlert(context, null, call.video, call.call_id, accessHash, UserConfig.selectedAccount, true);
} catch (Exception ignore) {
}
return;
@ -235,7 +239,7 @@ public class VoIPHelper {
}
}
public static void showRateAlert(final Context context, final Runnable onDismiss, final long callID, final long accessHash, final int account, final boolean userInitiative) {
public static void showRateAlert(final Context context, final Runnable onDismiss, boolean isVideo, final long callID, final long accessHash, final int account, final boolean userInitiative) {
final File log = getLogFile(callID);
final int[] page = {0};
LinearLayout alertView = new LinearLayout(context);
@ -262,32 +266,41 @@ public class VoIPHelper {
check.setChecked(!check.isChecked(), true);
};
final String[] problems = {"echo", "noise", "interruptions", "distorted_speech", "silent_local", "silent_remote", "dropped"};
final String[] problems = {isVideo ? "distorted_video" : null, isVideo ? "pixelated_video" : null, "echo", "noise", "interruptions", "distorted_speech", "silent_local", "silent_remote", "dropped"};
for (int i = 0; i < problems.length; i++) {
if (problems[i] == null) {
continue;
}
CheckBoxCell check = new CheckBoxCell(context, 1);
check.setClipToPadding(false);
check.setTag(problems[i]);
String label = null;
switch (i) {
case 0:
label = LocaleController.getString("RateCallEcho", R.string.RateCallEcho);
label = LocaleController.getString("RateCallVideoDistorted", R.string.RateCallVideoDistorted);
break;
case 1:
label = LocaleController.getString("RateCallNoise", R.string.RateCallNoise);
label = LocaleController.getString("RateCallVideoPixelated", R.string.RateCallVideoPixelated);
break;
case 2:
label = LocaleController.getString("RateCallInterruptions", R.string.RateCallInterruptions);
label = LocaleController.getString("RateCallEcho", R.string.RateCallEcho);
break;
case 3:
label = LocaleController.getString("RateCallDistorted", R.string.RateCallDistorted);
label = LocaleController.getString("RateCallNoise", R.string.RateCallNoise);
break;
case 4:
label = LocaleController.getString("RateCallSilentLocal", R.string.RateCallSilentLocal);
label = LocaleController.getString("RateCallInterruptions", R.string.RateCallInterruptions);
break;
case 5:
label = LocaleController.getString("RateCallSilentRemote", R.string.RateCallSilentRemote);
label = LocaleController.getString("RateCallDistorted", R.string.RateCallDistorted);
break;
case 6:
label = LocaleController.getString("RateCallSilentLocal", R.string.RateCallSilentLocal);
break;
case 7:
label = LocaleController.getString("RateCallSilentRemote", R.string.RateCallSilentRemote);
break;
case 8:
label = LocaleController.getString("RateCallDropped", R.string.RateCallDropped);
break;
}
@ -383,10 +396,11 @@ public class VoIPHelper {
problemTags.add("#" + check.getTag());
}
if (req.rating < 5)
if (req.rating < 5) {
req.comment = commentBox.getText().toString();
else
} else {
req.comment = "";
}
if (!problemTags.isEmpty() && !includeLogs[0]) {
req.comment += " " + TextUtils.join(" ", problemTags);
}

View File

@ -34,6 +34,7 @@ import org.telegram.messenger.AndroidUtilities;
import org.telegram.messenger.ApplicationLoader;
import org.telegram.messenger.FileLog;
import org.telegram.messenger.LocaleController;
import org.telegram.messenger.NotificationCenter;
import org.telegram.messenger.R;
import org.telegram.messenger.voip.Instance;
import org.telegram.messenger.voip.VideoCameraCapturer;
@ -44,7 +45,7 @@ import org.telegram.ui.Components.LayoutHelper;
import org.telegram.ui.LaunchActivity;
import org.telegram.ui.VoIPFragment;
public class VoIPPiPView implements VoIPBaseService.StateListener {
public class VoIPPiPView implements VoIPBaseService.StateListener, NotificationCenter.NotificationCenterDelegate {
public final static int ANIMATION_ENTER_TYPE_SCALE = 0;
public final static int ANIMATION_ENTER_TYPE_TRANSITION = 1;
@ -148,6 +149,7 @@ public class VoIPPiPView implements VoIPBaseService.StateListener {
float y = preferences.getFloat("relativeY", 0f);
instance.setRelativePosition(x, y);
NotificationCenter.getGlobalInstance().addObserver(instance, NotificationCenter.didEndCall);
wm.addView(instance.windowView, windowLayoutParams);
instance.currentUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null);
@ -324,6 +326,8 @@ public class VoIPPiPView implements VoIPBaseService.StateListener {
VoIPService service = VoIPService.getSharedInstance();
if (service != null) {
service.hangUp();
} else {
finish();
}
});
@ -353,23 +357,24 @@ public class VoIPPiPView implements VoIPBaseService.StateListener {
if (service != null) {
service.unregisterStateListener(this);
}
floatingView.getRelativePosition(point);
float x = Math.min(1f, Math.max(0f, point[0]));
float y = Math.min(1f, Math.max(0f, point[1]));
SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("voippipconfig", Context.MODE_PRIVATE);
preferences.edit()
.putFloat("relativeX", x)
.putFloat("relativeY", y)
.apply();
windowView.setVisibility(View.GONE);
if (windowView.getParent() != null) {
floatingView.getRelativePosition(point);
float x = Math.min(1f, Math.max(0f, point[0]));
float y = Math.min(1f, Math.max(0f, point[1]));
SharedPreferences preferences = ApplicationLoader.applicationContext.getSharedPreferences("voippipconfig", Context.MODE_PRIVATE);
preferences.edit()
.putFloat("relativeX", x)
.putFloat("relativeY", y)
.apply();
try {
windowManager.removeView(windowView);
} catch (Throwable e) {
FileLog.e(e);
}
}
instance = null;
NotificationCenter.getGlobalInstance().removeObserver(this, NotificationCenter.didEndCall);
}
@Override
@ -477,6 +482,13 @@ public class VoIPPiPView implements VoIPBaseService.StateListener {
}
}
@Override
public void didReceivedNotification(int id, int account, Object... args) {
if (id == NotificationCenter.didEndCall) {
finish();
}
}
private class FloatingView extends FrameLayout {
@ -762,12 +774,7 @@ public class VoIPPiPView implements VoIPBaseService.StateListener {
return;
}
expandedInstance.windowView.setAlpha(0);
try {
windowManager.removeView(expandedInstance.windowView);
} catch (Throwable e) {
FileLog.e(e);
}
expandedInstance = null;
expandedInstance.finishInternal();
expandedAnimationInProgress = false;
if (expanded) {
AndroidUtilities.runOnUIThread(collapseRunnable, 3000);

View File

@ -19,18 +19,21 @@ import android.widget.TextView;
import androidx.annotation.NonNull;
import org.telegram.messenger.AndroidUtilities;
import org.telegram.messenger.LocaleController;
import org.telegram.messenger.R;
import org.telegram.ui.Components.CubicBezierInterpolator;
import org.telegram.ui.Components.LayoutHelper;
public class VoIPStatusTextView extends FrameLayout {
TextView[] textView = new TextView[2];
TextView reconnectTextView;
VoIPTimerView timerView;
CharSequence nextTextToSet;
boolean animationInProgress;
private TextAlphaSpan[] ellSpans = ellSpans = new TextAlphaSpan[]{new TextAlphaSpan(), new TextAlphaSpan(), new TextAlphaSpan()};
private TextAlphaSpan[] ellSpans = new TextAlphaSpan[]{new TextAlphaSpan(), new TextAlphaSpan(), new TextAlphaSpan()};
private AnimatorSet ellAnimator;
private boolean attachedToWindow;
@ -47,6 +50,23 @@ public class VoIPStatusTextView extends FrameLayout {
textView[i].setGravity(Gravity.CENTER_HORIZONTAL);
addView(textView[i]);
}
reconnectTextView = new TextView(context);
reconnectTextView.setTextSize(15);
reconnectTextView.setShadowLayer(AndroidUtilities.dp(3), 0, AndroidUtilities.dp(.666666667f), 0x4C000000);
reconnectTextView.setTextColor(Color.WHITE);
reconnectTextView.setGravity(Gravity.CENTER_HORIZONTAL);
addView(reconnectTextView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, 0, 0, 22, 0, 0));
SpannableStringBuilder ssb = new SpannableStringBuilder(LocaleController.getString("VoipReconnecting", R.string.VoipReconnecting));
SpannableString ell = new SpannableString("...");
ell.setSpan(ellSpans[0], 0, 1, 0);
ell.setSpan(ellSpans[1], 1, 2, 0);
ell.setSpan(ellSpans[2], 2, 3, 0);
ssb.append(ell);
reconnectTextView.setText(ssb);
reconnectTextView.setVisibility(View.GONE);
timerView = new VoIPTimerView(context);
addView(timerView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT));
@ -97,7 +117,6 @@ public class VoIPStatusTextView extends FrameLayout {
animated = false;
}
if (!animated) {
if (animator != null) {
animator.cancel();
@ -230,6 +249,9 @@ public class VoIPStatusTextView extends FrameLayout {
textView[0].invalidate();
textView[1].invalidate();
}
if (reconnectTextView.getVisibility() == View.VISIBLE) {
reconnectTextView.invalidate();
}
});
a.setDuration(duration);
a.setStartDelay(startDelay);
@ -237,6 +259,29 @@ public class VoIPStatusTextView extends FrameLayout {
return a;
}
public void showReconnect(boolean showReconnecting, boolean animated) {
if (!animated) {
reconnectTextView.animate().setListener(null).cancel();
reconnectTextView.setVisibility(showReconnecting ? View.VISIBLE : View.GONE);
} else {
if (showReconnecting) {
if (reconnectTextView.getVisibility() != View.VISIBLE) {
reconnectTextView.setVisibility(View.VISIBLE);
reconnectTextView.setAlpha(0);
}
reconnectTextView.animate().setListener(null).cancel();
reconnectTextView.animate().alpha(1f).setDuration(150).start();
} else {
reconnectTextView.animate().alpha(0).setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
reconnectTextView.setVisibility(View.GONE);
}
}).setDuration(150).start();
}
}
}
private class TextAlphaSpan extends CharacterStyle {
private int alpha;

View File

@ -245,6 +245,8 @@ public class VoIPToggleButton extends FrameLayout {
textView[0].setText(text);
crossProgress = drawCross ? 1f : 0;
iconChangeColor = false;
replaceProgress = 0f;
invalidate();
} else {
if (!iconChangeColor) {
icon[1] = ContextCompat.getDrawable(getContext(), iconRes).mutate();
@ -299,6 +301,7 @@ public class VoIPToggleButton extends FrameLayout {
}
});
replaceAnimator.setDuration(150).start();
invalidate();
}
}

View File

@ -808,7 +808,7 @@ public class LaunchActivity extends Activity implements ActionBarLayout.ActionBa
if (BuildVars.LOGS_ENABLED) {
FileLog.d("OS name " + os1 + " " + os2);
}
if (os1.contains("flyme") || os2.contains("flyme")) {
if ((os1.contains("flyme") || os2.contains("flyme")) && Build.VERSION.SDK_INT <= 24) {
AndroidUtilities.incorrectDisplaySizeFix = true;
final View view = getWindow().getDecorView().getRootView();
view.getViewTreeObserver().addOnGlobalLayoutListener(onGlobalLayoutListener = () -> {

View File

@ -363,7 +363,7 @@ public class PaymentFormActivity extends BaseFragment implements NotificationCen
currentItemName = message.messageOwner.media.title;
validateRequest = request;
saveShippingInfo = true;
if (saveCard) {
if (saveCard || currentStep == 4) {
saveCardInfo = saveCard;
} else {
saveCardInfo = paymentForm.saved_credentials != null;

View File

@ -3597,6 +3597,10 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter.
color2 = Theme.getColor(Theme.key_windowBackgroundWhite);
verifiedCheckDrawable.setColorFilter(AndroidUtilities.getOffsetColor(color1, color2, value, 1.0f), PorterDuff.Mode.SRC_IN);
}
if (avatarsViewPagerIndicatorView.getSecondaryMenuItem() != null && videoCallItemVisible) {
needLayoutText(Math.min(1f, extraHeight / AndroidUtilities.dp(88f)));
}
}
@Override
@ -4128,7 +4132,8 @@ public class ProfileActivity extends BaseFragment implements NotificationCenter.
}
int viewWidth = AndroidUtilities.isTablet() ? AndroidUtilities.dp(490) : AndroidUtilities.displaySize.x;
int buttonsWidth = AndroidUtilities.dp(118 + 8 + (40 + (callItemVisible || editItemVisible || searchItem != null ? 48 : 0) + (videoCallItemVisible ? 48 : 0)));
ActionBarMenuItem item = avatarsViewPagerIndicatorView.getSecondaryMenuItem();
int buttonsWidth = AndroidUtilities.dp(118 + 8 + (40 + (item != null ? 48 * (1.0f - mediaHeaderAnimationProgress) : 0) + (videoCallItemVisible ? 48 * (1.0f - mediaHeaderAnimationProgress) : 0)));
int minWidth = viewWidth - buttonsWidth;
int width = (int) (viewWidth - buttonsWidth * Math.max(0.0f, 1.0f - (diff != 1.0f ? diff * 0.15f / (1.0f - diff) : 1.0f)) - nameTextView[1].getTranslationX());

View File

@ -16,12 +16,7 @@ public class VoIPFeedbackActivity extends Activity {
setContentView(new View(this));
VoIPHelper.showRateAlert(this, new Runnable(){
@Override
public void run(){
finish();
}
}, getIntent().getLongExtra("call_id", 0), getIntent().getLongExtra("call_access_hash", 0), getIntent().getIntExtra("account", 0), false);
VoIPHelper.showRateAlert(this, this::finish, getIntent().getBooleanExtra("call_video", false), getIntent().getLongExtra("call_id", 0), getIntent().getLongExtra("call_access_hash", 0), getIntent().getIntExtra("account", 0), false);
}
@Override

View File

@ -202,6 +202,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
}
};
private boolean lockOnScreen;
private boolean screenWasWakeup;
private boolean isVideoCall;
public static void show(Activity activity) {
show(activity, false);
@ -252,6 +255,15 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
}
};
instance.deviceIsLocked = ((KeyguardManager) activity.getSystemService(Context.KEYGUARD_SERVICE)).inKeyguardRestrictedInputMode();
PowerManager pm = (PowerManager) activity.getSystemService(Context.POWER_SERVICE);
boolean screenOn;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT_WATCH) {
screenOn = pm.isInteractive();
} else {
screenOn = pm.isScreenOn();
}
instance.screenWasWakeup = !screenOn;
windowView.setLockOnScreen(instance.deviceIsLocked);
fragment.windowView = windowView;
@ -406,6 +418,10 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
public void didReceivedNotification(int id, int account, Object... args) {
if (id == NotificationCenter.voipServiceCreated) {
if (currentState == VoIPService.STATE_BUSY && VoIPService.getSharedInstance() != null) {
currentUserTextureView.renderer.release();
callingUserTextureView.renderer.release();
callingUserMiniTextureRenderer.release();
initRenderers();
VoIPService.getSharedInstance().registerStateListener(this);
}
}
@ -430,6 +446,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
@Override
public void onMediaStateUpdated(int audioState, int videoState) {
previousState = currentState;
if (videoState == Instance.VIDEO_STATE_ACTIVE && !isVideoCall) {
isVideoCall = true;
}
updateViewState();
}
@ -442,6 +461,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
@Override
public void onVideoAvailableChange(boolean isAvailable) {
previousState = currentState;
if (isAvailable && !isVideoCall) {
isVideoCall = true;
}
updateViewState();
}
@ -559,7 +581,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
lastContentTapTime = System.currentTimeMillis();
callingUserMiniFloatingLayout.setRelativePosition(currentUserCameraFloatingLayout);
cameraForceExpanded = true;
currentState = previousState;
previousState = currentState;
updateViewState();
}
});
@ -585,7 +607,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
lastContentTapTime = System.currentTimeMillis();
currentUserCameraFloatingLayout.setRelativePosition(callingUserMiniFloatingLayout);
cameraForceExpanded = false;
currentState = previousState;
previousState = currentState;
updateViewState();
}
});
@ -700,7 +722,6 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
bottomButtons[i] = new VoIPToggleButton(context);
buttonsLayout.addView(bottomButtons[i]);
}
acceptDeclineView = new AcceptDeclineView(context);
acceptDeclineView.setListener(new AcceptDeclineView.Listener() {
@Override
@ -710,6 +731,8 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
intent.putExtra("user_id", callingUser.id);
intent.putExtra("is_outgoing", true);
intent.putExtra("start_incall_activity", false);
intent.putExtra("video_call", isVideoCall);
intent.putExtra("can_video_call", isVideoCall);
intent.putExtra("account", UserConfig.selectedAccount);
try {
activity.startService(intent);
@ -741,6 +764,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
}
}
});
acceptDeclineView.setScreenWasWakeup(screenWasWakeup);
frameLayout.addView(buttonsLayout, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, LayoutHelper.WRAP_CONTENT, Gravity.BOTTOM));
frameLayout.addView(acceptDeclineView, LayoutHelper.createFrame(LayoutHelper.MATCH_PARENT, 186, Gravity.BOTTOM));
@ -789,42 +813,47 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
VoIPService service = VoIPService.getSharedInstance();
if (service != null) {
if (VideoCameraCapturer.eglBase == null) {
VideoCameraCapturer.eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
if (!isVideoCall) {
isVideoCall = service.call != null && service.call.video;
}
currentUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), new RendererCommon.RendererEvents() {
@Override
public void onFirstFrameRendered() {
AndroidUtilities.runOnUIThread(() -> updateViewState());
}
@Override
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) {
}
});
callingUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), new RendererCommon.RendererEvents() {
@Override
public void onFirstFrameRendered() {
AndroidUtilities.runOnUIThread(() -> updateViewState());
}
@Override
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) {
}
}, EglBase.CONFIG_PLAIN, new GlRectDrawer());
callingUserMiniTextureRenderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null);
initRenderers();
}
return frameLayout;
}
private void initRenderers() {
if (VideoCameraCapturer.eglBase == null) {
VideoCameraCapturer.eglBase = EglBase.create(null, EglBase.CONFIG_PLAIN);
}
currentUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), new RendererCommon.RendererEvents() {
@Override
public void onFirstFrameRendered() {
AndroidUtilities.runOnUIThread(() -> updateViewState());
}
@Override
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) {
}
});
callingUserTextureView.renderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), new RendererCommon.RendererEvents() {
@Override
public void onFirstFrameRendered() {
AndroidUtilities.runOnUIThread(() -> updateViewState());
}
@Override
public void onFrameResolutionChanged(int videoWidth, int videoHeight, int rotation) {
}
}, EglBase.CONFIG_PLAIN, new GlRectDrawer());
callingUserMiniTextureRenderer.init(VideoCameraCapturer.eglBase.getEglBaseContext(), null);
}
public void switchToPip() {
if (isFinished || !AndroidUtilities.checkInlinePermissions(activity) || instance == null) {
return;
@ -907,6 +936,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
notificationsLayout.setAlpha(0f);
callingUserPhotoView.setAlpha(0f);
currentUserCameraFloatingLayout.switchingToPip = true;
AndroidUtilities.runOnUIThread(() -> {
VoIPPiPView.switchingToPip = false;
VoIPPiPView.finish();
@ -921,7 +951,6 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
notificationsLayout.animate().alpha(1f).setDuration(350).setInterpolator(CubicBezierInterpolator.DEFAULT).start();
callingUserPhotoView.animate().alpha(1f).setDuration(350).setInterpolator(CubicBezierInterpolator.DEFAULT).start();
currentUserCameraFloatingLayout.switchingToPip = true;
animator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
@ -936,8 +965,8 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
animator.setDuration(350);
animator.setInterpolator(CubicBezierInterpolator.DEFAULT);
animator.start();
}, 64);
}, 64);
}, 32);
}, 32);
}
@ -1113,6 +1142,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
boolean animated = previousState != -1;
boolean showAcceptDeclineView = false;
boolean showTimer = false;
boolean showReconnecting = false;
boolean showCallingAvatarMini = false;
int statusLayoutOffset = 0;
@ -1120,6 +1150,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
if (service != null) {
callingUserIsVideo = service.getCurrentVideoState() == Instance.VIDEO_STATE_ACTIVE;
currentUserIsVideo = service.getVideoState() == Instance.VIDEO_STATE_ACTIVE || service.getVideoState() == Instance.VIDEO_STATE_PAUSED;
if (currentUserIsVideo && !isVideoCall) {
isVideoCall = true;
}
}
if (animated) {
@ -1134,7 +1167,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
statusLayoutOffset = AndroidUtilities.dp(24);
acceptDeclineView.setRetryMod(false);
if (service != null && service.call.video) {
if (currentUserIsVideo) {
if (currentUserIsVideo && callingUser.photo != null) {
showCallingAvatarMini = true;
} else {
showCallingAvatarMini = false;
@ -1177,6 +1210,9 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
updateKeyView(animated);
}
showTimer = true;
if (currentState == VoIPService.STATE_RECONNECTING) {
showReconnecting = true;
}
break;
case VoIPBaseService.STATE_ENDED:
currentUserTextureView.saveCameraLastBitmap();
@ -1192,9 +1228,42 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
final String message = LocaleController.formatString("VoipPeerIncompatible", R.string.VoipPeerIncompatible, name);
showErrorDialog(AndroidUtilities.replaceTags(message));
} else if (TextUtils.equals(lastError, Instance.ERROR_PEER_OUTDATED)) {
final String name = ContactsController.formatName(callingUser.first_name, callingUser.last_name);
final String message = LocaleController.formatString("VoipPeerOutdated", R.string.VoipPeerOutdated, name);
showErrorDialog(AndroidUtilities.replaceTags(message));
if (isVideoCall) {
final String name = UserObject.getFirstName(callingUser);
final String message = LocaleController.formatString("VoipPeerVideoOutdated", R.string.VoipPeerVideoOutdated, name);
boolean[] callAgain = new boolean[1];
AlertDialog dlg = new DarkAlertDialog.Builder(activity)
.setTitle(LocaleController.getString("VoipFailed", R.string.VoipFailed))
.setMessage(AndroidUtilities.replaceTags(message))
.setNegativeButton(LocaleController.getString("Cancel", R.string.Cancel), (dialogInterface, i) -> windowView.finish())
.setPositiveButton(LocaleController.getString("VoipPeerVideoOutdatedMakeVoice", R.string.VoipPeerVideoOutdatedMakeVoice), (dialogInterface, i) -> {
callAgain[0] = true;
currentState = VoIPService.STATE_BUSY;
Intent intent = new Intent(activity, VoIPService.class);
intent.putExtra("user_id", callingUser.id);
intent.putExtra("is_outgoing", true);
intent.putExtra("start_incall_activity", false);
intent.putExtra("video_call", false);
intent.putExtra("can_video_call", false);
intent.putExtra("account", UserConfig.selectedAccount);
try {
activity.startService(intent);
} catch (Throwable e) {
FileLog.e(e);
}
})
.show();
dlg.setCanceledOnTouchOutside(true);
dlg.setOnDismissListener(dialog -> {
if (!callAgain[0]) {
windowView.finish();
}
});
} else {
final String name = UserObject.getFirstName(callingUser);
final String message = LocaleController.formatString("VoipPeerOutdated", R.string.VoipPeerOutdated, name);
showErrorDialog(AndroidUtilities.replaceTags(message));
}
} else if (TextUtils.equals(lastError, Instance.ERROR_PRIVACY)) {
final String name = ContactsController.formatName(callingUser.first_name, callingUser.last_name);
final String message = LocaleController.formatString("CallNotAvailable", R.string.CallNotAvailable, name);
@ -1286,12 +1355,17 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
backIcon.setAlpha(lockOnScreen ? 0 : 1f);
notificationsLayout.setTranslationY(-AndroidUtilities.dp(16) - (uiVisible ? AndroidUtilities.dp(80) : 0));
}
updateButtons(animated);
if (currentState != VoIPService.STATE_HANGING_UP && currentState != VoIPService.STATE_ENDED) {
updateButtons(animated);
}
if (showTimer) {
statusTextView.showTimer(animated);
}
statusTextView.showReconnect(showReconnecting, animated);
if (animated) {
if (statusLayoutOffset != statusLayoutAnimateToOffset) {
statusLayout.animate().translationY(statusLayoutOffset).setDuration(150).setInterpolator(CubicBezierInterpolator.DEFAULT).start();
@ -1749,6 +1823,7 @@ public class VoIPFragment implements VoIPBaseService.StateListener, Notification
view.announceForAccessibility(text);
}
serviceInstance.setMicMute(micMute);
previousState = currentState;
updateViewState();
}
});

View File

@ -63,9 +63,9 @@ public class GlShader {
// part of the program object."
// But in practice, detaching shaders from the program seems to break some devices. Deleting the
// shaders are fine however - it will delete them when they are no longer attached to a program.
GLES20.glDeleteShader(vertexShader);
GLES20.glDeleteShader(fragmentShader);
GlUtil.checkNoGLES2Error("Creating GlShader");
//GLES20.glDeleteShader(vertexShader); delete crashes on xiaomi
//GLES20.glDeleteShader(fragmentShader);
//GlUtil.checkNoGLES2Error("Creating GlShader");
}
public int getAttribLocation(String label) {

View File

@ -11,8 +11,10 @@
package org.webrtc;
import android.media.MediaCodecInfo;
import org.telegram.messenger.voip.Instance;
import androidx.annotation.Nullable;
import java.util.Arrays;
/** Factory for Android hardware VideoDecoders. */
public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
@ -20,7 +22,27 @@ public class HardwareVideoDecoderFactory extends MediaCodecVideoDecoderFactory {
new Predicate<MediaCodecInfo>() {
@Override
public boolean test(MediaCodecInfo arg) {
return MediaCodecUtils.isHardwareAccelerated(arg);
if (!MediaCodecUtils.isHardwareAccelerated(arg)) {
return false;
}
String[] types = arg.getSupportedTypes();
if (types == null || types.length == 0) {
return false;
}
Instance.ServerConfig config = Instance.getGlobalServerConfig();
for (int a = 0; a < types.length; a++) {
switch (types[a]) {
case "video/x-vnd.on2.vp8":
return config.enable_vp8_decoder;
case "video/x-vnd.on2.vp9":
return config.enable_vp9_decoder;
case "video/avc":
return config.enable_h264_decoder;
case "video/hevc":
return config.enable_h265_decoder;
}
}
return true;
}
};

View File

@ -18,12 +18,12 @@ import static org.webrtc.MediaCodecUtils.HISI_PREFIX;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Build;
import org.telegram.messenger.voip.Instance;
import androidx.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/** Factory for android hardware video encoders. */
@SuppressWarnings("deprecation") // API 16 requires the use of deprecated methods.
@ -40,63 +40,6 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
private final boolean enableH264HighProfile;
@Nullable private final Predicate<MediaCodecInfo> codecAllowedPredicate;
private static final List<String> H264_HW_EXCEPTION_MODELS =
Arrays.asList("samsung-sgh-i337", "nexus7", "nexus4", "pixel3xl", "pixel3");
private static final List<String> VP8_HW_EXCEPTION_MODELS =
Arrays.asList("pixel3xl", "pixel3");
private static Set<String> HW_EXCEPTION_MODELS = new HashSet<String>() {{
add("sm-a310f");
add("sm-a310f/ds");
add("sm-a310y");
add("sm-a310m");
add("sm-g920f");
add("sm-g920fd");
add("sm-g920fq");
add("sm-g920i");
add("sm-g920a");
add("sm-g920t");
add("sm-g930f");
add("sm-g930fd");
add("sm-g930w8");
add("sm-g930s");
add("sm-g930k");
add("sm-g930l");
add("sm-g935f");
add("sm-g935fd");
add("sm-g935w8");
add("sm-g935s");
add("sm-g935k");
add("sm-g935l");
add("i537");
add("sgh-i537");
add("gt-i9295");
add("sgh-i337");
add("gt-i9505g");
add("gt-i9505");
add("gt-i9515");
add("f240");
add("e980");
add("ls980");
add("e988");
add("e986");
add("f240l");
add("f240s");
add("v9815");
add("nx403a");
add("f310l");
add("f310lr");
add("onem7");
add("onemax");
add("pn071");
add("htc6500lvw");
add("butterflys");
add("mi2s");
add("n1");
}};
/**
* Creates a HardwareVideoEncoderFactory that supports surface texture encoding.
*
@ -249,13 +192,9 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
// Returns true if the given MediaCodecInfo indicates a hardware module that is supported on the
// current SDK.
private static String getModel() {
return Build.MODEL != null ? Build.MODEL.toLowerCase().replace(" ", "") : "nomodel";
}
private boolean isHardwareSupportedInCurrentSdk(MediaCodecInfo info, VideoCodecMimeType type) {
if (HW_EXCEPTION_MODELS.contains(getModel())) {
Instance.ServerConfig config = Instance.getGlobalServerConfig();
if (!config.enable_h264_encoder && !config.enable_h265_encoder && !config.enable_vp8_encoder && !config.enable_vp9_encoder) {
return false;
}
switch (type) {
@ -272,7 +211,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
}
private boolean isHardwareSupportedInCurrentSdkVp8(MediaCodecInfo info) {
if (VP8_HW_EXCEPTION_MODELS.contains(getModel())) {
if (!Instance.getGlobalServerConfig().enable_vp8_encoder) {
return false;
}
String name = info.getName();
@ -288,6 +227,9 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
}
private boolean isHardwareSupportedInCurrentSdkVp9(MediaCodecInfo info) {
if (!Instance.getGlobalServerConfig().enable_vp9_encoder) {
return false;
}
String name = info.getName();
return (name.startsWith(QCOM_PREFIX) || name.startsWith(EXYNOS_PREFIX) || name.startsWith(HISI_PREFIX))
// Both QCOM and Exynos VP9 encoders are supported in N or later.
@ -295,8 +237,7 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
}
private boolean isHardwareSupportedInCurrentSdkH264(MediaCodecInfo info) {
// First, H264 hardware might perform poorly on this model.
if (H264_HW_EXCEPTION_MODELS.contains(getModel())) {
if (!Instance.getGlobalServerConfig().enable_h264_encoder) {
return false;
}
String name = info.getName();
@ -308,6 +249,9 @@ public class HardwareVideoEncoderFactory implements VideoEncoderFactory {
}
private boolean isHardwareSupportedInCurrentSdkH265(MediaCodecInfo info) {
if (!Instance.getGlobalServerConfig().enable_h265_encoder) {
return false;
}
String name = info.getName();
// QCOM H265 encoder is supported in KITKAT or later.
return (name.startsWith(QCOM_PREFIX) && Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT)

Binary file not shown.

After

Width:  |  Height:  |  Size: 294 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 212 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 105 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 516 B

View File

@ -3023,6 +3023,8 @@
<string name="NoCallLog">You haven\'t made any calls yet.</string>
<string name="VoipPeerIncompatible">**%1$s**\'s app is using an incompatible protocol. They need to update their app before you can call them.</string>
<string name="VoipPeerOutdated">**%1$s**\'s app does not support calls. They need to update their app before you can call them.</string>
<string name="VoipPeerVideoOutdated">Sorry, **%1$s** is using an old version of Telegram that doesn\'t support video calls.</string>
<string name="VoipPeerVideoOutdatedMakeVoice">Make a voice call</string>
<string name="VoipRateCallAlert">Please rate the quality of your Telegram call</string>
<string name="VoipNeedMicPermission">Telegram needs access to your microphone so that you can make calls.</string>
<string name="VoipNeedMicCameraPermission">Telegram needs access to your microphone and camera so that you can make video calls.</string>
@ -3065,6 +3067,8 @@
<string name="RateCallSilentLocal">I couldn\'t hear the other side</string>
<string name="RateCallSilentRemote">The other side couldn\'t hear me</string>
<string name="RateCallDropped">Call ended unexpectedly</string>
<string name="RateCallVideoDistorted">Video was distorted</string>
<string name="RateCallVideoPixelated">Video was pixelated</string>
<string name="TapToTurnCamera">Tap here to turn on your camera</string>
<string name="VoipUnmute">Unmute</string>
<string name="VoipMute">Mute</string>
@ -3676,4 +3680,5 @@
<string name="RemindTodayAt">\'Remind today at\' HH:mm</string>
<string name="RemindDayAt">\'Remind on\' MMM d \'at\' HH:mm</string>
<string name="RemindDayYearAt">\'Remind on\' MMM d yyyy \'at\' HH:mm</string>
<string name="VoipReconnecting">Reconnecting</string>
</resources>