Merge remote-tracking branch 'upstream/master'

This commit is contained in:
世界 2020-07-27 00:58:40 +00:00
commit 5e8de25641
No known key found for this signature in database
GPG Key ID: CD109927C34A63C4
767 changed files with 44914 additions and 19687 deletions

View File

@ -1,11 +1,11 @@
FROM gradle:6.0.1-jdk8 FROM gradle:6.1.1-jdk8
ENV ANDROID_SDK_URL https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip ENV ANDROID_SDK_URL https://dl.google.com/android/repository/sdk-tools-linux-3859397.zip
ENV ANDROID_API_LEVEL android-29 ENV ANDROID_API_LEVEL android-30
ENV ANDROID_BUILD_TOOLS_VERSION 29.0.3 ENV ANDROID_BUILD_TOOLS_VERSION 30.0.1
ENV ANDROID_HOME /usr/local/android-sdk-linux ENV ANDROID_HOME /usr/local/android-sdk-linux
ENV ANDROID_NDK_VERSION 21.1.6352462 ENV ANDROID_NDK_VERSION 21.1.6352462
ENV ANDROID_VERSION 29 ENV ANDROID_VERSION 30
ENV ANDROID_NDK_HOME ${ANDROID_HOME}/ndk/${ANDROID_NDK_VERSION}/ ENV ANDROID_NDK_HOME ${ANDROID_HOME}/ndk/${ANDROID_NDK_VERSION}/
ENV PATH ${PATH}:${ANDROID_HOME}/tools:${ANDROID_HOME}/platform-tools ENV PATH ${PATH}:${ANDROID_HOME}/tools:${ANDROID_HOME}/platform-tools

View File

@ -81,7 +81,8 @@ dependencies {
implementation 'androidx.exifinterface:exifinterface:1.2.0' implementation 'androidx.exifinterface:exifinterface:1.2.0'
implementation "androidx.interpolator:interpolator:1.0.0" implementation "androidx.interpolator:interpolator:1.0.0"
implementation 'androidx.dynamicanimation:dynamicanimation:1.0.0' implementation 'androidx.dynamicanimation:dynamicanimation:1.0.0'
implementation 'com.android.support:multidex:1.0.3' implementation 'androidx.multidex:multidex:2.0.1'
implementation "androidx.sharetarget:sharetarget:1.0.0"
// replace zxing with latest // replace zxing with latest
// TODO: fix problem with android L // TODO: fix problem with android L
@ -96,7 +97,7 @@ dependencies {
implementation 'com.stripe:stripe-android:2.0.2' implementation 'com.stripe:stripe-android:2.0.2'
implementation 'com.google.code.gson:gson:2.8.6' implementation 'com.google.code.gson:gson:2.8.6'
implementation 'org.osmdroid:osmdroid-android:6.1.7' implementation 'org.osmdroid:osmdroid-android:6.1.8'
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.3.72" implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.3.72"
implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-android:1.3.8' implementation 'org.jetbrains.kotlinx:kotlinx-coroutines-android:1.3.8'
@ -105,8 +106,8 @@ dependencies {
implementation 'dnsjava:dnsjava:3.2.2' implementation 'dnsjava:dnsjava:3.2.2'
implementation 'org.dizitart:nitrite:3.4.2' implementation 'org.dizitart:nitrite:3.4.2'
implementation 'cn.hutool:hutool-core:5.3.9' implementation 'cn.hutool:hutool-core:5.3.10'
implementation 'cn.hutool:hutool-crypto:5.3.9' implementation 'cn.hutool:hutool-crypto:5.3.10'
implementation 'org.tukaani:xz:1.8' implementation 'org.tukaani:xz:1.8'
implementation project(":openpgp-api") implementation project(":openpgp-api")
@ -145,8 +146,8 @@ task writeUpdateInfo {
tasks.findByName("preBuild").finalizedBy(writeUpdateInfo) tasks.findByName("preBuild").finalizedBy(writeUpdateInfo)
android { android {
compileSdkVersion 29 compileSdkVersion 30
buildToolsVersion '29.0.3' buildToolsVersion '30.0.1'
ndkVersion rootProject.ext.ndkVersion ndkVersion rootProject.ext.ndkVersion
defaultConfig.applicationId = "nekox.messenger" defaultConfig.applicationId = "nekox.messenger"

View File

@ -296,11 +296,11 @@ include $(BUILD_STATIC_LIBRARY)
include $(CLEAR_VARS) include $(CLEAR_VARS)
LOCAL_PRELINK_MODULE := false LOCAL_PRELINK_MODULE := false
LOCAL_MODULE := tmessages.30 LOCAL_MODULE := tmessages.31
LOCAL_CFLAGS := -w -std=c11 -Os -DNULL=0 -DSOCKLEN_T=socklen_t -DLOCALE_NOT_USED -D_LARGEFILE_SOURCE=1 LOCAL_CFLAGS := -w -std=c11 -Os -DNULL=0 -DSOCKLEN_T=socklen_t -DLOCALE_NOT_USED -D_LARGEFILE_SOURCE=1
LOCAL_CFLAGS += -Drestrict='' -D__EMX__ -DOPUS_BUILD -DFIXED_POINT -DUSE_ALLOCA -DHAVE_LRINT -DHAVE_LRINTF -fno-math-errno LOCAL_CFLAGS += -Drestrict='' -D__EMX__ -DOPUS_BUILD -DFIXED_POINT -DUSE_ALLOCA -DHAVE_LRINT -DHAVE_LRINTF -fno-math-errno
LOCAL_CFLAGS += -DANDROID_NDK -DDISABLE_IMPORTGL -fno-strict-aliasing -fprefetch-loop-arrays -DAVOID_TABLES -DANDROID_TILE_BASED_DECODE -DANDROID_ARMV6_IDCT -ffast-math -D__STDC_CONSTANT_MACROS LOCAL_CFLAGS += -DANDROID_NDK -DDISABLE_IMPORTGL -fno-strict-aliasing -fprefetch-loop-arrays -DAVOID_TABLES -DANDROID_TILE_BASED_DECODE -DANDROID_ARMV6_IDCT -ffast-math -D__STDC_CONSTANT_MACROS
LOCAL_CPPFLAGS := -DBSD=1 -ffast-math -Os -funroll-loops -std=c++14 -DPACKAGE_NAME='"drinkless/org/ton"' LOCAL_CPPFLAGS := -DBSD=1 -ffast-math -Os -funroll-loops -std=c++14
LOCAL_LDLIBS := -ljnigraphics -llog -lz -lEGL -lGLESv2 -landroid LOCAL_LDLIBS := -ljnigraphics -llog -lz -lEGL -lGLESv2 -landroid
LOCAL_STATIC_LIBRARIES := webp sqlite lz4 rlottie tgnet swscale avformat avcodec avresample avutil swresample flac LOCAL_STATIC_LIBRARIES := webp sqlite lz4 rlottie tgnet swscale avformat avcodec avresample avutil swresample flac

View File

@ -404,7 +404,7 @@ void setSystemLangCode(JNIEnv *env, jclass c, jint instanceNum, jstring langCode
} }
} }
void init(JNIEnv *env, jclass c, jint instanceNum, jint version, jint layer, jint apiId, jstring deviceModel, jstring systemVersion, jstring appVersion, jstring langCode, jstring systemLangCode, jstring configPath, jstring logPath, jstring regId, jstring cFingerprint, jint timezoneOffset, jint userId, jboolean enablePushConnection, jboolean hasNetwork, jint networkType) { void init(JNIEnv *env, jclass c, jint instanceNum, jint version, jint layer, jint apiId, jstring deviceModel, jstring systemVersion, jstring appVersion, jstring langCode, jstring systemLangCode, jstring configPath, jstring logPath, jstring regId, jstring cFingerprint, jstring installerId, jint timezoneOffset, jint userId, jboolean enablePushConnection, jboolean hasNetwork, jint networkType) {
const char *deviceModelStr = env->GetStringUTFChars(deviceModel, 0); const char *deviceModelStr = env->GetStringUTFChars(deviceModel, 0);
const char *systemVersionStr = env->GetStringUTFChars(systemVersion, 0); const char *systemVersionStr = env->GetStringUTFChars(systemVersion, 0);
const char *appVersionStr = env->GetStringUTFChars(appVersion, 0); const char *appVersionStr = env->GetStringUTFChars(appVersion, 0);
@ -414,8 +414,9 @@ void init(JNIEnv *env, jclass c, jint instanceNum, jint version, jint layer, jin
const char *logPathStr = env->GetStringUTFChars(logPath, 0); const char *logPathStr = env->GetStringUTFChars(logPath, 0);
const char *regIdStr = env->GetStringUTFChars(regId, 0); const char *regIdStr = env->GetStringUTFChars(regId, 0);
const char *cFingerprintStr = env->GetStringUTFChars(cFingerprint, 0); const char *cFingerprintStr = env->GetStringUTFChars(cFingerprint, 0);
const char *installerIdStr = env->GetStringUTFChars(installerId, 0);
ConnectionsManager::getInstance(instanceNum).init((uint32_t) version, layer, apiId, std::string(deviceModelStr), std::string(systemVersionStr), std::string(appVersionStr), std::string(langCodeStr), std::string(systemLangCodeStr), std::string(configPathStr), std::string(logPathStr), std::string(regIdStr), std::string(cFingerprintStr), timezoneOffset, userId, true, enablePushConnection, hasNetwork, networkType); ConnectionsManager::getInstance(instanceNum).init((uint32_t) version, layer, apiId, std::string(deviceModelStr), std::string(systemVersionStr), std::string(appVersionStr), std::string(langCodeStr), std::string(systemLangCodeStr), std::string(configPathStr), std::string(logPathStr), std::string(regIdStr), std::string(cFingerprintStr), std::string(installerIdStr), timezoneOffset, userId, true, enablePushConnection, hasNetwork, networkType);
if (deviceModelStr != 0) { if (deviceModelStr != 0) {
env->ReleaseStringUTFChars(deviceModel, deviceModelStr); env->ReleaseStringUTFChars(deviceModel, deviceModelStr);
@ -444,6 +445,9 @@ void init(JNIEnv *env, jclass c, jint instanceNum, jint version, jint layer, jin
if (cFingerprintStr != 0) { if (cFingerprintStr != 0) {
env->ReleaseStringUTFChars(cFingerprint, cFingerprintStr); env->ReleaseStringUTFChars(cFingerprint, cFingerprintStr);
} }
if (installerIdStr != 0) {
env->ReleaseStringUTFChars(installerId, installerIdStr);
}
} }
void setJava(JNIEnv *env, jclass c, jboolean useJavaByteBuffers) { void setJava(JNIEnv *env, jclass c, jboolean useJavaByteBuffers) {
@ -472,7 +476,7 @@ static JNINativeMethod ConnectionsManagerMethods[] = {
{"native_setProxySettings", "(ILjava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;)V", (void *) setProxySettings}, {"native_setProxySettings", "(ILjava/lang/String;ILjava/lang/String;Ljava/lang/String;Ljava/lang/String;)V", (void *) setProxySettings},
{"native_getConnectionState", "(I)I", (void *) getConnectionState}, {"native_getConnectionState", "(I)I", (void *) getConnectionState},
{"native_setUserId", "(II)V", (void *) setUserId}, {"native_setUserId", "(II)V", (void *) setUserId},
{"native_init", "(IIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;IIZZI)V", (void *) init}, {"native_init", "(IIIILjava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;Ljava/lang/String;IIZZI)V", (void *) init},
{"native_setLangCode", "(ILjava/lang/String;)V", (void *) setLangCode}, {"native_setLangCode", "(ILjava/lang/String;)V", (void *) setLangCode},
{"native_setRegId", "(ILjava/lang/String;)V", (void *) setRegId}, {"native_setRegId", "(ILjava/lang/String;)V", (void *) setRegId},
{"native_setSystemLangCode", "(ILjava/lang/String;)V", (void *) setSystemLangCode}, {"native_setSystemLangCode", "(ILjava/lang/String;)V", (void *) setSystemLangCode},

View File

@ -40,45 +40,45 @@
JNIEnv *env, jobject thiz, ##__VA_ARGS__) JNIEnv *env, jobject thiz, ##__VA_ARGS__)
class JavaDataSource : public DataSource { class JavaDataSource : public DataSource {
public: public:
void setFlacDecoderJni(JNIEnv *env, jobject flacDecoderJni) { void setFlacDecoderJni(JNIEnv *env, jobject flacDecoderJni) {
this->env = env; this->env = env;
this->flacDecoderJni = flacDecoderJni; this->flacDecoderJni = flacDecoderJni;
if (mid == NULL) { if (mid == NULL) {
jclass cls = env->GetObjectClass(flacDecoderJni); jclass cls = env->GetObjectClass(flacDecoderJni);
mid = env->GetMethodID(cls, "read", "(Ljava/nio/ByteBuffer;)I"); mid = env->GetMethodID(cls, "read", "(Ljava/nio/ByteBuffer;)I");
}
} }
}
ssize_t readAt(off64_t offset, void *const data, size_t size) { ssize_t readAt(off64_t offset, void *const data, size_t size) {
jobject byteBuffer = env->NewDirectByteBuffer(data, size); jobject byteBuffer = env->NewDirectByteBuffer(data, size);
int result = env->CallIntMethod(flacDecoderJni, mid, byteBuffer); int result = env->CallIntMethod(flacDecoderJni, mid, byteBuffer);
if (env->ExceptionCheck()) { if (env->ExceptionCheck()) {
// Exception is thrown in Java when returning from the native call. // Exception is thrown in Java when returning from the native call.
result = -1; result = -1;
}
return result;
} }
return result;
}
private: private:
JNIEnv *env; JNIEnv *env;
jobject flacDecoderJni; jobject flacDecoderJni;
jmethodID mid; jmethodID mid;
}; };
struct Context { struct Context {
JavaDataSource *source; JavaDataSource *source;
FLACParser *parser; FLACParser *parser;
Context() { Context() {
source = new JavaDataSource(); source = new JavaDataSource();
parser = new FLACParser(source); parser = new FLACParser(source);
} }
~Context() { ~Context() {
delete parser; delete parser;
delete source; delete source;
} }
}; };
DECODER_FUNC(jlong, flacInit) { DECODER_FUNC(jlong, flacInit) {
@ -99,16 +99,16 @@ DECODER_FUNC(jobject, flacDecodeMetadata, jlong jContext) {
jclass arrayListClass = env->FindClass("java/util/ArrayList"); jclass arrayListClass = env->FindClass("java/util/ArrayList");
jmethodID arrayListConstructor = jmethodID arrayListConstructor =
env->GetMethodID(arrayListClass, "<init>", "()V"); env->GetMethodID(arrayListClass, "<init>", "()V");
jobject commentList = env->NewObject(arrayListClass, arrayListConstructor); jobject commentList = env->NewObject(arrayListClass, arrayListConstructor);
jmethodID arrayListAddMethod = jmethodID arrayListAddMethod =
env->GetMethodID(arrayListClass, "add", "(Ljava/lang/Object;)Z"); env->GetMethodID(arrayListClass, "add", "(Ljava/lang/Object;)Z");
if (context->parser->areVorbisCommentsValid()) { if (context->parser->areVorbisCommentsValid()) {
std::vector<std::string> vorbisComments = std::vector<std::string> vorbisComments =
context->parser->getVorbisComments(); context->parser->getVorbisComments();
for (std::vector<std::string>::const_iterator vorbisComment = for (std::vector<std::string>::const_iterator vorbisComment =
vorbisComments.begin(); vorbisComments.begin();
vorbisComment != vorbisComments.end(); ++vorbisComment) { vorbisComment != vorbisComments.end(); ++vorbisComment) {
jstring commentString = env->NewStringUTF((*vorbisComment).c_str()); jstring commentString = env->NewStringUTF((*vorbisComment).c_str());
env->CallBooleanMethod(commentList, arrayListAddMethod, commentString); env->CallBooleanMethod(commentList, arrayListAddMethod, commentString);
@ -121,10 +121,10 @@ DECODER_FUNC(jobject, flacDecodeMetadata, jlong jContext) {
if (picturesValid) { if (picturesValid) {
std::vector<FlacPicture> pictures = context->parser->getPictures(); std::vector<FlacPicture> pictures = context->parser->getPictures();
jclass pictureFrameClass = env->FindClass( jclass pictureFrameClass = env->FindClass(
"com/google/android/exoplayer2/metadata/flac/PictureFrame"); "com/google/android/exoplayer2/metadata/flac/PictureFrame");
jmethodID pictureFrameConstructor = jmethodID pictureFrameConstructor =
env->GetMethodID(pictureFrameClass, "<init>", env->GetMethodID(pictureFrameClass, "<init>",
"(ILjava/lang/String;Ljava/lang/String;IIII[B)V"); "(ILjava/lang/String;Ljava/lang/String;IIII[B)V");
for (std::vector<FlacPicture>::const_iterator picture = pictures.begin(); for (std::vector<FlacPicture>::const_iterator picture = pictures.begin();
picture != pictures.end(); ++picture) { picture != pictures.end(); ++picture) {
jstring mimeType = env->NewStringUTF(picture->mimeType.c_str()); jstring mimeType = env->NewStringUTF(picture->mimeType.c_str());
@ -133,9 +133,9 @@ DECODER_FUNC(jobject, flacDecodeMetadata, jlong jContext) {
env->SetByteArrayRegion(pictureData, 0, picture->data.size(), env->SetByteArrayRegion(pictureData, 0, picture->data.size(),
(signed char *)&picture->data[0]); (signed char *)&picture->data[0]);
jobject pictureFrame = env->NewObject( jobject pictureFrame = env->NewObject(
pictureFrameClass, pictureFrameConstructor, picture->type, mimeType, pictureFrameClass, pictureFrameConstructor, picture->type, mimeType,
description, picture->width, picture->height, picture->depth, description, picture->width, picture->height, picture->depth,
picture->colors, pictureData); picture->colors, pictureData);
env->CallBooleanMethod(pictureFrames, arrayListAddMethod, pictureFrame); env->CallBooleanMethod(pictureFrames, arrayListAddMethod, pictureFrame);
env->DeleteLocalRef(mimeType); env->DeleteLocalRef(mimeType);
env->DeleteLocalRef(description); env->DeleteLocalRef(description);
@ -144,14 +144,14 @@ DECODER_FUNC(jobject, flacDecodeMetadata, jlong jContext) {
} }
const FLAC__StreamMetadata_StreamInfo &streamInfo = const FLAC__StreamMetadata_StreamInfo &streamInfo =
context->parser->getStreamInfo(); context->parser->getStreamInfo();
jclass flacStreamMetadataClass = env->FindClass( jclass flacStreamMetadataClass = env->FindClass(
"com/google/android/exoplayer2/util/" "com/google/android/exoplayer2/util/"
"FlacStreamMetadata"); "FlacStreamMetadata");
jmethodID flacStreamMetadataConstructor = jmethodID flacStreamMetadataConstructor =
env->GetMethodID(flacStreamMetadataClass, "<init>", env->GetMethodID(flacStreamMetadataClass, "<init>",
"(IIIIIIIJLjava/util/List;Ljava/util/List;)V"); "(IIIIIIIJLjava/util/ArrayList;Ljava/util/ArrayList;)V");
return env->NewObject(flacStreamMetadataClass, flacStreamMetadataConstructor, return env->NewObject(flacStreamMetadataClass, flacStreamMetadataConstructor,
streamInfo.min_blocksize, streamInfo.max_blocksize, streamInfo.min_blocksize, streamInfo.max_blocksize,

View File

@ -52,31 +52,31 @@ const int endian = 1;
// with the same parameter list, but discard redundant information. // with the same parameter list, but discard redundant information.
FLAC__StreamDecoderReadStatus FLACParser::read_callback( FLAC__StreamDecoderReadStatus FLACParser::read_callback(
const FLAC__StreamDecoder * /* decoder */, FLAC__byte buffer[], const FLAC__StreamDecoder * /* decoder */, FLAC__byte buffer[],
size_t *bytes, void *client_data) { size_t *bytes, void *client_data) {
return reinterpret_cast<FLACParser *>(client_data) return reinterpret_cast<FLACParser *>(client_data)
->readCallback(buffer, bytes); ->readCallback(buffer, bytes);
} }
FLAC__StreamDecoderSeekStatus FLACParser::seek_callback( FLAC__StreamDecoderSeekStatus FLACParser::seek_callback(
const FLAC__StreamDecoder * /* decoder */, const FLAC__StreamDecoder * /* decoder */,
FLAC__uint64 absolute_byte_offset, void *client_data) { FLAC__uint64 absolute_byte_offset, void *client_data) {
return reinterpret_cast<FLACParser *>(client_data) return reinterpret_cast<FLACParser *>(client_data)
->seekCallback(absolute_byte_offset); ->seekCallback(absolute_byte_offset);
} }
FLAC__StreamDecoderTellStatus FLACParser::tell_callback( FLAC__StreamDecoderTellStatus FLACParser::tell_callback(
const FLAC__StreamDecoder * /* decoder */, const FLAC__StreamDecoder * /* decoder */,
FLAC__uint64 *absolute_byte_offset, void *client_data) { FLAC__uint64 *absolute_byte_offset, void *client_data) {
return reinterpret_cast<FLACParser *>(client_data) return reinterpret_cast<FLACParser *>(client_data)
->tellCallback(absolute_byte_offset); ->tellCallback(absolute_byte_offset);
} }
FLAC__StreamDecoderLengthStatus FLACParser::length_callback( FLAC__StreamDecoderLengthStatus FLACParser::length_callback(
const FLAC__StreamDecoder * /* decoder */, FLAC__uint64 *stream_length, const FLAC__StreamDecoder * /* decoder */, FLAC__uint64 *stream_length,
void *client_data) { void *client_data) {
return reinterpret_cast<FLACParser *>(client_data) return reinterpret_cast<FLACParser *>(client_data)
->lengthCallback(stream_length); ->lengthCallback(stream_length);
} }
FLAC__bool FLACParser::eof_callback(const FLAC__StreamDecoder * /* decoder */, FLAC__bool FLACParser::eof_callback(const FLAC__StreamDecoder * /* decoder */,
@ -85,10 +85,10 @@ FLAC__bool FLACParser::eof_callback(const FLAC__StreamDecoder * /* decoder */,
} }
FLAC__StreamDecoderWriteStatus FLACParser::write_callback( FLAC__StreamDecoderWriteStatus FLACParser::write_callback(
const FLAC__StreamDecoder * /* decoder */, const FLAC__Frame *frame, const FLAC__StreamDecoder * /* decoder */, const FLAC__Frame *frame,
const FLAC__int32 *const buffer[], void *client_data) { const FLAC__int32 *const buffer[], void *client_data) {
return reinterpret_cast<FLACParser *>(client_data) return reinterpret_cast<FLACParser *>(client_data)
->writeCallback(frame, buffer); ->writeCallback(frame, buffer);
} }
void FLACParser::metadata_callback(const FLAC__StreamDecoder * /* decoder */, void FLACParser::metadata_callback(const FLAC__StreamDecoder * /* decoder */,
@ -125,27 +125,27 @@ FLAC__StreamDecoderReadStatus FLACParser::readCallback(FLAC__byte buffer[],
} }
FLAC__StreamDecoderSeekStatus FLACParser::seekCallback( FLAC__StreamDecoderSeekStatus FLACParser::seekCallback(
FLAC__uint64 absolute_byte_offset) { FLAC__uint64 absolute_byte_offset) {
mCurrentPos = absolute_byte_offset; mCurrentPos = absolute_byte_offset;
mEOF = false; mEOF = false;
return FLAC__STREAM_DECODER_SEEK_STATUS_OK; return FLAC__STREAM_DECODER_SEEK_STATUS_OK;
} }
FLAC__StreamDecoderTellStatus FLACParser::tellCallback( FLAC__StreamDecoderTellStatus FLACParser::tellCallback(
FLAC__uint64 *absolute_byte_offset) { FLAC__uint64 *absolute_byte_offset) {
*absolute_byte_offset = mCurrentPos; *absolute_byte_offset = mCurrentPos;
return FLAC__STREAM_DECODER_TELL_STATUS_OK; return FLAC__STREAM_DECODER_TELL_STATUS_OK;
} }
FLAC__StreamDecoderLengthStatus FLACParser::lengthCallback( FLAC__StreamDecoderLengthStatus FLACParser::lengthCallback(
FLAC__uint64 *stream_length) { FLAC__uint64 *stream_length) {
return FLAC__STREAM_DECODER_LENGTH_STATUS_UNSUPPORTED; return FLAC__STREAM_DECODER_LENGTH_STATUS_UNSUPPORTED;
} }
FLAC__bool FLACParser::eofCallback() { return mEOF; } FLAC__bool FLACParser::eofCallback() { return mEOF; }
FLAC__StreamDecoderWriteStatus FLACParser::writeCallback( FLAC__StreamDecoderWriteStatus FLACParser::writeCallback(
const FLAC__Frame *frame, const FLAC__int32 *const buffer[]) { const FLAC__Frame *frame, const FLAC__int32 *const buffer[]) {
if (mWriteRequested) { if (mWriteRequested) {
mWriteRequested = false; mWriteRequested = false;
// FLAC parser doesn't free or realloc buffer until next frame or finish // FLAC parser doesn't free or realloc buffer until next frame or finish
@ -168,21 +168,21 @@ void FLACParser::metadataCallback(const FLAC__StreamMetadata *metadata) {
} else { } else {
ALOGE("FLACParser::metadataCallback unexpected STREAMINFO"); ALOGE("FLACParser::metadataCallback unexpected STREAMINFO");
} }
break; break;
case FLAC__METADATA_TYPE_SEEKTABLE: case FLAC__METADATA_TYPE_SEEKTABLE:
mSeekTable = &metadata->data.seek_table; mSeekTable = &metadata->data.seek_table;
break; break;
case FLAC__METADATA_TYPE_VORBIS_COMMENT: case FLAC__METADATA_TYPE_VORBIS_COMMENT:
if (!mVorbisCommentsValid) { if (!mVorbisCommentsValid) {
FLAC__StreamMetadata_VorbisComment vorbisComment = FLAC__StreamMetadata_VorbisComment vorbisComment =
metadata->data.vorbis_comment; metadata->data.vorbis_comment;
for (FLAC__uint32 i = 0; i < vorbisComment.num_comments; ++i) { for (FLAC__uint32 i = 0; i < vorbisComment.num_comments; ++i) {
FLAC__StreamMetadata_VorbisComment_Entry vorbisCommentEntry = FLAC__StreamMetadata_VorbisComment_Entry vorbisCommentEntry =
vorbisComment.comments[i]; vorbisComment.comments[i];
if (vorbisCommentEntry.entry != NULL) { if (vorbisCommentEntry.entry != NULL) {
std::string comment( std::string comment(
reinterpret_cast<char *>(vorbisCommentEntry.entry), reinterpret_cast<char *>(vorbisCommentEntry.entry),
vorbisCommentEntry.length); vorbisCommentEntry.length);
mVorbisComments.push_back(comment); mVorbisComments.push_back(comment);
} }
} }
@ -190,14 +190,14 @@ void FLACParser::metadataCallback(const FLAC__StreamMetadata *metadata) {
} else { } else {
ALOGE("FLACParser::metadataCallback unexpected VORBISCOMMENT"); ALOGE("FLACParser::metadataCallback unexpected VORBISCOMMENT");
} }
break; break;
case FLAC__METADATA_TYPE_PICTURE: { case FLAC__METADATA_TYPE_PICTURE: {
const FLAC__StreamMetadata_Picture *parsedPicture = const FLAC__StreamMetadata_Picture *parsedPicture =
&metadata->data.picture; &metadata->data.picture;
FlacPicture picture; FlacPicture picture;
picture.mimeType.assign(std::string(parsedPicture->mime_type)); picture.mimeType.assign(std::string(parsedPicture->mime_type));
picture.description.assign( picture.description.assign(
std::string((char *)parsedPicture->description)); std::string((char *)parsedPicture->description));
picture.data.assign(parsedPicture->data, picture.data.assign(parsedPicture->data,
parsedPicture->data + parsedPicture->data_length); parsedPicture->data + parsedPicture->data_length);
picture.width = parsedPicture->width; picture.width = parsedPicture->width;
@ -211,7 +211,7 @@ void FLACParser::metadataCallback(const FLAC__StreamMetadata *metadata) {
} }
default: default:
ALOGE("FLACParser::metadataCallback unexpected type %u", metadata->type); ALOGE("FLACParser::metadataCallback unexpected type %u", metadata->type);
break; break;
} }
} }
@ -232,7 +232,7 @@ static void copyToByteArrayBigEndian(int8_t *dst, const int *const *src,
// and then skip the first few bytes (most significant bytes) // and then skip the first few bytes (most significant bytes)
// depending on the bit depth // depending on the bit depth
const int8_t *byteSrc = const int8_t *byteSrc =
reinterpret_cast<const int8_t *>(&src[c][i]) + 4 - bytesPerSample; reinterpret_cast<const int8_t *>(&src[c][i]) + 4 - bytesPerSample;
memcpy(dst, byteSrc, bytesPerSample); memcpy(dst, byteSrc, bytesPerSample);
dst = dst + bytesPerSample; dst = dst + bytesPerSample;
} }
@ -262,20 +262,20 @@ static void copyTrespass(int8_t * /* dst */, const int *const * /* src */,
// FLACParser // FLACParser
FLACParser::FLACParser(DataSource *source) FLACParser::FLACParser(DataSource *source)
: mDataSource(source), : mDataSource(source),
mCopy(copyTrespass), mCopy(copyTrespass),
mDecoder(NULL), mDecoder(NULL),
mSeekTable(NULL), mCurrentPos(0LL),
firstFrameOffset(0LL), mEOF(false),
mCurrentPos(0LL), mStreamInfoValid(false),
mEOF(false), mSeekTable(NULL),
mStreamInfoValid(false), firstFrameOffset(0LL),
mVorbisCommentsValid(false), mVorbisCommentsValid(false),
mPicturesValid(false), mPicturesValid(false),
mWriteRequested(false), mWriteRequested(false),
mWriteCompleted(false), mWriteCompleted(false),
mWriteBuffer(NULL), mWriteBuffer(NULL),
mErrorStatus((FLAC__StreamDecoderErrorStatus)-1) { mErrorStatus((FLAC__StreamDecoderErrorStatus)-1) {
ALOGV("FLACParser::FLACParser"); ALOGV("FLACParser::FLACParser");
memset(&mStreamInfo, 0, sizeof(mStreamInfo)); memset(&mStreamInfo, 0, sizeof(mStreamInfo));
memset(&mWriteHeader, 0, sizeof(mWriteHeader)); memset(&mWriteHeader, 0, sizeof(mWriteHeader));
@ -311,9 +311,9 @@ bool FLACParser::init() {
FLAC__METADATA_TYPE_PICTURE); FLAC__METADATA_TYPE_PICTURE);
FLAC__StreamDecoderInitStatus initStatus; FLAC__StreamDecoderInitStatus initStatus;
initStatus = FLAC__stream_decoder_init_stream( initStatus = FLAC__stream_decoder_init_stream(
mDecoder, read_callback, seek_callback, tell_callback, length_callback, mDecoder, read_callback, seek_callback, tell_callback, length_callback,
eof_callback, write_callback, metadata_callback, error_callback, eof_callback, write_callback, metadata_callback, error_callback,
reinterpret_cast<void *>(this)); reinterpret_cast<void *>(this));
if (initStatus != FLAC__STREAM_DECODER_INIT_STATUS_OK) { if (initStatus != FLAC__STREAM_DECODER_INIT_STATUS_OK) {
// A failure here probably indicates a programming error and so is // A failure here probably indicates a programming error and so is
// unlikely to happen. But we check and log here similarly to above. // unlikely to happen. But we check and log here similarly to above.
@ -347,27 +347,7 @@ bool FLACParser::decodeMetadata() {
break; break;
default: default:
ALOGE("unsupported bits per sample %u", getBitsPerSample()); ALOGE("unsupported bits per sample %u", getBitsPerSample());
return false; return false;
}
// check sample rate
switch (getSampleRate()) {
case 8000:
case 11025:
case 12000:
case 16000:
case 22050:
case 24000:
case 32000:
case 44100:
case 48000:
case 88200:
case 96000:
case 176400:
case 192000:
break;
default:
ALOGE("unsupported sample rate %u", getSampleRate());
return false;
} }
// configure the appropriate copy function based on device endianness. // configure the appropriate copy function based on device endianness.
if (isBigEndian()) { if (isBigEndian()) {
@ -410,11 +390,11 @@ size_t FLACParser::readBuffer(void *output, size_t output_size) {
mWriteHeader.channels != getChannels() || mWriteHeader.channels != getChannels() ||
mWriteHeader.bits_per_sample != getBitsPerSample()) { mWriteHeader.bits_per_sample != getBitsPerSample()) {
ALOGE( ALOGE(
"FLACParser::readBuffer write changed parameters mid-stream: %d/%d/%d " "FLACParser::readBuffer write changed parameters mid-stream: %d/%d/%d "
"-> %d/%d/%d", "-> %d/%d/%d",
getSampleRate(), getChannels(), getBitsPerSample(), getSampleRate(), getChannels(), getBitsPerSample(),
mWriteHeader.sample_rate, mWriteHeader.channels, mWriteHeader.sample_rate, mWriteHeader.channels,
mWriteHeader.bits_per_sample); mWriteHeader.bits_per_sample);
return -1; return -1;
} }
@ -422,9 +402,9 @@ size_t FLACParser::readBuffer(void *output, size_t output_size) {
size_t bufferSize = blocksize * getChannels() * bytesPerSample; size_t bufferSize = blocksize * getChannels() * bytesPerSample;
if (bufferSize > output_size) { if (bufferSize > output_size) {
ALOGE( ALOGE(
"FLACParser::readBuffer not enough space in output buffer " "FLACParser::readBuffer not enough space in output buffer "
"%zu < %zu", "%zu < %zu",
output_size, bufferSize); output_size, bufferSize);
return -1; return -1;
} }
@ -456,11 +436,15 @@ bool FLACParser::getSeekPositions(int64_t timeUs,
for (unsigned i = length; i != 0; i--) { for (unsigned i = length; i != 0; i--) {
int64_t sampleNumber = points[i - 1].sample_number; int64_t sampleNumber = points[i - 1].sample_number;
if (sampleNumber == -1) { // placeholder
continue;
}
if (sampleNumber <= targetSampleNumber) { if (sampleNumber <= targetSampleNumber) {
result[0] = (sampleNumber * 1000000LL) / sampleRate; result[0] = (sampleNumber * 1000000LL) / sampleRate;
result[1] = firstFrameOffset + points[i - 1].stream_offset; result[1] = firstFrameOffset + points[i - 1].stream_offset;
if (sampleNumber == targetSampleNumber || i >= length) { if (sampleNumber == targetSampleNumber || i >= length ||
// exact seek, or no following seek point. points[i].sample_number == -1) { // placeholder
// exact seek, or no following non-placeholder seek point
result[2] = result[0]; result[2] = result[0];
result[3] = result[1]; result[3] = result[1];
} else { } else {

View File

@ -56,14 +56,14 @@ static int channelCount;
static int errorCode; static int errorCode;
DECODER_FUNC(jlong, opusInit, jint sampleRate, jint channelCount, DECODER_FUNC(jlong, opusInit, jint sampleRate, jint channelCount,
jint numStreams, jint numCoupled, jint gain, jbyteArray jStreamMap) { jint numStreams, jint numCoupled, jint gain, jbyteArray jStreamMap) {
int status = OPUS_INVALID_STATE; int status = OPUS_INVALID_STATE;
::channelCount = channelCount; ::channelCount = channelCount;
errorCode = 0; errorCode = 0;
jbyte* streamMapBytes = env->GetByteArrayElements(jStreamMap, 0); jbyte* streamMapBytes = env->GetByteArrayElements(jStreamMap, 0);
uint8_t* streamMap = reinterpret_cast<uint8_t*>(streamMapBytes); uint8_t* streamMap = reinterpret_cast<uint8_t*>(streamMapBytes);
OpusMSDecoder* decoder = opus_multistream_decoder_create( OpusMSDecoder* decoder = opus_multistream_decoder_create(
sampleRate, channelCount, numStreams, numCoupled, streamMap, &status); sampleRate, channelCount, numStreams, numCoupled, streamMap, &status);
env->ReleaseByteArrayElements(jStreamMap, streamMapBytes, 0); env->ReleaseByteArrayElements(jStreamMap, streamMapBytes, 0);
if (!decoder || status != OPUS_OK) { if (!decoder || status != OPUS_OK) {
LOGE("Failed to create Opus Decoder; status=%s", opus_strerror(status)); LOGE("Failed to create Opus Decoder; status=%s", opus_strerror(status));
@ -77,22 +77,22 @@ DECODER_FUNC(jlong, opusInit, jint sampleRate, jint channelCount,
// Populate JNI References. // Populate JNI References.
const jclass outputBufferClass = env->FindClass( const jclass outputBufferClass = env->FindClass(
"com/google/android/exoplayer2/decoder/SimpleOutputBuffer"); "com/google/android/exoplayer2/decoder/SimpleOutputBuffer");
outputBufferInit = env->GetMethodID(outputBufferClass, "init", outputBufferInit = env->GetMethodID(outputBufferClass, "init",
"(JI)Ljava/nio/ByteBuffer;"); "(JI)Ljava/nio/ByteBuffer;");
return reinterpret_cast<intptr_t>(decoder); return reinterpret_cast<intptr_t>(decoder);
} }
DECODER_FUNC(jint, opusDecode, jlong jDecoder, jlong jTimeUs, DECODER_FUNC(jint, opusDecode, jlong jDecoder, jlong jTimeUs,
jobject jInputBuffer, jint inputSize, jobject jOutputBuffer) { jobject jInputBuffer, jint inputSize, jobject jOutputBuffer) {
OpusMSDecoder* decoder = reinterpret_cast<OpusMSDecoder*>(jDecoder); OpusMSDecoder* decoder = reinterpret_cast<OpusMSDecoder*>(jDecoder);
const uint8_t* inputBuffer = const uint8_t* inputBuffer =
reinterpret_cast<const uint8_t*>( reinterpret_cast<const uint8_t*>(
env->GetDirectBufferAddress(jInputBuffer)); env->GetDirectBufferAddress(jInputBuffer));
const jint outputSize = const jint outputSize =
kMaxOpusOutputPacketSizeSamples * kBytesPerSample * channelCount; kMaxOpusOutputPacketSizeSamples * kBytesPerSample * channelCount;
env->CallObjectMethod(jOutputBuffer, outputBufferInit, jTimeUs, outputSize); env->CallObjectMethod(jOutputBuffer, outputBufferInit, jTimeUs, outputSize);
if (env->ExceptionCheck()) { if (env->ExceptionCheck()) {
@ -100,27 +100,27 @@ DECODER_FUNC(jint, opusDecode, jlong jDecoder, jlong jTimeUs,
return -1; return -1;
} }
const jobject jOutputBufferData = env->CallObjectMethod(jOutputBuffer, const jobject jOutputBufferData = env->CallObjectMethod(jOutputBuffer,
outputBufferInit, jTimeUs, outputSize); outputBufferInit, jTimeUs, outputSize);
if (env->ExceptionCheck()) { if (env->ExceptionCheck()) {
// Exception is thrown in Java when returning from the native call. // Exception is thrown in Java when returning from the native call.
return -1; return -1;
} }
int16_t* outputBufferData = reinterpret_cast<int16_t*>( int16_t* outputBufferData = reinterpret_cast<int16_t*>(
env->GetDirectBufferAddress(jOutputBufferData)); env->GetDirectBufferAddress(jOutputBufferData));
int sampleCount = opus_multistream_decode(decoder, inputBuffer, inputSize, int sampleCount = opus_multistream_decode(decoder, inputBuffer, inputSize,
outputBufferData, kMaxOpusOutputPacketSizeSamples, 0); outputBufferData, kMaxOpusOutputPacketSizeSamples, 0);
// record error code // record error code
errorCode = (sampleCount < 0) ? sampleCount : 0; errorCode = (sampleCount < 0) ? sampleCount : 0;
return (sampleCount < 0) ? sampleCount return (sampleCount < 0) ? sampleCount
: sampleCount * kBytesPerSample * channelCount; : sampleCount * kBytesPerSample * channelCount;
} }
DECODER_FUNC(jint, opusSecureDecode, jlong jDecoder, jlong jTimeUs, DECODER_FUNC(jint, opusSecureDecode, jlong jDecoder, jlong jTimeUs,
jobject jInputBuffer, jint inputSize, jobject jOutputBuffer, jobject jInputBuffer, jint inputSize, jobject jOutputBuffer,
jint sampleRate, jobject mediaCrypto, jint inputMode, jbyteArray key, jint sampleRate, jobject mediaCrypto, jint inputMode, jbyteArray key,
jbyteArray javaIv, jint inputNumSubSamples, jintArray numBytesOfClearData, jbyteArray javaIv, jint inputNumSubSamples, jintArray numBytesOfClearData,
jintArray numBytesOfEncryptedData) { jintArray numBytesOfEncryptedData) {
// Doesn't support // Doesn't support
// Java client should have checked vpxSupportSecureDecode // Java client should have checked vpxSupportSecureDecode
// and avoid calling this // and avoid calling this

View File

@ -582,18 +582,149 @@ void Java_org_telegram_ui_Components_AnimatedFileDrawable_seekToMs(JNIEnv *env,
return; return;
} }
if (got_frame) { if (got_frame) {
info->has_decoded_frames = true;
bool finished = false;
if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) { if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) {
int64_t pkt_pts = info->frame->best_effort_timestamp; int64_t pkt_pts = info->frame->best_effort_timestamp;
if (pkt_pts >= pts) { if (pkt_pts >= pts) {
return; finished = true;
} }
} }
av_frame_unref(info->frame); av_frame_unref(info->frame);
if (finished) {
return;
}
} }
tries--; tries--;
} }
} }
} }
static inline void writeFrameToBitmap(JNIEnv *env, VideoInfo *info, jintArray data, jobject bitmap, jint stride) {
jint *dataArr = env->GetIntArrayElements(data, 0);
int32_t wantedWidth;
int32_t wantedHeight;
if (dataArr != nullptr) {
wantedWidth = dataArr[0];
wantedHeight = dataArr[1];
dataArr[3] = (jint) (1000 * info->frame->best_effort_timestamp * av_q2d(info->video_stream->time_base));
env->ReleaseIntArrayElements(data, dataArr, 0);
} else {
AndroidBitmapInfo bitmapInfo;
AndroidBitmap_getInfo(env, bitmap, &bitmapInfo);
wantedWidth = bitmapInfo.width;
wantedHeight = bitmapInfo.height;
}
void *pixels;
if (AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0) {
if (wantedWidth == info->frame->width && wantedHeight == info->frame->height || wantedWidth == info->frame->height && wantedHeight == info->frame->width) {
if (info->sws_ctx == nullptr) {
if (info->frame->format > AV_PIX_FMT_NONE && info->frame->format < AV_PIX_FMT_NB) {
info->sws_ctx = sws_getContext(info->frame->width, info->frame->height, (AVPixelFormat) info->frame->format, info->frame->width, info->frame->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL);
} else if (info->video_dec_ctx->pix_fmt > AV_PIX_FMT_NONE && info->video_dec_ctx->pix_fmt < AV_PIX_FMT_NB) {
info->sws_ctx = sws_getContext(info->video_dec_ctx->width, info->video_dec_ctx->height, info->video_dec_ctx->pix_fmt, info->video_dec_ctx->width, info->video_dec_ctx->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL);
}
}
if (info->sws_ctx == nullptr || ((intptr_t) pixels) % 16 != 0) {
if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_YUVJ420P) {
if (info->frame->colorspace == AVColorSpace::AVCOL_SPC_BT709) {
libyuv::H420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, info->frame->width * 4, info->frame->width, info->frame->height);
} else {
libyuv::I420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, info->frame->width * 4, info->frame->width, info->frame->height);
}
} else if (info->frame->format == AV_PIX_FMT_BGRA) {
libyuv::ABGRToARGB(info->frame->data[0], info->frame->linesize[0], (uint8_t *) pixels, info->frame->width * 4, info->frame->width, info->frame->height);
}
} else {
info->dst_data[0] = (uint8_t *) pixels;
info->dst_linesize[0] = stride;
sws_scale(info->sws_ctx, info->frame->data, info->frame->linesize, 0, info->frame->height, info->dst_data, info->dst_linesize);
}
}
AndroidBitmap_unlockPixels(env, bitmap);
}
}
int Java_org_telegram_ui_Components_AnimatedFileDrawable_getFrameAtTime(JNIEnv *env, jclass clazz, jlong ptr, jlong ms, jobject bitmap, jintArray data, jint stride) {
if (ptr == NULL || bitmap == nullptr || data == nullptr) {
return 0;
}
VideoInfo *info = (VideoInfo *) (intptr_t) ptr;
info->seeking = false;
int64_t pts = (int64_t) (ms / av_q2d(info->video_stream->time_base) / 1000);
int ret = 0;
if ((ret = av_seek_frame(info->fmt_ctx, info->video_stream_idx, pts, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME)) < 0) {
LOGE("can't seek file %s, %s", info->src, av_err2str(ret));
return 0;
} else {
avcodec_flush_buffers(info->video_dec_ctx);
int got_frame = 0;
int32_t tries = 1000;
bool readNextPacket = true;
while (tries > 0) {
if (info->pkt.size == 0 && readNextPacket) {
ret = av_read_frame(info->fmt_ctx, &info->pkt);
if (ret >= 0) {
info->orig_pkt = info->pkt;
}
}
if (info->pkt.size > 0) {
ret = decode_packet(info, &got_frame);
if (ret < 0) {
if (info->has_decoded_frames) {
ret = 0;
}
info->pkt.size = 0;
} else {
info->pkt.data += ret;
info->pkt.size -= ret;
}
if (info->pkt.size == 0) {
av_packet_unref(&info->orig_pkt);
}
} else {
info->pkt.data = NULL;
info->pkt.size = 0;
ret = decode_packet(info, &got_frame);
if (ret < 0) {
return 0;
}
if (got_frame == 0) {
av_seek_frame(info->fmt_ctx, info->video_stream_idx, 0, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
return 0;
}
}
if (ret < 0) {
return 0;
}
if (got_frame) {
bool finished = false;
if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) {
int64_t pkt_pts = info->frame->best_effort_timestamp;
bool isLastPacket = false;
if (info->pkt.size == 0) {
readNextPacket = false;
isLastPacket = av_read_frame(info->fmt_ctx, &info->pkt) < 0;
}
if (pkt_pts >= pts || isLastPacket) {
writeFrameToBitmap(env, info, data, bitmap, stride);
finished = true;
}
}
av_frame_unref(info->frame);
if (finished) {
return 1;
}
} else {
readNextPacket = true;
}
tries--;
}
return 0;
}
}
jint Java_org_telegram_ui_Components_AnimatedFileDrawable_getVideoFrame(JNIEnv *env, jclass clazz, jlong ptr, jobject bitmap, jintArray data, jint stride, jboolean preview, jfloat start_time, jfloat end_time) { jint Java_org_telegram_ui_Components_AnimatedFileDrawable_getVideoFrame(JNIEnv *env, jclass clazz, jlong ptr, jobject bitmap, jintArray data, jint stride, jboolean preview, jfloat start_time, jfloat end_time) {
if (ptr == NULL || bitmap == nullptr) { if (ptr == NULL || bitmap == nullptr) {
@ -617,7 +748,6 @@ jint Java_org_telegram_ui_Components_AnimatedFileDrawable_getVideoFrame(JNIEnv *
} else { } else {
info->orig_pkt = info->pkt; info->orig_pkt = info->pkt;
} }
} }
} }
@ -666,55 +796,10 @@ jint Java_org_telegram_ui_Components_AnimatedFileDrawable_getVideoFrame(JNIEnv *
if (got_frame) { if (got_frame) {
//LOGD("decoded frame with w = %d, h = %d, format = %d", info->frame->width, info->frame->height, info->frame->format); //LOGD("decoded frame with w = %d, h = %d, format = %d", info->frame->width, info->frame->height, info->frame->format);
if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) { if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) {
jint *dataArr = env->GetIntArrayElements(data, 0); writeFrameToBitmap(env, info, data, bitmap, stride);
int32_t wantedWidth;
int32_t wantedHeight;
if (dataArr != nullptr) {
wantedWidth = dataArr[0];
wantedHeight = dataArr[1];
dataArr[3] = (jint) (1000 * info->frame->best_effort_timestamp * av_q2d(info->video_stream->time_base));
env->ReleaseIntArrayElements(data, dataArr, 0);
} else {
AndroidBitmapInfo bitmapInfo;
AndroidBitmap_getInfo(env, bitmap, &bitmapInfo);
wantedWidth = bitmapInfo.width;
wantedHeight = bitmapInfo.height;
}
void *pixels;
if (AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0) {
if (wantedWidth == info->frame->width && wantedHeight == info->frame->height || wantedWidth == info->frame->height && wantedHeight == info->frame->width) {
if (info->sws_ctx == nullptr) {
if (info->frame->format > AV_PIX_FMT_NONE && info->frame->format < AV_PIX_FMT_NB) {
info->sws_ctx = sws_getContext(info->frame->width, info->frame->height, (AVPixelFormat) info->frame->format, info->frame->width, info->frame->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL);
} else if (info->video_dec_ctx->pix_fmt > AV_PIX_FMT_NONE && info->video_dec_ctx->pix_fmt < AV_PIX_FMT_NB) {
info->sws_ctx = sws_getContext(info->video_dec_ctx->width, info->video_dec_ctx->height, info->video_dec_ctx->pix_fmt, info->video_dec_ctx->width, info->video_dec_ctx->height, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL);
}
}
if (info->sws_ctx == nullptr || ((intptr_t) pixels) % 16 != 0) {
if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_YUVJ420P) {
if (info->frame->colorspace == AVColorSpace::AVCOL_SPC_BT709) {
libyuv::H420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, info->frame->width * 4, info->frame->width, info->frame->height);
} else {
libyuv::I420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, info->frame->width * 4, info->frame->width, info->frame->height);
}
} else if (info->frame->format == AV_PIX_FMT_BGRA) {
libyuv::ABGRToARGB(info->frame->data[0], info->frame->linesize[0], (uint8_t *) pixels, info->frame->width * 4, info->frame->width, info->frame->height);
}
} else {
info->dst_data[0] = (uint8_t *) pixels;
info->dst_linesize[0] = stride;
sws_scale(info->sws_ctx, info->frame->data, info->frame->linesize, 0, info->frame->height, info->dst_data, info->dst_linesize);
}
}
AndroidBitmap_unlockPixels(env, bitmap);
}
} }
info->has_decoded_frames = true; info->has_decoded_frames = true;
av_frame_unref(info->frame); av_frame_unref(info->frame);
//LOGD("frame time %lld ms", ConnectionsManager::getInstance(0).getCurrentTimeMonotonicMillis() - time);
return 1; return 1;
} }
if (!info->has_decoded_frames) { if (!info->has_decoded_frames) {

View File

@ -822,8 +822,10 @@ void LottieParserImpl::parseLayers(LOTCompositionData *comp) {
return; return;
} }
std::shared_ptr<LOTData> layer = parseLayer(true); std::shared_ptr<LOTData> layer = parseLayer(true);
staticFlag = staticFlag && layer->isStatic(); if (layer) {
comp->mRootLayer->mChildren.push_back(layer); staticFlag = staticFlag && layer->isStatic();
comp->mRootLayer->mChildren.push_back(layer);
}
} }
if (!IsValid()) { if (!IsValid()) {
parsingError = true; parsingError = true;

View File

@ -1172,7 +1172,7 @@ UserProfilePhoto *UserProfilePhoto::TLdeserialize(NativeByteBuffer *stream, uint
case 0x4f11bae1: case 0x4f11bae1:
result = new TL_userProfilePhotoEmpty(); result = new TL_userProfilePhotoEmpty();
break; break;
case 0xecd75d8c: case 0x69d3ab26:
result = new TL_userProfilePhoto(); result = new TL_userProfilePhoto();
break; break;
default: default:
@ -1189,6 +1189,8 @@ void TL_userProfilePhotoEmpty::serializeToStream(NativeByteBuffer *stream) {
} }
void TL_userProfilePhoto::readParams(NativeByteBuffer *stream, int32_t instanceNum, bool &error) { void TL_userProfilePhoto::readParams(NativeByteBuffer *stream, int32_t instanceNum, bool &error) {
flags = stream->readInt32(&error);
has_video = (flags & 1) != 0;
photo_id = stream->readInt64(&error); photo_id = stream->readInt64(&error);
photo_small = std::unique_ptr<FileLocation>(FileLocation::TLdeserialize(stream, stream->readUint32(&error), instanceNum, error)); photo_small = std::unique_ptr<FileLocation>(FileLocation::TLdeserialize(stream, stream->readUint32(&error), instanceNum, error));
photo_big = std::unique_ptr<FileLocation>(FileLocation::TLdeserialize(stream, stream->readUint32(&error), instanceNum, error)); photo_big = std::unique_ptr<FileLocation>(FileLocation::TLdeserialize(stream, stream->readUint32(&error), instanceNum, error));
@ -1197,6 +1199,8 @@ void TL_userProfilePhoto::readParams(NativeByteBuffer *stream, int32_t instanceN
void TL_userProfilePhoto::serializeToStream(NativeByteBuffer *stream) { void TL_userProfilePhoto::serializeToStream(NativeByteBuffer *stream) {
stream->writeInt32(constructor); stream->writeInt32(constructor);
flags = has_video ? (flags | 1) : (flags &~ 1);
stream->writeInt32(flags);
stream->writeInt64(photo_id); stream->writeInt64(photo_id);
photo_small->serializeToStream(stream); photo_small->serializeToStream(stream);
photo_big->serializeToStream(stream); photo_big->serializeToStream(stream);

View File

@ -252,6 +252,8 @@ public:
class UserProfilePhoto : public TLObject { class UserProfilePhoto : public TLObject {
public: public:
int32_t flags;
bool has_video;
int64_t photo_id; int64_t photo_id;
std::unique_ptr<FileLocation> photo_small; std::unique_ptr<FileLocation> photo_small;
std::unique_ptr<FileLocation> photo_big; std::unique_ptr<FileLocation> photo_big;
@ -271,7 +273,7 @@ public:
class TL_userProfilePhoto : public UserProfilePhoto { class TL_userProfilePhoto : public UserProfilePhoto {
public: public:
static const uint32_t constructor = 0xecd75d8c; static const uint32_t constructor = 0x69d3ab26;
void readParams(NativeByteBuffer *stream, int32_t instanceNum, bool &error); void readParams(NativeByteBuffer *stream, int32_t instanceNum, bool &error);
void serializeToStream(NativeByteBuffer *stream); void serializeToStream(NativeByteBuffer *stream);

View File

@ -210,10 +210,10 @@ public:
Op::zero(32), Op::zero(32),
Op::string("\x20", 1), Op::string("\x20", 1),
Op::random(32), Op::random(32),
Op::string("\x00\x22", 2), Op::string("\x00\x20", 2),
Op::grease(0), Op::grease(0),
Op::string("\x13\x01\x13\x02\x13\x03\xc0\x2b\xc0\x2f\xc0\x2c\xc0\x30\xcc\xa9\xcc\xa8\xc0\x13\xc0\x14\x00\x9c" Op::string("\x13\x01\x13\x02\x13\x03\xc0\x2b\xc0\x2f\xc0\x2c\xc0\x30\xcc\xa9\xcc\xa8\xc0\x13\xc0\x14\x00\x9c"
"\x00\x9d\x00\x2f\x00\x35\x00\x0a\x01\x00\x01\x91", 36), "\x00\x9d\x00\x2f\x00\x35\x01\x00\x01\x93", 34),
Op::grease(2), Op::grease(2),
Op::string("\x00\x00\x00\x00", 4), Op::string("\x00\x00\x00\x00", 4),
Op::begin_scope(), Op::begin_scope(),
@ -228,8 +228,8 @@ public:
Op::grease(4), Op::grease(4),
Op::string( Op::string(
"\x00\x1d\x00\x17\x00\x18\x00\x0b\x00\x02\x01\x00\x00\x23\x00\x00\x00\x10\x00\x0e\x00\x0c\x02\x68\x32\x08" "\x00\x1d\x00\x17\x00\x18\x00\x0b\x00\x02\x01\x00\x00\x23\x00\x00\x00\x10\x00\x0e\x00\x0c\x02\x68\x32\x08"
"\x68\x74\x74\x70\x2f\x31\x2e\x31\x00\x05\x00\x05\x01\x00\x00\x00\x00\x00\x0d\x00\x14\x00\x12\x04\x03\x08" "\x68\x74\x74\x70\x2f\x31\x2e\x31\x00\x05\x00\x05\x01\x00\x00\x00\x00\x00\x0d\x00\x12\x00\x10\x04\x03\x08"
"\x04\x04\x01\x05\x03\x08\x05\x05\x01\x08\x06\x06\x01\x02\x01\x00\x12\x00\x00\x00\x33\x00\x2b\x00\x29", 77), "\x04\x04\x01\x05\x03\x08\x05\x05\x01\x08\x06\x06\x01\x00\x12\x00\x00\x00\x33\x00\x2b\x00\x29", 75),
Op::grease(4), Op::grease(4),
Op::string("\x00\x01\x00\x00\x1d\x00\x20", 7), Op::string("\x00\x01\x00\x00\x1d\x00\x20", 7),
Op::K(), Op::K(),

View File

@ -2748,6 +2748,13 @@ std::unique_ptr<TLObject> ConnectionsManager::wrapInLayer(TLObject *object, Data
TL_jsonObjectValue *objectValue = new TL_jsonObjectValue(); TL_jsonObjectValue *objectValue = new TL_jsonObjectValue();
jsonObject->value.push_back(std::unique_ptr<TL_jsonObjectValue>(objectValue)); jsonObject->value.push_back(std::unique_ptr<TL_jsonObjectValue>(objectValue));
TL_jsonString *jsonString = new TL_jsonString();
jsonString->value = installer;
objectValue->key = "installer";
objectValue->value = std::unique_ptr<JSONValue>(jsonString);
objectValue = new TL_jsonObjectValue();
jsonObject->value.push_back(std::unique_ptr<TL_jsonObjectValue>(objectValue));
TL_jsonNumber *jsonNumber = new TL_jsonNumber(); TL_jsonNumber *jsonNumber = new TL_jsonNumber();
jsonNumber->value = currentDeviceTimezone; jsonNumber->value = currentDeviceTimezone;
@ -3271,7 +3278,7 @@ void ConnectionsManager::applyDnsConfig(NativeByteBuffer *buffer, std::string ph
}); });
} }
void ConnectionsManager::init(uint32_t version, int32_t layer, int32_t apiId, std::string deviceModel, std::string systemVersion, std::string appVersion, std::string langCode, std::string systemLangCode, std::string configPath, std::string logPath, std::string regId, std::string cFingerpting, int32_t timezoneOffset, int32_t userId, bool isPaused, bool enablePushConnection, bool hasNetwork, int32_t networkType) { void ConnectionsManager::init(uint32_t version, int32_t layer, int32_t apiId, std::string deviceModel, std::string systemVersion, std::string appVersion, std::string langCode, std::string systemLangCode, std::string configPath, std::string logPath, std::string regId, std::string cFingerpting, std::string installerId, int32_t timezoneOffset, int32_t userId, bool isPaused, bool enablePushConnection, bool hasNetwork, int32_t networkType) {
currentVersion = version; currentVersion = version;
currentLayer = layer; currentLayer = layer;
currentApiId = apiId; currentApiId = apiId;
@ -3282,6 +3289,7 @@ void ConnectionsManager::init(uint32_t version, int32_t layer, int32_t apiId, st
currentLangCode = langCode; currentLangCode = langCode;
currentRegId = regId; currentRegId = regId;
certFingerprint = cFingerpting; certFingerprint = cFingerpting;
installer = installerId;
currentDeviceTimezone = timezoneOffset; currentDeviceTimezone = timezoneOffset;
currentSystemLangCode = systemLangCode; currentSystemLangCode = systemLangCode;
currentUserId = userId; currentUserId = userId;

View File

@ -66,7 +66,7 @@ public:
void pauseNetwork(); void pauseNetwork();
void setNetworkAvailable(bool value, int32_t type, bool slow); void setNetworkAvailable(bool value, int32_t type, bool slow);
void setUseIpv6(bool value); void setUseIpv6(bool value);
void init(uint32_t version, int32_t layer, int32_t apiId, std::string deviceModel, std::string systemVersion, std::string appVersion, std::string langCode, std::string systemLangCode, std::string configPath, std::string logPath, std::string regId, std::string cFingerprint, int32_t timezoneOffset, int32_t userId, bool isPaused, bool enablePushConnection, bool hasNetwork, int32_t networkType); void init(uint32_t version, int32_t layer, int32_t apiId, std::string deviceModel, std::string systemVersion, std::string appVersion, std::string langCode, std::string systemLangCode, std::string configPath, std::string logPath, std::string regId, std::string cFingerprint, std::string installerId, int32_t timezoneOffset, int32_t userId, bool isPaused, bool enablePushConnection, bool hasNetwork, int32_t networkType);
void setProxySettings(std::string address, uint16_t port, std::string username, std::string password, std::string secret); void setProxySettings(std::string address, uint16_t port, std::string username, std::string password, std::string secret);
void setLangCode(std::string langCode); void setLangCode(std::string langCode);
void setRegId(std::string regId); void setRegId(std::string regId);
@ -218,6 +218,7 @@ private:
std::string currentLangCode; std::string currentLangCode;
std::string currentRegId; std::string currentRegId;
std::string certFingerprint; std::string certFingerprint;
std::string installer;
int32_t currentDeviceTimezone = 0; int32_t currentDeviceTimezone = 0;
std::string currentSystemLangCode; std::string currentSystemLangCode;
std::string currentConfigPath; std::string currentConfigPath;

View File

@ -57,6 +57,74 @@
java.lang.Object readResolve(); java.lang.Object readResolve();
} }
# Constant folding for resource integers may mean that a resource passed to this method appears to be unused. Keep the method to prevent this from happening.
-keep class com.google.android.exoplayer2.upstream.RawResourceDataSource {
public static android.net.Uri buildRawResourceUri(int);
}
# Methods accessed via reflection in DefaultExtractorsFactory
-dontnote com.google.android.exoplayer2.ext.flac.FlacLibrary
-keepclassmembers class com.google.android.exoplayer2.ext.flac.FlacLibrary {
}
# Some members of this class are being accessed from native methods. Keep them unobfuscated.
-keep class com.google.android.exoplayer2.video.VideoDecoderOutputBuffer {
*;
}
-dontnote com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer
-keepclassmembers class com.google.android.exoplayer2.ext.opus.LibopusAudioRenderer {
<init>(android.os.Handler, com.google.android.exoplayer2.audio.AudioRendererEventListener, com.google.android.exoplayer2.audio.AudioProcessor[]);
}
-dontnote com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer
-keepclassmembers class com.google.android.exoplayer2.ext.flac.LibflacAudioRenderer {
<init>(android.os.Handler, com.google.android.exoplayer2.audio.AudioRendererEventListener, com.google.android.exoplayer2.audio.AudioProcessor[]);
}
-dontnote com.google.android.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer
-keepclassmembers class com.google.android.exoplayer2.ext.ffmpeg.FfmpegAudioRenderer {
<init>(android.os.Handler, com.google.android.exoplayer2.audio.AudioRendererEventListener, com.google.android.exoplayer2.audio.AudioProcessor[]);
}
# Constructors accessed via reflection in DefaultExtractorsFactory
-dontnote com.google.android.exoplayer2.ext.flac.FlacExtractor
-keepclassmembers class com.google.android.exoplayer2.ext.flac.FlacExtractor {
<init>();
}
# Constructors accessed via reflection in DefaultDownloaderFactory
-dontnote com.google.android.exoplayer2.source.dash.offline.DashDownloader
-keepclassmembers class com.google.android.exoplayer2.source.dash.offline.DashDownloader {
<init>(android.net.Uri, java.util.List, com.google.android.exoplayer2.offline.DownloaderConstructorHelper);
}
-dontnote com.google.android.exoplayer2.source.hls.offline.HlsDownloader
-keepclassmembers class com.google.android.exoplayer2.source.hls.offline.HlsDownloader {
<init>(android.net.Uri, java.util.List, com.google.android.exoplayer2.offline.DownloaderConstructorHelper);
}
-dontnote com.google.android.exoplayer2.source.smoothstreaming.offline.SsDownloader
-keepclassmembers class com.google.android.exoplayer2.source.smoothstreaming.offline.SsDownloader {
<init>(android.net.Uri, java.util.List, com.google.android.exoplayer2.offline.DownloaderConstructorHelper);
}
# Constructors accessed via reflection in DownloadHelper
-dontnote com.google.android.exoplayer2.source.dash.DashMediaSource$Factory
-keepclasseswithmembers class com.google.android.exoplayer2.source.dash.DashMediaSource$Factory {
<init>(com.google.android.exoplayer2.upstream.DataSource$Factory);
}
-dontnote com.google.android.exoplayer2.source.hls.HlsMediaSource$Factory
-keepclasseswithmembers class com.google.android.exoplayer2.source.hls.HlsMediaSource$Factory {
<init>(com.google.android.exoplayer2.upstream.DataSource$Factory);
}
-dontnote com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource$Factory
-keepclasseswithmembers class com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource$Factory {
<init>(com.google.android.exoplayer2.upstream.DataSource$Factory);
}
# Don't warn about checkerframework and Kotlin annotations
-dontwarn org.checkerframework.**
-dontwarn javax.annotation.**
# Use -keep to explicitly keep any other classes shrinking would remove # Use -keep to explicitly keep any other classes shrinking would remove
# -dontoptimize # -dontoptimize
-dontobfuscate -dontobfuscate

View File

@ -217,11 +217,17 @@
<data android:scheme="ton" /> <data android:scheme="ton" />
</intent-filter> </intent-filter>
<meta-data android:name="android.service.chooser.chooser_target_service" android:value="androidx.sharetarget.ChooserTargetServiceCompat" />
<meta-data <meta-data android:name="android.app.shortcuts" android:resource="@xml/shortcuts" />
android:name="android.service.chooser.chooser_target_service"
android:value=".TgChooserTargetService" />
</activity> </activity>
<activity
android:name="org.telegram.ui.BubbleActivity"
android:configChanges="keyboard|keyboardHidden|orientation|screenSize|uiMode"
android:hardwareAccelerated="@bool/useHardwareAcceleration"
android:windowSoftInputMode="adjustResize"
android:allowEmbedded="true"
android:documentLaunchMode="always"
android:resizeableActivity="true"/>
<activity <activity
android:name="org.telegram.ui.ShareActivity" android:name="org.telegram.ui.ShareActivity"
android:configChanges="keyboard|keyboardHidden|orientation|screenSize|uiMode" android:configChanges="keyboard|keyboardHidden|orientation|screenSize|uiMode"
@ -344,15 +350,6 @@
android:resource="@xml/contacts" /> android:resource="@xml/contacts" />
</service> </service>
<service
android:name="org.telegram.messenger.TgChooserTargetService"
android:label="@string/Nekogram"
android:permission="android.permission.BIND_CHOOSER_TARGET_SERVICE">
<intent-filter>
<action android:name="android.service.chooser.ChooserTargetService" />
</intent-filter>
</service>
<service <service
android:name=".KeepAliveJob" android:name=".KeepAliveJob"
android:exported="false" android:exported="false"

View File

@ -8,6 +8,8 @@ import android.animation.ValueAnimator;
import android.os.Build; import android.os.Build;
import android.view.View; import android.view.View;
import android.view.ViewPropertyAnimator; import android.view.ViewPropertyAnimator;
import android.view.ViewTreeObserver;
import android.view.animation.OvershootInterpolator;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -20,6 +22,7 @@ import org.telegram.messenger.MessageObject;
import org.telegram.ui.Cells.BotHelpCell; import org.telegram.ui.Cells.BotHelpCell;
import org.telegram.ui.Cells.ChatMessageCell; import org.telegram.ui.Cells.ChatMessageCell;
import org.telegram.ui.ChatActivity; import org.telegram.ui.ChatActivity;
import org.telegram.ui.Components.ChatGreetingsView;
import org.telegram.ui.Components.CubicBezierInterpolator; import org.telegram.ui.Components.CubicBezierInterpolator;
import org.telegram.ui.Components.RecyclerListView; import org.telegram.ui.Components.RecyclerListView;
@ -27,8 +30,14 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import static androidx.recyclerview.widget.ViewInfoStore.InfoRecord.FLAG_DISAPPEARED;
public class ChatListItemAnimator extends DefaultItemAnimator { public class ChatListItemAnimator extends DefaultItemAnimator {
public static final int ANIMATION_TYPE_OUT = 1;
public static final int ANIMATION_TYPE_IN = 2;
public static final int ANIMATION_TYPE_MOVE = 3;
private final ChatActivity activity; private final ChatActivity activity;
private final RecyclerListView recyclerListView; private final RecyclerListView recyclerListView;
@ -40,6 +49,8 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
ArrayList<Runnable> runOnAnimationsEnd = new ArrayList<>(); ArrayList<Runnable> runOnAnimationsEnd = new ArrayList<>();
private boolean shouldAnimateEnterFromBottom; private boolean shouldAnimateEnterFromBottom;
private RecyclerView.ViewHolder greetingsSticker;
private ChatGreetingsView chatGreetingsView;
public ChatListItemAnimator(ChatActivity activity, RecyclerListView listView) { public ChatListItemAnimator(ChatActivity activity, RecyclerListView listView) {
this.activity = activity; this.activity = activity;
@ -52,6 +63,14 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
@Override @Override
public void runPendingAnimations() { public void runPendingAnimations() {
boolean removalsPending = !mPendingRemovals.isEmpty();
boolean movesPending = !mPendingMoves.isEmpty();
boolean changesPending = !mPendingChanges.isEmpty();
boolean additionsPending = !mPendingAdditions.isEmpty();
if (!removalsPending && !movesPending && !additionsPending && !changesPending) {
return;
}
boolean runTranslationFromBottom = false; boolean runTranslationFromBottom = false;
if (shouldAnimateEnterFromBottom) { if (shouldAnimateEnterFromBottom) {
for (int i = 0; i < mPendingAdditions.size(); i++) { for (int i = 0; i < mPendingAdditions.size(); i++) {
@ -201,7 +220,7 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
@Override @Override
public boolean animateAppearance(@NonNull RecyclerView.ViewHolder viewHolder, @Nullable ItemHolderInfo preLayoutInfo, @NonNull ItemHolderInfo postLayoutInfo) { public boolean animateAppearance(@NonNull RecyclerView.ViewHolder viewHolder, @Nullable ItemHolderInfo preLayoutInfo, @NonNull ItemHolderInfo postLayoutInfo) {
boolean res = super.animateAppearance(viewHolder, preLayoutInfo, postLayoutInfo); boolean res = super.animateAppearance(viewHolder, preLayoutInfo, postLayoutInfo);
if (res) { if (res && shouldAnimateEnterFromBottom) {
boolean runTranslationFromBottom = false; boolean runTranslationFromBottom = false;
for (int i = 0; i < mPendingAdditions.size(); i++) { for (int i = 0; i < mPendingAdditions.size(); i++) {
if (mPendingAdditions.get(i).getLayoutPosition() == 0) { if (mPendingAdditions.get(i).getLayoutPosition() == 0) {
@ -532,13 +551,7 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
} }
} else if (holder.itemView instanceof BotHelpCell) { } else if (holder.itemView instanceof BotHelpCell) {
BotHelpCell botInfo = (BotHelpCell) holder.itemView; BotHelpCell botInfo = (BotHelpCell) holder.itemView;
if (!botInfo.wasDraw) { botInfo.setAnimating(true);
dispatchMoveFinished(holder);
botInfo.setAnimating(false);
return false;
} else {
botInfo.setAnimating(true);
}
} else { } else {
if (deltaX == 0 && deltaY == 0) { if (deltaX == 0 && deltaY == 0) {
dispatchMoveFinished(holder); dispatchMoveFinished(holder);
@ -666,9 +679,13 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
animatorSet.playTogether(valueAnimator); animatorSet.playTogether(valueAnimator);
} }
MessageObject.GroupedMessages group = chatMessageCell.getCurrentMessagesGroup();
if (group == null) {
moveInfoExtended.animateChangeGroupBackground = false;
}
if (moveInfoExtended.animateChangeGroupBackground) { if (moveInfoExtended.animateChangeGroupBackground) {
ValueAnimator valueAnimator = ValueAnimator.ofFloat(1f, 0); ValueAnimator valueAnimator = ValueAnimator.ofFloat(1f, 0);
MessageObject.GroupedMessages group = chatMessageCell.getCurrentMessagesGroup();
MessageObject.GroupedMessages.TransitionParams groupTransitionParams = group.transitionParams; MessageObject.GroupedMessages.TransitionParams groupTransitionParams = group.transitionParams;
RecyclerListView recyclerListView = (RecyclerListView) holder.itemView.getParent(); RecyclerListView recyclerListView = (RecyclerListView) holder.itemView.getParent();
@ -738,10 +755,12 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
@Override @Override
public void onAnimationEnd(Animator animator) { public void onAnimationEnd(Animator animator) {
animator.removeAllListeners(); animator.removeAllListeners();
restoreTransitionParams(holder.itemView);
if (holder.itemView instanceof ChatMessageCell) { if (holder.itemView instanceof ChatMessageCell) {
((ChatMessageCell) holder.itemView).getTransitionParams().resetAnimation(); MessageObject.GroupedMessages group = ((ChatMessageCell) view).getCurrentMessagesGroup();
} else if (holder.itemView instanceof BotHelpCell) { if (group != null) {
((BotHelpCell) holder.itemView).setAnimating(false); group.transitionParams.reset();
}
} }
if (mMoveAnimations.remove(holder)) { if (mMoveAnimations.remove(holder)) {
dispatchMoveFinished(holder); dispatchMoveFinished(holder);
@ -935,11 +954,32 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
animator.cancel(); animator.cancel();
} }
super.endAnimation(item); super.endAnimation(item);
restoreTransitionParams(item.itemView);
if (BuildVars.LOGS_ENABLED) { if (BuildVars.LOGS_ENABLED) {
FileLog.d("end animation"); FileLog.d("end animation");
} }
} }
private void restoreTransitionParams(View view) {
view.setAlpha(1f);
view.setTranslationY(0f);
if (view instanceof BotHelpCell) {
BotHelpCell botCell = (BotHelpCell) view;
int top = recyclerListView.getMeasuredHeight() / 2 - view.getMeasuredHeight() / 2;
botCell.setAnimating(false);
if (view.getTop() > top) {
view.setTranslationY(top - view.getTop());
} else {
view.setTranslationY(0);
}
} else if (view instanceof ChatMessageCell) {
((ChatMessageCell) view).getTransitionParams().resetAnimation();
((ChatMessageCell) view).setAnimationOffsetX(0f);
} else {
view.setTranslationX(0f);
}
}
@Override @Override
public void endAnimations() { public void endAnimations() {
if (BuildVars.LOGS_ENABLED) { if (BuildVars.LOGS_ENABLED) {
@ -949,39 +989,33 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
groupedMessages.transitionParams.isNewGroup = false; groupedMessages.transitionParams.isNewGroup = false;
} }
willChangedGroups.clear(); willChangedGroups.clear();
cancelAnimators(); cancelAnimators();
if (chatGreetingsView != null) {
chatGreetingsView.stickerToSendView.setAlpha(1f);
}
greetingsSticker = null;
chatGreetingsView = null;
int count = mPendingMoves.size(); int count = mPendingMoves.size();
for (int i = count - 1; i >= 0; i--) { for (int i = count - 1; i >= 0; i--) {
MoveInfo item = mPendingMoves.get(i); MoveInfo item = mPendingMoves.get(i);
View view = item.holder.itemView; View view = item.holder.itemView;
view.setTranslationY(0); restoreTransitionParams(view);
if (view instanceof ChatMessageCell) {
ChatMessageCell cell = ((ChatMessageCell) view);
ChatMessageCell.TransitionParams params = cell.getTransitionParams();
cell.setAnimationOffsetX(0);
params.deltaLeft = 0;
params.deltaTop = 0;
params.deltaRight = 0;
params.deltaBottom = 0;
params.animateBackgroundBoundsInner = false;
} else {
view.setTranslationX(0);
}
dispatchMoveFinished(item.holder); dispatchMoveFinished(item.holder);
mPendingMoves.remove(i); mPendingMoves.remove(i);
} }
count = mPendingRemovals.size(); count = mPendingRemovals.size();
for (int i = count - 1; i >= 0; i--) { for (int i = count - 1; i >= 0; i--) {
RecyclerView.ViewHolder item = mPendingRemovals.get(i); RecyclerView.ViewHolder item = mPendingRemovals.get(i);
restoreTransitionParams(item.itemView);
dispatchRemoveFinished(item); dispatchRemoveFinished(item);
mPendingRemovals.remove(i); mPendingRemovals.remove(i);
} }
count = mPendingAdditions.size(); count = mPendingAdditions.size();
for (int i = count - 1; i >= 0; i--) { for (int i = count - 1; i >= 0; i--) {
RecyclerView.ViewHolder item = mPendingAdditions.get(i); RecyclerView.ViewHolder item = mPendingAdditions.get(i);
item.itemView.setAlpha(1); restoreTransitionParams(item.itemView);
dispatchAddFinished(item); dispatchAddFinished(item);
mPendingAdditions.remove(i); mPendingAdditions.remove(i);
} }
@ -1001,13 +1035,7 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
for (int j = count - 1; j >= 0; j--) { for (int j = count - 1; j >= 0; j--) {
MoveInfo moveInfo = moves.get(j); MoveInfo moveInfo = moves.get(j);
RecyclerView.ViewHolder item = moveInfo.holder; RecyclerView.ViewHolder item = moveInfo.holder;
View view = item.itemView; restoreTransitionParams(item.itemView);
view.setTranslationY(0);
if (view instanceof ChatMessageCell) {
((ChatMessageCell) view).setAnimationOffsetX(0);
} else {
view.setTranslationX(0);
}
dispatchMoveFinished(moveInfo.holder); dispatchMoveFinished(moveInfo.holder);
moves.remove(j); moves.remove(j);
if (moves.isEmpty()) { if (moves.isEmpty()) {
@ -1021,8 +1049,7 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
count = additions.size(); count = additions.size();
for (int j = count - 1; j >= 0; j--) { for (int j = count - 1; j >= 0; j--) {
RecyclerView.ViewHolder item = additions.get(j); RecyclerView.ViewHolder item = additions.get(j);
View view = item.itemView; restoreTransitionParams(item.itemView);
view.setAlpha(1);
dispatchAddFinished(item); dispatchAddFinished(item);
additions.remove(j); additions.remove(j);
if (additions.isEmpty()) { if (additions.isEmpty()) {
@ -1067,14 +1094,7 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
} else { } else {
return false; return false;
} }
item.itemView.setAlpha(1); restoreTransitionParams(item.itemView);
if (item.itemView instanceof ChatMessageCell) {
((ChatMessageCell) item.itemView).setAnimationOffsetX(0);
((ChatMessageCell) item.itemView).getTransitionParams().resetAnimation();
} else {
item.itemView.setTranslationX(0);
}
item.itemView.setTranslationY(0);
dispatchChangeFinished(item, oldItem); dispatchChangeFinished(item, oldItem);
return true; return true;
@ -1120,6 +1140,9 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
} }
final View view = holder.itemView; final View view = holder.itemView;
mAddAnimations.add(holder); mAddAnimations.add(holder);
if (holder == greetingsSticker) {
view.setAlpha(1f);
}
AnimatorSet animatorSet = new AnimatorSet(); AnimatorSet animatorSet = new AnimatorSet();
ObjectAnimator animator = ObjectAnimator.ofFloat(view, View.ALPHA, view.getAlpha(), 1f); ObjectAnimator animator = ObjectAnimator.ofFloat(view, View.ALPHA, view.getAlpha(), 1f);
@ -1136,14 +1159,76 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
} else { } else {
view.animate().translationX(0).translationY(0).setDuration(getAddDuration()).start(); view.animate().translationX(0).translationY(0).setDuration(getAddDuration()).start();
} }
animatorSet.setDuration(getAddDuration());
if (view instanceof ChatMessageCell){ if (view instanceof ChatMessageCell){
MessageObject.GroupedMessages groupedMessages = ((ChatMessageCell) view).getCurrentMessagesGroup(); if (holder == greetingsSticker) {
if (groupedMessages != null && groupedMessages.transitionParams.backgroundChangeBounds) { if (chatGreetingsView != null) {
animatorSet.setStartDelay(140); chatGreetingsView.stickerToSendView.setAlpha(0f);
}
recyclerListView.setClipChildren(false);
ChatMessageCell messageCell = (ChatMessageCell) view;
View parentForGreetingsView = (View)chatGreetingsView.getParent();
float fromX = chatGreetingsView.stickerToSendView.getX() + chatGreetingsView.getX() + parentForGreetingsView.getX();
float fromY = chatGreetingsView.stickerToSendView.getY() + chatGreetingsView.getY() + parentForGreetingsView.getY();
float toX = messageCell.getPhotoImage().getImageX() + recyclerListView.getX() + messageCell.getX();
float toY = messageCell.getPhotoImage().getImageY() + recyclerListView.getY() + messageCell.getY();
float fromW = chatGreetingsView.stickerToSendView.getWidth();
float fromH = chatGreetingsView.stickerToSendView.getHeight();
float toW = messageCell.getPhotoImage().getImageWidth();
float toH = messageCell.getPhotoImage().getImageHeight();
float deltaX = fromX - toX;
float deltaY = fromY - toY;
toX = messageCell.getPhotoImage().getImageX();
toY = messageCell.getPhotoImage().getImageY();
messageCell.getTransitionParams().imageChangeBoundsTransition = true;
messageCell.getTransitionParams().animateDrawingTimeAlpha = true;
messageCell.getPhotoImage().setImageCoords(toX + deltaX, toX + deltaY, fromW,fromH);
ValueAnimator valueAnimator = ValueAnimator.ofFloat(0,1f);
float finalToX = toX;
float finalToY = toY;
valueAnimator.addUpdateListener(animation -> {
float v = (float) animation.getAnimatedValue();
messageCell.getTransitionParams().animateChangeProgress = v;
if (messageCell.getTransitionParams().animateChangeProgress > 1) {
messageCell.getTransitionParams().animateChangeProgress = 1f;
}
messageCell.getPhotoImage().setImageCoords(
finalToX + deltaX * (1f - v),
finalToY + deltaY * (1f - v),
fromW * (1f - v) + toW * v,
fromH * (1f - v) + toH * v);
messageCell.invalidate();
});
valueAnimator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
messageCell.getTransitionParams().resetAnimation();
messageCell.getPhotoImage().setImageCoords(finalToX, finalToY, toW, toH);
if (chatGreetingsView != null) {
chatGreetingsView.stickerToSendView.setAlpha(1f);
}
messageCell.invalidate();
}
});
animatorSet.play(valueAnimator);
} else {
MessageObject.GroupedMessages groupedMessages = ((ChatMessageCell) view).getCurrentMessagesGroup();
if (groupedMessages != null && groupedMessages.transitionParams.backgroundChangeBounds) {
animatorSet.setStartDelay(140);
}
} }
} }
if (holder == greetingsSticker) {
animatorSet.setDuration(350);
animatorSet.setInterpolator(new OvershootInterpolator());
} else {
animatorSet.setDuration(getAddDuration());
}
animatorSet.addListener(new AnimatorListenerAdapter() { animatorSet.addListener(new AnimatorListenerAdapter() {
@Override @Override
public void onAnimationStart(Animator animator) { public void onAnimationStart(Animator animator) {
@ -1232,6 +1317,31 @@ public class ChatListItemAnimator extends DefaultItemAnimator {
onAllAnimationsDone(); onAllAnimationsDone();
} }
public boolean willRemoved(View view) {
RecyclerView.ViewHolder holder = recyclerListView.getChildViewHolder(view);
if (holder != null) {
return mPendingRemovals.contains(holder) || mRemoveAnimations.contains(holder);
}
return false;
}
public boolean willAddedFromAlpha(View view) {
if (shouldAnimateEnterFromBottom) {
return false;
}
RecyclerView.ViewHolder holder = recyclerListView.getChildViewHolder(view);
if (holder != null) {
return mPendingAdditions.contains(holder) || mAddAnimations.contains(holder);
}
return false;
}
public void onGreetingStickerTransition(RecyclerView.ViewHolder holder, ChatGreetingsView greetingsViewContainer) {
greetingsSticker = holder;
chatGreetingsView = greetingsViewContainer;
shouldAnimateEnterFromBottom = false;
}
class MoveInfoExtended extends MoveInfo { class MoveInfoExtended extends MoveInfo {
public float captionDeltaX; public float captionDeltaX;

View File

@ -30,7 +30,6 @@ import androidx.core.view.ViewCompat;
import org.telegram.messenger.BuildVars; import org.telegram.messenger.BuildVars;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
/** /**
@ -186,7 +185,7 @@ public class DefaultItemAnimator extends SimpleItemAnimator {
long removeDuration = removalsPending ? getRemoveDuration() : 0; long removeDuration = removalsPending ? getRemoveDuration() : 0;
long moveDuration = movesPending ? getMoveDuration() : 0; long moveDuration = movesPending ? getMoveDuration() : 0;
long changeDuration = changesPending ? getChangeDuration() : 0; long changeDuration = changesPending ? getChangeDuration() : 0;
long totalDelay = removeDuration + Math.max(moveDuration, changeDuration); long totalDelay = getAddAnimationDelay(removeDuration,moveDuration,changeDuration);
View view = additions.get(0).itemView; View view = additions.get(0).itemView;
ViewCompat.postOnAnimationDelayed(view, adder, totalDelay); ViewCompat.postOnAnimationDelayed(view, adder, totalDelay);
} else { } else {
@ -195,6 +194,10 @@ public class DefaultItemAnimator extends SimpleItemAnimator {
} }
} }
protected long getAddAnimationDelay(long removeDuration, long moveDuration, long changeDuration) {
return removeDuration + Math.max(moveDuration, changeDuration);
}
protected long getMoveAnimationDelay() { protected long getMoveAnimationDelay() {
return getRemoveDuration(); return getRemoveDuration();
} }

View File

@ -158,7 +158,7 @@ final class GapWorker implements Runnable {
public void add(RecyclerView recyclerView) { public void add(RecyclerView recyclerView) {
if (RecyclerView.DEBUG && mRecyclerViews.contains(recyclerView)) { if (RecyclerView.DEBUG && mRecyclerViews.contains(recyclerView)) {
throw new IllegalStateException("RecyclerView already present in worker list!"); return;
} }
mRecyclerViews.add(recyclerView); mRecyclerViews.add(recyclerView);
} }

View File

@ -25,9 +25,6 @@ import android.view.ViewGroup;
import androidx.core.view.accessibility.AccessibilityNodeInfoCompat; import androidx.core.view.accessibility.AccessibilityNodeInfoCompat;
import org.telegram.messenger.AndroidUtilities;
import org.telegram.messenger.BuildVars;
import java.util.Arrays; import java.util.Arrays;
/** /**
@ -38,7 +35,7 @@ import java.util.Arrays;
*/ */
public class GridLayoutManager extends LinearLayoutManager { public class GridLayoutManager extends LinearLayoutManager {
private static final boolean DEBUG = BuildVars.DEBUG_VERSION; private static final boolean DEBUG = false;
private static final String TAG = "GridLayoutManager"; private static final String TAG = "GridLayoutManager";
public static final int DEFAULT_SPAN_COUNT = -1; public static final int DEFAULT_SPAN_COUNT = -1;
/** /**

View File

@ -4099,48 +4099,60 @@ public class RecyclerView extends ViewGroup implements ScrollingView,
// Step 3: Find out where things are now, and process change animations. // Step 3: Find out where things are now, and process change animations.
// traverse list in reverse because we may call animateChange in the loop which may // traverse list in reverse because we may call animateChange in the loop which may
// remove the target view holder. // remove the target view holder.
for (int i = mChildHelper.getChildCount() - 1; i >= 0; i--) { try {
ViewHolder holder = getChildViewHolderInt(mChildHelper.getChildAt(i)); for (int i = mChildHelper.getChildCount() - 1; i >= 0; i--) {
if (holder.shouldIgnore()) { ViewHolder holder = getChildViewHolderInt(mChildHelper.getChildAt(i));
continue; if (holder.shouldIgnore()) {
} continue;
long key = getChangedHolderKey(holder); }
final ItemHolderInfo animationInfo = mItemAnimator long key = getChangedHolderKey(holder);
.recordPostLayoutInformation(mState, holder); final ItemHolderInfo animationInfo = mItemAnimator
ViewHolder oldChangeViewHolder = mViewInfoStore.getFromOldChangeHolders(key); .recordPostLayoutInformation(mState, holder);
if (oldChangeViewHolder != null && !oldChangeViewHolder.shouldIgnore()) { ViewHolder oldChangeViewHolder = mViewInfoStore.getFromOldChangeHolders(key);
// run a change animation if (oldChangeViewHolder != null && !oldChangeViewHolder.shouldIgnore()) {
// run a change animation
// If an Item is CHANGED but the updated version is disappearing, it creates
// a conflicting case. // If an Item is CHANGED but the updated version is disappearing, it creates
// Since a view that is marked as disappearing is likely to be going out of // a conflicting case.
// bounds, we run a change animation. Both views will be cleaned automatically // Since a view that is marked as disappearing is likely to be going out of
// once their animations finish. // bounds, we run a change animation. Both views will be cleaned automatically
// On the other hand, if it is the same view holder instance, we run a // once their animations finish.
// disappearing animation instead because we are not going to rebind the updated // On the other hand, if it is the same view holder instance, we run a
// VH unless it is enforced by the layout manager. // disappearing animation instead because we are not going to rebind the updated
final boolean oldDisappearing = mViewInfoStore.isDisappearing( // VH unless it is enforced by the layout manager.
oldChangeViewHolder); final boolean oldDisappearing = mViewInfoStore.isDisappearing(
final boolean newDisappearing = mViewInfoStore.isDisappearing(holder); oldChangeViewHolder);
if (oldDisappearing && oldChangeViewHolder == holder) { final boolean newDisappearing = mViewInfoStore.isDisappearing(holder);
// run disappear animation instead of change if (oldDisappearing && oldChangeViewHolder == holder) {
mViewInfoStore.addToPostLayout(holder, animationInfo); // run disappear animation instead of change
} else { mViewInfoStore.addToPostLayout(holder, animationInfo);
final ItemHolderInfo preInfo = mViewInfoStore.popFromPreLayout( } else {
oldChangeViewHolder); final ItemHolderInfo preInfo = mViewInfoStore.popFromPreLayout(
// we add and remove so that any post info is merged. oldChangeViewHolder);
mViewInfoStore.addToPostLayout(holder, animationInfo); // we add and remove so that any post info is merged.
ItemHolderInfo postInfo = mViewInfoStore.popFromPostLayout(holder); mViewInfoStore.addToPostLayout(holder, animationInfo);
if (preInfo == null) { ItemHolderInfo postInfo = mViewInfoStore.popFromPostLayout(holder);
handleMissingPreInfoForChangeError(key, holder, oldChangeViewHolder); if (preInfo == null) {
} else { handleMissingPreInfoForChangeError(key, holder, oldChangeViewHolder);
animateChange(oldChangeViewHolder, holder, preInfo, postInfo, } else {
oldDisappearing, newDisappearing); animateChange(oldChangeViewHolder, holder, preInfo, postInfo,
} oldDisappearing, newDisappearing);
}
}
} else {
mViewInfoStore.addToPostLayout(holder, animationInfo);
} }
} else {
mViewInfoStore.addToPostLayout(holder, animationInfo);
} }
} catch (Exception e) {
StringBuilder builder = new StringBuilder();
for (int i = mChildHelper.getChildCount() - 1; i >= 0; i--) {
ViewHolder holder = getChildViewHolderInt(mChildHelper.getChildAt(i));
if (holder.shouldIgnore()) {
continue;
}
builder.append("Holder at" + i + " " + holder + "\n");
}
throw new RuntimeException(builder.toString(), e);
} }
// Step 4: Process view info lists and trigger animations // Step 4: Process view info lists and trigger animations
@ -11511,7 +11523,7 @@ public class RecyclerView extends ViewGroup implements ScrollingView,
* to store any additional required per-child view metadata about the layout. * to store any additional required per-child view metadata about the layout.
*/ */
public static class LayoutParams extends android.view.ViewGroup.MarginLayoutParams { public static class LayoutParams extends android.view.ViewGroup.MarginLayoutParams {
ViewHolder mViewHolder; public ViewHolder mViewHolder;
public final Rect mDecorInsets = new Rect(); public final Rect mDecorInsets = new Rect();
boolean mInsetsDirty = true; boolean mInsetsDirty = true;
// Flag is set to true if the view is bound while it is detached from RV. // Flag is set to true if the view is bound while it is detached from RV.

View File

@ -1,5 +1,8 @@
change SimpleExoPlayer.java change SimpleExoPlayer.java
change Player.java
change VideoListener.java change VideoListener.java
change AspectRatioFrameLayout.java change AspectRatioFrameLayout.java
change DefaultExtractorsFactory.java change DefaultExtractorsFactory.java
change MediaCodecVideoRenderer.java
add SurfaceNotValidException.java
change MP4Extractor.java - MAXIMUM_READ_AHEAD_BYTES_STREAM to 1MB change MP4Extractor.java - MAXIMUM_READ_AHEAD_BYTES_STREAM to 1MB

View File

@ -0,0 +1,81 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.media.AudioManager;
import android.os.Handler;
/* package */ final class AudioBecomingNoisyManager {
private final Context context;
private final AudioBecomingNoisyReceiver receiver;
private boolean receiverRegistered;
public interface EventListener {
void onAudioBecomingNoisy();
}
public AudioBecomingNoisyManager(Context context, Handler eventHandler, EventListener listener) {
this.context = context.getApplicationContext();
this.receiver = new AudioBecomingNoisyReceiver(eventHandler, listener);
}
/**
* Enables the {@link AudioBecomingNoisyManager} which calls {@link
* EventListener#onAudioBecomingNoisy()} upon receiving an intent of {@link
* AudioManager#ACTION_AUDIO_BECOMING_NOISY}.
*
* @param enabled True if the listener should be notified when audio is becoming noisy.
*/
public void setEnabled(boolean enabled) {
if (enabled && !receiverRegistered) {
context.registerReceiver(
receiver, new IntentFilter(AudioManager.ACTION_AUDIO_BECOMING_NOISY));
receiverRegistered = true;
} else if (!enabled && receiverRegistered) {
context.unregisterReceiver(receiver);
receiverRegistered = false;
}
}
private final class AudioBecomingNoisyReceiver extends BroadcastReceiver implements Runnable {
private final EventListener listener;
private final Handler eventHandler;
public AudioBecomingNoisyReceiver(Handler eventHandler, EventListener listener) {
this.eventHandler = eventHandler;
this.listener = listener;
}
@Override
public void onReceive(Context context, Intent intent) {
if (AudioManager.ACTION_AUDIO_BECOMING_NOISY.equals(intent.getAction())) {
eventHandler.post(this);
}
}
@Override
public void run() {
if (receiverRegistered) {
listener.onAudioBecomingNoisy();
}
}
}
}

View File

@ -13,19 +13,17 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package com.google.android.exoplayer2.audio; package com.google.android.exoplayer2;
import android.content.Context; import android.content.Context;
import android.media.AudioFocusRequest; import android.media.AudioFocusRequest;
import android.media.AudioManager; import android.media.AudioManager;
import android.os.Handler;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi; import androidx.annotation.RequiresApi;
import androidx.annotation.VisibleForTesting; import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
@ -35,7 +33,7 @@ import java.lang.annotation.RetentionPolicy;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** Manages requesting and responding to changes in audio focus. */ /** Manages requesting and responding to changes in audio focus. */
public final class AudioFocusManager { /* package */ final class AudioFocusManager {
/** Interface to allow AudioFocusManager to give commands to a player. */ /** Interface to allow AudioFocusManager to give commands to a player. */
public interface PlayerControl { public interface PlayerControl {
@ -77,15 +75,12 @@ public final class AudioFocusManager {
@Documented @Documented
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@IntDef({ @IntDef({
AUDIO_FOCUS_STATE_LOST_FOCUS,
AUDIO_FOCUS_STATE_NO_FOCUS, AUDIO_FOCUS_STATE_NO_FOCUS,
AUDIO_FOCUS_STATE_HAVE_FOCUS, AUDIO_FOCUS_STATE_HAVE_FOCUS,
AUDIO_FOCUS_STATE_LOSS_TRANSIENT, AUDIO_FOCUS_STATE_LOSS_TRANSIENT,
AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK
}) })
private @interface AudioFocusState {} private @interface AudioFocusState {}
/** No audio focus was held, but has been lost by another app taking it permanently. */
private static final int AUDIO_FOCUS_STATE_LOST_FOCUS = -1;
/** No audio focus is currently being held. */ /** No audio focus is currently being held. */
private static final int AUDIO_FOCUS_STATE_NO_FOCUS = 0; private static final int AUDIO_FOCUS_STATE_NO_FOCUS = 0;
/** The requested audio focus is currently held. */ /** The requested audio focus is currently held. */
@ -102,12 +97,12 @@ public final class AudioFocusManager {
private final AudioManager audioManager; private final AudioManager audioManager;
private final AudioFocusListener focusListener; private final AudioFocusListener focusListener;
private final PlayerControl playerControl; @Nullable private PlayerControl playerControl;
private @Nullable AudioAttributes audioAttributes; @Nullable private AudioAttributes audioAttributes;
private @AudioFocusState int audioFocusState; @AudioFocusState private int audioFocusState;
private int focusGain; @C.AudioFocusGain private int focusGain;
private float volumeMultiplier = 1.0f; private float volumeMultiplier = VOLUME_MULTIPLIER_DEFAULT;
private @MonotonicNonNull AudioFocusRequest audioFocusRequest; private @MonotonicNonNull AudioFocusRequest audioFocusRequest;
private boolean rebuildAudioFocusRequest; private boolean rebuildAudioFocusRequest;
@ -116,13 +111,14 @@ public final class AudioFocusManager {
* Constructs an AudioFocusManager to automatically handle audio focus for a player. * Constructs an AudioFocusManager to automatically handle audio focus for a player.
* *
* @param context The current context. * @param context The current context.
* @param eventHandler A {@link Handler} to for the thread on which the player is used.
* @param playerControl A {@link PlayerControl} to handle commands from this instance. * @param playerControl A {@link PlayerControl} to handle commands from this instance.
*/ */
public AudioFocusManager(Context context, PlayerControl playerControl) { public AudioFocusManager(Context context, Handler eventHandler, PlayerControl playerControl) {
this.audioManager = this.audioManager =
(AudioManager) context.getApplicationContext().getSystemService(Context.AUDIO_SERVICE); (AudioManager) context.getApplicationContext().getSystemService(Context.AUDIO_SERVICE);
this.playerControl = playerControl; this.playerControl = playerControl;
this.focusListener = new AudioFocusListener(); this.focusListener = new AudioFocusListener(eventHandler);
this.audioFocusState = AUDIO_FOCUS_STATE_NO_FOCUS; this.audioFocusState = AUDIO_FOCUS_STATE_NO_FOCUS;
} }
@ -134,64 +130,45 @@ public final class AudioFocusManager {
/** /**
* Sets audio attributes that should be used to manage audio focus. * Sets audio attributes that should be used to manage audio focus.
* *
* <p>Call {@link #updateAudioFocus(boolean, int)} to update the audio focus based on these
* attributes.
*
* @param audioAttributes The audio attributes or {@code null} if audio focus should not be * @param audioAttributes The audio attributes or {@code null} if audio focus should not be
* managed automatically. * managed automatically.
* @param playWhenReady The current state of {@link ExoPlayer#getPlayWhenReady()}.
* @param playerState The current player state; {@link ExoPlayer#getPlaybackState()}.
* @return A {@link PlayerCommand} to execute on the player.
*/ */
@PlayerCommand public void setAudioAttributes(@Nullable AudioAttributes audioAttributes) {
public int setAudioAttributes(
@Nullable AudioAttributes audioAttributes, boolean playWhenReady, int playerState) {
if (!Util.areEqual(this.audioAttributes, audioAttributes)) { if (!Util.areEqual(this.audioAttributes, audioAttributes)) {
this.audioAttributes = audioAttributes; this.audioAttributes = audioAttributes;
focusGain = convertAudioAttributesToFocusGain(audioAttributes); focusGain = convertAudioAttributesToFocusGain(audioAttributes);
Assertions.checkArgument( Assertions.checkArgument(
focusGain == C.AUDIOFOCUS_GAIN || focusGain == C.AUDIOFOCUS_NONE, focusGain == C.AUDIOFOCUS_GAIN || focusGain == C.AUDIOFOCUS_NONE,
"Automatic handling of audio focus is only available for USAGE_MEDIA and USAGE_GAME."); "Automatic handling of audio focus is only available for USAGE_MEDIA and USAGE_GAME.");
if (playWhenReady
&& (playerState == Player.STATE_BUFFERING || playerState == Player.STATE_READY)) {
return requestAudioFocus();
}
} }
return playerState == Player.STATE_IDLE
? handleIdle(playWhenReady)
: handlePrepare(playWhenReady);
} }
/** /**
* Called by a player as part of {@link ExoPlayer#prepare(MediaSource, boolean, boolean)}. * Called by the player to abandon or request audio focus based on the desired player state.
* *
* @param playWhenReady The current state of {@link ExoPlayer#getPlayWhenReady()}. * @param playWhenReady The desired value of playWhenReady.
* @param playbackState The desired playback state.
* @return A {@link PlayerCommand} to execute on the player. * @return A {@link PlayerCommand} to execute on the player.
*/ */
@PlayerCommand @PlayerCommand
public int handlePrepare(boolean playWhenReady) { public int updateAudioFocus(boolean playWhenReady, @Player.State int playbackState) {
if (shouldAbandonAudioFocus(playbackState)) {
abandonAudioFocus();
return playWhenReady ? PLAYER_COMMAND_PLAY_WHEN_READY : PLAYER_COMMAND_DO_NOT_PLAY;
}
return playWhenReady ? requestAudioFocus() : PLAYER_COMMAND_DO_NOT_PLAY; return playWhenReady ? requestAudioFocus() : PLAYER_COMMAND_DO_NOT_PLAY;
} }
/** /**
* Called by the player as part of {@link ExoPlayer#setPlayWhenReady(boolean)}. * Called when the manager is no longer required. Audio focus will be released without making any
* * calls to the {@link PlayerControl}.
* @param playWhenReady The desired value of playWhenReady.
* @param playerState The current state of the player.
* @return A {@link PlayerCommand} to execute on the player.
*/ */
@PlayerCommand public void release() {
public int handleSetPlayWhenReady(boolean playWhenReady, int playerState) { playerControl = null;
if (!playWhenReady) { abandonAudioFocus();
abandonAudioFocus();
return PLAYER_COMMAND_DO_NOT_PLAY;
}
return playerState == Player.STATE_IDLE ? handleIdle(playWhenReady) : requestAudioFocus();
}
/** Called by the player as part of {@link ExoPlayer#stop(boolean)}. */
public void handleStop() {
abandonAudioFocus(/* forceAbandon= */ true);
} }
// Internal methods. // Internal methods.
@ -201,62 +178,35 @@ public final class AudioFocusManager {
return focusListener; return focusListener;
} }
@PlayerCommand private boolean shouldAbandonAudioFocus(@Player.State int playbackState) {
private int handleIdle(boolean playWhenReady) { return playbackState == Player.STATE_IDLE || focusGain != C.AUDIOFOCUS_GAIN;
return playWhenReady ? PLAYER_COMMAND_PLAY_WHEN_READY : PLAYER_COMMAND_DO_NOT_PLAY;
} }
@PlayerCommand @PlayerCommand
private int requestAudioFocus() { private int requestAudioFocus() {
int focusRequestResult; if (audioFocusState == AUDIO_FOCUS_STATE_HAVE_FOCUS) {
if (focusGain == C.AUDIOFOCUS_NONE) {
if (audioFocusState != AUDIO_FOCUS_STATE_NO_FOCUS) {
abandonAudioFocus(/* forceAbandon= */ true);
}
return PLAYER_COMMAND_PLAY_WHEN_READY; return PLAYER_COMMAND_PLAY_WHEN_READY;
} }
int requestResult = Util.SDK_INT >= 26 ? requestAudioFocusV26() : requestAudioFocusDefault();
if (audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) { if (requestResult == AudioManager.AUDIOFOCUS_REQUEST_GRANTED) {
if (Util.SDK_INT >= 26) { setAudioFocusState(AUDIO_FOCUS_STATE_HAVE_FOCUS);
focusRequestResult = requestAudioFocusV26(); return PLAYER_COMMAND_PLAY_WHEN_READY;
} else { } else {
focusRequestResult = requestAudioFocusDefault(); setAudioFocusState(AUDIO_FOCUS_STATE_NO_FOCUS);
}
audioFocusState =
focusRequestResult == AudioManager.AUDIOFOCUS_REQUEST_GRANTED
? AUDIO_FOCUS_STATE_HAVE_FOCUS
: AUDIO_FOCUS_STATE_NO_FOCUS;
}
if (audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) {
return PLAYER_COMMAND_DO_NOT_PLAY; return PLAYER_COMMAND_DO_NOT_PLAY;
} }
return audioFocusState == AUDIO_FOCUS_STATE_LOSS_TRANSIENT
? PLAYER_COMMAND_WAIT_FOR_CALLBACK
: PLAYER_COMMAND_PLAY_WHEN_READY;
} }
private void abandonAudioFocus() { private void abandonAudioFocus() {
abandonAudioFocus(/* forceAbandon= */ false); if (audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) {
}
private void abandonAudioFocus(boolean forceAbandon) {
if (focusGain == C.AUDIOFOCUS_NONE && audioFocusState == AUDIO_FOCUS_STATE_NO_FOCUS) {
return; return;
} }
if (Util.SDK_INT >= 26) {
if (focusGain != C.AUDIOFOCUS_GAIN abandonAudioFocusV26();
|| audioFocusState == AUDIO_FOCUS_STATE_LOST_FOCUS } else {
|| forceAbandon) { abandonAudioFocusDefault();
if (Util.SDK_INT >= 26) {
abandonAudioFocusV26();
} else {
abandonAudioFocusDefault();
}
audioFocusState = AUDIO_FOCUS_STATE_NO_FOCUS;
} }
setAudioFocusState(AUDIO_FOCUS_STATE_NO_FOCUS);
} }
private int requestAudioFocusDefault() { private int requestAudioFocusDefault() {
@ -310,8 +260,8 @@ public final class AudioFocusManager {
* @param audioAttributes The audio attributes associated with this focus request. * @param audioAttributes The audio attributes associated with this focus request.
* @return The type of audio focus gain that should be requested. * @return The type of audio focus gain that should be requested.
*/ */
@C.AudioFocusGain
private static int convertAudioAttributesToFocusGain(@Nullable AudioAttributes audioAttributes) { private static int convertAudioAttributesToFocusGain(@Nullable AudioAttributes audioAttributes) {
if (audioAttributes == null) { if (audioAttributes == null) {
// Don't handle audio focus. It may be either video only contents or developers // Don't handle audio focus. It may be either video only contents or developers
// want to have more finer grained control. (e.g. adding audio focus listener) // want to have more finer grained control. (e.g. adding audio focus listener)
@ -381,65 +331,67 @@ public final class AudioFocusManager {
} }
} }
private void setAudioFocusState(@AudioFocusState int audioFocusState) {
if (this.audioFocusState == audioFocusState) {
return;
}
this.audioFocusState = audioFocusState;
float volumeMultiplier =
(audioFocusState == AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK)
? AudioFocusManager.VOLUME_MULTIPLIER_DUCK
: AudioFocusManager.VOLUME_MULTIPLIER_DEFAULT;
if (this.volumeMultiplier == volumeMultiplier) {
return;
}
this.volumeMultiplier = volumeMultiplier;
if (playerControl != null) {
playerControl.setVolumeMultiplier(volumeMultiplier);
}
}
private void handlePlatformAudioFocusChange(int focusChange) {
switch (focusChange) {
case AudioManager.AUDIOFOCUS_GAIN:
setAudioFocusState(AUDIO_FOCUS_STATE_HAVE_FOCUS);
executePlayerCommand(PLAYER_COMMAND_PLAY_WHEN_READY);
return;
case AudioManager.AUDIOFOCUS_LOSS:
executePlayerCommand(PLAYER_COMMAND_DO_NOT_PLAY);
abandonAudioFocus();
return;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
if (focusChange == AudioManager.AUDIOFOCUS_LOSS_TRANSIENT || willPauseWhenDucked()) {
executePlayerCommand(PLAYER_COMMAND_WAIT_FOR_CALLBACK);
setAudioFocusState(AUDIO_FOCUS_STATE_LOSS_TRANSIENT);
} else {
setAudioFocusState(AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK);
}
return;
default:
Log.w(TAG, "Unknown focus change type: " + focusChange);
}
}
private void executePlayerCommand(@PlayerCommand int playerCommand) {
if (playerControl != null) {
playerControl.executePlayerCommand(playerCommand);
}
}
// Internal audio focus listener. // Internal audio focus listener.
private class AudioFocusListener implements AudioManager.OnAudioFocusChangeListener { private class AudioFocusListener implements AudioManager.OnAudioFocusChangeListener {
private final Handler eventHandler;
public AudioFocusListener(Handler eventHandler) {
this.eventHandler = eventHandler;
}
@Override @Override
public void onAudioFocusChange(int focusChange) { public void onAudioFocusChange(int focusChange) {
// Convert the platform focus change to internal state. eventHandler.post(() -> handlePlatformAudioFocusChange(focusChange));
switch (focusChange) {
case AudioManager.AUDIOFOCUS_LOSS:
audioFocusState = AUDIO_FOCUS_STATE_LOST_FOCUS;
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT:
audioFocusState = AUDIO_FOCUS_STATE_LOSS_TRANSIENT;
break;
case AudioManager.AUDIOFOCUS_LOSS_TRANSIENT_CAN_DUCK:
if (willPauseWhenDucked()) {
audioFocusState = AUDIO_FOCUS_STATE_LOSS_TRANSIENT;
} else {
audioFocusState = AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK;
}
break;
case AudioManager.AUDIOFOCUS_GAIN:
audioFocusState = AUDIO_FOCUS_STATE_HAVE_FOCUS;
break;
default:
Log.w(TAG, "Unknown focus change type: " + focusChange);
// Early return.
return;
}
// Handle the internal state (change).
switch (audioFocusState) {
case AUDIO_FOCUS_STATE_NO_FOCUS:
// Focus was not requested; nothing to do.
break;
case AUDIO_FOCUS_STATE_LOST_FOCUS:
playerControl.executePlayerCommand(PLAYER_COMMAND_DO_NOT_PLAY);
abandonAudioFocus(/* forceAbandon= */ true);
break;
case AUDIO_FOCUS_STATE_LOSS_TRANSIENT:
playerControl.executePlayerCommand(PLAYER_COMMAND_WAIT_FOR_CALLBACK);
break;
case AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK:
// Volume will be adjusted by the code below.
break;
case AUDIO_FOCUS_STATE_HAVE_FOCUS:
playerControl.executePlayerCommand(PLAYER_COMMAND_PLAY_WHEN_READY);
break;
default:
throw new IllegalStateException("Unknown audio focus state: " + audioFocusState);
}
float volumeMultiplier =
(audioFocusState == AUDIO_FOCUS_STATE_LOSS_TRANSIENT_DUCK)
? AudioFocusManager.VOLUME_MULTIPLIER_DUCK
: AudioFocusManager.VOLUME_MULTIPLIER_DEFAULT;
if (AudioFocusManager.this.volumeMultiplier != volumeMultiplier) {
AudioFocusManager.this.volumeMultiplier = volumeMultiplier;
playerControl.setVolumeMultiplier(volumeMultiplier);
}
} }
} }
} }

View File

@ -101,11 +101,15 @@ public abstract class BasePlayer implements Player {
@Override @Override
@Nullable @Nullable
public final Object getCurrentTag() { public final Object getCurrentTag() {
int windowIndex = getCurrentWindowIndex();
Timeline timeline = getCurrentTimeline(); Timeline timeline = getCurrentTimeline();
return windowIndex >= timeline.getWindowCount() return timeline.isEmpty() ? null : timeline.getWindow(getCurrentWindowIndex(), window).tag;
? null }
: timeline.getWindow(windowIndex, window, /* setTag= */ true).tag;
@Override
@Nullable
public final Object getCurrentManifest() {
Timeline timeline = getCurrentTimeline();
return timeline.isEmpty() ? null : timeline.getWindow(getCurrentWindowIndex(), window).manifest;
} }
@Override @Override
@ -123,6 +127,12 @@ public abstract class BasePlayer implements Player {
return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isDynamic; return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isDynamic;
} }
@Override
public final boolean isCurrentWindowLive() {
Timeline timeline = getCurrentTimeline();
return !timeline.isEmpty() && timeline.getWindow(getCurrentWindowIndex(), window).isLive;
}
@Override @Override
public final boolean isCurrentWindowSeekable() { public final boolean isCurrentWindowSeekable() {
Timeline timeline = getCurrentTimeline(); Timeline timeline = getCurrentTimeline();

View File

@ -15,13 +15,17 @@
*/ */
package com.google.android.exoplayer2; package com.google.android.exoplayer2;
import android.os.Looper;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.drm.DrmSession;
import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.ExoMediaCrypto;
import com.google.android.exoplayer2.source.SampleStream; import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.MediaClock; import com.google.android.exoplayer2.util.MediaClock;
import com.google.android.exoplayer2.util.Util;
import java.io.IOException; import java.io.IOException;
/** /**
@ -30,6 +34,7 @@ import java.io.IOException;
public abstract class BaseRenderer implements Renderer, RendererCapabilities { public abstract class BaseRenderer implements Renderer, RendererCapabilities {
private final int trackType; private final int trackType;
private final FormatHolder formatHolder;
private RendererConfiguration configuration; private RendererConfiguration configuration;
private int index; private int index;
@ -39,6 +44,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
private long streamOffsetUs; private long streamOffsetUs;
private long readingPositionUs; private long readingPositionUs;
private boolean streamIsFinal; private boolean streamIsFinal;
private boolean throwRendererExceptionIsExecuting;
/** /**
* @param trackType The track type that the renderer handles. One of the {@link C} * @param trackType The track type that the renderer handles. One of the {@link C}
@ -46,6 +52,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
*/ */
public BaseRenderer(int trackType) { public BaseRenderer(int trackType) {
this.trackType = trackType; this.trackType = trackType;
formatHolder = new FormatHolder();
readingPositionUs = C.TIME_END_OF_SOURCE; readingPositionUs = C.TIME_END_OF_SOURCE;
} }
@ -65,6 +72,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
} }
@Override @Override
@Nullable
public MediaClock getMediaClock() { public MediaClock getMediaClock() {
return null; return null;
} }
@ -105,6 +113,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
} }
@Override @Override
@Nullable
public final SampleStream getStream() { public final SampleStream getStream() {
return stream; return stream;
} }
@ -151,6 +160,7 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
@Override @Override
public final void disable() { public final void disable() {
Assertions.checkState(state == STATE_ENABLED); Assertions.checkState(state == STATE_ENABLED);
formatHolder.clear();
state = STATE_DISABLED; state = STATE_DISABLED;
stream = null; stream = null;
streamFormats = null; streamFormats = null;
@ -161,12 +171,14 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
@Override @Override
public final void reset() { public final void reset() {
Assertions.checkState(state == STATE_DISABLED); Assertions.checkState(state == STATE_DISABLED);
formatHolder.clear();
onReset(); onReset();
} }
// RendererCapabilities implementation. // RendererCapabilities implementation.
@Override @Override
@AdaptiveSupport
public int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException { public int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException {
return ADAPTIVE_NOT_SUPPORTED; return ADAPTIVE_NOT_SUPPORTED;
} }
@ -269,6 +281,12 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
// Methods to be called by subclasses. // Methods to be called by subclasses.
/** Returns a clear {@link FormatHolder}. */
protected final FormatHolder getFormatHolder() {
formatHolder.clear();
return formatHolder;
}
/** Returns the formats of the currently enabled stream. */ /** Returns the formats of the currently enabled stream. */
protected final Format[] getStreamFormats() { protected final Format[] getStreamFormats() {
return streamFormats; return streamFormats;
@ -281,6 +299,35 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
return configuration; return configuration;
} }
/** Returns a {@link DrmSession} ready for assignment, handling resource management. */
@Nullable
protected final <T extends ExoMediaCrypto> DrmSession<T> getUpdatedSourceDrmSession(
@Nullable Format oldFormat,
Format newFormat,
@Nullable DrmSessionManager<T> drmSessionManager,
@Nullable DrmSession<T> existingSourceSession)
throws ExoPlaybackException {
boolean drmInitDataChanged =
!Util.areEqual(newFormat.drmInitData, oldFormat == null ? null : oldFormat.drmInitData);
if (!drmInitDataChanged) {
return existingSourceSession;
}
@Nullable DrmSession<T> newSourceDrmSession = null;
if (newFormat.drmInitData != null) {
if (drmSessionManager == null) {
throw createRendererException(
new IllegalStateException("Media requires a DrmSessionManager"), newFormat);
}
newSourceDrmSession =
drmSessionManager.acquireSession(
Assertions.checkNotNull(Looper.myLooper()), newFormat.drmInitData);
}
if (existingSourceSession != null) {
existingSourceSession.release();
}
return newSourceDrmSession;
}
/** /**
* Returns the index of the renderer within the player. * Returns the index of the renderer within the player.
*/ */
@ -288,6 +335,30 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
return index; return index;
} }
/**
* Creates an {@link ExoPlaybackException} of type {@link ExoPlaybackException#TYPE_RENDERER} for
* this renderer.
*
* @param cause The cause of the exception.
* @param format The current format used by the renderer. May be null.
*/
protected final ExoPlaybackException createRendererException(
Exception cause, @Nullable Format format) {
@FormatSupport int formatSupport = RendererCapabilities.FORMAT_HANDLED;
if (format != null && !throwRendererExceptionIsExecuting) {
// Prevent recursive re-entry from subclass supportsFormat implementations.
throwRendererExceptionIsExecuting = true;
try {
formatSupport = RendererCapabilities.getFormatSupport(supportsFormat(format));
} catch (ExoPlaybackException e) {
// Ignore, we are already failing.
} finally {
throwRendererExceptionIsExecuting = false;
}
}
return ExoPlaybackException.createForRenderer(cause, getIndex(), format, formatSupport);
}
/** /**
* Reads from the enabled upstream source. If the upstream source has been read to the end then * Reads from the enabled upstream source. If the upstream source has been read to the end then
* {@link C#RESULT_BUFFER_READ} is only returned if {@link #setCurrentStreamFinal()} has been * {@link C#RESULT_BUFFER_READ} is only returned if {@link #setCurrentStreamFinal()} has been
@ -295,16 +366,16 @@ public abstract class BaseRenderer implements Renderer, RendererCapabilities {
* *
* @param formatHolder A {@link FormatHolder} to populate in the case of reading a format. * @param formatHolder A {@link FormatHolder} to populate in the case of reading a format.
* @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the * @param buffer A {@link DecoderInputBuffer} to populate in the case of reading a sample or the
* end of the stream. If the end of the stream has been reached, the * end of the stream. If the end of the stream has been reached, the {@link
* {@link C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer. * C#BUFFER_FLAG_END_OF_STREAM} flag will be set on the buffer.
* @param formatRequired Whether the caller requires that the format of the stream be read even if * @param formatRequired Whether the caller requires that the format of the stream be read even if
* it's not changing. A sample will never be read if set to true, however it is still possible * it's not changing. A sample will never be read if set to true, however it is still possible
* for the end of stream or nothing to be read. * for the end of stream or nothing to be read.
* @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or * @return The result, which can be {@link C#RESULT_NOTHING_READ}, {@link C#RESULT_FORMAT_READ} or
* {@link C#RESULT_BUFFER_READ}. * {@link C#RESULT_BUFFER_READ}.
*/ */
protected final int readSource(FormatHolder formatHolder, DecoderInputBuffer buffer, protected final int readSource(
boolean formatRequired) { FormatHolder formatHolder, DecoderInputBuffer buffer, boolean formatRequired) {
int result = stream.readData(formatHolder, buffer, formatRequired); int result = stream.readData(formatHolder, buffer, formatRequired);
if (result == C.RESULT_BUFFER_READ) { if (result == C.RESULT_BUFFER_READ) {
if (buffer.isEndOfStream()) { if (buffer.isEndOfStream()) {

View File

@ -17,15 +17,18 @@ package com.google.android.exoplayer2;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.content.Context; import android.content.Context;
import android.media.AudioAttributes;
import android.media.AudioFormat; import android.media.AudioFormat;
import android.media.AudioManager; import android.media.AudioManager;
import android.media.MediaCodec; import android.media.MediaCodec;
import android.media.MediaFormat; import android.media.MediaFormat;
import androidx.annotation.IntDef;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.IntDef;
import com.google.android.exoplayer2.PlayerMessage.Target; import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.audio.AuxEffectInfo; import com.google.android.exoplayer2.audio.AuxEffectInfo;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.SimpleDecoderVideoRenderer;
import com.google.android.exoplayer2.video.VideoDecoderOutputBufferRenderer;
import com.google.android.exoplayer2.video.VideoFrameMetadataListener; import com.google.android.exoplayer2.video.VideoFrameMetadataListener;
import com.google.android.exoplayer2.video.spherical.CameraMotionListener; import com.google.android.exoplayer2.video.spherical.CameraMotionListener;
import java.lang.annotation.Documented; import java.lang.annotation.Documented;
@ -92,14 +95,16 @@ public final class C {
* The name of the ASCII charset. * The name of the ASCII charset.
*/ */
public static final String ASCII_NAME = "US-ASCII"; public static final String ASCII_NAME = "US-ASCII";
/** /**
* The name of the UTF-8 charset. * The name of the UTF-8 charset.
*/ */
public static final String UTF8_NAME = "UTF-8"; public static final String UTF8_NAME = "UTF-8";
/** /** The name of the ISO-8859-1 charset. */
* The name of the UTF-16 charset. public static final String ISO88591_NAME = "ISO-8859-1";
*/
/** The name of the UTF-16 charset. */
public static final String UTF16_NAME = "UTF-16"; public static final String UTF16_NAME = "UTF-16";
/** The name of the UTF-16 little-endian charset. */ /** The name of the UTF-16 little-endian charset. */
@ -145,8 +150,8 @@ public final class C {
/** /**
* Represents an audio encoding, or an invalid or unset value. One of {@link Format#NO_VALUE}, * Represents an audio encoding, or an invalid or unset value. One of {@link Format#NO_VALUE},
* {@link #ENCODING_INVALID}, {@link #ENCODING_PCM_8BIT}, {@link #ENCODING_PCM_16BIT}, {@link * {@link #ENCODING_INVALID}, {@link #ENCODING_PCM_8BIT}, {@link #ENCODING_PCM_16BIT}, {@link
* #ENCODING_PCM_24BIT}, {@link #ENCODING_PCM_32BIT}, {@link #ENCODING_PCM_FLOAT}, {@link * #ENCODING_PCM_16BIT_BIG_ENDIAN}, {@link #ENCODING_PCM_24BIT}, {@link #ENCODING_PCM_32BIT},
* #ENCODING_PCM_MU_LAW}, {@link #ENCODING_PCM_A_LAW}, {@link #ENCODING_AC3}, {@link * {@link #ENCODING_PCM_FLOAT}, {@link #ENCODING_MP3}, {@link #ENCODING_AC3}, {@link
* #ENCODING_E_AC3}, {@link #ENCODING_E_AC3_JOC}, {@link #ENCODING_AC4}, {@link #ENCODING_DTS}, * #ENCODING_E_AC3}, {@link #ENCODING_E_AC3_JOC}, {@link #ENCODING_AC4}, {@link #ENCODING_DTS},
* {@link #ENCODING_DTS_HD} or {@link #ENCODING_DOLBY_TRUEHD}. * {@link #ENCODING_DTS_HD} or {@link #ENCODING_DOLBY_TRUEHD}.
*/ */
@ -157,26 +162,26 @@ public final class C {
ENCODING_INVALID, ENCODING_INVALID,
ENCODING_PCM_8BIT, ENCODING_PCM_8BIT,
ENCODING_PCM_16BIT, ENCODING_PCM_16BIT,
ENCODING_PCM_16BIT_BIG_ENDIAN,
ENCODING_PCM_24BIT, ENCODING_PCM_24BIT,
ENCODING_PCM_32BIT, ENCODING_PCM_32BIT,
ENCODING_PCM_FLOAT, ENCODING_PCM_FLOAT,
ENCODING_PCM_MU_LAW, ENCODING_MP3,
ENCODING_PCM_A_LAW,
ENCODING_AC3, ENCODING_AC3,
ENCODING_E_AC3, ENCODING_E_AC3,
ENCODING_E_AC3_JOC, ENCODING_E_AC3_JOC,
ENCODING_AC4, ENCODING_AC4,
ENCODING_DTS, ENCODING_DTS,
ENCODING_DTS_HD, ENCODING_DTS_HD,
ENCODING_DOLBY_TRUEHD, ENCODING_DOLBY_TRUEHD
}) })
public @interface Encoding {} public @interface Encoding {}
/** /**
* Represents a PCM audio encoding, or an invalid or unset value. One of {@link Format#NO_VALUE}, * Represents a PCM audio encoding, or an invalid or unset value. One of {@link Format#NO_VALUE},
* {@link #ENCODING_INVALID}, {@link #ENCODING_PCM_8BIT}, {@link #ENCODING_PCM_16BIT}, {@link * {@link #ENCODING_INVALID}, {@link #ENCODING_PCM_8BIT}, {@link #ENCODING_PCM_16BIT}, {@link
* #ENCODING_PCM_24BIT}, {@link #ENCODING_PCM_32BIT}, {@link #ENCODING_PCM_FLOAT}, {@link * #ENCODING_PCM_16BIT_BIG_ENDIAN}, {@link #ENCODING_PCM_24BIT}, {@link #ENCODING_PCM_32BIT},
* #ENCODING_PCM_MU_LAW} or {@link #ENCODING_PCM_A_LAW}. * {@link #ENCODING_PCM_FLOAT}.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@ -185,11 +190,10 @@ public final class C {
ENCODING_INVALID, ENCODING_INVALID,
ENCODING_PCM_8BIT, ENCODING_PCM_8BIT,
ENCODING_PCM_16BIT, ENCODING_PCM_16BIT,
ENCODING_PCM_16BIT_BIG_ENDIAN,
ENCODING_PCM_24BIT, ENCODING_PCM_24BIT,
ENCODING_PCM_32BIT, ENCODING_PCM_32BIT,
ENCODING_PCM_FLOAT, ENCODING_PCM_FLOAT
ENCODING_PCM_MU_LAW,
ENCODING_PCM_A_LAW
}) })
public @interface PcmEncoding {} public @interface PcmEncoding {}
/** @see AudioFormat#ENCODING_INVALID */ /** @see AudioFormat#ENCODING_INVALID */
@ -198,16 +202,16 @@ public final class C {
public static final int ENCODING_PCM_8BIT = AudioFormat.ENCODING_PCM_8BIT; public static final int ENCODING_PCM_8BIT = AudioFormat.ENCODING_PCM_8BIT;
/** @see AudioFormat#ENCODING_PCM_16BIT */ /** @see AudioFormat#ENCODING_PCM_16BIT */
public static final int ENCODING_PCM_16BIT = AudioFormat.ENCODING_PCM_16BIT; public static final int ENCODING_PCM_16BIT = AudioFormat.ENCODING_PCM_16BIT;
/** Like {@link #ENCODING_PCM_16BIT}, but with the bytes in big endian order. */
public static final int ENCODING_PCM_16BIT_BIG_ENDIAN = 0x10000000;
/** PCM encoding with 24 bits per sample. */ /** PCM encoding with 24 bits per sample. */
public static final int ENCODING_PCM_24BIT = 0x80000000; public static final int ENCODING_PCM_24BIT = 0x20000000;
/** PCM encoding with 32 bits per sample. */ /** PCM encoding with 32 bits per sample. */
public static final int ENCODING_PCM_32BIT = 0x40000000; public static final int ENCODING_PCM_32BIT = 0x30000000;
/** @see AudioFormat#ENCODING_PCM_FLOAT */ /** @see AudioFormat#ENCODING_PCM_FLOAT */
public static final int ENCODING_PCM_FLOAT = AudioFormat.ENCODING_PCM_FLOAT; public static final int ENCODING_PCM_FLOAT = AudioFormat.ENCODING_PCM_FLOAT;
/** Audio encoding for mu-law. */ /** @see AudioFormat#ENCODING_MP3 */
public static final int ENCODING_PCM_MU_LAW = 0x10000000; public static final int ENCODING_MP3 = AudioFormat.ENCODING_MP3;
/** Audio encoding for A-law. */
public static final int ENCODING_PCM_A_LAW = 0x20000000;
/** @see AudioFormat#ENCODING_AC3 */ /** @see AudioFormat#ENCODING_AC3 */
public static final int ENCODING_AC3 = AudioFormat.ENCODING_AC3; public static final int ENCODING_AC3 = AudioFormat.ENCODING_AC3;
/** @see AudioFormat#ENCODING_E_AC3 */ /** @see AudioFormat#ENCODING_E_AC3 */
@ -440,6 +444,21 @@ public final class C {
public static final int USAGE_VOICE_COMMUNICATION_SIGNALLING = public static final int USAGE_VOICE_COMMUNICATION_SIGNALLING =
android.media.AudioAttributes.USAGE_VOICE_COMMUNICATION_SIGNALLING; android.media.AudioAttributes.USAGE_VOICE_COMMUNICATION_SIGNALLING;
/**
* Capture policies for {@link com.google.android.exoplayer2.audio.AudioAttributes}. One of {@link
* #ALLOW_CAPTURE_BY_ALL}, {@link #ALLOW_CAPTURE_BY_NONE} or {@link #ALLOW_CAPTURE_BY_SYSTEM}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({ALLOW_CAPTURE_BY_ALL, ALLOW_CAPTURE_BY_NONE, ALLOW_CAPTURE_BY_SYSTEM})
public @interface AudioAllowedCapturePolicy {}
/** See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_ALL}. */
public static final int ALLOW_CAPTURE_BY_ALL = AudioAttributes.ALLOW_CAPTURE_BY_ALL;
/** See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_NONE}. */
public static final int ALLOW_CAPTURE_BY_NONE = AudioAttributes.ALLOW_CAPTURE_BY_NONE;
/** See {@link android.media.AudioAttributes#ALLOW_CAPTURE_BY_SYSTEM}. */
public static final int ALLOW_CAPTURE_BY_SYSTEM = AudioAttributes.ALLOW_CAPTURE_BY_SYSTEM;
/** /**
* Audio focus types. One of {@link #AUDIOFOCUS_NONE}, {@link #AUDIOFOCUS_GAIN}, {@link * Audio focus types. One of {@link #AUDIOFOCUS_NONE}, {@link #AUDIOFOCUS_GAIN}, {@link
* #AUDIOFOCUS_GAIN_TRANSIENT}, {@link #AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK} or {@link * #AUDIOFOCUS_GAIN_TRANSIENT}, {@link #AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK} or {@link
@ -480,6 +499,7 @@ public final class C {
value = { value = {
BUFFER_FLAG_KEY_FRAME, BUFFER_FLAG_KEY_FRAME,
BUFFER_FLAG_END_OF_STREAM, BUFFER_FLAG_END_OF_STREAM,
BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA,
BUFFER_FLAG_LAST_SAMPLE, BUFFER_FLAG_LAST_SAMPLE,
BUFFER_FLAG_ENCRYPTED, BUFFER_FLAG_ENCRYPTED,
BUFFER_FLAG_DECODE_ONLY BUFFER_FLAG_DECODE_ONLY
@ -493,14 +513,35 @@ public final class C {
* Flag for empty buffers that signal that the end of the stream was reached. * Flag for empty buffers that signal that the end of the stream was reached.
*/ */
public static final int BUFFER_FLAG_END_OF_STREAM = MediaCodec.BUFFER_FLAG_END_OF_STREAM; public static final int BUFFER_FLAG_END_OF_STREAM = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
/** Indicates that a buffer has supplemental data. */
public static final int BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA = 1 << 28; // 0x10000000
/** Indicates that a buffer is known to contain the last media sample of the stream. */ /** Indicates that a buffer is known to contain the last media sample of the stream. */
public static final int BUFFER_FLAG_LAST_SAMPLE = 1 << 29; // 0x20000000 public static final int BUFFER_FLAG_LAST_SAMPLE = 1 << 29; // 0x20000000
/** Indicates that a buffer is (at least partially) encrypted. */ /** Indicates that a buffer is (at least partially) encrypted. */
public static final int BUFFER_FLAG_ENCRYPTED = 1 << 30; // 0x40000000 public static final int BUFFER_FLAG_ENCRYPTED = 1 << 30; // 0x40000000
/** Indicates that a buffer should be decoded but not rendered. */ /** Indicates that a buffer should be decoded but not rendered. */
@SuppressWarnings("NumericOverflow")
public static final int BUFFER_FLAG_DECODE_ONLY = 1 << 31; // 0x80000000 public static final int BUFFER_FLAG_DECODE_ONLY = 1 << 31; // 0x80000000
// LINT.IfChange
/**
* Video decoder output modes. Possible modes are {@link #VIDEO_OUTPUT_MODE_NONE}, {@link
* #VIDEO_OUTPUT_MODE_YUV} and {@link #VIDEO_OUTPUT_MODE_SURFACE_YUV}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef(value = {VIDEO_OUTPUT_MODE_NONE, VIDEO_OUTPUT_MODE_YUV, VIDEO_OUTPUT_MODE_SURFACE_YUV})
public @interface VideoOutputMode {}
/** Video decoder output mode is not set. */
public static final int VIDEO_OUTPUT_MODE_NONE = -1;
/** Video decoder output mode that outputs raw 4:2:0 YUV planes. */
public static final int VIDEO_OUTPUT_MODE_YUV = 0;
/** Video decoder output mode that renders 4:2:0 YUV planes directly to a surface. */
public static final int VIDEO_OUTPUT_MODE_SURFACE_YUV = 1;
// LINT.ThenChange(
// ../../../../../../../../../extensions/av1/src/main/jni/gav1_jni.cc,
// ../../../../../../../../../extensions/vp9/src/main/jni/vpx_jni.cc
// )
/** /**
* Video scaling modes for {@link MediaCodec}-based {@link Renderer}s. One of {@link * Video scaling modes for {@link MediaCodec}-based {@link Renderer}s. One of {@link
* #VIDEO_SCALING_MODE_SCALE_TO_FIT} or {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING}. * #VIDEO_SCALING_MODE_SCALE_TO_FIT} or {@link #VIDEO_SCALING_MODE_SCALE_TO_FIT_WITH_CROPPING}.
@ -790,6 +831,17 @@ public final class C {
*/ */
public static final int MSG_SET_CAMERA_MOTION_LISTENER = 7; public static final int MSG_SET_CAMERA_MOTION_LISTENER = 7;
/**
* The type of a message that can be passed to a {@link SimpleDecoderVideoRenderer} via {@link
* ExoPlayer#createMessage(Target)}. The message payload should be the target {@link
* VideoDecoderOutputBufferRenderer}, or null.
*
* <p>This message is intended only for use with extension renderers that expect a {@link
* VideoDecoderOutputBufferRenderer}. For other use cases, an output surface should be passed via
* {@link #MSG_SET_SURFACE} instead.
*/
public static final int MSG_SET_VIDEO_DECODER_OUTPUT_BUFFER_RENDERER = 8;
/** /**
* Applications or extensions may define custom {@code MSG_*} constants that can be passed to * Applications or extensions may define custom {@code MSG_*} constants that can be passed to
* {@link Renderer}s. These custom constants must be greater than or equal to this value. * {@link Renderer}s. These custom constants must be greater than or equal to this value.
@ -925,8 +977,8 @@ public final class C {
/** /**
* Network connection type. One of {@link #NETWORK_TYPE_UNKNOWN}, {@link #NETWORK_TYPE_OFFLINE}, * Network connection type. One of {@link #NETWORK_TYPE_UNKNOWN}, {@link #NETWORK_TYPE_OFFLINE},
* {@link #NETWORK_TYPE_WIFI}, {@link #NETWORK_TYPE_2G}, {@link #NETWORK_TYPE_3G}, {@link * {@link #NETWORK_TYPE_WIFI}, {@link #NETWORK_TYPE_2G}, {@link #NETWORK_TYPE_3G}, {@link
* #NETWORK_TYPE_4G}, {@link #NETWORK_TYPE_CELLULAR_UNKNOWN}, {@link #NETWORK_TYPE_ETHERNET} or * #NETWORK_TYPE_4G}, {@link #NETWORK_TYPE_5G}, {@link #NETWORK_TYPE_CELLULAR_UNKNOWN}, {@link
* {@link #NETWORK_TYPE_OTHER}. * #NETWORK_TYPE_ETHERNET} or {@link #NETWORK_TYPE_OTHER}.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@ -937,6 +989,7 @@ public final class C {
NETWORK_TYPE_2G, NETWORK_TYPE_2G,
NETWORK_TYPE_3G, NETWORK_TYPE_3G,
NETWORK_TYPE_4G, NETWORK_TYPE_4G,
NETWORK_TYPE_5G,
NETWORK_TYPE_CELLULAR_UNKNOWN, NETWORK_TYPE_CELLULAR_UNKNOWN,
NETWORK_TYPE_ETHERNET, NETWORK_TYPE_ETHERNET,
NETWORK_TYPE_OTHER NETWORK_TYPE_OTHER
@ -954,6 +1007,8 @@ public final class C {
public static final int NETWORK_TYPE_3G = 4; public static final int NETWORK_TYPE_3G = 4;
/** Network type for a 4G cellular connection. */ /** Network type for a 4G cellular connection. */
public static final int NETWORK_TYPE_4G = 5; public static final int NETWORK_TYPE_4G = 5;
/** Network type for a 5G cellular connection. */
public static final int NETWORK_TYPE_5G = 9;
/** /**
* Network type for cellular connections which cannot be mapped to one of {@link * Network type for cellular connections which cannot be mapped to one of {@link
* #NETWORK_TYPE_2G}, {@link #NETWORK_TYPE_3G}, or {@link #NETWORK_TYPE_4G}. * #NETWORK_TYPE_2G}, {@link #NETWORK_TYPE_3G}, or {@link #NETWORK_TYPE_4G}.
@ -961,19 +1016,48 @@ public final class C {
public static final int NETWORK_TYPE_CELLULAR_UNKNOWN = 6; public static final int NETWORK_TYPE_CELLULAR_UNKNOWN = 6;
/** Network type for an Ethernet connection. */ /** Network type for an Ethernet connection. */
public static final int NETWORK_TYPE_ETHERNET = 7; public static final int NETWORK_TYPE_ETHERNET = 7;
/** /** Network type for other connections which are not Wifi or cellular (e.g. VPN, Bluetooth). */
* Network type for other connections which are not Wifi or cellular (e.g. Ethernet, VPN,
* Bluetooth).
*/
public static final int NETWORK_TYPE_OTHER = 8; public static final int NETWORK_TYPE_OTHER = 8;
/**
* Mode specifying whether the player should hold a WakeLock and a WifiLock. One of {@link
* #WAKE_MODE_NONE}, {@link #WAKE_MODE_LOCAL} and {@link #WAKE_MODE_NETWORK}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({WAKE_MODE_NONE, WAKE_MODE_LOCAL, WAKE_MODE_NETWORK})
public @interface WakeMode {}
/**
* A wake mode that will not cause the player to hold any locks.
*
* <p>This is suitable for applications that do not play media with the screen off.
*/
public static final int WAKE_MODE_NONE = 0;
/**
* A wake mode that will cause the player to hold a {@link android.os.PowerManager.WakeLock}
* during playback.
*
* <p>This is suitable for applications that play media with the screen off and do not load media
* over wifi.
*/
public static final int WAKE_MODE_LOCAL = 1;
/**
* A wake mode that will cause the player to hold a {@link android.os.PowerManager.WakeLock} and a
* {@link android.net.wifi.WifiManager.WifiLock} during playback.
*
* <p>This is suitable for applications that play media with the screen off and may load media
* over wifi.
*/
public static final int WAKE_MODE_NETWORK = 2;
/** /**
* Track role flags. Possible flag values are {@link #ROLE_FLAG_MAIN}, {@link * Track role flags. Possible flag values are {@link #ROLE_FLAG_MAIN}, {@link
* #ROLE_FLAG_ALTERNATE}, {@link #ROLE_FLAG_SUPPLEMENTARY}, {@link #ROLE_FLAG_COMMENTARY}, {@link * #ROLE_FLAG_ALTERNATE}, {@link #ROLE_FLAG_SUPPLEMENTARY}, {@link #ROLE_FLAG_COMMENTARY}, {@link
* #ROLE_FLAG_DUB}, {@link #ROLE_FLAG_EMERGENCY}, {@link #ROLE_FLAG_CAPTION}, {@link * #ROLE_FLAG_DUB}, {@link #ROLE_FLAG_EMERGENCY}, {@link #ROLE_FLAG_CAPTION}, {@link
* #ROLE_FLAG_SUBTITLE}, {@link #ROLE_FLAG_SIGN}, {@link #ROLE_FLAG_DESCRIBES_VIDEO}, {@link * #ROLE_FLAG_SUBTITLE}, {@link #ROLE_FLAG_SIGN}, {@link #ROLE_FLAG_DESCRIBES_VIDEO}, {@link
* #ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND}, {@link #ROLE_FLAG_ENHANCED_DIALOG_INTELLIGIBILITY}, * #ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND}, {@link #ROLE_FLAG_ENHANCED_DIALOG_INTELLIGIBILITY},
* {@link #ROLE_FLAG_TRANSCRIBES_DIALOG} and {@link #ROLE_FLAG_EASY_TO_READ}. * {@link #ROLE_FLAG_TRANSCRIBES_DIALOG}, {@link #ROLE_FLAG_EASY_TO_READ} and {@link
* #ROLE_FLAG_TRICK_PLAY}.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@ -993,7 +1077,8 @@ public final class C {
ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND, ROLE_FLAG_DESCRIBES_MUSIC_AND_SOUND,
ROLE_FLAG_ENHANCED_DIALOG_INTELLIGIBILITY, ROLE_FLAG_ENHANCED_DIALOG_INTELLIGIBILITY,
ROLE_FLAG_TRANSCRIBES_DIALOG, ROLE_FLAG_TRANSCRIBES_DIALOG,
ROLE_FLAG_EASY_TO_READ ROLE_FLAG_EASY_TO_READ,
ROLE_FLAG_TRICK_PLAY
}) })
public @interface RoleFlags {} public @interface RoleFlags {}
/** Indicates a main track. */ /** Indicates a main track. */
@ -1039,6 +1124,8 @@ public final class C {
public static final int ROLE_FLAG_TRANSCRIBES_DIALOG = 1 << 12; public static final int ROLE_FLAG_TRANSCRIBES_DIALOG = 1 << 12;
/** Indicates the track contains a text that has been edited for ease of reading. */ /** Indicates the track contains a text that has been edited for ease of reading. */
public static final int ROLE_FLAG_EASY_TO_READ = 1 << 13; public static final int ROLE_FLAG_EASY_TO_READ = 1 << 13;
/** Indicates the track is intended for trick play. */
public static final int ROLE_FLAG_TRICK_PLAY = 1 << 14;
/** /**
* Converts a time in microseconds to the corresponding time in milliseconds, preserving * Converts a time in microseconds to the corresponding time in milliseconds, preserving

View File

@ -32,19 +32,21 @@ import com.google.android.exoplayer2.util.StandaloneMediaClock;
public interface PlaybackParameterListener { public interface PlaybackParameterListener {
/** /**
* Called when the active playback parameters changed. * Called when the active playback parameters changed. Will not be called for {@link
* #setPlaybackParameters(PlaybackParameters)}.
* *
* @param newPlaybackParameters The newly active {@link PlaybackParameters}. * @param newPlaybackParameters The newly active {@link PlaybackParameters}.
*/ */
void onPlaybackParametersChanged(PlaybackParameters newPlaybackParameters); void onPlaybackParametersChanged(PlaybackParameters newPlaybackParameters);
} }
private final StandaloneMediaClock standaloneMediaClock; private final StandaloneMediaClock standaloneClock;
private final PlaybackParameterListener listener; private final PlaybackParameterListener listener;
private @Nullable Renderer rendererClockSource; @Nullable private Renderer rendererClockSource;
private @Nullable MediaClock rendererClock; @Nullable private MediaClock rendererClock;
private boolean isUsingStandaloneClock;
private boolean standaloneClockIsStarted;
/** /**
* Creates a new instance with listener for playback parameter changes and a {@link Clock} to use * Creates a new instance with listener for playback parameter changes and a {@link Clock} to use
@ -56,21 +58,24 @@ import com.google.android.exoplayer2.util.StandaloneMediaClock;
*/ */
public DefaultMediaClock(PlaybackParameterListener listener, Clock clock) { public DefaultMediaClock(PlaybackParameterListener listener, Clock clock) {
this.listener = listener; this.listener = listener;
this.standaloneMediaClock = new StandaloneMediaClock(clock); this.standaloneClock = new StandaloneMediaClock(clock);
isUsingStandaloneClock = true;
} }
/** /**
* Starts the standalone fallback clock. * Starts the standalone fallback clock.
*/ */
public void start() { public void start() {
standaloneMediaClock.start(); standaloneClockIsStarted = true;
standaloneClock.start();
} }
/** /**
* Stops the standalone fallback clock. * Stops the standalone fallback clock.
*/ */
public void stop() { public void stop() {
standaloneMediaClock.stop(); standaloneClockIsStarted = false;
standaloneClock.stop();
} }
/** /**
@ -79,7 +84,7 @@ import com.google.android.exoplayer2.util.StandaloneMediaClock;
* @param positionUs The position to set in microseconds. * @param positionUs The position to set in microseconds.
*/ */
public void resetPosition(long positionUs) { public void resetPosition(long positionUs) {
standaloneMediaClock.resetPosition(positionUs); standaloneClock.resetPosition(positionUs);
} }
/** /**
@ -99,8 +104,7 @@ import com.google.android.exoplayer2.util.StandaloneMediaClock;
} }
this.rendererClock = rendererMediaClock; this.rendererClock = rendererMediaClock;
this.rendererClockSource = renderer; this.rendererClockSource = renderer;
rendererClock.setPlaybackParameters(standaloneMediaClock.getPlaybackParameters()); rendererClock.setPlaybackParameters(standaloneClock.getPlaybackParameters());
ensureSynced();
} }
} }
@ -114,65 +118,80 @@ import com.google.android.exoplayer2.util.StandaloneMediaClock;
if (renderer == rendererClockSource) { if (renderer == rendererClockSource) {
this.rendererClock = null; this.rendererClock = null;
this.rendererClockSource = null; this.rendererClockSource = null;
isUsingStandaloneClock = true;
} }
} }
/** /**
* Syncs internal clock if needed and returns current clock position in microseconds. * Syncs internal clock if needed and returns current clock position in microseconds.
*
* @param isReadingAhead Whether the renderers are reading ahead.
*/ */
public long syncAndGetPositionUs() { public long syncAndGetPositionUs(boolean isReadingAhead) {
if (isUsingRendererClock()) { syncClocks(isReadingAhead);
ensureSynced(); return getPositionUs();
return rendererClock.getPositionUs();
} else {
return standaloneMediaClock.getPositionUs();
}
} }
// MediaClock implementation. // MediaClock implementation.
@Override @Override
public long getPositionUs() { public long getPositionUs() {
if (isUsingRendererClock()) { return isUsingStandaloneClock ? standaloneClock.getPositionUs() : rendererClock.getPositionUs();
return rendererClock.getPositionUs();
} else {
return standaloneMediaClock.getPositionUs();
}
} }
@Override @Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) { public void setPlaybackParameters(PlaybackParameters playbackParameters) {
if (rendererClock != null) { if (rendererClock != null) {
playbackParameters = rendererClock.setPlaybackParameters(playbackParameters); rendererClock.setPlaybackParameters(playbackParameters);
playbackParameters = rendererClock.getPlaybackParameters();
} }
standaloneMediaClock.setPlaybackParameters(playbackParameters); standaloneClock.setPlaybackParameters(playbackParameters);
listener.onPlaybackParametersChanged(playbackParameters);
return playbackParameters;
} }
@Override @Override
public PlaybackParameters getPlaybackParameters() { public PlaybackParameters getPlaybackParameters() {
return rendererClock != null ? rendererClock.getPlaybackParameters() return rendererClock != null
: standaloneMediaClock.getPlaybackParameters(); ? rendererClock.getPlaybackParameters()
: standaloneClock.getPlaybackParameters();
} }
private void ensureSynced() { private void syncClocks(boolean isReadingAhead) {
if (shouldUseStandaloneClock(isReadingAhead)) {
isUsingStandaloneClock = true;
if (standaloneClockIsStarted) {
standaloneClock.start();
}
return;
}
long rendererClockPositionUs = rendererClock.getPositionUs(); long rendererClockPositionUs = rendererClock.getPositionUs();
standaloneMediaClock.resetPosition(rendererClockPositionUs); if (isUsingStandaloneClock) {
// Ensure enabling the renderer clock doesn't jump backwards in time.
if (rendererClockPositionUs < standaloneClock.getPositionUs()) {
standaloneClock.stop();
return;
}
isUsingStandaloneClock = false;
if (standaloneClockIsStarted) {
standaloneClock.start();
}
}
// Continuously sync stand-alone clock to renderer clock so that it can take over if needed.
standaloneClock.resetPosition(rendererClockPositionUs);
PlaybackParameters playbackParameters = rendererClock.getPlaybackParameters(); PlaybackParameters playbackParameters = rendererClock.getPlaybackParameters();
if (!playbackParameters.equals(standaloneMediaClock.getPlaybackParameters())) { if (!playbackParameters.equals(standaloneClock.getPlaybackParameters())) {
standaloneMediaClock.setPlaybackParameters(playbackParameters); standaloneClock.setPlaybackParameters(playbackParameters);
listener.onPlaybackParametersChanged(playbackParameters); listener.onPlaybackParametersChanged(playbackParameters);
} }
} }
private boolean isUsingRendererClock() { private boolean shouldUseStandaloneClock(boolean isReadingAhead) {
// Use the renderer clock if the providing renderer has not ended or needs the next sample // Use the standalone clock if the clock providing renderer is not set or has ended. Also use
// stream to reenter the ready state. The latter case uses the standalone clock to avoid getting // the standalone clock if the renderer is not ready and we have finished reading the stream or
// stuck if tracks in the current period have uneven durations. // are reading ahead to avoid getting stuck if tracks in the current period have uneven
// See: https://github.com/google/ExoPlayer/issues/1874. // durations. See: https://github.com/google/ExoPlayer/issues/1874.
return rendererClockSource != null && !rendererClockSource.isEnded() return rendererClockSource == null
&& (rendererClockSource.isReady() || !rendererClockSource.hasReadStreamToEnd()); || rendererClockSource.isEnded()
|| (!rendererClockSource.isReady()
&& (isReadingAhead || rendererClockSource.hasReadStreamToEnd()));
} }
} }

View File

@ -104,7 +104,7 @@ public class DefaultRenderersFactory implements RenderersFactory {
/** /**
* @deprecated Use {@link #DefaultRenderersFactory(Context)} and pass {@link DrmSessionManager} * @deprecated Use {@link #DefaultRenderersFactory(Context)} and pass {@link DrmSessionManager}
* directly to {@link SimpleExoPlayer} or {@link ExoPlayerFactory}. * directly to {@link SimpleExoPlayer.Builder}.
*/ */
@Deprecated @Deprecated
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@ -127,7 +127,7 @@ public class DefaultRenderersFactory implements RenderersFactory {
/** /**
* @deprecated Use {@link #DefaultRenderersFactory(Context)} and {@link * @deprecated Use {@link #DefaultRenderersFactory(Context)} and {@link
* #setExtensionRendererMode(int)}, and pass {@link DrmSessionManager} directly to {@link * #setExtensionRendererMode(int)}, and pass {@link DrmSessionManager} directly to {@link
* SimpleExoPlayer} or {@link ExoPlayerFactory}. * SimpleExoPlayer.Builder}.
*/ */
@Deprecated @Deprecated
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@ -154,7 +154,7 @@ public class DefaultRenderersFactory implements RenderersFactory {
/** /**
* @deprecated Use {@link #DefaultRenderersFactory(Context)}, {@link * @deprecated Use {@link #DefaultRenderersFactory(Context)}, {@link
* #setExtensionRendererMode(int)} and {@link #setAllowedVideoJoiningTimeMs(long)}, and pass * #setExtensionRendererMode(int)} and {@link #setAllowedVideoJoiningTimeMs(long)}, and pass
* {@link DrmSessionManager} directly to {@link SimpleExoPlayer} or {@link ExoPlayerFactory}. * {@link DrmSessionManager} directly to {@link SimpleExoPlayer.Builder}.
*/ */
@Deprecated @Deprecated
public DefaultRenderersFactory( public DefaultRenderersFactory(
@ -365,6 +365,33 @@ public class DefaultRenderersFactory implements RenderersFactory {
// The extension is present, but instantiation failed. // The extension is present, but instantiation failed.
throw new RuntimeException("Error instantiating VP9 extension", e); throw new RuntimeException("Error instantiating VP9 extension", e);
} }
try {
// Full class names used for constructor args so the LINT rule triggers if any of them move.
// LINT.IfChange
Class<?> clazz = Class.forName("com.google.android.exoplayer2.ext.av1.Libgav1VideoRenderer");
Constructor<?> constructor =
clazz.getConstructor(
long.class,
android.os.Handler.class,
com.google.android.exoplayer2.video.VideoRendererEventListener.class,
int.class);
// LINT.ThenChange(../../../../../../../proguard-rules.txt)
Renderer renderer =
(Renderer)
constructor.newInstance(
allowedVideoJoiningTimeMs,
eventHandler,
eventListener,
MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY);
out.add(extensionRendererIndex++, renderer);
Log.i(TAG, "Loaded Libgav1VideoRenderer.");
} catch (ClassNotFoundException e) {
// Expected if the app was built without the extension.
} catch (Exception e) {
// The extension is present, but instantiation failed.
throw new RuntimeException("Error instantiating AV1 extension", e);
}
} }
/** /**

View File

@ -15,8 +15,10 @@
*/ */
package com.google.android.exoplayer2; package com.google.android.exoplayer2;
import android.os.SystemClock;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.RendererCapabilities.FormatSupport;
import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import java.io.IOException; import java.io.IOException;
@ -73,6 +75,22 @@ public final class ExoPlaybackException extends Exception {
*/ */
public final int rendererIndex; public final int rendererIndex;
/**
* If {@link #type} is {@link #TYPE_RENDERER}, this is the {@link Format} the renderer was using
* at the time of the exception, or null if the renderer wasn't using a {@link Format}.
*/
@Nullable public final Format rendererFormat;
/**
* If {@link #type} is {@link #TYPE_RENDERER}, this is the level of {@link FormatSupport} of the
* renderer for {@link #rendererFormat}. If {@link #rendererFormat} is null, this is {@link
* RendererCapabilities#FORMAT_HANDLED}.
*/
@FormatSupport public final int rendererFormatSupport;
/** The value of {@link SystemClock#elapsedRealtime()} when this exception was created. */
public final long timestampMs;
@Nullable private final Throwable cause; @Nullable private final Throwable cause;
/** /**
@ -82,7 +100,7 @@ public final class ExoPlaybackException extends Exception {
* @return The created instance. * @return The created instance.
*/ */
public static ExoPlaybackException createForSource(IOException cause) { public static ExoPlaybackException createForSource(IOException cause) {
return new ExoPlaybackException(TYPE_SOURCE, cause, /* rendererIndex= */ C.INDEX_UNSET); return new ExoPlaybackException(TYPE_SOURCE, cause);
} }
/** /**
@ -90,10 +108,23 @@ public final class ExoPlaybackException extends Exception {
* *
* @param cause The cause of the failure. * @param cause The cause of the failure.
* @param rendererIndex The index of the renderer in which the failure occurred. * @param rendererIndex The index of the renderer in which the failure occurred.
* @param rendererFormat The {@link Format} the renderer was using at the time of the exception,
* or null if the renderer wasn't using a {@link Format}.
* @param rendererFormatSupport The {@link FormatSupport} of the renderer for {@code
* rendererFormat}. Ignored if {@code rendererFormat} is null.
* @return The created instance. * @return The created instance.
*/ */
public static ExoPlaybackException createForRenderer(Exception cause, int rendererIndex) { public static ExoPlaybackException createForRenderer(
return new ExoPlaybackException(TYPE_RENDERER, cause, rendererIndex); Exception cause,
int rendererIndex,
@Nullable Format rendererFormat,
@FormatSupport int rendererFormatSupport) {
return new ExoPlaybackException(
TYPE_RENDERER,
cause,
rendererIndex,
rendererFormat,
rendererFormat == null ? RendererCapabilities.FORMAT_HANDLED : rendererFormatSupport);
} }
/** /**
@ -103,7 +134,7 @@ public final class ExoPlaybackException extends Exception {
* @return The created instance. * @return The created instance.
*/ */
public static ExoPlaybackException createForUnexpected(RuntimeException cause) { public static ExoPlaybackException createForUnexpected(RuntimeException cause) {
return new ExoPlaybackException(TYPE_UNEXPECTED, cause, /* rendererIndex= */ C.INDEX_UNSET); return new ExoPlaybackException(TYPE_UNEXPECTED, cause);
} }
/** /**
@ -123,21 +154,41 @@ public final class ExoPlaybackException extends Exception {
* @return The created instance. * @return The created instance.
*/ */
public static ExoPlaybackException createForOutOfMemoryError(OutOfMemoryError cause) { public static ExoPlaybackException createForOutOfMemoryError(OutOfMemoryError cause) {
return new ExoPlaybackException(TYPE_OUT_OF_MEMORY, cause, /* rendererIndex= */ C.INDEX_UNSET); return new ExoPlaybackException(TYPE_OUT_OF_MEMORY, cause);
} }
private ExoPlaybackException(@Type int type, Throwable cause, int rendererIndex) { private ExoPlaybackException(@Type int type, Throwable cause) {
this(
type,
cause,
/* rendererIndex= */ C.INDEX_UNSET,
/* rendererFormat= */ null,
/* rendererFormatSupport= */ RendererCapabilities.FORMAT_HANDLED);
}
private ExoPlaybackException(
@Type int type,
Throwable cause,
int rendererIndex,
@Nullable Format rendererFormat,
@FormatSupport int rendererFormatSupport) {
super(cause); super(cause);
this.type = type; this.type = type;
this.cause = cause; this.cause = cause;
this.rendererIndex = rendererIndex; this.rendererIndex = rendererIndex;
this.rendererFormat = rendererFormat;
this.rendererFormatSupport = rendererFormatSupport;
timestampMs = SystemClock.elapsedRealtime();
} }
private ExoPlaybackException(@Type int type, String message) { private ExoPlaybackException(@Type int type, String message) {
super(message); super(message);
this.type = type; this.type = type;
rendererIndex = C.INDEX_UNSET; rendererIndex = C.INDEX_UNSET;
rendererFormat = null;
rendererFormatSupport = RendererCapabilities.FORMAT_UNSUPPORTED_TYPE;
cause = null; cause = null;
timestampMs = SystemClock.elapsedRealtime();
} }
/** /**

View File

@ -15,8 +15,11 @@
*/ */
package com.google.android.exoplayer2; package com.google.android.exoplayer2;
import android.content.Context;
import android.os.Looper; import android.os.Looper;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.analytics.AnalyticsCollector;
import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer; import com.google.android.exoplayer2.audio.MediaCodecAudioRenderer;
import com.google.android.exoplayer2.metadata.MetadataRenderer; import com.google.android.exoplayer2.metadata.MetadataRenderer;
import com.google.android.exoplayer2.source.ClippingMediaSource; import com.google.android.exoplayer2.source.ClippingMediaSource;
@ -29,12 +32,17 @@ import com.google.android.exoplayer2.source.SingleSampleMediaSource;
import com.google.android.exoplayer2.text.TextRenderer; import com.google.android.exoplayer2.text.TextRenderer;
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector; import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
import com.google.android.exoplayer2.trackselection.TrackSelector; import com.google.android.exoplayer2.trackselection.TrackSelector;
import com.google.android.exoplayer2.upstream.BandwidthMeter;
import com.google.android.exoplayer2.upstream.DataSource; import com.google.android.exoplayer2.upstream.DataSource;
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.MediaCodecVideoRenderer; import com.google.android.exoplayer2.video.MediaCodecVideoRenderer;
/** /**
* An extensible media player that plays {@link MediaSource}s. Instances can be obtained from {@link * An extensible media player that plays {@link MediaSource}s. Instances can be obtained from {@link
* ExoPlayerFactory}. * SimpleExoPlayer.Builder} or {@link ExoPlayer.Builder}.
* *
* <h3>Player components</h3> * <h3>Player components</h3>
* *
@ -85,8 +93,8 @@ import com.google.android.exoplayer2.video.MediaCodecVideoRenderer;
* *
* <p>The figure below shows ExoPlayer's threading model. * <p>The figure below shows ExoPlayer's threading model.
* *
* <p align="center"><img src="doc-files/exoplayer-threading-model.svg" alt="ExoPlayer's threading * <p style="align:center"><img src="doc-files/exoplayer-threading-model.svg" alt="ExoPlayer's
* model"> * threading model">
* *
* <ul> * <ul>
* <li>ExoPlayer instances must be accessed from a single application thread. For the vast * <li>ExoPlayer instances must be accessed from a single application thread. For the vast
@ -117,27 +125,200 @@ import com.google.android.exoplayer2.video.MediaCodecVideoRenderer;
*/ */
public interface ExoPlayer extends Player { public interface ExoPlayer extends Player {
/** @deprecated Use {@link PlayerMessage.Target} instead. */ /**
@Deprecated * A builder for {@link ExoPlayer} instances.
interface ExoPlayerComponent extends PlayerMessage.Target {} *
* <p>See {@link #Builder(Context, Renderer...)} for the list of default values.
*/
final class Builder {
/** @deprecated Use {@link PlayerMessage} instead. */ private final Renderer[] renderers;
@Deprecated
final class ExoPlayerMessage {
/** The target to receive the message. */ private Clock clock;
public final PlayerMessage.Target target; private TrackSelector trackSelector;
/** The type of the message. */ private LoadControl loadControl;
public final int messageType; private BandwidthMeter bandwidthMeter;
/** The message. */ private Looper looper;
public final Object message; private AnalyticsCollector analyticsCollector;
private boolean useLazyPreparation;
private boolean buildCalled;
/** @deprecated Use {@link ExoPlayer#createMessage(PlayerMessage.Target)} instead. */ /**
@Deprecated * Creates a builder with a list of {@link Renderer Renderers}.
public ExoPlayerMessage(PlayerMessage.Target target, int messageType, Object message) { *
this.target = target; * <p>The builder uses the following default values:
this.messageType = messageType; *
this.message = message; * <ul>
* <li>{@link TrackSelector}: {@link DefaultTrackSelector}
* <li>{@link LoadControl}: {@link DefaultLoadControl}
* <li>{@link BandwidthMeter}: {@link DefaultBandwidthMeter#getSingletonInstance(Context)}
* <li>{@link Looper}: The {@link Looper} associated with the current thread, or the {@link
* Looper} of the application's main thread if the current thread doesn't have a {@link
* Looper}
* <li>{@link AnalyticsCollector}: {@link AnalyticsCollector} with {@link Clock#DEFAULT}
* <li>{@code useLazyPreparation}: {@code true}
* <li>{@link Clock}: {@link Clock#DEFAULT}
* </ul>
*
* @param context A {@link Context}.
* @param renderers The {@link Renderer Renderers} to be used by the player.
*/
public Builder(Context context, Renderer... renderers) {
this(
renderers,
new DefaultTrackSelector(context),
new DefaultLoadControl(),
DefaultBandwidthMeter.getSingletonInstance(context),
Util.getLooper(),
new AnalyticsCollector(Clock.DEFAULT),
/* useLazyPreparation= */ true,
Clock.DEFAULT);
}
/**
* Creates a builder with the specified custom components.
*
* <p>Note that this constructor is only useful if you try to ensure that ExoPlayer's default
* components can be removed by ProGuard or R8. For most components except renderers, there is
* only a marginal benefit of doing that.
*
* @param renderers The {@link Renderer Renderers} to be used by the player.
* @param trackSelector A {@link TrackSelector}.
* @param loadControl A {@link LoadControl}.
* @param bandwidthMeter A {@link BandwidthMeter}.
* @param looper A {@link Looper} that must be used for all calls to the player.
* @param analyticsCollector An {@link AnalyticsCollector}.
* @param useLazyPreparation Whether media sources should be initialized lazily.
* @param clock A {@link Clock}. Should always be {@link Clock#DEFAULT}.
*/
public Builder(
Renderer[] renderers,
TrackSelector trackSelector,
LoadControl loadControl,
BandwidthMeter bandwidthMeter,
Looper looper,
AnalyticsCollector analyticsCollector,
boolean useLazyPreparation,
Clock clock) {
Assertions.checkArgument(renderers.length > 0);
this.renderers = renderers;
this.trackSelector = trackSelector;
this.loadControl = loadControl;
this.bandwidthMeter = bandwidthMeter;
this.looper = looper;
this.analyticsCollector = analyticsCollector;
this.useLazyPreparation = useLazyPreparation;
this.clock = clock;
}
/**
* Sets the {@link TrackSelector} that will be used by the player.
*
* @param trackSelector A {@link TrackSelector}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setTrackSelector(TrackSelector trackSelector) {
Assertions.checkState(!buildCalled);
this.trackSelector = trackSelector;
return this;
}
/**
* Sets the {@link LoadControl} that will be used by the player.
*
* @param loadControl A {@link LoadControl}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setLoadControl(LoadControl loadControl) {
Assertions.checkState(!buildCalled);
this.loadControl = loadControl;
return this;
}
/**
* Sets the {@link BandwidthMeter} that will be used by the player.
*
* @param bandwidthMeter A {@link BandwidthMeter}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setBandwidthMeter(BandwidthMeter bandwidthMeter) {
Assertions.checkState(!buildCalled);
this.bandwidthMeter = bandwidthMeter;
return this;
}
/**
* Sets the {@link Looper} that must be used for all calls to the player and that is used to
* call listeners on.
*
* @param looper A {@link Looper}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setLooper(Looper looper) {
Assertions.checkState(!buildCalled);
this.looper = looper;
return this;
}
/**
* Sets the {@link AnalyticsCollector} that will collect and forward all player events.
*
* @param analyticsCollector An {@link AnalyticsCollector}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setAnalyticsCollector(AnalyticsCollector analyticsCollector) {
Assertions.checkState(!buildCalled);
this.analyticsCollector = analyticsCollector;
return this;
}
/**
* Sets whether media sources should be initialized lazily.
*
* <p>If false, all initial preparation steps (e.g., manifest loads) happen immediately. If
* true, these initial preparations are triggered only when the player starts buffering the
* media.
*
* @param useLazyPreparation Whether to use lazy preparation.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setUseLazyPreparation(boolean useLazyPreparation) {
Assertions.checkState(!buildCalled);
this.useLazyPreparation = useLazyPreparation;
return this;
}
/**
* Sets the {@link Clock} that will be used by the player. Should only be set for testing
* purposes.
*
* @param clock A {@link Clock}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
@VisibleForTesting
public Builder setClock(Clock clock) {
Assertions.checkState(!buildCalled);
this.clock = clock;
return this;
}
/**
* Builds an {@link ExoPlayer} instance.
*
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public ExoPlayer build() {
Assertions.checkState(!buildCalled);
buildCalled = true;
return new ExoPlayerImpl(
renderers, trackSelector, loadControl, bandwidthMeter, clock, looper);
} }
} }
@ -151,8 +332,8 @@ public interface ExoPlayer extends Player {
void retry(); void retry();
/** /**
* Prepares the player to play the provided {@link MediaSource}. Equivalent to * Prepares the player to play the provided {@link MediaSource}. Equivalent to {@code
* {@code prepare(mediaSource, true, true)}. * prepare(mediaSource, true, true)}.
*/ */
void prepare(MediaSource mediaSource); void prepare(MediaSource mediaSource);
@ -181,19 +362,6 @@ public interface ExoPlayer extends Player {
*/ */
PlayerMessage createMessage(PlayerMessage.Target target); PlayerMessage createMessage(PlayerMessage.Target target);
/** @deprecated Use {@link #createMessage(PlayerMessage.Target)} instead. */
@Deprecated
@SuppressWarnings("deprecation")
void sendMessages(ExoPlayerMessage... messages);
/**
* @deprecated Use {@link #createMessage(PlayerMessage.Target)} with {@link
* PlayerMessage#blockUntilDelivered()}.
*/
@Deprecated
@SuppressWarnings("deprecation")
void blockingSendMessages(ExoPlayerMessage... messages);
/** /**
* Sets the parameters that control how seek operations are performed. * Sets the parameters that control how seek operations are performed.
* *

View File

@ -21,6 +21,7 @@ import androidx.annotation.Nullable;
import com.google.android.exoplayer2.analytics.AnalyticsCollector; import com.google.android.exoplayer2.analytics.AnalyticsCollector;
import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; import com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector; import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
import com.google.android.exoplayer2.trackselection.TrackSelector; import com.google.android.exoplayer2.trackselection.TrackSelector;
import com.google.android.exoplayer2.upstream.BandwidthMeter; import com.google.android.exoplayer2.upstream.BandwidthMeter;
@ -28,30 +29,19 @@ import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
/** /** @deprecated Use {@link SimpleExoPlayer.Builder} or {@link ExoPlayer.Builder} instead. */
* A factory for {@link ExoPlayer} instances. @Deprecated
*/
public final class ExoPlayerFactory { public final class ExoPlayerFactory {
private static @Nullable BandwidthMeter singletonBandwidthMeter;
private ExoPlayerFactory() {} private ExoPlayerFactory() {}
/** /**
* Creates a {@link SimpleExoPlayer} instance. * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* @param context A {@link Context}. * MediaSource} factories.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param extensionRendererMode The extension renderer mode, which determines if and how available
* extension renderers are used. Note that extensions must be included in the application
* build for them to be considered available.
* @deprecated Use {@link #newSimpleInstance(Context, RenderersFactory, TrackSelector,
* LoadControl, DrmSessionManager)}.
*/ */
@Deprecated @Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
TrackSelector trackSelector, TrackSelector trackSelector,
@ -65,22 +55,12 @@ public final class ExoPlayerFactory {
} }
/** /**
* Creates a {@link SimpleExoPlayer} instance. * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* @param context A {@link Context}. * MediaSource} factories.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param extensionRendererMode The extension renderer mode, which determines if and how available
* extension renderers are used. Note that extensions must be included in the application
* build for them to be considered available.
* @param allowedVideoJoiningTimeMs The maximum duration for which a video renderer can attempt to
* seamlessly join an ongoing playback.
* @deprecated Use {@link #newSimpleInstance(Context, RenderersFactory, TrackSelector,
* LoadControl, DrmSessionManager)}.
*/ */
@Deprecated @Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
TrackSelector trackSelector, TrackSelector trackSelector,
@ -96,44 +76,31 @@ public final class ExoPlayerFactory {
context, renderersFactory, trackSelector, loadControl, drmSessionManager); context, renderersFactory, trackSelector, loadControl, drmSessionManager);
} }
/** /** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
* Creates a {@link SimpleExoPlayer} instance. @Deprecated
* @SuppressWarnings("deprecation")
* @param context A {@link Context}.
*/
public static SimpleExoPlayer newSimpleInstance(Context context) { public static SimpleExoPlayer newSimpleInstance(Context context) {
return newSimpleInstance(context, new DefaultTrackSelector()); return newSimpleInstance(context, new DefaultTrackSelector(context));
} }
/** /** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
* Creates a {@link SimpleExoPlayer} instance. @Deprecated
* @SuppressWarnings("deprecation")
* @param context A {@link Context}.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
*/
public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector) { public static SimpleExoPlayer newSimpleInstance(Context context, TrackSelector trackSelector) {
return newSimpleInstance(context, new DefaultRenderersFactory(context), trackSelector); return newSimpleInstance(context, new DefaultRenderersFactory(context), trackSelector);
} }
/** /** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
* Creates a {@link SimpleExoPlayer} instance. @Deprecated
* @SuppressWarnings("deprecation")
* @param context A {@link Context}.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
*/
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, RenderersFactory renderersFactory, TrackSelector trackSelector) { Context context, RenderersFactory renderersFactory, TrackSelector trackSelector) {
return newSimpleInstance(context, renderersFactory, trackSelector, new DefaultLoadControl()); return newSimpleInstance(context, renderersFactory, trackSelector, new DefaultLoadControl());
} }
/** /** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
* Creates a {@link SimpleExoPlayer} instance. @Deprecated
* @SuppressWarnings("deprecation")
* @param context A {@link Context}.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
*/
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, TrackSelector trackSelector, LoadControl loadControl) { Context context, TrackSelector trackSelector, LoadControl loadControl) {
RenderersFactory renderersFactory = new DefaultRenderersFactory(context); RenderersFactory renderersFactory = new DefaultRenderersFactory(context);
@ -141,14 +108,12 @@ public final class ExoPlayerFactory {
} }
/** /**
* Creates a {@link SimpleExoPlayer} instance. Available extension renderers are not used. * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* @param context A {@link Context}. * MediaSource} factories.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
TrackSelector trackSelector, TrackSelector trackSelector,
@ -160,14 +125,12 @@ public final class ExoPlayerFactory {
} }
/** /**
* Creates a {@link SimpleExoPlayer} instance. * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* @param context A {@link Context}. * MediaSource} factories.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
RenderersFactory renderersFactory, RenderersFactory renderersFactory,
@ -177,14 +140,9 @@ public final class ExoPlayerFactory {
context, renderersFactory, trackSelector, new DefaultLoadControl(), drmSessionManager); context, renderersFactory, trackSelector, new DefaultLoadControl(), drmSessionManager);
} }
/** /** @deprecated Use {@link SimpleExoPlayer.Builder} instead. */
* Creates a {@link SimpleExoPlayer} instance. @Deprecated
* @SuppressWarnings("deprecation")
* @param context A {@link Context}.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
*/
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
RenderersFactory renderersFactory, RenderersFactory renderersFactory,
@ -200,15 +158,12 @@ public final class ExoPlayerFactory {
} }
/** /**
* Creates a {@link SimpleExoPlayer} instance. * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* @param context A {@link Context}. * MediaSource} factories.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
RenderersFactory renderersFactory, RenderersFactory renderersFactory,
@ -220,16 +175,12 @@ public final class ExoPlayerFactory {
} }
/** /**
* Creates a {@link SimpleExoPlayer} instance. * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* @param context A {@link Context}. * MediaSource} factories.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param bandwidthMeter The {@link BandwidthMeter} that will be used by the instance.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
RenderersFactory renderersFactory, RenderersFactory renderersFactory,
@ -244,51 +195,41 @@ public final class ExoPlayerFactory {
loadControl, loadControl,
drmSessionManager, drmSessionManager,
bandwidthMeter, bandwidthMeter,
new AnalyticsCollector.Factory(), new AnalyticsCollector(Clock.DEFAULT),
Util.getLooper()); Util.getLooper());
} }
/** /**
* Creates a {@link SimpleExoPlayer} instance. * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* @param context A {@link Context}. * MediaSource} factories.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param analyticsCollectorFactory A factory for creating the {@link AnalyticsCollector} that
* will collect and forward all player events.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
RenderersFactory renderersFactory, RenderersFactory renderersFactory,
TrackSelector trackSelector, TrackSelector trackSelector,
LoadControl loadControl, LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager, @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
AnalyticsCollector.Factory analyticsCollectorFactory) { AnalyticsCollector analyticsCollector) {
return newSimpleInstance( return newSimpleInstance(
context, context,
renderersFactory, renderersFactory,
trackSelector, trackSelector,
loadControl, loadControl,
drmSessionManager, drmSessionManager,
analyticsCollectorFactory, analyticsCollector,
Util.getLooper()); Util.getLooper());
} }
/** /**
* Creates a {@link SimpleExoPlayer} instance. * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* @param context A {@link Context}. * MediaSource} factories.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param looper The {@link Looper} which must be used for all calls to the player and which is
* used to call listeners on.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
RenderersFactory renderersFactory, RenderersFactory renderersFactory,
@ -302,31 +243,24 @@ public final class ExoPlayerFactory {
trackSelector, trackSelector,
loadControl, loadControl,
drmSessionManager, drmSessionManager,
new AnalyticsCollector.Factory(), new AnalyticsCollector(Clock.DEFAULT),
looper); looper);
} }
/** /**
* Creates a {@link SimpleExoPlayer} instance. * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* @param context A {@link Context}. * MediaSource} factories.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param analyticsCollectorFactory A factory for creating the {@link AnalyticsCollector} that
* will collect and forward all player events.
* @param looper The {@link Looper} which must be used for all calls to the player and which is
* used to call listeners on.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
RenderersFactory renderersFactory, RenderersFactory renderersFactory,
TrackSelector trackSelector, TrackSelector trackSelector,
LoadControl loadControl, LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager, @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
AnalyticsCollector.Factory analyticsCollectorFactory, AnalyticsCollector analyticsCollector,
Looper looper) { Looper looper) {
return newSimpleInstance( return newSimpleInstance(
context, context,
@ -334,25 +268,18 @@ public final class ExoPlayerFactory {
trackSelector, trackSelector,
loadControl, loadControl,
drmSessionManager, drmSessionManager,
getDefaultBandwidthMeter(context), DefaultBandwidthMeter.getSingletonInstance(context),
analyticsCollectorFactory, analyticsCollector,
looper); looper);
} }
/** /**
* Creates a {@link SimpleExoPlayer} instance. * @deprecated Use {@link SimpleExoPlayer.Builder} instead. The {@link DrmSessionManager} cannot
* * be passed to {@link SimpleExoPlayer.Builder} and should instead be injected into the {@link
* @param context A {@link Context}. * MediaSource} factories.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param analyticsCollectorFactory A factory for creating the {@link AnalyticsCollector} that
* will collect and forward all player events.
* @param looper The {@link Looper} which must be used for all calls to the player and which is
* used to call listeners on.
*/ */
@SuppressWarnings("deprecation")
@Deprecated
public static SimpleExoPlayer newSimpleInstance( public static SimpleExoPlayer newSimpleInstance(
Context context, Context context,
RenderersFactory renderersFactory, RenderersFactory renderersFactory,
@ -360,7 +287,7 @@ public final class ExoPlayerFactory {
LoadControl loadControl, LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager, @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
BandwidthMeter bandwidthMeter, BandwidthMeter bandwidthMeter,
AnalyticsCollector.Factory analyticsCollectorFactory, AnalyticsCollector analyticsCollector,
Looper looper) { Looper looper) {
return new SimpleExoPlayer( return new SimpleExoPlayer(
context, context,
@ -369,45 +296,30 @@ public final class ExoPlayerFactory {
loadControl, loadControl,
drmSessionManager, drmSessionManager,
bandwidthMeter, bandwidthMeter,
analyticsCollectorFactory, analyticsCollector,
Clock.DEFAULT,
looper); looper);
} }
/** /** @deprecated Use {@link ExoPlayer.Builder} instead. */
* Creates an {@link ExoPlayer} instance. @Deprecated
* @SuppressWarnings("deprecation")
* @param context A {@link Context}.
* @param renderers The {@link Renderer}s that will be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
*/
public static ExoPlayer newInstance( public static ExoPlayer newInstance(
Context context, Renderer[] renderers, TrackSelector trackSelector) { Context context, Renderer[] renderers, TrackSelector trackSelector) {
return newInstance(context, renderers, trackSelector, new DefaultLoadControl()); return newInstance(context, renderers, trackSelector, new DefaultLoadControl());
} }
/** /** @deprecated Use {@link ExoPlayer.Builder} instead. */
* Creates an {@link ExoPlayer} instance. @Deprecated
* @SuppressWarnings("deprecation")
* @param context A {@link Context}.
* @param renderers The {@link Renderer}s that will be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
*/
public static ExoPlayer newInstance( public static ExoPlayer newInstance(
Context context, Renderer[] renderers, TrackSelector trackSelector, LoadControl loadControl) { Context context, Renderer[] renderers, TrackSelector trackSelector, LoadControl loadControl) {
return newInstance(context, renderers, trackSelector, loadControl, Util.getLooper()); return newInstance(context, renderers, trackSelector, loadControl, Util.getLooper());
} }
/** /** @deprecated Use {@link ExoPlayer.Builder} instead. */
* Creates an {@link ExoPlayer} instance. @Deprecated
* @SuppressWarnings("deprecation")
* @param context A {@link Context}.
* @param renderers The {@link Renderer}s that will be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param looper The {@link Looper} which must be used for all calls to the player and which is
* used to call listeners on.
*/
public static ExoPlayer newInstance( public static ExoPlayer newInstance(
Context context, Context context,
Renderer[] renderers, Renderer[] renderers,
@ -415,21 +327,16 @@ public final class ExoPlayerFactory {
LoadControl loadControl, LoadControl loadControl,
Looper looper) { Looper looper) {
return newInstance( return newInstance(
context, renderers, trackSelector, loadControl, getDefaultBandwidthMeter(context), looper); context,
renderers,
trackSelector,
loadControl,
DefaultBandwidthMeter.getSingletonInstance(context),
looper);
} }
/** /** @deprecated Use {@link ExoPlayer.Builder} instead. */
* Creates an {@link ExoPlayer} instance. @Deprecated
*
* @param context A {@link Context}.
* @param renderers The {@link Renderer}s that will be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param bandwidthMeter The {@link BandwidthMeter} that will be used by the instance.
* @param looper The {@link Looper} which must be used for all calls to the player and which is
* used to call listeners on.
*/
@SuppressWarnings("unused")
public static ExoPlayer newInstance( public static ExoPlayer newInstance(
Context context, Context context,
Renderer[] renderers, Renderer[] renderers,
@ -440,11 +347,4 @@ public final class ExoPlayerFactory {
return new ExoPlayerImpl( return new ExoPlayerImpl(
renderers, trackSelector, loadControl, bandwidthMeter, Clock.DEFAULT, looper); renderers, trackSelector, loadControl, bandwidthMeter, Clock.DEFAULT, looper);
} }
private static synchronized BandwidthMeter getDefaultBandwidthMeter(Context context) {
if (singletonBandwidthMeter == null) {
singletonBandwidthMeter = new DefaultBandwidthMeter.Builder(context).build();
}
return singletonBandwidthMeter;
}
} }

View File

@ -19,8 +19,8 @@ import android.annotation.SuppressLint;
import android.os.Handler; import android.os.Handler;
import android.os.Looper; import android.os.Looper;
import android.os.Message; import android.os.Message;
import androidx.annotation.Nullable;
import android.util.Pair; import android.util.Pair;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.PlayerMessage.Target; import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
@ -35,11 +35,11 @@ import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import java.util.ArrayDeque; import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
/** An {@link ExoPlayer} implementation. Instances can be obtained from {@link ExoPlayerFactory}. */ /**
* An {@link ExoPlayer} implementation. Instances can be obtained from {@link ExoPlayer.Builder}.
*/
/* package */ final class ExoPlayerImpl extends BasePlayer implements ExoPlayer { /* package */ final class ExoPlayerImpl extends BasePlayer implements ExoPlayer {
private static final String TAG = "ExoPlayerImpl"; private static final String TAG = "ExoPlayerImpl";
@ -71,9 +71,9 @@ import java.util.concurrent.CopyOnWriteArrayList;
private boolean hasPendingPrepare; private boolean hasPendingPrepare;
private boolean hasPendingSeek; private boolean hasPendingSeek;
private boolean foregroundMode; private boolean foregroundMode;
private int pendingSetPlaybackParametersAcks;
private PlaybackParameters playbackParameters; private PlaybackParameters playbackParameters;
private SeekParameters seekParameters; private SeekParameters seekParameters;
private @Nullable ExoPlaybackException playbackError;
// Playback information when there is no pending seek/set source operation. // Playback information when there is no pending seek/set source operation.
private PlaybackInfo playbackInfo; private PlaybackInfo playbackInfo;
@ -194,10 +194,12 @@ import java.util.concurrent.CopyOnWriteArrayList;
} }
@Override @Override
@State
public int getPlaybackState() { public int getPlaybackState() {
return playbackInfo.playbackState; return playbackInfo.playbackState;
} }
@Override
@PlaybackSuppressionReason @PlaybackSuppressionReason
public int getPlaybackSuppressionReason() { public int getPlaybackSuppressionReason() {
return playbackSuppressionReason; return playbackSuppressionReason;
@ -206,13 +208,12 @@ import java.util.concurrent.CopyOnWriteArrayList;
@Override @Override
@Nullable @Nullable
public ExoPlaybackException getPlaybackError() { public ExoPlaybackException getPlaybackError() {
return playbackError; return playbackInfo.playbackError;
} }
@Override @Override
public void retry() { public void retry() {
if (mediaSource != null if (mediaSource != null && playbackInfo.playbackState == Player.STATE_IDLE) {
&& (playbackError != null || playbackInfo.playbackState == Player.STATE_IDLE)) {
prepare(mediaSource, /* resetPosition= */ false, /* resetState= */ false); prepare(mediaSource, /* resetPosition= */ false, /* resetState= */ false);
} }
} }
@ -224,11 +225,13 @@ import java.util.concurrent.CopyOnWriteArrayList;
@Override @Override
public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) { public void prepare(MediaSource mediaSource, boolean resetPosition, boolean resetState) {
playbackError = null;
this.mediaSource = mediaSource; this.mediaSource = mediaSource;
PlaybackInfo playbackInfo = PlaybackInfo playbackInfo =
getResetPlaybackInfo( getResetPlaybackInfo(
resetPosition, resetState, /* playbackState= */ Player.STATE_BUFFERING); resetPosition,
resetState,
/* resetError= */ true,
/* playbackState= */ Player.STATE_BUFFERING);
// Trigger internal prepare first before updating the playback info and notifying external // Trigger internal prepare first before updating the playback info and notifying external
// listeners to ensure that new operations issued in the listener notifications reach the // listeners to ensure that new operations issued in the listener notifications reach the
// player after this prepare. The internal player can't change the playback info immediately // player after this prepare. The internal player can't change the playback info immediately
@ -244,6 +247,7 @@ import java.util.concurrent.CopyOnWriteArrayList;
/* seekProcessed= */ false); /* seekProcessed= */ false);
} }
@Override @Override
public void setPlayWhenReady(boolean playWhenReady) { public void setPlayWhenReady(boolean playWhenReady) {
setPlayWhenReady(playWhenReady, PLAYBACK_SUPPRESSION_REASON_NONE); setPlayWhenReady(playWhenReady, PLAYBACK_SUPPRESSION_REASON_NONE);
@ -363,7 +367,14 @@ import java.util.concurrent.CopyOnWriteArrayList;
if (playbackParameters == null) { if (playbackParameters == null) {
playbackParameters = PlaybackParameters.DEFAULT; playbackParameters = PlaybackParameters.DEFAULT;
} }
if (this.playbackParameters.equals(playbackParameters)) {
return;
}
pendingSetPlaybackParametersAcks++;
this.playbackParameters = playbackParameters;
internalPlayer.setPlaybackParameters(playbackParameters); internalPlayer.setPlaybackParameters(playbackParameters);
PlaybackParameters playbackParametersToNotify = playbackParameters;
notifyListeners(listener -> listener.onPlaybackParametersChanged(playbackParametersToNotify));
} }
@Override @Override
@ -398,13 +409,13 @@ import java.util.concurrent.CopyOnWriteArrayList;
@Override @Override
public void stop(boolean reset) { public void stop(boolean reset) {
if (reset) { if (reset) {
playbackError = null;
mediaSource = null; mediaSource = null;
} }
PlaybackInfo playbackInfo = PlaybackInfo playbackInfo =
getResetPlaybackInfo( getResetPlaybackInfo(
/* resetPosition= */ reset, /* resetPosition= */ reset,
/* resetState= */ reset, /* resetState= */ reset,
/* resetError= */ reset,
/* playbackState= */ Player.STATE_IDLE); /* playbackState= */ Player.STATE_IDLE);
// Trigger internal stop first before updating the playback info and notifying external // Trigger internal stop first before updating the playback info and notifying external
// listeners to ensure that new operations issued in the listener notifications reach the // listeners to ensure that new operations issued in the listener notifications reach the
@ -432,18 +443,10 @@ import java.util.concurrent.CopyOnWriteArrayList;
getResetPlaybackInfo( getResetPlaybackInfo(
/* resetPosition= */ false, /* resetPosition= */ false,
/* resetState= */ false, /* resetState= */ false,
/* resetError= */ false,
/* playbackState= */ Player.STATE_IDLE); /* playbackState= */ Player.STATE_IDLE);
} }
@Override
@Deprecated
@SuppressWarnings("deprecation")
public void sendMessages(ExoPlayerMessage... messages) {
for (ExoPlayerMessage message : messages) {
createMessage(message.target).setType(message.messageType).setPayload(message.message).send();
}
}
@Override @Override
public PlayerMessage createMessage(Target target) { public PlayerMessage createMessage(Target target) {
return new PlayerMessage( return new PlayerMessage(
@ -454,36 +457,6 @@ import java.util.concurrent.CopyOnWriteArrayList;
internalPlayerHandler); internalPlayerHandler);
} }
@Override
@Deprecated
@SuppressWarnings("deprecation")
public void blockingSendMessages(ExoPlayerMessage... messages) {
List<PlayerMessage> playerMessages = new ArrayList<>();
for (ExoPlayerMessage message : messages) {
playerMessages.add(
createMessage(message.target)
.setType(message.messageType)
.setPayload(message.message)
.send());
}
boolean wasInterrupted = false;
for (PlayerMessage message : playerMessages) {
boolean blockMessage = true;
while (blockMessage) {
try {
message.blockUntilDelivered();
blockMessage = false;
} catch (InterruptedException e) {
wasInterrupted = true;
}
}
}
if (wasInterrupted) {
// Restore the interrupted status.
Thread.currentThread().interrupt();
}
}
@Override @Override
public int getCurrentPeriodIndex() { public int getCurrentPeriodIndex() {
if (shouldMaskPosition()) { if (shouldMaskPosition()) {
@ -615,11 +588,6 @@ import java.util.concurrent.CopyOnWriteArrayList;
return playbackInfo.timeline; return playbackInfo.timeline;
} }
@Override
public Object getCurrentManifest() {
return playbackInfo.manifest;
}
// Not private so it can be called from an inner class without going through a thunk method. // Not private so it can be called from an inner class without going through a thunk method.
/* package */ void handleEvent(Message msg) { /* package */ void handleEvent(Message msg) {
switch (msg.what) { switch (msg.what) {
@ -631,22 +599,26 @@ import java.util.concurrent.CopyOnWriteArrayList;
/* positionDiscontinuityReason= */ msg.arg2); /* positionDiscontinuityReason= */ msg.arg2);
break; break;
case ExoPlayerImplInternal.MSG_PLAYBACK_PARAMETERS_CHANGED: case ExoPlayerImplInternal.MSG_PLAYBACK_PARAMETERS_CHANGED:
PlaybackParameters playbackParameters = (PlaybackParameters) msg.obj; handlePlaybackParameters((PlaybackParameters) msg.obj, /* operationAck= */ msg.arg1 != 0);
if (!this.playbackParameters.equals(playbackParameters)) {
this.playbackParameters = playbackParameters;
notifyListeners(listener -> listener.onPlaybackParametersChanged(playbackParameters));
}
break;
case ExoPlayerImplInternal.MSG_ERROR:
ExoPlaybackException playbackError = (ExoPlaybackException) msg.obj;
this.playbackError = playbackError;
notifyListeners(listener -> listener.onPlayerError(playbackError));
break; break;
default: default:
throw new IllegalStateException(); throw new IllegalStateException();
} }
} }
private void handlePlaybackParameters(
PlaybackParameters playbackParameters, boolean operationAck) {
if (operationAck) {
pendingSetPlaybackParametersAcks--;
}
if (pendingSetPlaybackParametersAcks == 0) {
if (!this.playbackParameters.equals(playbackParameters)) {
this.playbackParameters = playbackParameters;
notifyListeners(listener -> listener.onPlaybackParametersChanged(playbackParameters));
}
}
}
private void handlePlaybackInfo( private void handlePlaybackInfo(
PlaybackInfo playbackInfo, PlaybackInfo playbackInfo,
int operationAcks, int operationAcks,
@ -657,8 +629,11 @@ import java.util.concurrent.CopyOnWriteArrayList;
if (playbackInfo.startPositionUs == C.TIME_UNSET) { if (playbackInfo.startPositionUs == C.TIME_UNSET) {
// Replace internal unset start position with externally visible start position of zero. // Replace internal unset start position with externally visible start position of zero.
playbackInfo = playbackInfo =
playbackInfo.resetToNewPosition( playbackInfo.copyWithNewPosition(
playbackInfo.periodId, /* startPositionUs= */ 0, playbackInfo.contentPositionUs); playbackInfo.periodId,
/* positionUs= */ 0,
playbackInfo.contentPositionUs,
playbackInfo.totalBufferedDurationUs);
} }
if (!this.playbackInfo.timeline.isEmpty() && playbackInfo.timeline.isEmpty()) { if (!this.playbackInfo.timeline.isEmpty() && playbackInfo.timeline.isEmpty()) {
// Update the masking variables, which are used when the timeline becomes empty. // Update the masking variables, which are used when the timeline becomes empty.
@ -684,7 +659,10 @@ import java.util.concurrent.CopyOnWriteArrayList;
} }
private PlaybackInfo getResetPlaybackInfo( private PlaybackInfo getResetPlaybackInfo(
boolean resetPosition, boolean resetState, int playbackState) { boolean resetPosition,
boolean resetState,
boolean resetError,
@Player.State int playbackState) {
if (resetPosition) { if (resetPosition) {
maskingWindowIndex = 0; maskingWindowIndex = 0;
maskingPeriodIndex = 0; maskingPeriodIndex = 0;
@ -698,17 +676,17 @@ import java.util.concurrent.CopyOnWriteArrayList;
resetPosition = resetPosition || resetState; resetPosition = resetPosition || resetState;
MediaPeriodId mediaPeriodId = MediaPeriodId mediaPeriodId =
resetPosition resetPosition
? playbackInfo.getDummyFirstMediaPeriodId(shuffleModeEnabled, window) ? playbackInfo.getDummyFirstMediaPeriodId(shuffleModeEnabled, window, period)
: playbackInfo.periodId; : playbackInfo.periodId;
long startPositionUs = resetPosition ? 0 : playbackInfo.positionUs; long startPositionUs = resetPosition ? 0 : playbackInfo.positionUs;
long contentPositionUs = resetPosition ? C.TIME_UNSET : playbackInfo.contentPositionUs; long contentPositionUs = resetPosition ? C.TIME_UNSET : playbackInfo.contentPositionUs;
return new PlaybackInfo( return new PlaybackInfo(
resetState ? Timeline.EMPTY : playbackInfo.timeline, resetState ? Timeline.EMPTY : playbackInfo.timeline,
resetState ? null : playbackInfo.manifest,
mediaPeriodId, mediaPeriodId,
startPositionUs, startPositionUs,
contentPositionUs, contentPositionUs,
playbackState, playbackState,
resetError ? null : playbackInfo.playbackError,
/* isLoading= */ false, /* isLoading= */ false,
resetState ? TrackGroupArray.EMPTY : playbackInfo.trackGroups, resetState ? TrackGroupArray.EMPTY : playbackInfo.trackGroups,
resetState ? emptyTrackSelectorResult : playbackInfo.trackSelectorResult, resetState ? emptyTrackSelectorResult : playbackInfo.trackSelectorResult,
@ -781,7 +759,8 @@ import java.util.concurrent.CopyOnWriteArrayList;
private final @Player.TimelineChangeReason int timelineChangeReason; private final @Player.TimelineChangeReason int timelineChangeReason;
private final boolean seekProcessed; private final boolean seekProcessed;
private final boolean playbackStateChanged; private final boolean playbackStateChanged;
private final boolean timelineOrManifestChanged; private final boolean playbackErrorChanged;
private final boolean timelineChanged;
private final boolean isLoadingChanged; private final boolean isLoadingChanged;
private final boolean trackSelectorResultChanged; private final boolean trackSelectorResultChanged;
private final boolean playWhenReady; private final boolean playWhenReady;
@ -808,9 +787,10 @@ import java.util.concurrent.CopyOnWriteArrayList;
this.playWhenReady = playWhenReady; this.playWhenReady = playWhenReady;
this.isPlayingChanged = isPlayingChanged; this.isPlayingChanged = isPlayingChanged;
playbackStateChanged = previousPlaybackInfo.playbackState != playbackInfo.playbackState; playbackStateChanged = previousPlaybackInfo.playbackState != playbackInfo.playbackState;
timelineOrManifestChanged = playbackErrorChanged =
previousPlaybackInfo.timeline != playbackInfo.timeline previousPlaybackInfo.playbackError != playbackInfo.playbackError
|| previousPlaybackInfo.manifest != playbackInfo.manifest; && playbackInfo.playbackError != null;
timelineChanged = previousPlaybackInfo.timeline != playbackInfo.timeline;
isLoadingChanged = previousPlaybackInfo.isLoading != playbackInfo.isLoading; isLoadingChanged = previousPlaybackInfo.isLoading != playbackInfo.isLoading;
trackSelectorResultChanged = trackSelectorResultChanged =
previousPlaybackInfo.trackSelectorResult != playbackInfo.trackSelectorResult; previousPlaybackInfo.trackSelectorResult != playbackInfo.trackSelectorResult;
@ -818,18 +798,19 @@ import java.util.concurrent.CopyOnWriteArrayList;
@Override @Override
public void run() { public void run() {
if (timelineOrManifestChanged || timelineChangeReason == TIMELINE_CHANGE_REASON_PREPARED) { if (timelineChanged || timelineChangeReason == TIMELINE_CHANGE_REASON_PREPARED) {
invokeAll( invokeAll(
listenerSnapshot, listenerSnapshot,
listener -> listener -> listener.onTimelineChanged(playbackInfo.timeline, timelineChangeReason));
listener.onTimelineChanged(
playbackInfo.timeline, playbackInfo.manifest, timelineChangeReason));
} }
if (positionDiscontinuity) { if (positionDiscontinuity) {
invokeAll( invokeAll(
listenerSnapshot, listenerSnapshot,
listener -> listener.onPositionDiscontinuity(positionDiscontinuityReason)); listener -> listener.onPositionDiscontinuity(positionDiscontinuityReason));
} }
if (playbackErrorChanged) {
invokeAll(listenerSnapshot, listener -> listener.onPlayerError(playbackInfo.playbackError));
}
if (trackSelectorResultChanged) { if (trackSelectorResultChanged) {
trackSelector.onSelectionActivated(playbackInfo.trackSelectorResult.info); trackSelector.onSelectionActivated(playbackInfo.trackSelectorResult.info);
invokeAll( invokeAll(

View File

@ -29,11 +29,11 @@ public final class ExoPlayerLibraryInfo {
/** The version of the library expressed as a string, for example "1.2.3". */ /** The version of the library expressed as a string, for example "1.2.3". */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa. // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION_INT) or vice versa.
public static final String VERSION = "2.10.6"; public static final String VERSION = "2.11.7";
/** The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}. */ /** The version of the library expressed as {@code "ExoPlayerLib/" + VERSION}. */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa. // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
public static final String VERSION_SLASHY = "ExoPlayerLib/2.10.6"; public static final String VERSION_SLASHY = "ExoPlayerLib/2.11.7";
/** /**
* The version of the library expressed as an integer, for example 1002003. * The version of the library expressed as an integer, for example 1002003.
@ -43,7 +43,7 @@ public final class ExoPlayerLibraryInfo {
* integer version 123045006 (123-045-006). * integer version 123045006 (123-045-006).
*/ */
// Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa. // Intentionally hardcoded. Do not derive from other constants (e.g. VERSION) or vice versa.
public static final int VERSION_INT = 2010006; public static final int VERSION_INT = 2011007;
/** /**
* Whether the library was compiled with {@link com.google.android.exoplayer2.util.Assertions} * Whether the library was compiled with {@link com.google.android.exoplayer2.util.Assertions}

View File

@ -19,6 +19,8 @@ import android.os.Parcel;
import android.os.Parcelable; import android.os.Parcelable;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.drm.DrmSession;
import com.google.android.exoplayer2.drm.ExoMediaCrypto;
import com.google.android.exoplayer2.metadata.Metadata; import com.google.android.exoplayer2.metadata.Metadata;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
@ -45,9 +47,9 @@ public final class Format implements Parcelable {
public static final long OFFSET_SAMPLE_RELATIVE = Long.MAX_VALUE; public static final long OFFSET_SAMPLE_RELATIVE = Long.MAX_VALUE;
/** An identifier for the format, or null if unknown or not applicable. */ /** An identifier for the format, or null if unknown or not applicable. */
public final @Nullable String id; @Nullable public final String id;
/** The human readable label, or null if unknown or not applicable. */ /** The human readable label, or null if unknown or not applicable. */
public final @Nullable String label; @Nullable public final String label;
/** Track selection flags. */ /** Track selection flags. */
@C.SelectionFlags public final int selectionFlags; @C.SelectionFlags public final int selectionFlags;
/** Track role flags. */ /** Track role flags. */
@ -57,14 +59,14 @@ public final class Format implements Parcelable {
*/ */
public final int bitrate; public final int bitrate;
/** Codecs of the format as described in RFC 6381, or null if unknown or not applicable. */ /** Codecs of the format as described in RFC 6381, or null if unknown or not applicable. */
public final @Nullable String codecs; @Nullable public final String codecs;
/** Metadata, or null if unknown or not applicable. */ /** Metadata, or null if unknown or not applicable. */
public final @Nullable Metadata metadata; @Nullable public final Metadata metadata;
// Container specific. // Container specific.
/** The mime type of the container, or null if unknown or not applicable. */ /** The mime type of the container, or null if unknown or not applicable. */
public final @Nullable String containerMimeType; @Nullable public final String containerMimeType;
// Elementary stream specific. // Elementary stream specific.
@ -72,7 +74,7 @@ public final class Format implements Parcelable {
* The mime type of the elementary stream (i.e. the individual samples), or null if unknown or not * The mime type of the elementary stream (i.e. the individual samples), or null if unknown or not
* applicable. * applicable.
*/ */
public final @Nullable String sampleMimeType; @Nullable public final String sampleMimeType;
/** /**
* The maximum size of a buffer of data (typically one sample), or {@link #NO_VALUE} if unknown or * The maximum size of a buffer of data (typically one sample), or {@link #NO_VALUE} if unknown or
* not applicable. * not applicable.
@ -84,7 +86,7 @@ public final class Format implements Parcelable {
*/ */
public final List<byte[]> initializationData; public final List<byte[]> initializationData;
/** DRM initialization data if the stream is protected, or null otherwise. */ /** DRM initialization data if the stream is protected, or null otherwise. */
public final @Nullable DrmInitData drmInitData; @Nullable public final DrmInitData drmInitData;
/** /**
* For samples that contain subsamples, this is an offset that should be added to subsample * For samples that contain subsamples, this is an offset that should be added to subsample
@ -122,9 +124,9 @@ public final class Format implements Parcelable {
@C.StereoMode @C.StereoMode
public final int stereoMode; public final int stereoMode;
/** The projection data for 360/VR video, or null if not applicable. */ /** The projection data for 360/VR video, or null if not applicable. */
public final @Nullable byte[] projectionData; @Nullable public final byte[] projectionData;
/** The color metadata associated with the video, helps with accurate color reproduction. */ /** The color metadata associated with the video, helps with accurate color reproduction. */
public final @Nullable ColorInfo colorInfo; @Nullable public final ColorInfo colorInfo;
// Audio specific. // Audio specific.
@ -136,13 +138,7 @@ public final class Format implements Parcelable {
* The audio sampling rate in Hz, or {@link #NO_VALUE} if unknown or not applicable. * The audio sampling rate in Hz, or {@link #NO_VALUE} if unknown or not applicable.
*/ */
public final int sampleRate; public final int sampleRate;
/** /** The {@link C.PcmEncoding} for PCM audio. Set to {@link #NO_VALUE} for other media types. */
* The encoding for PCM audio streams. If {@link #sampleMimeType} is {@link MimeTypes#AUDIO_RAW}
* then one of {@link C#ENCODING_PCM_8BIT}, {@link C#ENCODING_PCM_16BIT}, {@link
* C#ENCODING_PCM_24BIT}, {@link C#ENCODING_PCM_32BIT}, {@link C#ENCODING_PCM_FLOAT}, {@link
* C#ENCODING_PCM_MU_LAW} or {@link C#ENCODING_PCM_A_LAW}. Set to {@link #NO_VALUE} for other
* media types.
*/
public final @C.PcmEncoding int pcmEncoding; public final @C.PcmEncoding int pcmEncoding;
/** /**
* The number of frames to trim from the start of the decoded audio stream, or 0 if not * The number of frames to trim from the start of the decoded audio stream, or 0 if not
@ -157,12 +153,21 @@ public final class Format implements Parcelable {
// Audio and text specific. // Audio and text specific.
/** The language as an IETF BCP 47 conformant tag, or null if unknown or not applicable. */ /** The language as an IETF BCP 47 conformant tag, or null if unknown or not applicable. */
public final @Nullable String language; @Nullable public final String language;
/** /**
* The Accessibility channel, or {@link #NO_VALUE} if not known or applicable. * The Accessibility channel, or {@link #NO_VALUE} if not known or applicable.
*/ */
public final int accessibilityChannel; public final int accessibilityChannel;
// Provided by source.
/**
* The type of the {@link ExoMediaCrypto} provided by the media source, if the media source can
* acquire a {@link DrmSession} for {@link #drmInitData}. Null if the media source cannot acquire
* a session for {@link #drmInitData}, or if not applicable.
*/
@Nullable public final Class<? extends ExoMediaCrypto> exoMediaCryptoType;
// Lazily initialized hashcode. // Lazily initialized hashcode.
private int hashCode; private int hashCode;
@ -176,8 +181,8 @@ public final class Format implements Parcelable {
public static Format createVideoContainerFormat( public static Format createVideoContainerFormat(
@Nullable String id, @Nullable String id,
@Nullable String containerMimeType, @Nullable String containerMimeType,
String sampleMimeType, @Nullable String sampleMimeType,
String codecs, @Nullable String codecs,
int bitrate, int bitrate,
int width, int width,
int height, int height,
@ -204,8 +209,8 @@ public final class Format implements Parcelable {
@Nullable String id, @Nullable String id,
@Nullable String label, @Nullable String label,
@Nullable String containerMimeType, @Nullable String containerMimeType,
String sampleMimeType, @Nullable String sampleMimeType,
String codecs, @Nullable String codecs,
@Nullable Metadata metadata, @Nullable Metadata metadata,
int bitrate, int bitrate,
int width, int width,
@ -242,7 +247,8 @@ public final class Format implements Parcelable {
/* encoderDelay= */ NO_VALUE, /* encoderDelay= */ NO_VALUE,
/* encoderPadding= */ NO_VALUE, /* encoderPadding= */ NO_VALUE,
/* language= */ null, /* language= */ null,
/* accessibilityChannel= */ NO_VALUE); /* accessibilityChannel= */ NO_VALUE,
/* exoMediaCryptoType= */ null);
} }
public static Format createVideoSampleFormat( public static Format createVideoSampleFormat(
@ -314,7 +320,7 @@ public final class Format implements Parcelable {
@Nullable List<byte[]> initializationData, @Nullable List<byte[]> initializationData,
int rotationDegrees, int rotationDegrees,
float pixelWidthHeightRatio, float pixelWidthHeightRatio,
byte[] projectionData, @Nullable byte[] projectionData,
@C.StereoMode int stereoMode, @C.StereoMode int stereoMode,
@Nullable ColorInfo colorInfo, @Nullable ColorInfo colorInfo,
@Nullable DrmInitData drmInitData) { @Nullable DrmInitData drmInitData) {
@ -346,7 +352,8 @@ public final class Format implements Parcelable {
/* encoderDelay= */ NO_VALUE, /* encoderDelay= */ NO_VALUE,
/* encoderPadding= */ NO_VALUE, /* encoderPadding= */ NO_VALUE,
/* language= */ null, /* language= */ null,
/* accessibilityChannel= */ NO_VALUE); /* accessibilityChannel= */ NO_VALUE,
/* exoMediaCryptoType= */ null);
} }
// Audio. // Audio.
@ -425,7 +432,8 @@ public final class Format implements Parcelable {
/* encoderDelay= */ NO_VALUE, /* encoderDelay= */ NO_VALUE,
/* encoderPadding= */ NO_VALUE, /* encoderPadding= */ NO_VALUE,
language, language,
/* accessibilityChannel= */ NO_VALUE); /* accessibilityChannel= */ NO_VALUE,
/* exoMediaCryptoType= */ null);
} }
public static Format createAudioSampleFormat( public static Format createAudioSampleFormat(
@ -530,7 +538,8 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
/* accessibilityChannel= */ NO_VALUE); /* accessibilityChannel= */ NO_VALUE,
/* exoMediaCryptoType= */ null);
} }
// Text. // Text.
@ -597,12 +606,13 @@ public final class Format implements Parcelable {
/* encoderDelay= */ NO_VALUE, /* encoderDelay= */ NO_VALUE,
/* encoderPadding= */ NO_VALUE, /* encoderPadding= */ NO_VALUE,
language, language,
accessibilityChannel); accessibilityChannel,
/* exoMediaCryptoType= */ null);
} }
public static Format createTextSampleFormat( public static Format createTextSampleFormat(
@Nullable String id, @Nullable String id,
String sampleMimeType, @Nullable String sampleMimeType,
@C.SelectionFlags int selectionFlags, @C.SelectionFlags int selectionFlags,
@Nullable String language) { @Nullable String language) {
return createTextSampleFormat(id, sampleMimeType, selectionFlags, language, null); return createTextSampleFormat(id, sampleMimeType, selectionFlags, language, null);
@ -610,7 +620,7 @@ public final class Format implements Parcelable {
public static Format createTextSampleFormat( public static Format createTextSampleFormat(
@Nullable String id, @Nullable String id,
String sampleMimeType, @Nullable String sampleMimeType,
@C.SelectionFlags int selectionFlags, @C.SelectionFlags int selectionFlags,
@Nullable String language, @Nullable String language,
@Nullable DrmInitData drmInitData) { @Nullable DrmInitData drmInitData) {
@ -681,7 +691,7 @@ public final class Format implements Parcelable {
int accessibilityChannel, int accessibilityChannel,
@Nullable DrmInitData drmInitData, @Nullable DrmInitData drmInitData,
long subsampleOffsetUs, long subsampleOffsetUs,
List<byte[]> initializationData) { @Nullable List<byte[]> initializationData) {
return new Format( return new Format(
id, id,
/* label= */ null, /* label= */ null,
@ -710,7 +720,8 @@ public final class Format implements Parcelable {
/* encoderDelay= */ NO_VALUE, /* encoderDelay= */ NO_VALUE,
/* encoderPadding= */ NO_VALUE, /* encoderPadding= */ NO_VALUE,
language, language,
accessibilityChannel); accessibilityChannel,
/* exoMediaCryptoType= */ null);
} }
// Image. // Image.
@ -752,11 +763,16 @@ public final class Format implements Parcelable {
/* encoderDelay= */ NO_VALUE, /* encoderDelay= */ NO_VALUE,
/* encoderPadding= */ NO_VALUE, /* encoderPadding= */ NO_VALUE,
language, language,
/* accessibilityChannel= */ NO_VALUE); /* accessibilityChannel= */ NO_VALUE,
/* exoMediaCryptoType= */ null);
} }
// Generic. // Generic.
/**
* @deprecated Use {@link #createContainerFormat(String, String, String, String, String, int, int,
* int, String)} instead.
*/
@Deprecated @Deprecated
public static Format createContainerFormat( public static Format createContainerFormat(
@Nullable String id, @Nullable String id,
@ -816,7 +832,8 @@ public final class Format implements Parcelable {
/* encoderDelay= */ NO_VALUE, /* encoderDelay= */ NO_VALUE,
/* encoderPadding= */ NO_VALUE, /* encoderPadding= */ NO_VALUE,
language, language,
/* accessibilityChannel= */ NO_VALUE); /* accessibilityChannel= */ NO_VALUE,
/* exoMediaCryptoType= */ null);
} }
public static Format createSampleFormat( public static Format createSampleFormat(
@ -849,7 +866,8 @@ public final class Format implements Parcelable {
/* encoderDelay= */ NO_VALUE, /* encoderDelay= */ NO_VALUE,
/* encoderPadding= */ NO_VALUE, /* encoderPadding= */ NO_VALUE,
/* language= */ null, /* language= */ null,
/* accessibilityChannel= */ NO_VALUE); /* accessibilityChannel= */ NO_VALUE,
/* exoMediaCryptoType= */ null);
} }
public static Format createSampleFormat( public static Format createSampleFormat(
@ -886,7 +904,8 @@ public final class Format implements Parcelable {
/* encoderDelay= */ NO_VALUE, /* encoderDelay= */ NO_VALUE,
/* encoderPadding= */ NO_VALUE, /* encoderPadding= */ NO_VALUE,
/* language= */ null, /* language= */ null,
/* accessibilityChannel= */ NO_VALUE); /* accessibilityChannel= */ NO_VALUE,
/* exoMediaCryptoType= */ null);
} }
/* package */ Format( /* package */ Format(
@ -922,7 +941,9 @@ public final class Format implements Parcelable {
int encoderPadding, int encoderPadding,
// Audio and text specific. // Audio and text specific.
@Nullable String language, @Nullable String language,
int accessibilityChannel) { int accessibilityChannel,
// Provided by source.
@Nullable Class<? extends ExoMediaCrypto> exoMediaCryptoType) {
this.id = id; this.id = id;
this.label = label; this.label = label;
this.selectionFlags = selectionFlags; this.selectionFlags = selectionFlags;
@ -958,6 +979,8 @@ public final class Format implements Parcelable {
// Audio and text specific. // Audio and text specific.
this.language = Util.normalizeLanguageCode(language); this.language = Util.normalizeLanguageCode(language);
this.accessibilityChannel = accessibilityChannel; this.accessibilityChannel = accessibilityChannel;
// Provided by source.
this.exoMediaCryptoType = exoMediaCryptoType;
} }
@SuppressWarnings("ResourceType") @SuppressWarnings("ResourceType")
@ -1000,6 +1023,8 @@ public final class Format implements Parcelable {
// Audio and text specific. // Audio and text specific.
language = in.readString(); language = in.readString();
accessibilityChannel = in.readInt(); accessibilityChannel = in.readInt();
// Provided by source.
exoMediaCryptoType = null;
} }
public Format copyWithMaxInputSize(int maxInputSize) { public Format copyWithMaxInputSize(int maxInputSize) {
@ -1031,7 +1056,8 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
accessibilityChannel); accessibilityChannel,
exoMediaCryptoType);
} }
public Format copyWithSubsampleOffsetUs(long subsampleOffsetUs) { public Format copyWithSubsampleOffsetUs(long subsampleOffsetUs) {
@ -1063,7 +1089,8 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
accessibilityChannel); accessibilityChannel,
exoMediaCryptoType);
} }
public Format copyWithLabel(@Nullable String label) { public Format copyWithLabel(@Nullable String label) {
@ -1095,7 +1122,8 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
accessibilityChannel); accessibilityChannel,
exoMediaCryptoType);
} }
public Format copyWithContainerInfo( public Format copyWithContainerInfo(
@ -1143,7 +1171,8 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
accessibilityChannel); accessibilityChannel,
exoMediaCryptoType);
} }
@SuppressWarnings("ReferenceEquality") @SuppressWarnings("ReferenceEquality")
@ -1222,7 +1251,8 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
accessibilityChannel); accessibilityChannel,
exoMediaCryptoType);
} }
public Format copyWithGaplessInfo(int encoderDelay, int encoderPadding) { public Format copyWithGaplessInfo(int encoderDelay, int encoderPadding) {
@ -1254,7 +1284,8 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
accessibilityChannel); accessibilityChannel,
exoMediaCryptoType);
} }
public Format copyWithFrameRate(float frameRate) { public Format copyWithFrameRate(float frameRate) {
@ -1286,42 +1317,24 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
accessibilityChannel); accessibilityChannel,
exoMediaCryptoType);
} }
public Format copyWithDrmInitData(@Nullable DrmInitData drmInitData) { public Format copyWithDrmInitData(@Nullable DrmInitData drmInitData) {
return new Format( return copyWithAdjustments(drmInitData, metadata);
id,
label,
selectionFlags,
roleFlags,
bitrate,
codecs,
metadata,
containerMimeType,
sampleMimeType,
maxInputSize,
initializationData,
drmInitData,
subsampleOffsetUs,
width,
height,
frameRate,
rotationDegrees,
pixelWidthHeightRatio,
projectionData,
stereoMode,
colorInfo,
channelCount,
sampleRate,
pcmEncoding,
encoderDelay,
encoderPadding,
language,
accessibilityChannel);
} }
public Format copyWithMetadata(@Nullable Metadata metadata) { public Format copyWithMetadata(@Nullable Metadata metadata) {
return copyWithAdjustments(drmInitData, metadata);
}
@SuppressWarnings("ReferenceEquality")
public Format copyWithAdjustments(
@Nullable DrmInitData drmInitData, @Nullable Metadata metadata) {
if (drmInitData == this.drmInitData && metadata == this.metadata) {
return this;
}
return new Format( return new Format(
id, id,
label, label,
@ -1350,7 +1363,8 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
accessibilityChannel); accessibilityChannel,
exoMediaCryptoType);
} }
public Format copyWithRotationDegrees(int rotationDegrees) { public Format copyWithRotationDegrees(int rotationDegrees) {
@ -1382,7 +1396,8 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
accessibilityChannel); accessibilityChannel,
exoMediaCryptoType);
} }
public Format copyWithBitrate(int bitrate) { public Format copyWithBitrate(int bitrate) {
@ -1414,7 +1429,75 @@ public final class Format implements Parcelable {
encoderDelay, encoderDelay,
encoderPadding, encoderPadding,
language, language,
accessibilityChannel); accessibilityChannel,
exoMediaCryptoType);
}
public Format copyWithVideoSize(int width, int height) {
return new Format(
id,
label,
selectionFlags,
roleFlags,
bitrate,
codecs,
metadata,
containerMimeType,
sampleMimeType,
maxInputSize,
initializationData,
drmInitData,
subsampleOffsetUs,
width,
height,
frameRate,
rotationDegrees,
pixelWidthHeightRatio,
projectionData,
stereoMode,
colorInfo,
channelCount,
sampleRate,
pcmEncoding,
encoderDelay,
encoderPadding,
language,
accessibilityChannel,
exoMediaCryptoType);
}
public Format copyWithExoMediaCryptoType(
@Nullable Class<? extends ExoMediaCrypto> exoMediaCryptoType) {
return new Format(
id,
label,
selectionFlags,
roleFlags,
bitrate,
codecs,
metadata,
containerMimeType,
sampleMimeType,
maxInputSize,
initializationData,
drmInitData,
subsampleOffsetUs,
width,
height,
frameRate,
rotationDegrees,
pixelWidthHeightRatio,
projectionData,
stereoMode,
colorInfo,
channelCount,
sampleRate,
pcmEncoding,
encoderDelay,
encoderPadding,
language,
accessibilityChannel,
exoMediaCryptoType);
} }
/** /**
@ -1493,6 +1576,8 @@ public final class Format implements Parcelable {
// Audio and text specific. // Audio and text specific.
result = 31 * result + (language == null ? 0 : language.hashCode()); result = 31 * result + (language == null ? 0 : language.hashCode());
result = 31 * result + accessibilityChannel; result = 31 * result + accessibilityChannel;
// Provided by source.
result = 31 * result + (exoMediaCryptoType == null ? 0 : exoMediaCryptoType.hashCode());
hashCode = result; hashCode = result;
} }
return hashCode; return hashCode;
@ -1528,6 +1613,7 @@ public final class Format implements Parcelable {
&& accessibilityChannel == other.accessibilityChannel && accessibilityChannel == other.accessibilityChannel
&& Float.compare(frameRate, other.frameRate) == 0 && Float.compare(frameRate, other.frameRate) == 0
&& Float.compare(pixelWidthHeightRatio, other.pixelWidthHeightRatio) == 0 && Float.compare(pixelWidthHeightRatio, other.pixelWidthHeightRatio) == 0
&& Util.areEqual(exoMediaCryptoType, other.exoMediaCryptoType)
&& Util.areEqual(id, other.id) && Util.areEqual(id, other.id)
&& Util.areEqual(label, other.label) && Util.areEqual(label, other.label)
&& Util.areEqual(codecs, other.codecs) && Util.areEqual(codecs, other.codecs)

View File

@ -16,24 +16,28 @@
package com.google.android.exoplayer2; package com.google.android.exoplayer2;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.drm.DecryptionResource; import com.google.android.exoplayer2.drm.DrmSession;
/** /**
* Holds a {@link Format}. * Holds a {@link Format}.
*/ */
public final class FormatHolder { public final class FormatHolder {
/** /** Whether the {@link #format} setter also sets the {@link #drmSession} field. */
* Whether the object expected to populate {@link #format} is also expected to populate {@link
* #decryptionResource}.
*/
// TODO: Remove once all Renderers and MediaSources have migrated to the new DRM model [Internal // TODO: Remove once all Renderers and MediaSources have migrated to the new DRM model [Internal
// ref: b/129764794]. // ref: b/129764794].
public boolean decryptionResourceIsProvided; public boolean includesDrmSession;
/** An accompanying context for decrypting samples in the format. */ /** An accompanying context for decrypting samples in the format. */
@Nullable public DecryptionResource<?> decryptionResource; @Nullable public DrmSession<?> drmSession;
/** The held {@link Format}. */ /** The held {@link Format}. */
@Nullable public Format format; @Nullable public Format format;
/** Clears the holder. */
public void clear() {
includesDrmSession = false;
drmSession = null;
format = null;
}
} }

View File

@ -59,8 +59,8 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
private final MediaSource mediaSource; private final MediaSource mediaSource;
@Nullable private MediaPeriodHolder next; @Nullable private MediaPeriodHolder next;
@Nullable private TrackGroupArray trackGroups; private TrackGroupArray trackGroups;
@Nullable private TrackSelectorResult trackSelectorResult; private TrackSelectorResult trackSelectorResult;
private long rendererPositionOffsetUs; private long rendererPositionOffsetUs;
/** /**
@ -72,6 +72,8 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
* @param allocator The allocator. * @param allocator The allocator.
* @param mediaSource The media source that produced the media period. * @param mediaSource The media source that produced the media period.
* @param info Information used to identify this media period in its timeline period. * @param info Information used to identify this media period in its timeline period.
* @param emptyTrackSelectorResult A {@link TrackSelectorResult} with empty selections for each
* renderer.
*/ */
public MediaPeriodHolder( public MediaPeriodHolder(
RendererCapabilities[] rendererCapabilities, RendererCapabilities[] rendererCapabilities,
@ -79,13 +81,16 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
TrackSelector trackSelector, TrackSelector trackSelector,
Allocator allocator, Allocator allocator,
MediaSource mediaSource, MediaSource mediaSource,
MediaPeriodInfo info) { MediaPeriodInfo info,
TrackSelectorResult emptyTrackSelectorResult) {
this.rendererCapabilities = rendererCapabilities; this.rendererCapabilities = rendererCapabilities;
this.rendererPositionOffsetUs = rendererPositionOffsetUs; this.rendererPositionOffsetUs = rendererPositionOffsetUs;
this.trackSelector = trackSelector; this.trackSelector = trackSelector;
this.mediaSource = mediaSource; this.mediaSource = mediaSource;
this.uid = info.id.periodUid; this.uid = info.id.periodUid;
this.info = info; this.info = info;
this.trackGroups = TrackGroupArray.EMPTY;
this.trackSelectorResult = emptyTrackSelectorResult;
sampleStreams = new SampleStream[rendererCapabilities.length]; sampleStreams = new SampleStream[rendererCapabilities.length];
mayRetainStreamFlags = new boolean[rendererCapabilities.length]; mayRetainStreamFlags = new boolean[rendererCapabilities.length];
mediaPeriod = mediaPeriod =
@ -167,8 +172,7 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
public void handlePrepared(float playbackSpeed, Timeline timeline) throws ExoPlaybackException { public void handlePrepared(float playbackSpeed, Timeline timeline) throws ExoPlaybackException {
prepared = true; prepared = true;
trackGroups = mediaPeriod.getTrackGroups(); trackGroups = mediaPeriod.getTrackGroups();
TrackSelectorResult selectorResult = TrackSelectorResult selectorResult = selectTracks(playbackSpeed, timeline);
Assertions.checkNotNull(selectTracks(playbackSpeed, timeline));
long newStartPositionUs = long newStartPositionUs =
applyTrackSelection( applyTrackSelection(
selectorResult, info.startPositionUs, /* forceRecreateStreams= */ false); selectorResult, info.startPositionUs, /* forceRecreateStreams= */ false);
@ -202,22 +206,20 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
} }
/** /**
* Selects tracks for the period and returns the new result if the selection changed. Must only be * Selects tracks for the period. Must only be called if {@link #prepared} is {@code true}.
* called if {@link #prepared} is {@code true}. *
* <p>The new track selection needs to be applied with {@link
* #applyTrackSelection(TrackSelectorResult, long, boolean)} before taking effect.
* *
* @param playbackSpeed The current playback speed. * @param playbackSpeed The current playback speed.
* @param timeline The current {@link Timeline}. * @param timeline The current {@link Timeline}.
* @return The {@link TrackSelectorResult} if the result changed. Or null if nothing changed. * @return The {@link TrackSelectorResult}.
* @throws ExoPlaybackException If an error occurs during track selection. * @throws ExoPlaybackException If an error occurs during track selection.
*/ */
@Nullable
public TrackSelectorResult selectTracks(float playbackSpeed, Timeline timeline) public TrackSelectorResult selectTracks(float playbackSpeed, Timeline timeline)
throws ExoPlaybackException { throws ExoPlaybackException {
TrackSelectorResult selectorResult = TrackSelectorResult selectorResult =
trackSelector.selectTracks(rendererCapabilities, getTrackGroups(), info.id, timeline); trackSelector.selectTracks(rendererCapabilities, getTrackGroups(), info.id, timeline);
if (selectorResult.isEquivalent(trackSelectorResult)) {
return null;
}
for (TrackSelection trackSelection : selectorResult.selections.getAll()) { for (TrackSelection trackSelection : selectorResult.selections.getAll()) {
if (trackSelection != null) { if (trackSelection != null) {
trackSelection.onPlaybackSpeed(playbackSpeed); trackSelection.onPlaybackSpeed(playbackSpeed);
@ -303,7 +305,6 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
/** Releases the media period. No other method should be called after the release. */ /** Releases the media period. No other method should be called after the release. */
public void release() { public void release() {
disableTrackSelectionsInResult(); disableTrackSelectionsInResult();
trackSelectorResult = null;
releaseMediaPeriod(info.endPositionUs, mediaSource, mediaPeriod); releaseMediaPeriod(info.endPositionUs, mediaSource, mediaPeriod);
} }
@ -331,25 +332,18 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
return next; return next;
} }
/** /** Returns the {@link TrackGroupArray} exposed by this media period. */
* Returns the {@link TrackGroupArray} exposed by this media period. Must only be called if {@link
* #prepared} is {@code true}.
*/
public TrackGroupArray getTrackGroups() { public TrackGroupArray getTrackGroups() {
return Assertions.checkNotNull(trackGroups); return trackGroups;
} }
/** /** Returns the {@link TrackSelectorResult} which is currently applied. */
* Returns the {@link TrackSelectorResult} which is currently applied. Must only be called if
* {@link #prepared} is {@code true}.
*/
public TrackSelectorResult getTrackSelectorResult() { public TrackSelectorResult getTrackSelectorResult() {
return Assertions.checkNotNull(trackSelectorResult); return trackSelectorResult;
} }
private void enableTrackSelectionsInResult() { private void enableTrackSelectionsInResult() {
TrackSelectorResult trackSelectorResult = this.trackSelectorResult; if (!isLoadingMediaPeriod()) {
if (!isLoadingMediaPeriod() || trackSelectorResult == null) {
return; return;
} }
for (int i = 0; i < trackSelectorResult.length; i++) { for (int i = 0; i < trackSelectorResult.length; i++) {
@ -362,8 +356,7 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
} }
private void disableTrackSelectionsInResult() { private void disableTrackSelectionsInResult() {
TrackSelectorResult trackSelectorResult = this.trackSelectorResult; if (!isLoadingMediaPeriod()) {
if (!isLoadingMediaPeriod() || trackSelectorResult == null) {
return; return;
} }
for (int i = 0; i < trackSelectorResult.length; i++) { for (int i = 0; i < trackSelectorResult.length; i++) {
@ -394,7 +387,6 @@ import org.checkerframework.checker.nullness.compatqual.NullableType;
*/ */
private void associateNoSampleRenderersWithEmptySampleStream( private void associateNoSampleRenderersWithEmptySampleStream(
@NullableType SampleStream[] sampleStreams) { @NullableType SampleStream[] sampleStreams) {
TrackSelectorResult trackSelectorResult = Assertions.checkNotNull(this.trackSelectorResult);
for (int i = 0; i < rendererCapabilities.length; i++) { for (int i = 0; i < rendererCapabilities.length; i++) {
if (rendererCapabilities[i].getTrackType() == C.TRACK_TYPE_NONE if (rendererCapabilities[i].getTrackType() == C.TRACK_TYPE_NONE
&& trackSelectorResult.isRendererEnabled(i)) { && trackSelectorResult.isRendererEnabled(i)) {

View File

@ -15,13 +15,14 @@
*/ */
package com.google.android.exoplayer2; package com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import android.util.Pair; import android.util.Pair;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.Player.RepeatMode; import com.google.android.exoplayer2.Player.RepeatMode;
import com.google.android.exoplayer2.source.MediaPeriod; import com.google.android.exoplayer2.source.MediaPeriod;
import com.google.android.exoplayer2.source.MediaSource; import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId; import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import com.google.android.exoplayer2.trackselection.TrackSelector; import com.google.android.exoplayer2.trackselection.TrackSelector;
import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
import com.google.android.exoplayer2.upstream.Allocator; import com.google.android.exoplayer2.upstream.Allocator;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
@ -46,11 +47,11 @@ import com.google.android.exoplayer2.util.Assertions;
private Timeline timeline; private Timeline timeline;
private @RepeatMode int repeatMode; private @RepeatMode int repeatMode;
private boolean shuffleModeEnabled; private boolean shuffleModeEnabled;
private @Nullable MediaPeriodHolder playing; @Nullable private MediaPeriodHolder playing;
private @Nullable MediaPeriodHolder reading; @Nullable private MediaPeriodHolder reading;
private @Nullable MediaPeriodHolder loading; @Nullable private MediaPeriodHolder loading;
private int length; private int length;
private @Nullable Object oldFrontPeriodUid; @Nullable private Object oldFrontPeriodUid;
private long oldFrontPeriodWindowSequenceNumber; private long oldFrontPeriodWindowSequenceNumber;
/** Creates a new media period queue. */ /** Creates a new media period queue. */
@ -127,21 +128,24 @@ import com.google.android.exoplayer2.util.Assertions;
} }
/** /**
* Enqueues a new media period based on the specified information as the new loading media period, * Enqueues a new media period holder based on the specified information as the new loading media
* and returns it. * period, and returns it.
* *
* @param rendererCapabilities The renderer capabilities. * @param rendererCapabilities The renderer capabilities.
* @param trackSelector The track selector. * @param trackSelector The track selector.
* @param allocator The allocator. * @param allocator The allocator.
* @param mediaSource The media source that produced the media period. * @param mediaSource The media source that produced the media period.
* @param info Information used to identify this media period in its timeline period. * @param info Information used to identify this media period in its timeline period.
* @param emptyTrackSelectorResult A {@link TrackSelectorResult} with empty selections for each
* renderer.
*/ */
public MediaPeriod enqueueNextMediaPeriod( public MediaPeriodHolder enqueueNextMediaPeriodHolder(
RendererCapabilities[] rendererCapabilities, RendererCapabilities[] rendererCapabilities,
TrackSelector trackSelector, TrackSelector trackSelector,
Allocator allocator, Allocator allocator,
MediaSource mediaSource, MediaSource mediaSource,
MediaPeriodInfo info) { MediaPeriodInfo info,
TrackSelectorResult emptyTrackSelectorResult) {
long rendererPositionOffsetUs = long rendererPositionOffsetUs =
loading == null loading == null
? (info.id.isAd() && info.contentPositionUs != C.TIME_UNSET ? (info.id.isAd() && info.contentPositionUs != C.TIME_UNSET
@ -155,54 +159,44 @@ import com.google.android.exoplayer2.util.Assertions;
trackSelector, trackSelector,
allocator, allocator,
mediaSource, mediaSource,
info); info,
emptyTrackSelectorResult);
if (loading != null) { if (loading != null) {
Assertions.checkState(hasPlayingPeriod());
loading.setNext(newPeriodHolder); loading.setNext(newPeriodHolder);
} else {
playing = newPeriodHolder;
reading = newPeriodHolder;
} }
oldFrontPeriodUid = null; oldFrontPeriodUid = null;
loading = newPeriodHolder; loading = newPeriodHolder;
length++; length++;
return newPeriodHolder.mediaPeriod; return newPeriodHolder;
} }
/** /**
* Returns the loading period holder which is at the end of the queue, or null if the queue is * Returns the loading period holder which is at the end of the queue, or null if the queue is
* empty. * empty.
*/ */
@Nullable
public MediaPeriodHolder getLoadingPeriod() { public MediaPeriodHolder getLoadingPeriod() {
return loading; return loading;
} }
/** /**
* Returns the playing period holder which is at the front of the queue, or null if the queue is * Returns the playing period holder which is at the front of the queue, or null if the queue is
* empty or hasn't started playing. * empty.
*/ */
@Nullable
public MediaPeriodHolder getPlayingPeriod() { public MediaPeriodHolder getPlayingPeriod() {
return playing; return playing;
} }
/** /** Returns the reading period holder, or null if the queue is empty. */
* Returns the reading period holder, or null if the queue is empty or the player hasn't started @Nullable
* reading.
*/
public MediaPeriodHolder getReadingPeriod() { public MediaPeriodHolder getReadingPeriod() {
return reading; return reading;
} }
/**
* Returns the period holder in the front of the queue which is the playing period holder when
* playing, or null if the queue is empty.
*/
public MediaPeriodHolder getFrontPeriod() {
return hasPlayingPeriod() ? playing : loading;
}
/** Returns whether the reading and playing period holders are set. */
public boolean hasPlayingPeriod() {
return playing != null;
}
/** /**
* Continues reading from the next period holder in the queue. * Continues reading from the next period holder in the queue.
* *
@ -216,28 +210,26 @@ import com.google.android.exoplayer2.util.Assertions;
/** /**
* Dequeues the playing period holder from the front of the queue and advances the playing period * Dequeues the playing period holder from the front of the queue and advances the playing period
* holder to be the next item in the queue. If the playing period holder is unset, set it to the * holder to be the next item in the queue.
* item in the front of the queue.
* *
* @return The updated playing period holder, or null if the queue is or becomes empty. * @return The updated playing period holder, or null if the queue is or becomes empty.
*/ */
@Nullable
public MediaPeriodHolder advancePlayingPeriod() { public MediaPeriodHolder advancePlayingPeriod() {
if (playing != null) { if (playing == null) {
if (playing == reading) { return null;
reading = playing.getNext();
}
playing.release();
length--;
if (length == 0) {
loading = null;
oldFrontPeriodUid = playing.uid;
oldFrontPeriodWindowSequenceNumber = playing.info.id.windowSequenceNumber;
}
playing = playing.getNext();
} else {
playing = loading;
reading = loading;
} }
if (playing == reading) {
reading = playing.getNext();
}
playing.release();
length--;
if (length == 0) {
loading = null;
oldFrontPeriodUid = playing.uid;
oldFrontPeriodWindowSequenceNumber = playing.info.id.windowSequenceNumber;
}
playing = playing.getNext();
return playing; return playing;
} }
@ -273,12 +265,12 @@ import com.google.android.exoplayer2.util.Assertions;
* of queue (typically the playing one) for later reuse. * of queue (typically the playing one) for later reuse.
*/ */
public void clear(boolean keepFrontPeriodUid) { public void clear(boolean keepFrontPeriodUid) {
MediaPeriodHolder front = getFrontPeriod(); MediaPeriodHolder front = playing;
if (front != null) { if (front != null) {
oldFrontPeriodUid = keepFrontPeriodUid ? front.uid : null; oldFrontPeriodUid = keepFrontPeriodUid ? front.uid : null;
oldFrontPeriodWindowSequenceNumber = front.info.id.windowSequenceNumber; oldFrontPeriodWindowSequenceNumber = front.info.id.windowSequenceNumber;
front.release();
removeAfter(front); removeAfter(front);
front.release();
} else if (!keepFrontPeriodUid) { } else if (!keepFrontPeriodUid) {
oldFrontPeriodUid = null; oldFrontPeriodUid = null;
} }
@ -305,7 +297,7 @@ import com.google.android.exoplayer2.util.Assertions;
// is set, once all cases handled by ExoPlayerImplInternal.handleSourceInfoRefreshed can be // is set, once all cases handled by ExoPlayerImplInternal.handleSourceInfoRefreshed can be
// handled here. // handled here.
MediaPeriodHolder previousPeriodHolder = null; MediaPeriodHolder previousPeriodHolder = null;
MediaPeriodHolder periodHolder = getFrontPeriod(); MediaPeriodHolder periodHolder = playing;
while (periodHolder != null) { while (periodHolder != null) {
MediaPeriodInfo oldPeriodInfo = periodHolder.info; MediaPeriodInfo oldPeriodInfo = periodHolder.info;
@ -441,7 +433,7 @@ import com.google.android.exoplayer2.util.Assertions;
} }
} }
} }
MediaPeriodHolder mediaPeriodHolder = getFrontPeriod(); MediaPeriodHolder mediaPeriodHolder = playing;
while (mediaPeriodHolder != null) { while (mediaPeriodHolder != null) {
if (mediaPeriodHolder.uid.equals(periodUid)) { if (mediaPeriodHolder.uid.equals(periodUid)) {
// Reuse window sequence number of first exact period match. // Reuse window sequence number of first exact period match.
@ -449,7 +441,7 @@ import com.google.android.exoplayer2.util.Assertions;
} }
mediaPeriodHolder = mediaPeriodHolder.getNext(); mediaPeriodHolder = mediaPeriodHolder.getNext();
} }
mediaPeriodHolder = getFrontPeriod(); mediaPeriodHolder = playing;
while (mediaPeriodHolder != null) { while (mediaPeriodHolder != null) {
int indexOfHolderInTimeline = timeline.getIndexOfPeriod(mediaPeriodHolder.uid); int indexOfHolderInTimeline = timeline.getIndexOfPeriod(mediaPeriodHolder.uid);
if (indexOfHolderInTimeline != C.INDEX_UNSET) { if (indexOfHolderInTimeline != C.INDEX_UNSET) {
@ -462,7 +454,13 @@ import com.google.android.exoplayer2.util.Assertions;
mediaPeriodHolder = mediaPeriodHolder.getNext(); mediaPeriodHolder = mediaPeriodHolder.getNext();
} }
// If no match is found, create new sequence number. // If no match is found, create new sequence number.
return nextWindowSequenceNumber++; long windowSequenceNumber = nextWindowSequenceNumber++;
if (playing == null) {
// If the queue is empty, save it as old front uid to allow later reuse.
oldFrontPeriodUid = periodUid;
oldFrontPeriodWindowSequenceNumber = windowSequenceNumber;
}
return windowSequenceNumber;
} }
/** /**
@ -486,7 +484,7 @@ import com.google.android.exoplayer2.util.Assertions;
*/ */
private boolean updateForPlaybackModeChange() { private boolean updateForPlaybackModeChange() {
// Find the last existing period holder that matches the new period order. // Find the last existing period holder that matches the new period order.
MediaPeriodHolder lastValidPeriodHolder = getFrontPeriod(); MediaPeriodHolder lastValidPeriodHolder = playing;
if (lastValidPeriodHolder == null) { if (lastValidPeriodHolder == null) {
return true; return true;
} }
@ -519,7 +517,7 @@ import com.google.android.exoplayer2.util.Assertions;
lastValidPeriodHolder.info = getUpdatedMediaPeriodInfo(lastValidPeriodHolder.info); lastValidPeriodHolder.info = getUpdatedMediaPeriodInfo(lastValidPeriodHolder.info);
// If renderers may have read from a period that's been removed, it is necessary to restart. // If renderers may have read from a period that's been removed, it is necessary to restart.
return !readingPeriodRemoved || !hasPlayingPeriod(); return !readingPeriodRemoved;
} }
/** /**

View File

@ -20,6 +20,7 @@ import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.MediaClock; import com.google.android.exoplayer2.util.MediaClock;
import java.io.IOException; import java.io.IOException;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/** /**
* A {@link Renderer} implementation whose track type is {@link C#TRACK_TYPE_NONE} and does not * A {@link Renderer} implementation whose track type is {@link C#TRACK_TYPE_NONE} and does not
@ -27,10 +28,10 @@ import java.io.IOException;
*/ */
public abstract class NoSampleRenderer implements Renderer, RendererCapabilities { public abstract class NoSampleRenderer implements Renderer, RendererCapabilities {
private RendererConfiguration configuration; @MonotonicNonNull private RendererConfiguration configuration;
private int index; private int index;
private int state; private int state;
private SampleStream stream; @Nullable private SampleStream stream;
private boolean streamIsFinal; private boolean streamIsFinal;
@Override @Override
@ -49,6 +50,7 @@ public abstract class NoSampleRenderer implements Renderer, RendererCapabilities
} }
@Override @Override
@Nullable
public MediaClock getMediaClock() { public MediaClock getMediaClock() {
return null; return null;
} }
@ -113,6 +115,7 @@ public abstract class NoSampleRenderer implements Renderer, RendererCapabilities
} }
@Override @Override
@Nullable
public final SampleStream getStream() { public final SampleStream getStream() {
return stream; return stream;
} }
@ -182,11 +185,13 @@ public abstract class NoSampleRenderer implements Renderer, RendererCapabilities
// RendererCapabilities implementation. // RendererCapabilities implementation.
@Override @Override
@Capabilities
public int supportsFormat(Format format) throws ExoPlaybackException { public int supportsFormat(Format format) throws ExoPlaybackException {
return FORMAT_UNSUPPORTED_TYPE; return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE);
} }
@Override @Override
@AdaptiveSupport
public int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException { public int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException {
return ADAPTIVE_NOT_SUPPORTED; return ADAPTIVE_NOT_SUPPORTED;
} }
@ -283,8 +288,10 @@ public abstract class NoSampleRenderer implements Renderer, RendererCapabilities
// Methods to be called by subclasses. // Methods to be called by subclasses.
/** /**
* Returns the configuration set when the renderer was most recently enabled. * Returns the configuration set when the renderer was most recently enabled, or {@code null} if
* the renderer has never been enabled.
*/ */
@Nullable
protected final RendererConfiguration getConfiguration() { protected final RendererConfiguration getConfiguration() {
return configuration; return configuration;
} }

View File

@ -35,8 +35,6 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
/** The current {@link Timeline}. */ /** The current {@link Timeline}. */
public final Timeline timeline; public final Timeline timeline;
/** The current manifest. */
public final @Nullable Object manifest;
/** The {@link MediaPeriodId} of the currently playing media period in the {@link #timeline}. */ /** The {@link MediaPeriodId} of the currently playing media period in the {@link #timeline}. */
public final MediaPeriodId periodId; public final MediaPeriodId periodId;
/** /**
@ -53,7 +51,9 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
*/ */
public final long contentPositionUs; public final long contentPositionUs;
/** The current playback state. One of the {@link Player}.STATE_ constants. */ /** The current playback state. One of the {@link Player}.STATE_ constants. */
public final int playbackState; @Player.State public final int playbackState;
/** The current playback error, or null if this is not an error state. */
@Nullable public final ExoPlaybackException playbackError;
/** Whether the player is currently loading. */ /** Whether the player is currently loading. */
public final boolean isLoading; public final boolean isLoading;
/** The currently available track groups. */ /** The currently available track groups. */
@ -92,11 +92,11 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
long startPositionUs, TrackSelectorResult emptyTrackSelectorResult) { long startPositionUs, TrackSelectorResult emptyTrackSelectorResult) {
return new PlaybackInfo( return new PlaybackInfo(
Timeline.EMPTY, Timeline.EMPTY,
/* manifest= */ null,
DUMMY_MEDIA_PERIOD_ID, DUMMY_MEDIA_PERIOD_ID,
startPositionUs, startPositionUs,
/* contentPositionUs= */ C.TIME_UNSET, /* contentPositionUs= */ C.TIME_UNSET,
Player.STATE_IDLE, Player.STATE_IDLE,
/* playbackError= */ null,
/* isLoading= */ false, /* isLoading= */ false,
TrackGroupArray.EMPTY, TrackGroupArray.EMPTY,
emptyTrackSelectorResult, emptyTrackSelectorResult,
@ -110,7 +110,6 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
* Create playback info. * Create playback info.
* *
* @param timeline See {@link #timeline}. * @param timeline See {@link #timeline}.
* @param manifest See {@link #manifest}.
* @param periodId See {@link #periodId}. * @param periodId See {@link #periodId}.
* @param startPositionUs See {@link #startPositionUs}. * @param startPositionUs See {@link #startPositionUs}.
* @param contentPositionUs See {@link #contentPositionUs}. * @param contentPositionUs See {@link #contentPositionUs}.
@ -125,11 +124,11 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
*/ */
public PlaybackInfo( public PlaybackInfo(
Timeline timeline, Timeline timeline,
@Nullable Object manifest,
MediaPeriodId periodId, MediaPeriodId periodId,
long startPositionUs, long startPositionUs,
long contentPositionUs, long contentPositionUs,
int playbackState, @Player.State int playbackState,
@Nullable ExoPlaybackException playbackError,
boolean isLoading, boolean isLoading,
TrackGroupArray trackGroups, TrackGroupArray trackGroups,
TrackSelectorResult trackSelectorResult, TrackSelectorResult trackSelectorResult,
@ -138,11 +137,11 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
long totalBufferedDurationUs, long totalBufferedDurationUs,
long positionUs) { long positionUs) {
this.timeline = timeline; this.timeline = timeline;
this.manifest = manifest;
this.periodId = periodId; this.periodId = periodId;
this.startPositionUs = startPositionUs; this.startPositionUs = startPositionUs;
this.contentPositionUs = contentPositionUs; this.contentPositionUs = contentPositionUs;
this.playbackState = playbackState; this.playbackState = playbackState;
this.playbackError = playbackError;
this.isLoading = isLoading; this.isLoading = isLoading;
this.trackGroups = trackGroups; this.trackGroups = trackGroups;
this.trackSelectorResult = trackSelectorResult; this.trackSelectorResult = trackSelectorResult;
@ -157,49 +156,30 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
* *
* @param shuffleModeEnabled Whether shuffle mode is enabled. * @param shuffleModeEnabled Whether shuffle mode is enabled.
* @param window A writable {@link Timeline.Window}. * @param window A writable {@link Timeline.Window}.
* @param period A writable {@link Timeline.Period}.
* @return A dummy media period id for the first-to-be-played period of the current timeline. * @return A dummy media period id for the first-to-be-played period of the current timeline.
*/ */
public MediaPeriodId getDummyFirstMediaPeriodId( public MediaPeriodId getDummyFirstMediaPeriodId(
boolean shuffleModeEnabled, Timeline.Window window) { boolean shuffleModeEnabled, Timeline.Window window, Timeline.Period period) {
if (timeline.isEmpty()) { if (timeline.isEmpty()) {
return DUMMY_MEDIA_PERIOD_ID; return DUMMY_MEDIA_PERIOD_ID;
} }
int firstPeriodIndex = int firstWindowIndex = timeline.getFirstWindowIndex(shuffleModeEnabled);
timeline.getWindow(timeline.getFirstWindowIndex(shuffleModeEnabled), window) int firstPeriodIndex = timeline.getWindow(firstWindowIndex, window).firstPeriodIndex;
.firstPeriodIndex; int currentPeriodIndex = timeline.getIndexOfPeriod(periodId.periodUid);
return new MediaPeriodId(timeline.getUidOfPeriod(firstPeriodIndex)); long windowSequenceNumber = C.INDEX_UNSET;
if (currentPeriodIndex != C.INDEX_UNSET) {
int currentWindowIndex = timeline.getPeriod(currentPeriodIndex, period).windowIndex;
if (firstWindowIndex == currentWindowIndex) {
// Keep window sequence number if the new position is still in the same window.
windowSequenceNumber = periodId.windowSequenceNumber;
}
}
return new MediaPeriodId(timeline.getUidOfPeriod(firstPeriodIndex), windowSequenceNumber);
} }
/** /**
* Copies playback info and resets playing and loading position. * Copies playback info with new playing position.
*
* @param periodId New playing and loading {@link MediaPeriodId}.
* @param startPositionUs New start position. See {@link #startPositionUs}.
* @param contentPositionUs New content position. See {@link #contentPositionUs}. Value is ignored
* if {@code periodId.isAd()} is true.
* @return Copied playback info with reset position.
*/
@CheckResult
public PlaybackInfo resetToNewPosition(
MediaPeriodId periodId, long startPositionUs, long contentPositionUs) {
return new PlaybackInfo(
timeline,
manifest,
periodId,
startPositionUs,
periodId.isAd() ? contentPositionUs : C.TIME_UNSET,
playbackState,
isLoading,
trackGroups,
trackSelectorResult,
periodId,
startPositionUs,
/* totalBufferedDurationUs= */ 0,
startPositionUs);
}
/**
* Copied playback info with new playing position.
* *
* @param periodId New playing media period. See {@link #periodId}. * @param periodId New playing media period. See {@link #periodId}.
* @param positionUs New position. See {@link #positionUs}. * @param positionUs New position. See {@link #positionUs}.
@ -216,11 +196,11 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
long totalBufferedDurationUs) { long totalBufferedDurationUs) {
return new PlaybackInfo( return new PlaybackInfo(
timeline, timeline,
manifest,
periodId, periodId,
positionUs, positionUs,
periodId.isAd() ? contentPositionUs : C.TIME_UNSET, periodId.isAd() ? contentPositionUs : C.TIME_UNSET,
playbackState, playbackState,
playbackError,
isLoading, isLoading,
trackGroups, trackGroups,
trackSelectorResult, trackSelectorResult,
@ -231,21 +211,20 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
} }
/** /**
* Copies playback info with new timeline and manifest. * Copies playback info with the new timeline.
* *
* @param timeline New timeline. See {@link #timeline}. * @param timeline New timeline. See {@link #timeline}.
* @param manifest New manifest. See {@link #manifest}. * @return Copied playback info with the new timeline.
* @return Copied playback info with new timeline and manifest.
*/ */
@CheckResult @CheckResult
public PlaybackInfo copyWithTimeline(Timeline timeline, Object manifest) { public PlaybackInfo copyWithTimeline(Timeline timeline) {
return new PlaybackInfo( return new PlaybackInfo(
timeline, timeline,
manifest,
periodId, periodId,
startPositionUs, startPositionUs,
contentPositionUs, contentPositionUs,
playbackState, playbackState,
playbackError,
isLoading, isLoading,
trackGroups, trackGroups,
trackSelectorResult, trackSelectorResult,
@ -265,11 +244,35 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
public PlaybackInfo copyWithPlaybackState(int playbackState) { public PlaybackInfo copyWithPlaybackState(int playbackState) {
return new PlaybackInfo( return new PlaybackInfo(
timeline, timeline,
manifest,
periodId, periodId,
startPositionUs, startPositionUs,
contentPositionUs, contentPositionUs,
playbackState, playbackState,
playbackError,
isLoading,
trackGroups,
trackSelectorResult,
loadingMediaPeriodId,
bufferedPositionUs,
totalBufferedDurationUs,
positionUs);
}
/**
* Copies playback info with a playback error.
*
* @param playbackError The error. See {@link #playbackError}.
* @return Copied playback info with the playback error.
*/
@CheckResult
public PlaybackInfo copyWithPlaybackError(@Nullable ExoPlaybackException playbackError) {
return new PlaybackInfo(
timeline,
periodId,
startPositionUs,
contentPositionUs,
playbackState,
playbackError,
isLoading, isLoading,
trackGroups, trackGroups,
trackSelectorResult, trackSelectorResult,
@ -289,11 +292,11 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
public PlaybackInfo copyWithIsLoading(boolean isLoading) { public PlaybackInfo copyWithIsLoading(boolean isLoading) {
return new PlaybackInfo( return new PlaybackInfo(
timeline, timeline,
manifest,
periodId, periodId,
startPositionUs, startPositionUs,
contentPositionUs, contentPositionUs,
playbackState, playbackState,
playbackError,
isLoading, isLoading,
trackGroups, trackGroups,
trackSelectorResult, trackSelectorResult,
@ -315,11 +318,11 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
TrackGroupArray trackGroups, TrackSelectorResult trackSelectorResult) { TrackGroupArray trackGroups, TrackSelectorResult trackSelectorResult) {
return new PlaybackInfo( return new PlaybackInfo(
timeline, timeline,
manifest,
periodId, periodId,
startPositionUs, startPositionUs,
contentPositionUs, contentPositionUs,
playbackState, playbackState,
playbackError,
isLoading, isLoading,
trackGroups, trackGroups,
trackSelectorResult, trackSelectorResult,
@ -339,11 +342,11 @@ import com.google.android.exoplayer2.trackselection.TrackSelectorResult;
public PlaybackInfo copyWithLoadingMediaPeriodId(MediaPeriodId loadingMediaPeriodId) { public PlaybackInfo copyWithLoadingMediaPeriodId(MediaPeriodId loadingMediaPeriodId) {
return new PlaybackInfo( return new PlaybackInfo(
timeline, timeline,
manifest,
periodId, periodId,
startPositionUs, startPositionUs,
contentPositionUs, contentPositionUs,
playbackState, playbackState,
playbackError,
isLoading, isLoading,
trackGroups, trackGroups,
trackSelectorResult, trackSelectorResult,

View File

@ -16,12 +16,12 @@
package com.google.android.exoplayer2; package com.google.android.exoplayer2;
import android.os.Looper; import android.os.Looper;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import android.view.Surface; import android.view.Surface;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import android.view.SurfaceView; import android.view.SurfaceView;
import android.view.TextureView; import android.view.TextureView;
import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C.VideoScalingMode; import com.google.android.exoplayer2.C.VideoScalingMode;
import com.google.android.exoplayer2.audio.AudioAttributes; import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.audio.AudioListener; import com.google.android.exoplayer2.audio.AudioListener;
@ -31,6 +31,7 @@ import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.text.TextOutput; import com.google.android.exoplayer2.text.TextOutput;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray; import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.VideoDecoderOutputBufferRenderer;
import com.google.android.exoplayer2.video.VideoFrameMetadataListener; import com.google.android.exoplayer2.video.VideoFrameMetadataListener;
import com.google.android.exoplayer2.video.VideoListener; import com.google.android.exoplayer2.video.VideoListener;
import com.google.android.exoplayer2.video.spherical.CameraMotionListener; import com.google.android.exoplayer2.video.spherical.CameraMotionListener;
@ -216,7 +217,7 @@ public interface Player {
* *
* @param surface The surface to clear. * @param surface The surface to clear.
*/ */
void clearVideoSurface(Surface surface); void clearVideoSurface(@Nullable Surface surface);
/** /**
* Sets the {@link Surface} onto which video will be rendered. The caller is responsible for * Sets the {@link Surface} onto which video will be rendered. The caller is responsible for
@ -239,7 +240,7 @@ public interface Player {
* *
* @param surfaceHolder The surface holder. * @param surfaceHolder The surface holder.
*/ */
void setVideoSurfaceHolder(SurfaceHolder surfaceHolder); void setVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder);
/** /**
* Clears the {@link SurfaceHolder} that holds the {@link Surface} onto which video is being * Clears the {@link SurfaceHolder} that holds the {@link Surface} onto which video is being
@ -247,7 +248,7 @@ public interface Player {
* *
* @param surfaceHolder The surface holder to clear. * @param surfaceHolder The surface holder to clear.
*/ */
void clearVideoSurfaceHolder(SurfaceHolder surfaceHolder); void clearVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder);
/** /**
* Sets the {@link SurfaceView} onto which video will be rendered. The player will track the * Sets the {@link SurfaceView} onto which video will be rendered. The player will track the
@ -255,7 +256,7 @@ public interface Player {
* *
* @param surfaceView The surface view. * @param surfaceView The surface view.
*/ */
void setVideoSurfaceView(SurfaceView surfaceView); void setVideoSurfaceView(@Nullable SurfaceView surfaceView);
/** /**
* Clears the {@link SurfaceView} onto which video is being rendered if it matches the one * Clears the {@link SurfaceView} onto which video is being rendered if it matches the one
@ -263,7 +264,7 @@ public interface Player {
* *
* @param surfaceView The texture view to clear. * @param surfaceView The texture view to clear.
*/ */
void clearVideoSurfaceView(SurfaceView surfaceView); void clearVideoSurfaceView(@Nullable SurfaceView surfaceView);
/** /**
* Sets the {@link TextureView} onto which video will be rendered. The player will track the * Sets the {@link TextureView} onto which video will be rendered. The player will track the
@ -271,7 +272,7 @@ public interface Player {
* *
* @param textureView The texture view. * @param textureView The texture view.
*/ */
void setVideoTextureView(TextureView textureView); void setVideoTextureView(@Nullable TextureView textureView);
/** /**
* Clears the {@link TextureView} onto which video is being rendered if it matches the one * Clears the {@link TextureView} onto which video is being rendered if it matches the one
@ -279,7 +280,31 @@ public interface Player {
* *
* @param textureView The texture view to clear. * @param textureView The texture view to clear.
*/ */
void clearVideoTextureView(TextureView textureView); void clearVideoTextureView(@Nullable TextureView textureView);
/**
* Sets the video decoder output buffer renderer. This is intended for use only with extension
* renderers that accept {@link C#MSG_SET_VIDEO_DECODER_OUTPUT_BUFFER_RENDERER}. For most use
* cases, an output surface or view should be passed via {@link #setVideoSurface(Surface)} or
* {@link #setVideoSurfaceView(SurfaceView)} instead.
*
* @param videoDecoderOutputBufferRenderer The video decoder output buffer renderer, or {@code
* null} to clear the output buffer renderer.
*/
void setVideoDecoderOutputBufferRenderer(
@Nullable VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer);
/** Clears the video decoder output buffer renderer. */
void clearVideoDecoderOutputBufferRenderer();
/**
* Clears the video decoder output buffer renderer if it matches the one passed. Else does
* nothing.
*
* @param videoDecoderOutputBufferRenderer The video decoder output buffer renderer to clear.
*/
void clearVideoDecoderOutputBufferRenderer(
@Nullable VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer);
} }
/** The text component of a {@link Player}. */ /** The text component of a {@link Player}. */
@ -324,6 +349,29 @@ public interface Player {
*/ */
interface EventListener { interface EventListener {
/**
* Called when the timeline has been refreshed.
*
* <p>Note that if the timeline has changed then a position discontinuity may also have
* occurred. For example, the current period index may have changed as a result of periods being
* added or removed from the timeline. This will <em>not</em> be reported via a separate call to
* {@link #onPositionDiscontinuity(int)}.
*
* @param timeline The latest timeline. Never null, but may be empty.
* @param reason The {@link TimelineChangeReason} responsible for this timeline change.
*/
@SuppressWarnings("deprecation")
default void onTimelineChanged(Timeline timeline, @TimelineChangeReason int reason) {
Object manifest = null;
if (timeline.getWindowCount() == 1) {
// Legacy behavior was to report the manifest for single window timelines only.
Timeline.Window window = new Timeline.Window();
manifest = timeline.getWindow(0, window).manifest;
}
// Call deprecated version.
onTimelineChanged(timeline, manifest, reason);
}
/** /**
* Called when the timeline and/or manifest has been refreshed. * Called when the timeline and/or manifest has been refreshed.
* *
@ -335,7 +383,11 @@ public interface Player {
* @param timeline The latest timeline. Never null, but may be empty. * @param timeline The latest timeline. Never null, but may be empty.
* @param manifest The latest manifest. May be null. * @param manifest The latest manifest. May be null.
* @param reason The {@link TimelineChangeReason} responsible for this timeline change. * @param reason The {@link TimelineChangeReason} responsible for this timeline change.
* @deprecated Use {@link #onTimelineChanged(Timeline, int)} instead. The manifest can be
* accessed by using {@link #getCurrentManifest()} or {@code timeline.getWindow(windowIndex,
* window).manifest} for a given window index.
*/ */
@Deprecated
default void onTimelineChanged( default void onTimelineChanged(
Timeline timeline, @Nullable Object manifest, @TimelineChangeReason int reason) {} Timeline timeline, @Nullable Object manifest, @TimelineChangeReason int reason) {}
@ -361,9 +413,9 @@ public interface Player {
* #getPlaybackState()} changes. * #getPlaybackState()} changes.
* *
* @param playWhenReady Whether playback will proceed when ready. * @param playWhenReady Whether playback will proceed when ready.
* @param playbackState One of the {@code STATE} constants. * @param playbackState The new {@link State playback state}.
*/ */
default void onPlayerStateChanged(boolean playWhenReady, int playbackState) {} default void onPlayerStateChanged(boolean playWhenReady, @State int playbackState) {}
/** /**
* Called when the value returned from {@link #getPlaybackSuppressionReason()} changes. * Called when the value returned from {@link #getPlaybackSuppressionReason()} changes.
@ -411,8 +463,7 @@ public interface Player {
* when the source introduces a discontinuity internally). * when the source introduces a discontinuity internally).
* *
* <p>When a position discontinuity occurs as a result of a change to the timeline this method * <p>When a position discontinuity occurs as a result of a change to the timeline this method
* is <em>not</em> called. {@link #onTimelineChanged(Timeline, Object, int)} is called in this * is <em>not</em> called. {@link #onTimelineChanged(Timeline, int)} is called in this case.
* case.
* *
* @param reason The {@link DiscontinuityReason} responsible for the discontinuity. * @param reason The {@link DiscontinuityReason} responsible for the discontinuity.
*/ */
@ -443,6 +494,18 @@ public interface Player {
@Deprecated @Deprecated
abstract class DefaultEventListener implements EventListener { abstract class DefaultEventListener implements EventListener {
@Override
public void onTimelineChanged(Timeline timeline, @TimelineChangeReason int reason) {
Object manifest = null;
if (timeline.getWindowCount() == 1) {
// Legacy behavior was to report the manifest for single window timelines only.
Timeline.Window window = new Timeline.Window();
manifest = timeline.getWindow(0, window).manifest;
}
// Call deprecated version.
onTimelineChanged(timeline, manifest, reason);
}
@Override @Override
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public void onTimelineChanged( public void onTimelineChanged(
@ -451,13 +514,21 @@ public interface Player {
onTimelineChanged(timeline, manifest); onTimelineChanged(timeline, manifest);
} }
/** @deprecated Use {@link EventListener#onTimelineChanged(Timeline, Object, int)} instead. */ /** @deprecated Use {@link EventListener#onTimelineChanged(Timeline, int)} instead. */
@Deprecated @Deprecated
public void onTimelineChanged(Timeline timeline, @Nullable Object manifest) { public void onTimelineChanged(Timeline timeline, @Nullable Object manifest) {
// Do nothing. // Do nothing.
} }
} }
/**
* Playback state. One of {@link #STATE_IDLE}, {@link #STATE_BUFFERING}, {@link #STATE_READY} or
* {@link #STATE_ENDED}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({STATE_IDLE, STATE_BUFFERING, STATE_READY, STATE_ENDED})
@interface State {}
/** /**
* The player does not have any media to play. * The player does not have any media to play.
*/ */
@ -548,8 +619,8 @@ public interface Player {
int DISCONTINUITY_REASON_INTERNAL = 4; int DISCONTINUITY_REASON_INTERNAL = 4;
/** /**
* Reasons for timeline and/or manifest changes. One of {@link #TIMELINE_CHANGE_REASON_PREPARED}, * Reasons for timeline changes. One of {@link #TIMELINE_CHANGE_REASON_PREPARED}, {@link
* {@link #TIMELINE_CHANGE_REASON_RESET} or {@link #TIMELINE_CHANGE_REASON_DYNAMIC}. * #TIMELINE_CHANGE_REASON_RESET} or {@link #TIMELINE_CHANGE_REASON_DYNAMIC}.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@ -559,13 +630,9 @@ public interface Player {
TIMELINE_CHANGE_REASON_DYNAMIC TIMELINE_CHANGE_REASON_DYNAMIC
}) })
@interface TimelineChangeReason {} @interface TimelineChangeReason {}
/** /** Timeline and manifest changed as a result of a player initialization with new media. */
* Timeline and manifest changed as a result of a player initialization with new media.
*/
int TIMELINE_CHANGE_REASON_PREPARED = 0; int TIMELINE_CHANGE_REASON_PREPARED = 0;
/** /** Timeline and manifest changed as a result of a player reset. */
* Timeline and manifest changed as a result of a player reset.
*/
int TIMELINE_CHANGE_REASON_RESET = 1; int TIMELINE_CHANGE_REASON_RESET = 1;
/** /**
* Timeline or manifest changed as a result of an dynamic update introduced by the played media. * Timeline or manifest changed as a result of an dynamic update introduced by the played media.
@ -613,10 +680,11 @@ public interface Player {
void removeListener(EventListener listener); void removeListener(EventListener listener);
/** /**
* Returns the current state of the player. * Returns the current {@link State playback state} of the player.
* *
* @return One of the {@code STATE} constants defined in this interface. * @return The current {@link State playback state}.
*/ */
@State
int getPlaybackState(); int getPlaybackState();
/** /**
@ -677,7 +745,7 @@ public interface Player {
/** /**
* Sets the {@link RepeatMode} to be used for playback. * Sets the {@link RepeatMode} to be used for playback.
* *
* @param repeatMode A repeat mode. * @param repeatMode The repeat mode.
*/ */
void setRepeatMode(@RepeatMode int repeatMode); void setRepeatMode(@RepeatMode int repeatMode);
@ -772,13 +840,10 @@ public interface Player {
/** /**
* Attempts to set the playback parameters. Passing {@code null} sets the parameters to the * Attempts to set the playback parameters. Passing {@code null} sets the parameters to the
* default, {@link PlaybackParameters#DEFAULT}, which means there is no speed or pitch adjustment. * default, {@link PlaybackParameters#DEFAULT}, which means there is no speed or pitch adjustment.
* <p> *
* Playback parameters changes may cause the player to buffer. * <p>Playback parameters changes may cause the player to buffer. {@link
* {@link EventListener#onPlaybackParametersChanged(PlaybackParameters)} will be called whenever * EventListener#onPlaybackParametersChanged(PlaybackParameters)} will be called whenever the
* the currently active playback parameters change. When that listener is called, the parameters * currently active playback parameters change.
* passed to it may not match {@code playbackParameters}. For example, the chosen speed or pitch
* may be out of range, in which case they are constrained to a set of permitted values. If it is
* not possible to change the playback parameters, the listener will not be invoked.
* *
* @param playbackParameters The playback parameters, or {@code null} to use the defaults. * @param playbackParameters The playback parameters, or {@code null} to use the defaults.
*/ */
@ -920,6 +985,13 @@ public interface Player {
*/ */
boolean isCurrentWindowDynamic(); boolean isCurrentWindowDynamic();
/**
* Returns whether the current window is live, or {@code false} if the {@link Timeline} is empty.
*
* @see Timeline.Window#isLive
*/
boolean isCurrentWindowLive();
/** /**
* Returns whether the current window is seekable, or {@code false} if the {@link Timeline} is * Returns whether the current window is seekable, or {@code false} if the {@link Timeline} is
* empty. * empty.

View File

@ -55,7 +55,7 @@ public final class PlayerMessage {
private final Timeline timeline; private final Timeline timeline;
private int type; private int type;
private @Nullable Object payload; @Nullable private Object payload;
private Handler handler; private Handler handler;
private int windowIndex; private int windowIndex;
private long positionMs; private long positionMs;
@ -134,7 +134,8 @@ public final class PlayerMessage {
} }
/** Returns the message payload forwarded to {@link Target#handleMessage(int, Object)}. */ /** Returns the message payload forwarded to {@link Target#handleMessage(int, Object)}. */
public @Nullable Object getPayload() { @Nullable
public Object getPayload() {
return payload; return payload;
} }

View File

@ -16,6 +16,7 @@
package com.google.android.exoplayer2; package com.google.android.exoplayer2;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.source.SampleStream; import com.google.android.exoplayer2.source.SampleStream;
import com.google.android.exoplayer2.util.MediaClock; import com.google.android.exoplayer2.util.MediaClock;
import java.io.IOException; import java.io.IOException;
@ -31,7 +32,8 @@ import java.lang.annotation.RetentionPolicy;
* valid state transitions are shown below, annotated with the methods that are called during each * valid state transitions are shown below, annotated with the methods that are called during each
* transition. * transition.
* *
* <p align="center"><img src="doc-files/renderer-states.svg" alt="Renderer state transitions"> * <p style="align:center"><img src="doc-files/renderer-states.svg" alt="Renderer state
* transitions">
*/ */
public interface Renderer extends PlayerMessage.Target { public interface Renderer extends PlayerMessage.Target {
@ -87,11 +89,12 @@ public interface Renderer extends PlayerMessage.Target {
/** /**
* If the renderer advances its own playback position then this method returns a corresponding * If the renderer advances its own playback position then this method returns a corresponding
* {@link MediaClock}. If provided, the player will use the returned {@link MediaClock} as its * {@link MediaClock}. If provided, the player will use the returned {@link MediaClock} as its
* source of time during playback. A player may have at most one renderer that returns a * source of time during playback. A player may have at most one renderer that returns a {@link
* {@link MediaClock} from this method. * MediaClock} from this method.
* *
* @return The {@link MediaClock} tracking the playback position of the renderer, or null. * @return The {@link MediaClock} tracking the playback position of the renderer, or null.
*/ */
@Nullable
MediaClock getMediaClock(); MediaClock getMediaClock();
/** /**
@ -147,9 +150,8 @@ public interface Renderer extends PlayerMessage.Target {
void replaceStream(Format[] formats, SampleStream stream, long offsetUs) void replaceStream(Format[] formats, SampleStream stream, long offsetUs)
throws ExoPlaybackException; throws ExoPlaybackException;
/** /** Returns the {@link SampleStream} being consumed, or null if the renderer is disabled. */
* Returns the {@link SampleStream} being consumed, or null if the renderer is disabled. @Nullable
*/
SampleStream getStream(); SampleStream getStream();
/** /**

View File

@ -15,7 +15,12 @@
*/ */
package com.google.android.exoplayer2; package com.google.android.exoplayer2;
import android.annotation.SuppressLint;
import androidx.annotation.IntDef;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/** /**
* Defines the capabilities of a {@link Renderer}. * Defines the capabilities of a {@link Renderer}.
@ -23,10 +28,22 @@ import com.google.android.exoplayer2.util.MimeTypes;
public interface RendererCapabilities { public interface RendererCapabilities {
/** /**
* A mask to apply to the result of {@link #supportsFormat(Format)} to obtain one of * Level of renderer support for a format. One of {@link #FORMAT_HANDLED}, {@link
* {@link #FORMAT_HANDLED}, {@link #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_DRM}, * #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_DRM}, {@link
* {@link #FORMAT_UNSUPPORTED_SUBTYPE} and {@link #FORMAT_UNSUPPORTED_TYPE}. * #FORMAT_UNSUPPORTED_SUBTYPE} or {@link #FORMAT_UNSUPPORTED_TYPE}.
*/ */
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({
FORMAT_HANDLED,
FORMAT_EXCEEDS_CAPABILITIES,
FORMAT_UNSUPPORTED_DRM,
FORMAT_UNSUPPORTED_SUBTYPE,
FORMAT_UNSUPPORTED_TYPE
})
@interface FormatSupport {}
/** A mask to apply to {@link Capabilities} to obtain the {@link FormatSupport} only. */
int FORMAT_SUPPORT_MASK = 0b111; int FORMAT_SUPPORT_MASK = 0b111;
/** /**
* The {@link Renderer} is capable of rendering the format. * The {@link Renderer} is capable of rendering the format.
@ -72,9 +89,15 @@ public interface RendererCapabilities {
int FORMAT_UNSUPPORTED_TYPE = 0b000; int FORMAT_UNSUPPORTED_TYPE = 0b000;
/** /**
* A mask to apply to the result of {@link #supportsFormat(Format)} to obtain one of * Level of renderer support for adaptive format switches. One of {@link #ADAPTIVE_SEAMLESS},
* {@link #ADAPTIVE_SEAMLESS}, {@link #ADAPTIVE_NOT_SEAMLESS} and {@link #ADAPTIVE_NOT_SUPPORTED}. * {@link #ADAPTIVE_NOT_SEAMLESS} or {@link #ADAPTIVE_NOT_SUPPORTED}.
*/ */
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({ADAPTIVE_SEAMLESS, ADAPTIVE_NOT_SEAMLESS, ADAPTIVE_NOT_SUPPORTED})
@interface AdaptiveSupport {}
/** A mask to apply to {@link Capabilities} to obtain the {@link AdaptiveSupport} only. */
int ADAPTIVE_SUPPORT_MASK = 0b11000; int ADAPTIVE_SUPPORT_MASK = 0b11000;
/** /**
* The {@link Renderer} can seamlessly adapt between formats. * The {@link Renderer} can seamlessly adapt between formats.
@ -91,9 +114,15 @@ public interface RendererCapabilities {
int ADAPTIVE_NOT_SUPPORTED = 0b00000; int ADAPTIVE_NOT_SUPPORTED = 0b00000;
/** /**
* A mask to apply to the result of {@link #supportsFormat(Format)} to obtain one of * Level of renderer support for tunneling. One of {@link #TUNNELING_SUPPORTED} or {@link
* {@link #TUNNELING_SUPPORTED} and {@link #TUNNELING_NOT_SUPPORTED}. * #TUNNELING_NOT_SUPPORTED}.
*/ */
@Documented
@Retention(RetentionPolicy.SOURCE)
@IntDef({TUNNELING_SUPPORTED, TUNNELING_NOT_SUPPORTED})
@interface TunnelingSupport {}
/** A mask to apply to {@link Capabilities} to obtain the {@link TunnelingSupport} only. */
int TUNNELING_SUPPORT_MASK = 0b100000; int TUNNELING_SUPPORT_MASK = 0b100000;
/** /**
* The {@link Renderer} supports tunneled output. * The {@link Renderer} supports tunneled output.
@ -104,6 +133,133 @@ public interface RendererCapabilities {
*/ */
int TUNNELING_NOT_SUPPORTED = 0b000000; int TUNNELING_NOT_SUPPORTED = 0b000000;
/**
* Combined renderer capabilities.
*
* <p>This is a bitwise OR of {@link FormatSupport}, {@link AdaptiveSupport} and {@link
* TunnelingSupport}. Use {@link #getFormatSupport(int)}, {@link #getAdaptiveSupport(int)} or
* {@link #getTunnelingSupport(int)} to obtain the individual flags. And use {@link #create(int)}
* or {@link #create(int, int, int)} to create the combined capabilities.
*
* <p>Possible values:
*
* <ul>
* <li>{@link FormatSupport}: The level of support for the format itself. One of {@link
* #FORMAT_HANDLED}, {@link #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_DRM},
* {@link #FORMAT_UNSUPPORTED_SUBTYPE} and {@link #FORMAT_UNSUPPORTED_TYPE}.
* <li>{@link AdaptiveSupport}: The level of support for adapting from the format to another
* format of the same mime type. One of {@link #ADAPTIVE_SEAMLESS}, {@link
* #ADAPTIVE_NOT_SEAMLESS} and {@link #ADAPTIVE_NOT_SUPPORTED}. Only set if the level of
* support for the format itself is {@link #FORMAT_HANDLED} or {@link
* #FORMAT_EXCEEDS_CAPABILITIES}.
* <li>{@link TunnelingSupport}: The level of support for tunneling. One of {@link
* #TUNNELING_SUPPORTED} and {@link #TUNNELING_NOT_SUPPORTED}. Only set if the level of
* support for the format itself is {@link #FORMAT_HANDLED} or {@link
* #FORMAT_EXCEEDS_CAPABILITIES}.
* </ul>
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
// Intentionally empty to prevent assignment or comparison with individual flags without masking.
@IntDef({})
@interface Capabilities {}
/**
* Returns {@link Capabilities} for the given {@link FormatSupport}.
*
* <p>The {@link AdaptiveSupport} is set to {@link #ADAPTIVE_NOT_SUPPORTED} and {{@link
* TunnelingSupport} is set to {@link #TUNNELING_NOT_SUPPORTED}.
*
* @param formatSupport The {@link FormatSupport}.
* @return The combined {@link Capabilities} of the given {@link FormatSupport}, {@link
* #ADAPTIVE_NOT_SUPPORTED} and {@link #TUNNELING_NOT_SUPPORTED}.
*/
@Capabilities
static int create(@FormatSupport int formatSupport) {
return create(formatSupport, ADAPTIVE_NOT_SUPPORTED, TUNNELING_NOT_SUPPORTED);
}
/**
* Returns {@link Capabilities} combining the given {@link FormatSupport}, {@link AdaptiveSupport}
* and {@link TunnelingSupport}.
*
* @param formatSupport The {@link FormatSupport}.
* @param adaptiveSupport The {@link AdaptiveSupport}.
* @param tunnelingSupport The {@link TunnelingSupport}.
* @return The combined {@link Capabilities}.
*/
// Suppression needed for IntDef casting.
@SuppressLint("WrongConstant")
@Capabilities
static int create(
@FormatSupport int formatSupport,
@AdaptiveSupport int adaptiveSupport,
@TunnelingSupport int tunnelingSupport) {
return formatSupport | adaptiveSupport | tunnelingSupport;
}
/**
* Returns the {@link FormatSupport} from the combined {@link Capabilities}.
*
* @param supportFlags The combined {@link Capabilities}.
* @return The {@link FormatSupport} only.
*/
// Suppression needed for IntDef casting.
@SuppressLint("WrongConstant")
@FormatSupport
static int getFormatSupport(@Capabilities int supportFlags) {
return supportFlags & FORMAT_SUPPORT_MASK;
}
/**
* Returns the {@link AdaptiveSupport} from the combined {@link Capabilities}.
*
* @param supportFlags The combined {@link Capabilities}.
* @return The {@link AdaptiveSupport} only.
*/
// Suppression needed for IntDef casting.
@SuppressLint("WrongConstant")
@AdaptiveSupport
static int getAdaptiveSupport(@Capabilities int supportFlags) {
return supportFlags & ADAPTIVE_SUPPORT_MASK;
}
/**
* Returns the {@link TunnelingSupport} from the combined {@link Capabilities}.
*
* @param supportFlags The combined {@link Capabilities}.
* @return The {@link TunnelingSupport} only.
*/
// Suppression needed for IntDef casting.
@SuppressLint("WrongConstant")
@TunnelingSupport
static int getTunnelingSupport(@Capabilities int supportFlags) {
return supportFlags & TUNNELING_SUPPORT_MASK;
}
/**
* Returns string representation of a {@link FormatSupport} flag.
*
* @param formatSupport A {@link FormatSupport} flag.
* @return A string representation of the flag.
*/
static String getFormatSupportString(@FormatSupport int formatSupport) {
switch (formatSupport) {
case RendererCapabilities.FORMAT_HANDLED:
return "YES";
case RendererCapabilities.FORMAT_EXCEEDS_CAPABILITIES:
return "NO_EXCEEDS_CAPABILITIES";
case RendererCapabilities.FORMAT_UNSUPPORTED_DRM:
return "NO_UNSUPPORTED_DRM";
case RendererCapabilities.FORMAT_UNSUPPORTED_SUBTYPE:
return "NO_UNSUPPORTED_TYPE";
case RendererCapabilities.FORMAT_UNSUPPORTED_TYPE:
return "NO";
default:
throw new IllegalStateException();
}
}
/** /**
* Returns the track type that the {@link Renderer} handles. For example, a video renderer will * Returns the track type that the {@link Renderer} handles. For example, a video renderer will
* return {@link C#TRACK_TYPE_VIDEO}, an audio renderer will return {@link C#TRACK_TYPE_AUDIO}, a * return {@link C#TRACK_TYPE_VIDEO}, an audio renderer will return {@link C#TRACK_TYPE_AUDIO}, a
@ -115,39 +271,23 @@ public interface RendererCapabilities {
int getTrackType(); int getTrackType();
/** /**
* Returns the extent to which the {@link Renderer} supports a given format. The returned value is * Returns the extent to which the {@link Renderer} supports a given format.
* the bitwise OR of three properties:
* <ul>
* <li>The level of support for the format itself. One of {@link #FORMAT_HANDLED},
* {@link #FORMAT_EXCEEDS_CAPABILITIES}, {@link #FORMAT_UNSUPPORTED_DRM},
* {@link #FORMAT_UNSUPPORTED_SUBTYPE} and {@link #FORMAT_UNSUPPORTED_TYPE}.</li>
* <li>The level of support for adapting from the format to another format of the same mime type.
* One of {@link #ADAPTIVE_SEAMLESS}, {@link #ADAPTIVE_NOT_SEAMLESS} and
* {@link #ADAPTIVE_NOT_SUPPORTED}. Only set if the level of support for the format itself is
* {@link #FORMAT_HANDLED} or {@link #FORMAT_EXCEEDS_CAPABILITIES}.</li>
* <li>The level of support for tunneling. One of {@link #TUNNELING_SUPPORTED} and
* {@link #TUNNELING_NOT_SUPPORTED}. Only set if the level of support for the format itself is
* {@link #FORMAT_HANDLED} or {@link #FORMAT_EXCEEDS_CAPABILITIES}.</li>
* </ul>
* The individual properties can be retrieved by performing a bitwise AND with
* {@link #FORMAT_SUPPORT_MASK}, {@link #ADAPTIVE_SUPPORT_MASK} and
* {@link #TUNNELING_SUPPORT_MASK} respectively.
* *
* @param format The format. * @param format The format.
* @return The extent to which the renderer is capable of supporting the given format. * @return The {@link Capabilities} for this format.
* @throws ExoPlaybackException If an error occurs. * @throws ExoPlaybackException If an error occurs.
*/ */
@Capabilities
int supportsFormat(Format format) throws ExoPlaybackException; int supportsFormat(Format format) throws ExoPlaybackException;
/** /**
* Returns the extent to which the {@link Renderer} supports adapting between supported formats * Returns the extent to which the {@link Renderer} supports adapting between supported formats
* that have different mime types. * that have different MIME types.
* *
* @return The extent to which the renderer supports adapting between supported formats that have * @return The {@link AdaptiveSupport} for adapting between supported formats that have different
* different mime types. One of {@link #ADAPTIVE_SEAMLESS}, {@link #ADAPTIVE_NOT_SEAMLESS} and * MIME types.
* {@link #ADAPTIVE_NOT_SUPPORTED}.
* @throws ExoPlaybackException If an error occurs. * @throws ExoPlaybackException If an error occurs.
*/ */
@AdaptiveSupport
int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException; int supportsMixedMimeTypeAdaptation() throws ExoPlaybackException;
} }

View File

@ -23,15 +23,15 @@ import android.media.MediaCodec;
import android.media.PlaybackParams; import android.media.PlaybackParams;
import android.os.Handler; import android.os.Handler;
import android.os.Looper; import android.os.Looper;
import androidx.annotation.Nullable;
import android.view.Surface; import android.view.Surface;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import android.view.SurfaceView; import android.view.SurfaceView;
import android.view.TextureView; import android.view.TextureView;
import androidx.annotation.Nullable;
import androidx.annotation.VisibleForTesting;
import com.google.android.exoplayer2.analytics.AnalyticsCollector; import com.google.android.exoplayer2.analytics.AnalyticsCollector;
import com.google.android.exoplayer2.analytics.AnalyticsListener; import com.google.android.exoplayer2.analytics.AnalyticsListener;
import com.google.android.exoplayer2.audio.AudioAttributes; import com.google.android.exoplayer2.audio.AudioAttributes;
import com.google.android.exoplayer2.audio.AudioFocusManager;
import com.google.android.exoplayer2.audio.AudioListener; import com.google.android.exoplayer2.audio.AudioListener;
import com.google.android.exoplayer2.audio.AudioRendererEventListener; import com.google.android.exoplayer2.audio.AudioRendererEventListener;
import com.google.android.exoplayer2.audio.AuxEffectInfo; import com.google.android.exoplayer2.audio.AuxEffectInfo;
@ -45,14 +45,17 @@ import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.TrackGroupArray; import com.google.android.exoplayer2.source.TrackGroupArray;
import com.google.android.exoplayer2.text.Cue; import com.google.android.exoplayer2.text.Cue;
import com.google.android.exoplayer2.text.TextOutput; import com.google.android.exoplayer2.text.TextOutput;
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
import com.google.android.exoplayer2.trackselection.TrackSelectionArray; import com.google.android.exoplayer2.trackselection.TrackSelectionArray;
import com.google.android.exoplayer2.trackselection.TrackSelector; import com.google.android.exoplayer2.trackselection.TrackSelector;
import com.google.android.exoplayer2.upstream.BandwidthMeter; import com.google.android.exoplayer2.upstream.BandwidthMeter;
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.Clock;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.PriorityTaskManager; import com.google.android.exoplayer2.util.PriorityTaskManager;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import com.google.android.exoplayer2.video.VideoDecoderOutputBufferRenderer;
import com.google.android.exoplayer2.video.VideoFrameMetadataListener; import com.google.android.exoplayer2.video.VideoFrameMetadataListener;
import com.google.android.exoplayer2.video.VideoRendererEventListener; import com.google.android.exoplayer2.video.VideoRendererEventListener;
import com.google.android.exoplayer2.video.spherical.CameraMotionListener; import com.google.android.exoplayer2.video.spherical.CameraMotionListener;
@ -66,7 +69,7 @@ import java.util.concurrent.CopyOnWriteArraySet;
/** /**
* An {@link ExoPlayer} implementation that uses default {@link Renderer} components. Instances can * An {@link ExoPlayer} implementation that uses default {@link Renderer} components. Instances can
* be obtained from {@link ExoPlayerFactory}. * be obtained from {@link SimpleExoPlayer.Builder}.
*/ */
public class SimpleExoPlayer extends BasePlayer public class SimpleExoPlayer extends BasePlayer
implements ExoPlayer, implements ExoPlayer,
@ -79,6 +82,232 @@ public class SimpleExoPlayer extends BasePlayer
@Deprecated @Deprecated
public interface VideoListener extends com.google.android.exoplayer2.video.VideoListener {} public interface VideoListener extends com.google.android.exoplayer2.video.VideoListener {}
/**
* A builder for {@link SimpleExoPlayer} instances.
*
* <p>See {@link #Builder(Context)} for the list of default values.
*/
public static final class Builder {
private final Context context;
private final RenderersFactory renderersFactory;
private Clock clock;
private TrackSelector trackSelector;
private LoadControl loadControl;
private BandwidthMeter bandwidthMeter;
private AnalyticsCollector analyticsCollector;
private Looper looper;
private boolean useLazyPreparation;
private boolean buildCalled;
/**
* Creates a builder.
*
* <p>Use {@link #Builder(Context, RenderersFactory)} instead, if you intend to provide a custom
* {@link RenderersFactory}. This is to ensure that ProGuard or R8 can remove ExoPlayer's {@link
* DefaultRenderersFactory} from the APK.
*
* <p>The builder uses the following default values:
*
* <ul>
* <li>{@link RenderersFactory}: {@link DefaultRenderersFactory}
* <li>{@link TrackSelector}: {@link DefaultTrackSelector}
* <li>{@link LoadControl}: {@link DefaultLoadControl}
* <li>{@link BandwidthMeter}: {@link DefaultBandwidthMeter#getSingletonInstance(Context)}
* <li>{@link Looper}: The {@link Looper} associated with the current thread, or the {@link
* Looper} of the application's main thread if the current thread doesn't have a {@link
* Looper}
* <li>{@link AnalyticsCollector}: {@link AnalyticsCollector} with {@link Clock#DEFAULT}
* <li>{@code useLazyPreparation}: {@code true}
* <li>{@link Clock}: {@link Clock#DEFAULT}
* </ul>
*
* @param context A {@link Context}.
*/
public Builder(Context context) {
this(context, new DefaultRenderersFactory(context));
}
/**
* Creates a builder with a custom {@link RenderersFactory}.
*
* <p>See {@link #Builder(Context)} for a list of default values.
*
* @param context A {@link Context}.
* @param renderersFactory A factory for creating {@link Renderer Renderers} to be used by the
* player.
*/
public Builder(Context context, RenderersFactory renderersFactory) {
this(
context,
renderersFactory,
new DefaultTrackSelector(context),
new DefaultLoadControl(),
DefaultBandwidthMeter.getSingletonInstance(context),
Util.getLooper(),
new AnalyticsCollector(Clock.DEFAULT),
/* useLazyPreparation= */ true,
Clock.DEFAULT);
}
/**
* Creates a builder with the specified custom components.
*
* <p>Note that this constructor is only useful if you try to ensure that ExoPlayer's default
* components can be removed by ProGuard or R8. For most components except renderers, there is
* only a marginal benefit of doing that.
*
* @param context A {@link Context}.
* @param renderersFactory A factory for creating {@link Renderer Renderers} to be used by the
* player.
* @param trackSelector A {@link TrackSelector}.
* @param loadControl A {@link LoadControl}.
* @param bandwidthMeter A {@link BandwidthMeter}.
* @param looper A {@link Looper} that must be used for all calls to the player.
* @param analyticsCollector An {@link AnalyticsCollector}.
* @param useLazyPreparation Whether media sources should be initialized lazily.
* @param clock A {@link Clock}. Should always be {@link Clock#DEFAULT}.
*/
public Builder(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
BandwidthMeter bandwidthMeter,
Looper looper,
AnalyticsCollector analyticsCollector,
boolean useLazyPreparation,
Clock clock) {
this.context = context;
this.renderersFactory = renderersFactory;
this.trackSelector = trackSelector;
this.loadControl = loadControl;
this.bandwidthMeter = bandwidthMeter;
this.looper = looper;
this.analyticsCollector = analyticsCollector;
this.useLazyPreparation = useLazyPreparation;
this.clock = clock;
}
/**
* Sets the {@link TrackSelector} that will be used by the player.
*
* @param trackSelector A {@link TrackSelector}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setTrackSelector(TrackSelector trackSelector) {
Assertions.checkState(!buildCalled);
this.trackSelector = trackSelector;
return this;
}
/**
* Sets the {@link LoadControl} that will be used by the player.
*
* @param loadControl A {@link LoadControl}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setLoadControl(LoadControl loadControl) {
Assertions.checkState(!buildCalled);
this.loadControl = loadControl;
return this;
}
/**
* Sets the {@link BandwidthMeter} that will be used by the player.
*
* @param bandwidthMeter A {@link BandwidthMeter}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setBandwidthMeter(BandwidthMeter bandwidthMeter) {
Assertions.checkState(!buildCalled);
this.bandwidthMeter = bandwidthMeter;
return this;
}
/**
* Sets the {@link Looper} that must be used for all calls to the player and that is used to
* call listeners on.
*
* @param looper A {@link Looper}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setLooper(Looper looper) {
Assertions.checkState(!buildCalled);
this.looper = looper;
return this;
}
/**
* Sets the {@link AnalyticsCollector} that will collect and forward all player events.
*
* @param analyticsCollector An {@link AnalyticsCollector}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setAnalyticsCollector(AnalyticsCollector analyticsCollector) {
Assertions.checkState(!buildCalled);
this.analyticsCollector = analyticsCollector;
return this;
}
/**
* Sets whether media sources should be initialized lazily.
*
* <p>If false, all initial preparation steps (e.g., manifest loads) happen immediately. If
* true, these initial preparations are triggered only when the player starts buffering the
* media.
*
* @param useLazyPreparation Whether to use lazy preparation.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public Builder setUseLazyPreparation(boolean useLazyPreparation) {
Assertions.checkState(!buildCalled);
this.useLazyPreparation = useLazyPreparation;
return this;
}
/**
* Sets the {@link Clock} that will be used by the player. Should only be set for testing
* purposes.
*
* @param clock A {@link Clock}.
* @return This builder.
* @throws IllegalStateException If {@link #build()} has already been called.
*/
@VisibleForTesting
public Builder setClock(Clock clock) {
Assertions.checkState(!buildCalled);
this.clock = clock;
return this;
}
/**
* Builds a {@link SimpleExoPlayer} instance.
*
* @throws IllegalStateException If {@link #build()} has already been called.
*/
public SimpleExoPlayer build() {
Assertions.checkState(!buildCalled);
buildCalled = true;
return new SimpleExoPlayer(
context,
renderersFactory,
trackSelector,
loadControl,
bandwidthMeter,
analyticsCollector,
clock,
looper);
}
}
private static final String TAG = "SimpleExoPlayer"; private static final String TAG = "SimpleExoPlayer";
protected final Renderer[] renderers; protected final Renderer[] renderers;
@ -96,13 +325,17 @@ public class SimpleExoPlayer extends BasePlayer
private final BandwidthMeter bandwidthMeter; private final BandwidthMeter bandwidthMeter;
private final AnalyticsCollector analyticsCollector; private final AnalyticsCollector analyticsCollector;
private final AudioBecomingNoisyManager audioBecomingNoisyManager;
private final AudioFocusManager audioFocusManager; private final AudioFocusManager audioFocusManager;
private final WakeLockManager wakeLockManager;
private final WifiLockManager wifiLockManager;
private boolean needSetSurface = true; private boolean needSetSurface = true;
@Nullable private Format videoFormat; @Nullable private Format videoFormat;
@Nullable private Format audioFormat; @Nullable private Format audioFormat;
@Nullable private VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer;
@Nullable private Surface surface; @Nullable private Surface surface;
private boolean ownsSurface; private boolean ownsSurface;
private @C.VideoScalingMode int videoScalingMode; private @C.VideoScalingMode int videoScalingMode;
@ -122,6 +355,7 @@ public class SimpleExoPlayer extends BasePlayer
private boolean hasNotifiedFullWrongThreadWarning; private boolean hasNotifiedFullWrongThreadWarning;
@Nullable private PriorityTaskManager priorityTaskManager; @Nullable private PriorityTaskManager priorityTaskManager;
private boolean isPriorityTaskManagerRegistered; private boolean isPriorityTaskManagerRegistered;
private boolean playerReleased;
/** /**
* @param context A {@link Context}. * @param context A {@link Context}.
@ -129,79 +363,54 @@ public class SimpleExoPlayer extends BasePlayer
* @param trackSelector The {@link TrackSelector} that will be used by the instance. * @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance. * @param loadControl The {@link LoadControl} that will be used by the instance.
* @param bandwidthMeter The {@link BandwidthMeter} that will be used by the instance. * @param bandwidthMeter The {@link BandwidthMeter} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance * @param analyticsCollector A factory for creating the {@link AnalyticsCollector} that will
* will not be used for DRM protected playbacks. * collect and forward all player events.
* @param looper The {@link Looper} which must be used for all calls to the player and which is
* used to call listeners on.
*/
protected SimpleExoPlayer(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
BandwidthMeter bandwidthMeter,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
Looper looper) {
this(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
bandwidthMeter,
new AnalyticsCollector.Factory(),
looper);
}
/**
* @param context A {@link Context}.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param bandwidthMeter The {@link BandwidthMeter} that will be used by the instance.
* @param analyticsCollectorFactory A factory for creating the {@link AnalyticsCollector} that
* will collect and forward all player events.
* @param looper The {@link Looper} which must be used for all calls to the player and which is
* used to call listeners on.
*/
protected SimpleExoPlayer(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
BandwidthMeter bandwidthMeter,
AnalyticsCollector.Factory analyticsCollectorFactory,
Looper looper) {
this(
context,
renderersFactory,
trackSelector,
loadControl,
drmSessionManager,
bandwidthMeter,
analyticsCollectorFactory,
Clock.DEFAULT,
looper);
}
/**
* @param context A {@link Context}.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param bandwidthMeter The {@link BandwidthMeter} that will be used by the instance.
* @param analyticsCollectorFactory A factory for creating the {@link AnalyticsCollector} that
* will collect and forward all player events.
* @param clock The {@link Clock} that will be used by the instance. Should always be {@link * @param clock The {@link Clock} that will be used by the instance. Should always be {@link
* Clock#DEFAULT}, unless the player is being used from a test. * Clock#DEFAULT}, unless the player is being used from a test.
* @param looper The {@link Looper} which must be used for all calls to the player and which is * @param looper The {@link Looper} which must be used for all calls to the player and which is
* used to call listeners on. * used to call listeners on.
*/ */
@SuppressWarnings("deprecation")
protected SimpleExoPlayer(
Context context,
RenderersFactory renderersFactory,
TrackSelector trackSelector,
LoadControl loadControl,
BandwidthMeter bandwidthMeter,
AnalyticsCollector analyticsCollector,
Clock clock,
Looper looper) {
this(
context,
renderersFactory,
trackSelector,
loadControl,
DrmSessionManager.getDummyDrmSessionManager(),
bandwidthMeter,
analyticsCollector,
clock,
looper);
}
/**
* @param context A {@link Context}.
* @param renderersFactory A factory for creating {@link Renderer}s to be used by the instance.
* @param trackSelector The {@link TrackSelector} that will be used by the instance.
* @param loadControl The {@link LoadControl} that will be used by the instance.
* @param drmSessionManager An optional {@link DrmSessionManager}. May be null if the instance
* will not be used for DRM protected playbacks.
* @param bandwidthMeter The {@link BandwidthMeter} that will be used by the instance.
* @param analyticsCollector The {@link AnalyticsCollector} that will collect and forward all
* player events.
* @param clock The {@link Clock} that will be used by the instance. Should always be {@link
* Clock#DEFAULT}, unless the player is being used from a test.
* @param looper The {@link Looper} which must be used for all calls to the player and which is
* used to call listeners on.
* @deprecated Use {@link #SimpleExoPlayer(Context, RenderersFactory, TrackSelector, LoadControl,
* BandwidthMeter, AnalyticsCollector, Clock, Looper)} instead, and pass the {@link
* DrmSessionManager} to the {@link MediaSource} factories.
*/
@Deprecated
protected SimpleExoPlayer( protected SimpleExoPlayer(
Context context, Context context,
RenderersFactory renderersFactory, RenderersFactory renderersFactory,
@ -209,10 +418,11 @@ public class SimpleExoPlayer extends BasePlayer
LoadControl loadControl, LoadControl loadControl,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager, @Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
BandwidthMeter bandwidthMeter, BandwidthMeter bandwidthMeter,
AnalyticsCollector.Factory analyticsCollectorFactory, AnalyticsCollector analyticsCollector,
Clock clock, Clock clock,
Looper looper) { Looper looper) {
this.bandwidthMeter = bandwidthMeter; this.bandwidthMeter = bandwidthMeter;
this.analyticsCollector = analyticsCollector;
componentListener = new ComponentListener(); componentListener = new ComponentListener();
videoListeners = new CopyOnWriteArraySet<>(); videoListeners = new CopyOnWriteArraySet<>();
audioListeners = new CopyOnWriteArraySet<>(); audioListeners = new CopyOnWriteArraySet<>();
@ -240,9 +450,9 @@ public class SimpleExoPlayer extends BasePlayer
// Build the player and associated objects. // Build the player and associated objects.
player = player =
new ExoPlayerImpl(renderers, trackSelector, loadControl, bandwidthMeter, clock, looper); new ExoPlayerImpl(renderers, trackSelector, loadControl, bandwidthMeter, clock, looper);
analyticsCollector = analyticsCollectorFactory.createAnalyticsCollector(player, clock); analyticsCollector.setPlayer(player);
addListener(analyticsCollector); player.addListener(analyticsCollector);
addListener(componentListener); player.addListener(componentListener);
videoDebugListeners.add(analyticsCollector); videoDebugListeners.add(analyticsCollector);
videoListeners.add(analyticsCollector); videoListeners.add(analyticsCollector);
audioDebugListeners.add(analyticsCollector); audioDebugListeners.add(analyticsCollector);
@ -252,7 +462,11 @@ public class SimpleExoPlayer extends BasePlayer
if (drmSessionManager instanceof DefaultDrmSessionManager) { if (drmSessionManager instanceof DefaultDrmSessionManager) {
((DefaultDrmSessionManager) drmSessionManager).addListener(eventHandler, analyticsCollector); ((DefaultDrmSessionManager) drmSessionManager).addListener(eventHandler, analyticsCollector);
} }
audioFocusManager = new AudioFocusManager(context, componentListener); audioBecomingNoisyManager =
new AudioBecomingNoisyManager(context, eventHandler, componentListener);
audioFocusManager = new AudioFocusManager(context, eventHandler, componentListener);
wakeLockManager = new WakeLockManager(context);
wifiLockManager = new WifiLockManager(context);
} }
@Override @Override
@ -310,14 +524,16 @@ public class SimpleExoPlayer extends BasePlayer
@Override @Override
public void clearVideoSurface() { public void clearVideoSurface() {
verifyApplicationThread(); verifyApplicationThread();
setVideoSurface(null); removeSurfaceCallbacks();
setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ false);
maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0);
} }
@Override @Override
public void clearVideoSurface(Surface surface) { public void clearVideoSurface(@Nullable Surface surface) {
verifyApplicationThread(); verifyApplicationThread();
if (surface != null && surface == this.surface) { if (surface != null && surface == this.surface) {
setVideoSurface(null); clearVideoSurface();
} }
} }
@ -325,18 +541,24 @@ public class SimpleExoPlayer extends BasePlayer
public void setVideoSurface(@Nullable Surface surface) { public void setVideoSurface(@Nullable Surface surface) {
verifyApplicationThread(); verifyApplicationThread();
removeSurfaceCallbacks(); removeSurfaceCallbacks();
setVideoSurfaceInternal(surface, false); if (surface != null) {
clearVideoDecoderOutputBufferRenderer();
}
setVideoSurfaceInternal(surface, /* ownsSurface= */ false);
int newSurfaceSize = surface == null ? 0 : C.LENGTH_UNSET; int newSurfaceSize = surface == null ? 0 : C.LENGTH_UNSET;
maybeNotifySurfaceSizeChanged(/* width= */ newSurfaceSize, /* height= */ newSurfaceSize); maybeNotifySurfaceSizeChanged(/* width= */ newSurfaceSize, /* height= */ newSurfaceSize);
} }
@Override @Override
public void setVideoSurfaceHolder(SurfaceHolder surfaceHolder) { public void setVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) {
verifyApplicationThread(); verifyApplicationThread();
removeSurfaceCallbacks(); removeSurfaceCallbacks();
if (surfaceHolder != null) {
clearVideoDecoderOutputBufferRenderer();
}
this.surfaceHolder = surfaceHolder; this.surfaceHolder = surfaceHolder;
if (surfaceHolder == null) { if (surfaceHolder == null) {
setVideoSurfaceInternal(null, false); setVideoSurfaceInternal(null, /* ownsSurface= */ false);
maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0);
} else { } else {
surfaceHolder.addCallback(componentListener); surfaceHolder.addCallback(componentListener);
@ -353,7 +575,7 @@ public class SimpleExoPlayer extends BasePlayer
} }
@Override @Override
public void clearVideoSurfaceHolder(SurfaceHolder surfaceHolder) { public void clearVideoSurfaceHolder(@Nullable SurfaceHolder surfaceHolder) {
verifyApplicationThread(); verifyApplicationThread();
if (surfaceHolder != null && surfaceHolder == this.surfaceHolder) { if (surfaceHolder != null && surfaceHolder == this.surfaceHolder) {
setVideoSurfaceHolder(null); setVideoSurfaceHolder(null);
@ -361,34 +583,37 @@ public class SimpleExoPlayer extends BasePlayer
} }
@Override @Override
public void setVideoSurfaceView(SurfaceView surfaceView) { public void setVideoSurfaceView(@Nullable SurfaceView surfaceView) {
setVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder()); setVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder());
} }
@Override @Override
public void clearVideoSurfaceView(SurfaceView surfaceView) { public void clearVideoSurfaceView(@Nullable SurfaceView surfaceView) {
clearVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder()); clearVideoSurfaceHolder(surfaceView == null ? null : surfaceView.getHolder());
} }
@Override @Override
public void setVideoTextureView(TextureView textureView) { public void setVideoTextureView(@Nullable TextureView textureView) {
if (this.textureView == textureView) { if (this.textureView == textureView) {
return; return;
} }
verifyApplicationThread(); verifyApplicationThread();
removeSurfaceCallbacks(); removeSurfaceCallbacks();
if (textureView != null) {
clearVideoDecoderOutputBufferRenderer();
}
this.textureView = textureView; this.textureView = textureView;
needSetSurface = true; needSetSurface = true;
if (textureView == null) { if (textureView == null) {
setVideoSurfaceInternal(null, true); setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ true);
maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0);
} else { } else {
if (textureView.getSurfaceTextureListener() != null) { if (textureView.getSurfaceTextureListener() != null) {
Log.w(TAG, "Replacing existing SurfaceTextureListener."); Log.w(TAG, "Replacing existing SurfaceTextureListener.");
} }
textureView.setSurfaceTextureListener(componentListener); textureView.setSurfaceTextureListener(componentListener);
SurfaceTexture surfaceTexture = textureView.isAvailable() ? textureView.getSurfaceTexture() SurfaceTexture surfaceTexture =
: null; textureView.isAvailable() ? textureView.getSurfaceTexture() : null;
if (surfaceTexture == null) { if (surfaceTexture == null) {
setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ true); setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ true);
maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0);
@ -400,13 +625,39 @@ public class SimpleExoPlayer extends BasePlayer
} }
@Override @Override
public void clearVideoTextureView(TextureView textureView) { public void clearVideoTextureView(@Nullable TextureView textureView) {
verifyApplicationThread(); verifyApplicationThread();
if (textureView != null && textureView == this.textureView) { if (textureView != null && textureView == this.textureView) {
setVideoTextureView(null); setVideoTextureView(null);
} }
} }
@Override
public void setVideoDecoderOutputBufferRenderer(
@Nullable VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer) {
verifyApplicationThread();
if (videoDecoderOutputBufferRenderer != null) {
clearVideoSurface();
}
setVideoDecoderOutputBufferRendererInternal(videoDecoderOutputBufferRenderer);
}
@Override
public void clearVideoDecoderOutputBufferRenderer() {
verifyApplicationThread();
setVideoDecoderOutputBufferRendererInternal(/* videoDecoderOutputBufferRenderer= */ null);
}
@Override
public void clearVideoDecoderOutputBufferRenderer(
@Nullable VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer) {
verifyApplicationThread();
if (videoDecoderOutputBufferRenderer != null
&& videoDecoderOutputBufferRenderer == this.videoDecoderOutputBufferRenderer) {
clearVideoDecoderOutputBufferRenderer();
}
}
@Override @Override
public void addAudioListener(AudioListener listener) { public void addAudioListener(AudioListener listener) {
audioListeners.add(listener); audioListeners.add(listener);
@ -425,6 +676,9 @@ public class SimpleExoPlayer extends BasePlayer
@Override @Override
public void setAudioAttributes(AudioAttributes audioAttributes, boolean handleAudioFocus) { public void setAudioAttributes(AudioAttributes audioAttributes, boolean handleAudioFocus) {
verifyApplicationThread(); verifyApplicationThread();
if (playerReleased) {
return;
}
if (!Util.areEqual(this.audioAttributes, audioAttributes)) { if (!Util.areEqual(this.audioAttributes, audioAttributes)) {
this.audioAttributes = audioAttributes; this.audioAttributes = audioAttributes;
for (Renderer renderer : renderers) { for (Renderer renderer : renderers) {
@ -441,11 +695,11 @@ public class SimpleExoPlayer extends BasePlayer
} }
} }
audioFocusManager.setAudioAttributes(handleAudioFocus ? audioAttributes : null);
boolean playWhenReady = getPlayWhenReady();
@AudioFocusManager.PlayerCommand @AudioFocusManager.PlayerCommand
int playerCommand = int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, getPlaybackState());
audioFocusManager.setAudioAttributes( updatePlayWhenReady(playWhenReady, playerCommand);
handleAudioFocus ? audioAttributes : null, getPlayWhenReady(), getPlaybackState());
updatePlayWhenReady(getPlayWhenReady(), playerCommand);
} }
@Override @Override
@ -498,12 +752,12 @@ public class SimpleExoPlayer extends BasePlayer
/** /**
* Sets the stream type for audio playback, used by the underlying audio track. * Sets the stream type for audio playback, used by the underlying audio track.
* <p> *
* Setting the stream type during playback may introduce a short gap in audio output as the audio * <p>Setting the stream type during playback may introduce a short gap in audio output as the
* track is recreated. A new audio session id will also be generated. * audio track is recreated. A new audio session id will also be generated.
* <p> *
* Calling this method overwrites any attributes set previously by calling * <p>Calling this method overwrites any attributes set previously by calling {@link
* {@link #setAudioAttributes(AudioAttributes)}. * #setAudioAttributes(AudioAttributes)}.
* *
* @deprecated Use {@link #setAudioAttributes(AudioAttributes)}. * @deprecated Use {@link #setAudioAttributes(AudioAttributes)}.
* @param streamType The stream type for audio playback. * @param streamType The stream type for audio playback.
@ -552,6 +806,25 @@ public class SimpleExoPlayer extends BasePlayer
analyticsCollector.removeListener(listener); analyticsCollector.removeListener(listener);
} }
/**
* Sets whether the player should pause automatically when audio is rerouted from a headset to
* device speakers. See the <a
* href="https://developer.android.com/guide/topics/media-apps/volume-and-earphones#becoming-noisy">audio
* becoming noisy</a> documentation for more information.
*
* <p>This feature is not enabled by default.
*
* @param handleAudioBecomingNoisy Whether the player should pause automatically when audio is
* rerouted from a headset to device speakers.
*/
public void setHandleAudioBecomingNoisy(boolean handleAudioBecomingNoisy) {
verifyApplicationThread();
if (playerReleased) {
return;
}
audioBecomingNoisyManager.setEnabled(handleAudioBecomingNoisy);
}
/** /**
* Sets a {@link PriorityTaskManager}, or null to clear a previously set priority task manager. * Sets a {@link PriorityTaskManager}, or null to clear a previously set priority task manager.
* *
@ -882,11 +1155,13 @@ public class SimpleExoPlayer extends BasePlayer
} }
@Override @Override
@State
public int getPlaybackState() { public int getPlaybackState() {
verifyApplicationThread(); verifyApplicationThread();
return player.getPlaybackState(); return player.getPlaybackState();
} }
@Override
@PlaybackSuppressionReason @PlaybackSuppressionReason
public int getPlaybackSuppressionReason() { public int getPlaybackSuppressionReason() {
verifyApplicationThread(); verifyApplicationThread();
@ -923,9 +1198,10 @@ public class SimpleExoPlayer extends BasePlayer
} }
this.mediaSource = mediaSource; this.mediaSource = mediaSource;
mediaSource.addEventListener(eventHandler, analyticsCollector); mediaSource.addEventListener(eventHandler, analyticsCollector);
boolean playWhenReady = getPlayWhenReady();
@AudioFocusManager.PlayerCommand @AudioFocusManager.PlayerCommand
int playerCommand = audioFocusManager.handlePrepare(getPlayWhenReady()); int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, Player.STATE_BUFFERING);
updatePlayWhenReady(getPlayWhenReady(), playerCommand); updatePlayWhenReady(playWhenReady, playerCommand);
player.prepare(mediaSource, resetPosition, resetState); player.prepare(mediaSource, resetPosition, resetState);
} }
@ -933,7 +1209,7 @@ public class SimpleExoPlayer extends BasePlayer
public void setPlayWhenReady(boolean playWhenReady) { public void setPlayWhenReady(boolean playWhenReady) {
verifyApplicationThread(); verifyApplicationThread();
@AudioFocusManager.PlayerCommand @AudioFocusManager.PlayerCommand
int playerCommand = audioFocusManager.handleSetPlayWhenReady(playWhenReady, getPlaybackState()); int playerCommand = audioFocusManager.updateAudioFocus(playWhenReady, getPlaybackState());
updatePlayWhenReady(playWhenReady, playerCommand); updatePlayWhenReady(playWhenReady, playerCommand);
} }
@ -1012,6 +1288,7 @@ public class SimpleExoPlayer extends BasePlayer
@Override @Override
public void stop(boolean reset) { public void stop(boolean reset) {
verifyApplicationThread(); verifyApplicationThread();
audioFocusManager.updateAudioFocus(getPlayWhenReady(), Player.STATE_IDLE);
player.stop(reset); player.stop(reset);
if (mediaSource != null) { if (mediaSource != null) {
mediaSource.removeEventListener(analyticsCollector); mediaSource.removeEventListener(analyticsCollector);
@ -1020,14 +1297,16 @@ public class SimpleExoPlayer extends BasePlayer
mediaSource = null; mediaSource = null;
} }
} }
audioFocusManager.handleStop();
currentCues = Collections.emptyList(); currentCues = Collections.emptyList();
} }
@Override @Override
public void release(boolean async) { public void release(boolean async) {
verifyApplicationThread(); verifyApplicationThread();
audioFocusManager.handleStop(); audioBecomingNoisyManager.setEnabled(false);
wakeLockManager.setStayAwake(false);
wifiLockManager.setStayAwake(false);
audioFocusManager.release();
if (async) { if (async) {
Utilities.globalQueue.postRunnable(() -> player.release(async)); Utilities.globalQueue.postRunnable(() -> player.release(async));
} else { } else {
@ -1050,13 +1329,7 @@ public class SimpleExoPlayer extends BasePlayer
} }
bandwidthMeter.removeEventListener(analyticsCollector); bandwidthMeter.removeEventListener(analyticsCollector);
currentCues = Collections.emptyList(); currentCues = Collections.emptyList();
} playerReleased = true;
@Override
@Deprecated
@SuppressWarnings("deprecation")
public void sendMessages(ExoPlayerMessage... messages) {
player.sendMessages(messages);
} }
@Override @Override
@ -1065,13 +1338,6 @@ public class SimpleExoPlayer extends BasePlayer
return player.createMessage(target); return player.createMessage(target);
} }
@Override
@Deprecated
@SuppressWarnings("deprecation")
public void blockingSendMessages(ExoPlayerMessage... messages) {
player.blockingSendMessages(messages);
}
@Override @Override
public int getRendererCount() { public int getRendererCount() {
verifyApplicationThread(); verifyApplicationThread();
@ -1102,13 +1368,6 @@ public class SimpleExoPlayer extends BasePlayer
return player.getCurrentTimeline(); return player.getCurrentTimeline();
} }
@Override
@Nullable
public Object getCurrentManifest() {
verifyApplicationThread();
return player.getCurrentManifest();
}
@Override @Override
public int getCurrentPeriodIndex() { public int getCurrentPeriodIndex() {
verifyApplicationThread(); verifyApplicationThread();
@ -1175,6 +1434,62 @@ public class SimpleExoPlayer extends BasePlayer
return player.getContentBufferedPosition(); return player.getContentBufferedPosition();
} }
/**
* Sets whether the player should use a {@link android.os.PowerManager.WakeLock} to ensure the
* device stays awake for playback, even when the screen is off.
*
* <p>Enabling this feature requires the {@link android.Manifest.permission#WAKE_LOCK} permission.
* It should be used together with a foreground {@link android.app.Service} for use cases where
* playback can occur when the screen is off (e.g. background audio playback). It is not useful if
* the screen will always be on during playback (e.g. foreground video playback).
*
* <p>This feature is not enabled by default. If enabled, a WakeLock is held whenever the player
* is in the {@link #STATE_READY READY} or {@link #STATE_BUFFERING BUFFERING} states with {@code
* playWhenReady = true}.
*
* @param handleWakeLock Whether the player should use a {@link android.os.PowerManager.WakeLock}
* to ensure the device stays awake for playback, even when the screen is off.
* @deprecated Use {@link #setWakeMode(int)} instead.
*/
@Deprecated
public void setHandleWakeLock(boolean handleWakeLock) {
setWakeMode(handleWakeLock ? C.WAKE_MODE_LOCAL : C.WAKE_MODE_NONE);
}
/**
* Sets how the player should keep the device awake for playback when the screen is off.
*
* <p>Enabling this feature requires the {@link android.Manifest.permission#WAKE_LOCK} permission.
* It should be used together with a foreground {@link android.app.Service} for use cases where
* playback occurs and the screen is off (e.g. background audio playback). It is not useful when
* the screen will be kept on during playback (e.g. foreground video playback).
*
* <p>When enabled, the locks ({@link android.os.PowerManager.WakeLock} / {@link
* android.net.wifi.WifiManager.WifiLock}) will be held whenever the player is in the {@link
* #STATE_READY} or {@link #STATE_BUFFERING} states with {@code playWhenReady = true}. The locks
* held depends on the specified {@link C.WakeMode}.
*
* @param wakeMode The {@link C.WakeMode} option to keep the device awake during playback.
*/
public void setWakeMode(@C.WakeMode int wakeMode) {
switch (wakeMode) {
case C.WAKE_MODE_NONE:
wakeLockManager.setEnabled(false);
wifiLockManager.setEnabled(false);
break;
case C.WAKE_MODE_LOCAL:
wakeLockManager.setEnabled(true);
wifiLockManager.setEnabled(false);
break;
case C.WAKE_MODE_NETWORK:
wakeLockManager.setEnabled(true);
wifiLockManager.setEnabled(true);
break;
default:
break;
}
}
// Internal methods. // Internal methods.
private void removeSurfaceCallbacks() { private void removeSurfaceCallbacks() {
@ -1220,6 +1535,20 @@ public class SimpleExoPlayer extends BasePlayer
this.ownsSurface = ownsSurface; this.ownsSurface = ownsSurface;
} }
private void setVideoDecoderOutputBufferRendererInternal(
@Nullable VideoDecoderOutputBufferRenderer videoDecoderOutputBufferRenderer) {
for (Renderer renderer : renderers) {
if (renderer.getTrackType() == C.TRACK_TYPE_VIDEO) {
player
.createMessage(renderer)
.setType(C.MSG_SET_VIDEO_DECODER_OUTPUT_BUFFER_RENDERER)
.setPayload(videoDecoderOutputBufferRenderer)
.send();
}
}
this.videoDecoderOutputBufferRenderer = videoDecoderOutputBufferRenderer;
}
private void maybeNotifySurfaceSizeChanged(int width, int height) { private void maybeNotifySurfaceSizeChanged(int width, int height) {
if (width != surfaceWidth || height != surfaceHeight) { if (width != surfaceWidth || height != surfaceHeight) {
surfaceWidth = width; surfaceWidth = width;
@ -1261,6 +1590,24 @@ public class SimpleExoPlayer extends BasePlayer
} }
} }
private void updateWakeAndWifiLock() {
@State int playbackState = getPlaybackState();
switch (playbackState) {
case Player.STATE_READY:
case Player.STATE_BUFFERING:
wakeLockManager.setStayAwake(getPlayWhenReady());
wifiLockManager.setStayAwake(getPlayWhenReady());
break;
case Player.STATE_ENDED:
case Player.STATE_IDLE:
wakeLockManager.setStayAwake(false);
wifiLockManager.setStayAwake(false);
break;
default:
throw new IllegalStateException();
}
}
private final class ComponentListener private final class ComponentListener
implements VideoRendererEventListener, implements VideoRendererEventListener,
AudioRendererEventListener, AudioRendererEventListener,
@ -1269,6 +1616,7 @@ public class SimpleExoPlayer extends BasePlayer
SurfaceHolder.Callback, SurfaceHolder.Callback,
TextureView.SurfaceTextureListener, TextureView.SurfaceTextureListener,
AudioFocusManager.PlayerControl, AudioFocusManager.PlayerControl,
AudioBecomingNoisyManager.EventListener,
Player.EventListener { Player.EventListener {
// VideoRendererEventListener implementation // VideoRendererEventListener implementation
@ -1282,11 +1630,11 @@ public class SimpleExoPlayer extends BasePlayer
} }
@Override @Override
public void onVideoDecoderInitialized(String decoderName, long initializedTimestampMs, public void onVideoDecoderInitialized(
long initializationDurationMs) { String decoderName, long initializedTimestampMs, long initializationDurationMs) {
for (VideoRendererEventListener videoDebugListener : videoDebugListeners) { for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
videoDebugListener.onVideoDecoderInitialized(decoderName, initializedTimestampMs, videoDebugListener.onVideoDecoderInitialized(
initializationDurationMs); decoderName, initializedTimestampMs, initializationDurationMs);
} }
} }
@ -1306,8 +1654,8 @@ public class SimpleExoPlayer extends BasePlayer
} }
@Override @Override
public void onVideoSizeChanged(int width, int height, int unappliedRotationDegrees, public void onVideoSizeChanged(
float pixelWidthHeightRatio) { int width, int height, int unappliedRotationDegrees, float pixelWidthHeightRatio) {
for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) { for (com.google.android.exoplayer2.video.VideoListener videoListener : videoListeners) {
// Prevent duplicate notification if a listener is both a VideoRendererEventListener and // Prevent duplicate notification if a listener is both a VideoRendererEventListener and
// a VideoListener, as they have the same method signature. // a VideoListener, as they have the same method signature.
@ -1317,8 +1665,8 @@ public class SimpleExoPlayer extends BasePlayer
} }
} }
for (VideoRendererEventListener videoDebugListener : videoDebugListeners) { for (VideoRendererEventListener videoDebugListener : videoDebugListeners) {
videoDebugListener.onVideoSizeChanged(width, height, unappliedRotationDegrees, videoDebugListener.onVideoSizeChanged(
pixelWidthHeightRatio); width, height, unappliedRotationDegrees, pixelWidthHeightRatio);
} }
} }
@ -1372,11 +1720,11 @@ public class SimpleExoPlayer extends BasePlayer
} }
@Override @Override
public void onAudioDecoderInitialized(String decoderName, long initializedTimestampMs, public void onAudioDecoderInitialized(
long initializationDurationMs) { String decoderName, long initializedTimestampMs, long initializationDurationMs) {
for (AudioRendererEventListener audioDebugListener : audioDebugListeners) { for (AudioRendererEventListener audioDebugListener : audioDebugListeners) {
audioDebugListener.onAudioDecoderInitialized(decoderName, initializedTimestampMs, audioDebugListener.onAudioDecoderInitialized(
initializationDurationMs); decoderName, initializedTimestampMs, initializationDurationMs);
} }
} }
@ -1389,8 +1737,8 @@ public class SimpleExoPlayer extends BasePlayer
} }
@Override @Override
public void onAudioSinkUnderrun(int bufferSize, long bufferSizeMs, public void onAudioSinkUnderrun(
long elapsedSinceLastFeedMs) { int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
for (AudioRendererEventListener audioDebugListener : audioDebugListeners) { for (AudioRendererEventListener audioDebugListener : audioDebugListeners) {
audioDebugListener.onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs); audioDebugListener.onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
} }
@ -1439,7 +1787,7 @@ public class SimpleExoPlayer extends BasePlayer
@Override @Override
public void surfaceDestroyed(SurfaceHolder holder) { public void surfaceDestroyed(SurfaceHolder holder) {
setVideoSurfaceInternal(null, false); setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ false);
maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0);
} }
@ -1451,6 +1799,7 @@ public class SimpleExoPlayer extends BasePlayer
setVideoSurfaceInternal(new Surface(surfaceTexture), true); setVideoSurfaceInternal(new Surface(surfaceTexture), true);
needSetSurface = false; needSetSurface = false;
} }
setVideoSurfaceInternal(new Surface(surfaceTexture), /* ownsSurface= */ true);
maybeNotifySurfaceSizeChanged(width, height); maybeNotifySurfaceSizeChanged(width, height);
} }
@ -1466,7 +1815,7 @@ public class SimpleExoPlayer extends BasePlayer
return false; return false;
} }
} }
setVideoSurfaceInternal(null, true); setVideoSurfaceInternal(/* surface= */ null, /* ownsSurface= */ true);
maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0); maybeNotifySurfaceSizeChanged(/* width= */ 0, /* height= */ 0);
needSetSurface = true; needSetSurface = true;
return true; return true;
@ -1492,6 +1841,13 @@ public class SimpleExoPlayer extends BasePlayer
updatePlayWhenReady(getPlayWhenReady(), playerCommand); updatePlayWhenReady(getPlayWhenReady(), playerCommand);
} }
// AudioBecomingNoisyManager.EventListener implementation.
@Override
public void onAudioBecomingNoisy() {
setPlayWhenReady(false);
}
// Player.EventListener implementation. // Player.EventListener implementation.
@Override @Override
@ -1506,5 +1862,10 @@ public class SimpleExoPlayer extends BasePlayer
} }
} }
} }
@Override
public void onPlayerStateChanged(boolean playWhenReady, @State int playbackState) {
updateWakeAndWifiLock();
}
} }
} }

View File

@ -15,10 +15,11 @@
*/ */
package com.google.android.exoplayer2; package com.google.android.exoplayer2;
import androidx.annotation.Nullable;
import android.util.Pair; import android.util.Pair;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.source.ads.AdPlaybackState; import com.google.android.exoplayer2.source.ads.AdPlaybackState;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
/** /**
* A flexible representation of the structure of media. A timeline is able to represent the * A flexible representation of the structure of media. A timeline is able to represent the
@ -26,102 +27,113 @@ import com.google.android.exoplayer2.util.Assertions;
* complex compositions of media such as playlists and streams with inserted ads. Instances are * complex compositions of media such as playlists and streams with inserted ads. Instances are
* immutable. For cases where media is changing dynamically (e.g. live streams), a timeline provides * immutable. For cases where media is changing dynamically (e.g. live streams), a timeline provides
* a snapshot of the current state. * a snapshot of the current state.
* <p> *
* A timeline consists of related {@link Period}s and {@link Window}s. A period defines a single * <p>A timeline consists of {@link Window Windows} and {@link Period Periods}.
* logical piece of media, for example a media file. It may also define groups of ads inserted into *
* the media, along with information about whether those ads have been loaded and played. A window * <ul>
* spans one or more periods, defining the region within those periods that's currently available * <li>A {@link Window} usually corresponds to one playlist item. It may span one or more periods
* for playback along with additional information such as whether seeking is supported within the * and it defines the region within those periods that's currently available for playback. The
* window. Each window defines a default position, which is the position from which playback will * window also provides additional information such as whether seeking is supported within the
* start when the player starts playing the window. The following examples illustrate timelines for * window and the default position, which is the position from which playback will start when
* various use cases. * the player starts playing the window.
* <li>A {@link Period} defines a single logical piece of media, for example a media file. It may
* also define groups of ads inserted into the media, along with information about whether
* those ads have been loaded and played.
* </ul>
*
* <p>The following examples illustrate timelines for various use cases.
* *
* <h3 id="single-file">Single media file or on-demand stream</h3> * <h3 id="single-file">Single media file or on-demand stream</h3>
* <p align="center"> *
* <img src="doc-files/timeline-single-file.svg" alt="Example timeline for a single file"> * <p style="align:center"><img src="doc-files/timeline-single-file.svg" alt="Example timeline for a
* </p> * single file"> A timeline for a single media file or on-demand stream consists of a single period
* A timeline for a single media file or on-demand stream consists of a single period and window. * and window. The window spans the whole period, indicating that all parts of the media are
* The window spans the whole period, indicating that all parts of the media are available for * available for playback. The window's default position is typically at the start of the period
* playback. The window's default position is typically at the start of the period (indicated by the * (indicated by the black dot in the figure above).
* black dot in the figure above).
* *
* <h3>Playlist of media files or on-demand streams</h3> * <h3>Playlist of media files or on-demand streams</h3>
* <p align="center"> *
* <img src="doc-files/timeline-playlist.svg" alt="Example timeline for a playlist of files"> * <p style="align:center"><img src="doc-files/timeline-playlist.svg" alt="Example timeline for a
* </p> * playlist of files"> A timeline for a playlist of media files or on-demand streams consists of
* A timeline for a playlist of media files or on-demand streams consists of multiple periods, each * multiple periods, each with its own window. Each window spans the whole of the corresponding
* with its own window. Each window spans the whole of the corresponding period, and typically has a * period, and typically has a default position at the start of the period. The properties of the
* default position at the start of the period. The properties of the periods and windows (e.g. * periods and windows (e.g. their durations and whether the window is seekable) will often only
* their durations and whether the window is seekable) will often only become known when the player * become known when the player starts buffering the corresponding file or stream.
* starts buffering the corresponding file or stream.
* *
* <h3 id="live-limited">Live stream with limited availability</h3> * <h3 id="live-limited">Live stream with limited availability</h3>
* <p align="center"> *
* <img src="doc-files/timeline-live-limited.svg" alt="Example timeline for a live stream with * <p style="align:center"><img src="doc-files/timeline-live-limited.svg" alt="Example timeline for
* limited availability"> * a live stream with limited availability"> A timeline for a live stream consists of a period whose
* </p> * duration is unknown, since it's continually extending as more content is broadcast. If content
* A timeline for a live stream consists of a period whose duration is unknown, since it's * only remains available for a limited period of time then the window may start at a non-zero
* continually extending as more content is broadcast. If content only remains available for a * position, defining the region of content that can still be played. The window will have {@link
* limited period of time then the window may start at a non-zero position, defining the region of * Window#isLive} set to true to indicate it's a live stream and {@link Window#isDynamic} set to
* content that can still be played. The window will have {@link Window#isDynamic} set to true if * true as long as we expect changes to the live window. Its default position is typically near to
* the stream is still live. Its default position is typically near to the live edge (indicated by * the live edge (indicated by the black dot in the figure above).
* the black dot in the figure above).
* *
* <h3>Live stream with indefinite availability</h3> * <h3>Live stream with indefinite availability</h3>
* <p align="center"> *
* <img src="doc-files/timeline-live-indefinite.svg" alt="Example timeline for a live stream with * <p style="align:center"><img src="doc-files/timeline-live-indefinite.svg" alt="Example timeline
* indefinite availability"> * for a live stream with indefinite availability"> A timeline for a live stream with indefinite
* </p> * availability is similar to the <a href="#live-limited">Live stream with limited availability</a>
* A timeline for a live stream with indefinite availability is similar to the * case, except that the window starts at the beginning of the period to indicate that all of the
* <a href="#live-limited">Live stream with limited availability</a> case, except that the window * previously broadcast content can still be played.
* starts at the beginning of the period to indicate that all of the previously broadcast content
* can still be played.
* *
* <h3 id="live-multi-period">Live stream with multiple periods</h3> * <h3 id="live-multi-period">Live stream with multiple periods</h3>
* <p align="center"> *
* <img src="doc-files/timeline-live-multi-period.svg" alt="Example timeline for a live stream * <p style="align:center"><img src="doc-files/timeline-live-multi-period.svg" alt="Example timeline
* with multiple periods"> * for a live stream with multiple periods"> This case arises when a live stream is explicitly
* </p> * divided into separate periods, for example at content boundaries. This case is similar to the <a
* This case arises when a live stream is explicitly divided into separate periods, for example at * href="#live-limited">Live stream with limited availability</a> case, except that the window may
* content boundaries. This case is similar to the <a href="#live-limited">Live stream with limited * span more than one period. Multiple periods are also possible in the indefinite availability
* availability</a> case, except that the window may span more than one period. Multiple periods are * case.
* also possible in the indefinite availability case.
* *
* <h3>On-demand stream followed by live stream</h3> * <h3>On-demand stream followed by live stream</h3>
* <p align="center"> *
* <img src="doc-files/timeline-advanced.svg" alt="Example timeline for an on-demand stream * <p style="align:center"><img src="doc-files/timeline-advanced.svg" alt="Example timeline for an
* followed by a live stream"> * on-demand stream followed by a live stream"> This case is the concatenation of the <a
* </p> * href="#single-file">Single media file or on-demand stream</a> and <a href="#multi-period">Live
* This case is the concatenation of the <a href="#single-file">Single media file or on-demand * stream with multiple periods</a> cases. When playback of the on-demand stream ends, playback of
* stream</a> and <a href="#multi-period">Live stream with multiple periods</a> cases. When playback * the live stream will start from its default position near the live edge.
* of the on-demand stream ends, playback of the live stream will start from its default position
* near the live edge.
* *
* <h3 id="single-file-midrolls">On-demand stream with mid-roll ads</h3> * <h3 id="single-file-midrolls">On-demand stream with mid-roll ads</h3>
* <p align="center"> *
* <img src="doc-files/timeline-single-file-midrolls.svg" alt="Example timeline for an on-demand * <p style="align:center"><img src="doc-files/timeline-single-file-midrolls.svg" alt="Example
* stream with mid-roll ad groups"> * timeline for an on-demand stream with mid-roll ad groups"> This case includes mid-roll ad groups,
* </p> * which are defined as part of the timeline's single period. The period can be queried for
* This case includes mid-roll ad groups, which are defined as part of the timeline's single period. * information about the ad groups and the ads they contain.
* The period can be queried for information about the ad groups and the ads they contain.
*/ */
public abstract class Timeline { public abstract class Timeline {
/** /**
* Holds information about a window in a {@link Timeline}. A window defines a region of media * Holds information about a window in a {@link Timeline}. A window usually corresponds to one
* currently available for playback along with additional information such as whether seeking is * playlist item and defines a region of media currently available for playback along with
* supported within the window. The figure below shows some of the information defined by a * additional information such as whether seeking is supported within the window. The figure below
* window, as well as how this information relates to corresponding {@link Period}s in the * shows some of the information defined by a window, as well as how this information relates to
* timeline. * corresponding {@link Period Periods} in the timeline.
* <p align="center"> *
* <img src="doc-files/timeline-window.svg" alt="Information defined by a timeline window"> * <p style="align:center"><img src="doc-files/timeline-window.svg" alt="Information defined by a
* </p> * timeline window">
*/ */
public static final class Window { public static final class Window {
/**
* A {@link #uid} for a window that must be used for single-window {@link Timeline Timelines}.
*/
public static final Object SINGLE_WINDOW_UID = new Object();
/**
* A unique identifier for the window. Single-window {@link Timeline Timelines} must use {@link
* #SINGLE_WINDOW_UID}.
*/
public Object uid;
/** A tag for the window. Not necessarily unique. */ /** A tag for the window. Not necessarily unique. */
@Nullable public Object tag; @Nullable public Object tag;
/** The manifest of the window. May be {@code null}. */
@Nullable public Object manifest;
/** /**
* The start time of the presentation to which this window belongs in milliseconds since the * The start time of the presentation to which this window belongs in milliseconds since the
* epoch, or {@link C#TIME_UNSET} if unknown or not applicable. For informational purposes only. * epoch, or {@link C#TIME_UNSET} if unknown or not applicable. For informational purposes only.
@ -148,8 +160,13 @@ public abstract class Timeline {
public boolean isDynamic; public boolean isDynamic;
/** /**
* The index of the first period that belongs to this window. * Whether the media in this window is live. For informational purposes only.
*
* <p>Check {@link #isDynamic} to know whether this window may still change.
*/ */
public boolean isLive;
/** The index of the first period that belongs to this window. */
public int firstPeriodIndex; public int firstPeriodIndex;
/** /**
@ -176,23 +193,34 @@ public abstract class Timeline {
*/ */
public long positionInFirstPeriodUs; public long positionInFirstPeriodUs;
/** Creates window. */
public Window() {
uid = SINGLE_WINDOW_UID;
}
/** Sets the data held by this window. */ /** Sets the data held by this window. */
public Window set( public Window set(
Object uid,
@Nullable Object tag, @Nullable Object tag,
@Nullable Object manifest,
long presentationStartTimeMs, long presentationStartTimeMs,
long windowStartTimeMs, long windowStartTimeMs,
boolean isSeekable, boolean isSeekable,
boolean isDynamic, boolean isDynamic,
boolean isLive,
long defaultPositionUs, long defaultPositionUs,
long durationUs, long durationUs,
int firstPeriodIndex, int firstPeriodIndex,
int lastPeriodIndex, int lastPeriodIndex,
long positionInFirstPeriodUs) { long positionInFirstPeriodUs) {
this.uid = uid;
this.tag = tag; this.tag = tag;
this.manifest = manifest;
this.presentationStartTimeMs = presentationStartTimeMs; this.presentationStartTimeMs = presentationStartTimeMs;
this.windowStartTimeMs = windowStartTimeMs; this.windowStartTimeMs = windowStartTimeMs;
this.isSeekable = isSeekable; this.isSeekable = isSeekable;
this.isDynamic = isDynamic; this.isDynamic = isDynamic;
this.isLive = isLive;
this.defaultPositionUs = defaultPositionUs; this.defaultPositionUs = defaultPositionUs;
this.durationUs = durationUs; this.durationUs = durationUs;
this.firstPeriodIndex = firstPeriodIndex; this.firstPeriodIndex = firstPeriodIndex;
@ -251,18 +279,60 @@ public abstract class Timeline {
return positionInFirstPeriodUs; return positionInFirstPeriodUs;
} }
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (obj == null || !getClass().equals(obj.getClass())) {
return false;
}
Window that = (Window) obj;
return Util.areEqual(uid, that.uid)
&& Util.areEqual(tag, that.tag)
&& Util.areEqual(manifest, that.manifest)
&& presentationStartTimeMs == that.presentationStartTimeMs
&& windowStartTimeMs == that.windowStartTimeMs
&& isSeekable == that.isSeekable
&& isDynamic == that.isDynamic
&& isLive == that.isLive
&& defaultPositionUs == that.defaultPositionUs
&& durationUs == that.durationUs
&& firstPeriodIndex == that.firstPeriodIndex
&& lastPeriodIndex == that.lastPeriodIndex
&& positionInFirstPeriodUs == that.positionInFirstPeriodUs;
}
@Override
public int hashCode() {
int result = 7;
result = 31 * result + uid.hashCode();
result = 31 * result + (tag == null ? 0 : tag.hashCode());
result = 31 * result + (manifest == null ? 0 : manifest.hashCode());
result = 31 * result + (int) (presentationStartTimeMs ^ (presentationStartTimeMs >>> 32));
result = 31 * result + (int) (windowStartTimeMs ^ (windowStartTimeMs >>> 32));
result = 31 * result + (isSeekable ? 1 : 0);
result = 31 * result + (isDynamic ? 1 : 0);
result = 31 * result + (isLive ? 1 : 0);
result = 31 * result + (int) (defaultPositionUs ^ (defaultPositionUs >>> 32));
result = 31 * result + (int) (durationUs ^ (durationUs >>> 32));
result = 31 * result + firstPeriodIndex;
result = 31 * result + lastPeriodIndex;
result = 31 * result + (int) (positionInFirstPeriodUs ^ (positionInFirstPeriodUs >>> 32));
return result;
}
} }
/** /**
* Holds information about a period in a {@link Timeline}. A period defines a single logical piece * Holds information about a period in a {@link Timeline}. A period defines a single logical piece
* of media, for example a media file. It may also define groups of ads inserted into the media, * of media, for example a media file. It may also define groups of ads inserted into the media,
* along with information about whether those ads have been loaded and played. * along with information about whether those ads have been loaded and played.
* <p> *
* The figure below shows some of the information defined by a period, as well as how this * <p>The figure below shows some of the information defined by a period, as well as how this
* information relates to a corresponding {@link Window} in the timeline. * information relates to a corresponding {@link Window} in the timeline.
* <p align="center"> *
* <img src="doc-files/timeline-period.svg" alt="Information defined by a period"> * <p style="align:center"><img src="doc-files/timeline-period.svg" alt="Information defined by a
* </p> * period">
*/ */
public static final class Period { public static final class Period {
@ -396,7 +466,8 @@ public abstract class Timeline {
* microseconds. * microseconds.
* *
* @param adGroupIndex The ad group index. * @param adGroupIndex The ad group index.
* @return The time of the ad group at the index, in microseconds. * @return The time of the ad group at the index relative to the start of the enclosing {@link
* Period}, in microseconds, or {@link C#TIME_END_OF_SOURCE} for a post-roll ad group.
*/ */
public long getAdGroupTimeUs(int adGroupIndex) { public long getAdGroupTimeUs(int adGroupIndex) {
return adPlaybackState.adGroupTimesUs[adGroupIndex]; return adPlaybackState.adGroupTimesUs[adGroupIndex];
@ -439,22 +510,23 @@ public abstract class Timeline {
} }
/** /**
* Returns the index of the ad group at or before {@code positionUs}, if that ad group is * Returns the index of the ad group at or before {@code positionUs} in the period, if that ad
* unplayed. Returns {@link C#INDEX_UNSET} if the ad group at or before {@code positionUs} has * group is unplayed. Returns {@link C#INDEX_UNSET} if the ad group at or before {@code
* no ads remaining to be played, or if there is no such ad group. * positionUs} has no ads remaining to be played, or if there is no such ad group.
* *
* @param positionUs The position at or before which to find an ad group, in microseconds. * @param positionUs The period position at or before which to find an ad group, in
* microseconds.
* @return The index of the ad group, or {@link C#INDEX_UNSET}. * @return The index of the ad group, or {@link C#INDEX_UNSET}.
*/ */
public int getAdGroupIndexForPositionUs(long positionUs) { public int getAdGroupIndexForPositionUs(long positionUs) {
return adPlaybackState.getAdGroupIndexForPositionUs(positionUs); return adPlaybackState.getAdGroupIndexForPositionUs(positionUs, durationUs);
} }
/** /**
* Returns the index of the next ad group after {@code positionUs} that has ads remaining to be * Returns the index of the next ad group after {@code positionUs} in the period that has ads
* played. Returns {@link C#INDEX_UNSET} if there is no such ad group. * remaining to be played. Returns {@link C#INDEX_UNSET} if there is no such ad group.
* *
* @param positionUs The position after which to find an ad group, in microseconds. * @param positionUs The period position after which to find an ad group, in microseconds.
* @return The index of the ad group, or {@link C#INDEX_UNSET}. * @return The index of the ad group, or {@link C#INDEX_UNSET}.
*/ */
public int getAdGroupIndexAfterPositionUs(long positionUs) { public int getAdGroupIndexAfterPositionUs(long positionUs) {
@ -506,6 +578,34 @@ public abstract class Timeline {
return adPlaybackState.adResumePositionUs; return adPlaybackState.adResumePositionUs;
} }
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (obj == null || !getClass().equals(obj.getClass())) {
return false;
}
Period that = (Period) obj;
return Util.areEqual(id, that.id)
&& Util.areEqual(uid, that.uid)
&& windowIndex == that.windowIndex
&& durationUs == that.durationUs
&& positionInWindowUs == that.positionInWindowUs
&& Util.areEqual(adPlaybackState, that.adPlaybackState);
}
@Override
public int hashCode() {
int result = 7;
result = 31 * result + (id == null ? 0 : id.hashCode());
result = 31 * result + (uid == null ? 0 : uid.hashCode());
result = 31 * result + windowIndex;
result = 31 * result + (int) (durationUs ^ (durationUs >>> 32));
result = 31 * result + (int) (positionInWindowUs ^ (positionInWindowUs >>> 32));
result = 31 * result + (adPlaybackState == null ? 0 : adPlaybackState.hashCode());
return result;
}
} }
/** An empty timeline. */ /** An empty timeline. */
@ -518,8 +618,7 @@ public abstract class Timeline {
} }
@Override @Override
public Window getWindow( public Window getWindow(int windowIndex, Window window, long defaultPositionProjectionUs) {
int windowIndex, Window window, boolean setTag, long defaultPositionProjectionUs) {
throw new IndexOutOfBoundsException(); throw new IndexOutOfBoundsException();
} }
@ -631,28 +730,20 @@ public abstract class Timeline {
} }
/** /**
* Populates a {@link Window} with data for the window at the specified index. Does not populate * Populates a {@link Window} with data for the window at the specified index.
* {@link Window#tag}.
* *
* @param windowIndex The index of the window. * @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null. * @param window The {@link Window} to populate. Must not be null.
* @return The populated {@link Window}, for convenience. * @return The populated {@link Window}, for convenience.
*/ */
public final Window getWindow(int windowIndex, Window window) { public final Window getWindow(int windowIndex, Window window) {
return getWindow(windowIndex, window, false); return getWindow(windowIndex, window, /* defaultPositionProjectionUs= */ 0);
} }
/** /** @deprecated Use {@link #getWindow(int, Window)} instead. Tags will always be set. */
* Populates a {@link Window} with data for the window at the specified index. @Deprecated
*
* @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null.
* @param setTag Whether {@link Window#tag} should be populated. If false, the field will be set
* to null. The caller should pass false for efficiency reasons unless the field is required.
* @return The populated {@link Window}, for convenience.
*/
public final Window getWindow(int windowIndex, Window window, boolean setTag) { public final Window getWindow(int windowIndex, Window window, boolean setTag) {
return getWindow(windowIndex, window, setTag, 0); return getWindow(windowIndex, window, /* defaultPositionProjectionUs= */ 0);
} }
/** /**
@ -660,14 +751,12 @@ public abstract class Timeline {
* *
* @param windowIndex The index of the window. * @param windowIndex The index of the window.
* @param window The {@link Window} to populate. Must not be null. * @param window The {@link Window} to populate. Must not be null.
* @param setTag Whether {@link Window#tag} should be populated. If false, the field will be set
* to null. The caller should pass false for efficiency reasons unless the field is required.
* @param defaultPositionProjectionUs A duration into the future that the populated window's * @param defaultPositionProjectionUs A duration into the future that the populated window's
* default start position should be projected. * default start position should be projected.
* @return The populated {@link Window}, for convenience. * @return The populated {@link Window}, for convenience.
*/ */
public abstract Window getWindow( public abstract Window getWindow(
int windowIndex, Window window, boolean setTag, long defaultPositionProjectionUs); int windowIndex, Window window, long defaultPositionProjectionUs);
/** /**
* Returns the number of periods in the timeline. * Returns the number of periods in the timeline.
@ -748,7 +837,7 @@ public abstract class Timeline {
long windowPositionUs, long windowPositionUs,
long defaultPositionProjectionUs) { long defaultPositionProjectionUs) {
Assertions.checkIndex(windowIndex, 0, getWindowCount()); Assertions.checkIndex(windowIndex, 0, getWindowCount());
getWindow(windowIndex, window, false, defaultPositionProjectionUs); getWindow(windowIndex, window, defaultPositionProjectionUs);
if (windowPositionUs == C.TIME_UNSET) { if (windowPositionUs == C.TIME_UNSET) {
windowPositionUs = window.getDefaultPositionUs(); windowPositionUs = window.getDefaultPositionUs();
if (windowPositionUs == C.TIME_UNSET) { if (windowPositionUs == C.TIME_UNSET) {
@ -802,8 +891,8 @@ public abstract class Timeline {
public abstract Period getPeriod(int periodIndex, Period period, boolean setIds); public abstract Period getPeriod(int periodIndex, Period period, boolean setIds);
/** /**
* Returns the index of the period identified by its unique {@code id}, or {@link C#INDEX_UNSET} * Returns the index of the period identified by its unique {@link Period#uid}, or {@link
* if the period is not in the timeline. * C#INDEX_UNSET} if the period is not in the timeline.
* *
* @param uid A unique identifier for a period. * @param uid A unique identifier for a period.
* @return The index of the period, or {@link C#INDEX_UNSET} if the period was not found. * @return The index of the period, or {@link C#INDEX_UNSET} if the period was not found.
@ -817,4 +906,50 @@ public abstract class Timeline {
* @return The unique id of the period. * @return The unique id of the period.
*/ */
public abstract Object getUidOfPeriod(int periodIndex); public abstract Object getUidOfPeriod(int periodIndex);
@Override
public boolean equals(@Nullable Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof Timeline)) {
return false;
}
Timeline other = (Timeline) obj;
if (other.getWindowCount() != getWindowCount() || other.getPeriodCount() != getPeriodCount()) {
return false;
}
Timeline.Window window = new Timeline.Window();
Timeline.Period period = new Timeline.Period();
Timeline.Window otherWindow = new Timeline.Window();
Timeline.Period otherPeriod = new Timeline.Period();
for (int i = 0; i < getWindowCount(); i++) {
if (!getWindow(i, window).equals(other.getWindow(i, otherWindow))) {
return false;
}
}
for (int i = 0; i < getPeriodCount(); i++) {
if (!getPeriod(i, period, /* setIds= */ true)
.equals(other.getPeriod(i, otherPeriod, /* setIds= */ true))) {
return false;
}
}
return true;
}
@Override
public int hashCode() {
Window window = new Window();
Period period = new Period();
int result = 7;
result = 31 * result + getWindowCount();
for (int i = 0; i < getWindowCount(); i++) {
result = 31 * result + getWindow(i, window).hashCode();
}
result = 31 * result + getPeriodCount();
for (int i = 0; i < getPeriodCount(); i++) {
result = 31 * result + getPeriod(i, period, /* setIds= */ true).hashCode();
}
return result;
}
} }

View File

@ -0,0 +1,101 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import android.annotation.SuppressLint;
import android.content.Context;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.util.Log;
/**
* Handles a {@link WakeLock}.
*
* <p>The handling of wake locks requires the {@link android.Manifest.permission#WAKE_LOCK}
* permission.
*/
/* package */ final class WakeLockManager {
private static final String TAG = "WakeLockManager";
private static final String WAKE_LOCK_TAG = "ExoPlayer:WakeLockManager";
@Nullable private final PowerManager powerManager;
@Nullable private WakeLock wakeLock;
private boolean enabled;
private boolean stayAwake;
public WakeLockManager(Context context) {
powerManager =
(PowerManager) context.getApplicationContext().getSystemService(Context.POWER_SERVICE);
}
/**
* Sets whether to enable the acquiring and releasing of the {@link WakeLock}.
*
* <p>By default, wake lock handling is not enabled. Enabling this will acquire the wake lock if
* necessary. Disabling this will release the wake lock if it is held.
*
* <p>Enabling {@link WakeLock} requires the {@link android.Manifest.permission#WAKE_LOCK}.
*
* @param enabled True if the player should handle a {@link WakeLock}, false otherwise.
*/
public void setEnabled(boolean enabled) {
if (enabled) {
if (wakeLock == null) {
if (powerManager == null) {
Log.w(TAG, "PowerManager is null, therefore not creating the WakeLock.");
return;
}
wakeLock = powerManager.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, WAKE_LOCK_TAG);
wakeLock.setReferenceCounted(false);
}
}
this.enabled = enabled;
updateWakeLock();
}
/**
* Sets whether to acquire or release the {@link WakeLock}.
*
* <p>Please note this method requires wake lock handling to be enabled through setEnabled(boolean
* enable) to actually have an impact on the {@link WakeLock}.
*
* @param stayAwake True if the player should acquire the {@link WakeLock}. False if the player
* should release.
*/
public void setStayAwake(boolean stayAwake) {
this.stayAwake = stayAwake;
updateWakeLock();
}
// WakelockTimeout suppressed because the time the wake lock is needed for is unknown (could be
// listening to radio with screen off for multiple hours), therefore we can not determine a
// reasonable timeout that would not affect the user.
@SuppressLint("WakelockTimeout")
private void updateWakeLock() {
if (wakeLock == null) {
return;
}
if (enabled && stayAwake) {
wakeLock.acquire();
} else {
wakeLock.release();
}
}
}

View File

@ -0,0 +1,94 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2;
import android.content.Context;
import android.net.wifi.WifiManager;
import android.net.wifi.WifiManager.WifiLock;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.util.Log;
/**
* Handles a {@link WifiLock}
*
* <p>The handling of wifi locks requires the {@link android.Manifest.permission#WAKE_LOCK}
* permission.
*/
/* package */ final class WifiLockManager {
private static final String TAG = "WifiLockManager";
private static final String WIFI_LOCK_TAG = "ExoPlayer:WifiLockManager";
@Nullable private final WifiManager wifiManager;
@Nullable private WifiLock wifiLock;
private boolean enabled;
private boolean stayAwake;
public WifiLockManager(Context context) {
wifiManager =
(WifiManager) context.getApplicationContext().getSystemService(Context.WIFI_SERVICE);
}
/**
* Sets whether to enable the usage of a {@link WifiLock}.
*
* <p>By default, wifi lock handling is not enabled. Enabling will acquire the wifi lock if
* necessary. Disabling will release the wifi lock if held.
*
* <p>Enabling {@link WifiLock} requires the {@link android.Manifest.permission#WAKE_LOCK}.
*
* @param enabled True if the player should handle a {@link WifiLock}.
*/
public void setEnabled(boolean enabled) {
if (enabled && wifiLock == null) {
if (wifiManager == null) {
Log.w(TAG, "WifiManager is null, therefore not creating the WifiLock.");
return;
}
wifiLock = wifiManager.createWifiLock(WifiManager.WIFI_MODE_FULL_HIGH_PERF, WIFI_LOCK_TAG);
wifiLock.setReferenceCounted(false);
}
this.enabled = enabled;
updateWifiLock();
}
/**
* Sets whether to acquire or release the {@link WifiLock}.
*
* <p>The wifi lock will not be acquired unless handling has been enabled through {@link
* #setEnabled(boolean)}.
*
* @param stayAwake True if the player should acquire the {@link WifiLock}. False if it should
* release.
*/
public void setStayAwake(boolean stayAwake) {
this.stayAwake = stayAwake;
updateWifiLock();
}
private void updateWifiLock() {
if (wifiLock == null) {
return;
}
if (enabled && stayAwake) {
wifiLock.acquire();
} else {
wifiLock.release();
}
}
}

View File

@ -15,10 +15,9 @@
*/ */
package com.google.android.exoplayer2.analytics; package com.google.android.exoplayer2.analytics;
import androidx.annotation.Nullable;
import android.graphics.SurfaceTexture; import android.graphics.SurfaceTexture;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
@ -70,23 +69,6 @@ public class AnalyticsCollector
VideoListener, VideoListener,
AudioListener { AudioListener {
/** Factory for an analytics collector. */
public static class Factory {
/**
* Creates an analytics collector for the specified player.
*
* @param player The {@link Player} for which data will be collected. Can be null, if the player
* is set by calling {@link AnalyticsCollector#setPlayer(Player)} before using the analytics
* collector.
* @param clock A {@link Clock} used to generate timestamps.
* @return An analytics collector.
*/
public AnalyticsCollector createAnalyticsCollector(@Nullable Player player, Clock clock) {
return new AnalyticsCollector(player, clock);
}
}
private final CopyOnWriteArraySet<AnalyticsListener> listeners; private final CopyOnWriteArraySet<AnalyticsListener> listeners;
private final Clock clock; private final Clock clock;
private final Window window; private final Window window;
@ -95,17 +77,11 @@ public class AnalyticsCollector
private @MonotonicNonNull Player player; private @MonotonicNonNull Player player;
/** /**
* Creates an analytics collector for the specified player. * Creates an analytics collector.
* *
* @param player The {@link Player} for which data will be collected. Can be null, if the player
* is set by calling {@link AnalyticsCollector#setPlayer(Player)} before using the analytics
* collector.
* @param clock A {@link Clock} used to generate timestamps. * @param clock A {@link Clock} used to generate timestamps.
*/ */
protected AnalyticsCollector(@Nullable Player player, Clock clock) { public AnalyticsCollector(Clock clock) {
if (player != null) {
this.player = player;
}
this.clock = Assertions.checkNotNull(clock); this.clock = Assertions.checkNotNull(clock);
listeners = new CopyOnWriteArraySet<>(); listeners = new CopyOnWriteArraySet<>();
mediaPeriodQueueTracker = new MediaPeriodQueueTracker(); mediaPeriodQueueTracker = new MediaPeriodQueueTracker();
@ -450,8 +426,7 @@ public class AnalyticsCollector
// having slightly different real times. // having slightly different real times.
@Override @Override
public final void onTimelineChanged( public final void onTimelineChanged(Timeline timeline, @Player.TimelineChangeReason int reason) {
Timeline timeline, @Nullable Object manifest, @Player.TimelineChangeReason int reason) {
mediaPeriodQueueTracker.onTimelineChanged(timeline); mediaPeriodQueueTracker.onTimelineChanged(timeline);
EventTime eventTime = generatePlayingMediaPeriodEventTime(); EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) { for (AnalyticsListener listener : listeners) {
@ -477,7 +452,7 @@ public class AnalyticsCollector
} }
@Override @Override
public final void onPlayerStateChanged(boolean playWhenReady, int playbackState) { public final void onPlayerStateChanged(boolean playWhenReady, @Player.State int playbackState) {
EventTime eventTime = generatePlayingMediaPeriodEventTime(); EventTime eventTime = generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) { for (AnalyticsListener listener : listeners) {
listener.onPlayerStateChanged(eventTime, playWhenReady, playbackState); listener.onPlayerStateChanged(eventTime, playWhenReady, playbackState);
@ -519,10 +494,7 @@ public class AnalyticsCollector
@Override @Override
public final void onPlayerError(ExoPlaybackException error) { public final void onPlayerError(ExoPlaybackException error) {
EventTime eventTime = EventTime eventTime = generateLastReportedPlayingMediaPeriodEventTime();
error.type == ExoPlaybackException.TYPE_SOURCE
? generateLoadingMediaPeriodEventTime()
: generatePlayingMediaPeriodEventTime();
for (AnalyticsListener listener : listeners) { for (AnalyticsListener listener : listeners) {
listener.onPlayerError(eventTime, error); listener.onPlayerError(eventTime, error);
} }
@ -717,8 +689,9 @@ public class AnalyticsCollector
private final HashMap<MediaPeriodId, MediaPeriodInfo> mediaPeriodIdToInfo; private final HashMap<MediaPeriodId, MediaPeriodInfo> mediaPeriodIdToInfo;
private final Period period; private final Period period;
private @Nullable MediaPeriodInfo lastReportedPlayingMediaPeriod; @Nullable private MediaPeriodInfo lastPlayingMediaPeriod;
private @Nullable MediaPeriodInfo readingMediaPeriod; @Nullable private MediaPeriodInfo lastReportedPlayingMediaPeriod;
@Nullable private MediaPeriodInfo readingMediaPeriod;
private Timeline timeline; private Timeline timeline;
private boolean isSeeking; private boolean isSeeking;
@ -736,7 +709,8 @@ public class AnalyticsCollector
* always return null to reflect the uncertainty about the current playing period. May also be * always return null to reflect the uncertainty about the current playing period. May also be
* null, if the timeline is empty or no media period is active yet. * null, if the timeline is empty or no media period is active yet.
*/ */
public @Nullable MediaPeriodInfo getPlayingMediaPeriod() { @Nullable
public MediaPeriodInfo getPlayingMediaPeriod() {
return mediaPeriodInfoQueue.isEmpty() || timeline.isEmpty() || isSeeking return mediaPeriodInfoQueue.isEmpty() || timeline.isEmpty() || isSeeking
? null ? null
: mediaPeriodInfoQueue.get(0); : mediaPeriodInfoQueue.get(0);
@ -749,7 +723,8 @@ public class AnalyticsCollector
* reported until the seek or preparation is processed. May be null, if no media period is * reported until the seek or preparation is processed. May be null, if no media period is
* active yet. * active yet.
*/ */
public @Nullable MediaPeriodInfo getLastReportedPlayingMediaPeriod() { @Nullable
public MediaPeriodInfo getLastReportedPlayingMediaPeriod() {
return lastReportedPlayingMediaPeriod; return lastReportedPlayingMediaPeriod;
} }
@ -757,7 +732,8 @@ public class AnalyticsCollector
* Returns the {@link MediaPeriodInfo} of the media period currently being read by the player. * Returns the {@link MediaPeriodInfo} of the media period currently being read by the player.
* May be null, if the player is not reading a media period. * May be null, if the player is not reading a media period.
*/ */
public @Nullable MediaPeriodInfo getReadingMediaPeriod() { @Nullable
public MediaPeriodInfo getReadingMediaPeriod() {
return readingMediaPeriod; return readingMediaPeriod;
} }
@ -766,14 +742,16 @@ public class AnalyticsCollector
* currently loading or will be the next one loading. May be null, if no media period is active * currently loading or will be the next one loading. May be null, if no media period is active
* yet. * yet.
*/ */
public @Nullable MediaPeriodInfo getLoadingMediaPeriod() { @Nullable
public MediaPeriodInfo getLoadingMediaPeriod() {
return mediaPeriodInfoQueue.isEmpty() return mediaPeriodInfoQueue.isEmpty()
? null ? null
: mediaPeriodInfoQueue.get(mediaPeriodInfoQueue.size() - 1); : mediaPeriodInfoQueue.get(mediaPeriodInfoQueue.size() - 1);
} }
/** Returns the {@link MediaPeriodInfo} for the given {@link MediaPeriodId}. */ /** Returns the {@link MediaPeriodInfo} for the given {@link MediaPeriodId}. */
public @Nullable MediaPeriodInfo getMediaPeriodInfo(MediaPeriodId mediaPeriodId) { @Nullable
public MediaPeriodInfo getMediaPeriodInfo(MediaPeriodId mediaPeriodId) {
return mediaPeriodIdToInfo.get(mediaPeriodId); return mediaPeriodIdToInfo.get(mediaPeriodId);
} }
@ -786,7 +764,8 @@ public class AnalyticsCollector
* Tries to find an existing media period info from the specified window index. Only returns a * Tries to find an existing media period info from the specified window index. Only returns a
* non-null media period info if there is a unique, unambiguous match. * non-null media period info if there is a unique, unambiguous match.
*/ */
public @Nullable MediaPeriodInfo tryResolveWindowIndex(int windowIndex) { @Nullable
public MediaPeriodInfo tryResolveWindowIndex(int windowIndex) {
MediaPeriodInfo match = null; MediaPeriodInfo match = null;
for (int i = 0; i < mediaPeriodInfoQueue.size(); i++) { for (int i = 0; i < mediaPeriodInfoQueue.size(); i++) {
MediaPeriodInfo info = mediaPeriodInfoQueue.get(i); MediaPeriodInfo info = mediaPeriodInfoQueue.get(i);
@ -805,7 +784,7 @@ public class AnalyticsCollector
/** Updates the queue with a reported position discontinuity . */ /** Updates the queue with a reported position discontinuity . */
public void onPositionDiscontinuity(@Player.DiscontinuityReason int reason) { public void onPositionDiscontinuity(@Player.DiscontinuityReason int reason) {
updateLastReportedPlayingMediaPeriod(); lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod;
} }
/** Updates the queue with a reported timeline change. */ /** Updates the queue with a reported timeline change. */
@ -820,7 +799,7 @@ public class AnalyticsCollector
readingMediaPeriod = updateMediaPeriodInfoToNewTimeline(readingMediaPeriod, timeline); readingMediaPeriod = updateMediaPeriodInfoToNewTimeline(readingMediaPeriod, timeline);
} }
this.timeline = timeline; this.timeline = timeline;
updateLastReportedPlayingMediaPeriod(); lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod;
} }
/** Updates the queue with a reported start of seek. */ /** Updates the queue with a reported start of seek. */
@ -831,18 +810,23 @@ public class AnalyticsCollector
/** Updates the queue with a reported processed seek. */ /** Updates the queue with a reported processed seek. */
public void onSeekProcessed() { public void onSeekProcessed() {
isSeeking = false; isSeeking = false;
updateLastReportedPlayingMediaPeriod(); lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod;
} }
/** Updates the queue with a newly created media period. */ /** Updates the queue with a newly created media period. */
public void onMediaPeriodCreated(int windowIndex, MediaPeriodId mediaPeriodId) { public void onMediaPeriodCreated(int windowIndex, MediaPeriodId mediaPeriodId) {
boolean isInTimeline = timeline.getIndexOfPeriod(mediaPeriodId.periodUid) != C.INDEX_UNSET; int periodIndex = timeline.getIndexOfPeriod(mediaPeriodId.periodUid);
boolean isInTimeline = periodIndex != C.INDEX_UNSET;
MediaPeriodInfo mediaPeriodInfo = MediaPeriodInfo mediaPeriodInfo =
new MediaPeriodInfo(mediaPeriodId, isInTimeline ? timeline : Timeline.EMPTY, windowIndex); new MediaPeriodInfo(
mediaPeriodId,
isInTimeline ? timeline : Timeline.EMPTY,
isInTimeline ? timeline.getPeriod(periodIndex, period).windowIndex : windowIndex);
mediaPeriodInfoQueue.add(mediaPeriodInfo); mediaPeriodInfoQueue.add(mediaPeriodInfo);
mediaPeriodIdToInfo.put(mediaPeriodId, mediaPeriodInfo); mediaPeriodIdToInfo.put(mediaPeriodId, mediaPeriodInfo);
lastPlayingMediaPeriod = mediaPeriodInfoQueue.get(0);
if (mediaPeriodInfoQueue.size() == 1 && !timeline.isEmpty()) { if (mediaPeriodInfoQueue.size() == 1 && !timeline.isEmpty()) {
updateLastReportedPlayingMediaPeriod(); lastReportedPlayingMediaPeriod = lastPlayingMediaPeriod;
} }
} }
@ -860,6 +844,9 @@ public class AnalyticsCollector
if (readingMediaPeriod != null && mediaPeriodId.equals(readingMediaPeriod.mediaPeriodId)) { if (readingMediaPeriod != null && mediaPeriodId.equals(readingMediaPeriod.mediaPeriodId)) {
readingMediaPeriod = mediaPeriodInfoQueue.isEmpty() ? null : mediaPeriodInfoQueue.get(0); readingMediaPeriod = mediaPeriodInfoQueue.isEmpty() ? null : mediaPeriodInfoQueue.get(0);
} }
if (!mediaPeriodInfoQueue.isEmpty()) {
lastPlayingMediaPeriod = mediaPeriodInfoQueue.get(0);
}
return true; return true;
} }
@ -868,12 +855,6 @@ public class AnalyticsCollector
readingMediaPeriod = mediaPeriodIdToInfo.get(mediaPeriodId); readingMediaPeriod = mediaPeriodIdToInfo.get(mediaPeriodId);
} }
private void updateLastReportedPlayingMediaPeriod() {
if (!mediaPeriodInfoQueue.isEmpty()) {
lastReportedPlayingMediaPeriod = mediaPeriodInfoQueue.get(0);
}
}
private MediaPeriodInfo updateMediaPeriodInfoToNewTimeline( private MediaPeriodInfo updateMediaPeriodInfoToNewTimeline(
MediaPeriodInfo info, Timeline newTimeline) { MediaPeriodInfo info, Timeline newTimeline) {
int newPeriodIndex = newTimeline.getIndexOfPeriod(info.mediaPeriodId.periodUid); int newPeriodIndex = newTimeline.getIndexOfPeriod(info.mediaPeriodId.periodUid);

View File

@ -15,8 +15,8 @@
*/ */
package com.google.android.exoplayer2.analytics; package com.google.android.exoplayer2.analytics;
import androidx.annotation.Nullable;
import android.view.Surface; import android.view.Surface;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
@ -69,7 +69,7 @@ public interface AnalyticsListener {
* Media period identifier for the media period this event belongs to, or {@code null} if the * Media period identifier for the media period this event belongs to, or {@code null} if the
* event is not associated with a specific media period. * event is not associated with a specific media period.
*/ */
public final @Nullable MediaPeriodId mediaPeriodId; @Nullable public final MediaPeriodId mediaPeriodId;
/** /**
* Position in the window or ad this event belongs to at the time of the event, in milliseconds. * Position in the window or ad this event belongs to at the time of the event, in milliseconds.
@ -128,10 +128,10 @@ public interface AnalyticsListener {
* *
* @param eventTime The event time. * @param eventTime The event time.
* @param playWhenReady Whether the playback will proceed when ready. * @param playWhenReady Whether the playback will proceed when ready.
* @param playbackState One of the {@link Player}.STATE constants. * @param playbackState The new {@link Player.State playback state}.
*/ */
default void onPlayerStateChanged( default void onPlayerStateChanged(
EventTime eventTime, boolean playWhenReady, int playbackState) {} EventTime eventTime, boolean playWhenReady, @Player.State int playbackState) {}
/** /**
* Called when playback suppression reason changed. * Called when playback suppression reason changed.

View File

@ -0,0 +1,371 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.analytics;
import android.util.Base64;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Player.DiscontinuityReason;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Random;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
/**
* Default {@link PlaybackSessionManager} which instantiates a new session for each window in the
* timeline and also for each ad within the windows.
*
* <p>Sessions are identified by Base64-encoded, URL-safe, random strings.
*/
public final class DefaultPlaybackSessionManager implements PlaybackSessionManager {
private static final Random RANDOM = new Random();
private static final int SESSION_ID_LENGTH = 12;
private final Timeline.Window window;
private final Timeline.Period period;
private final HashMap<String, SessionDescriptor> sessions;
private @MonotonicNonNull Listener listener;
private Timeline currentTimeline;
@Nullable private String currentSessionId;
/** Creates session manager. */
public DefaultPlaybackSessionManager() {
window = new Timeline.Window();
period = new Timeline.Period();
sessions = new HashMap<>();
currentTimeline = Timeline.EMPTY;
}
@Override
public void setListener(Listener listener) {
this.listener = listener;
}
@Override
public synchronized String getSessionForMediaPeriodId(
Timeline timeline, MediaPeriodId mediaPeriodId) {
int windowIndex = timeline.getPeriodByUid(mediaPeriodId.periodUid, period).windowIndex;
return getOrAddSession(windowIndex, mediaPeriodId).sessionId;
}
@Override
public synchronized boolean belongsToSession(EventTime eventTime, String sessionId) {
SessionDescriptor sessionDescriptor = sessions.get(sessionId);
if (sessionDescriptor == null) {
return false;
}
sessionDescriptor.maybeSetWindowSequenceNumber(eventTime.windowIndex, eventTime.mediaPeriodId);
return sessionDescriptor.belongsToSession(eventTime.windowIndex, eventTime.mediaPeriodId);
}
@Override
public synchronized void updateSessions(EventTime eventTime) {
Assertions.checkNotNull(listener);
@Nullable SessionDescriptor currentSession = sessions.get(currentSessionId);
if (eventTime.mediaPeriodId != null && currentSession != null) {
// If we receive an event associated with a media period, then it needs to be either part of
// the current window if it's the first created media period, or a window that will be played
// in the future. Otherwise, we know that it belongs to a session that was already finished
// and we can ignore the event.
boolean isAlreadyFinished =
currentSession.windowSequenceNumber == C.INDEX_UNSET
? currentSession.windowIndex != eventTime.windowIndex
: eventTime.mediaPeriodId.windowSequenceNumber < currentSession.windowSequenceNumber;
if (isAlreadyFinished) {
return;
}
}
SessionDescriptor eventSession =
getOrAddSession(eventTime.windowIndex, eventTime.mediaPeriodId);
if (currentSessionId == null) {
currentSessionId = eventSession.sessionId;
}
if (!eventSession.isCreated) {
eventSession.isCreated = true;
listener.onSessionCreated(eventTime, eventSession.sessionId);
}
if (eventSession.sessionId.equals(currentSessionId) && !eventSession.isActive) {
eventSession.isActive = true;
listener.onSessionActive(eventTime, eventSession.sessionId);
}
}
@Override
public synchronized void handleTimelineUpdate(EventTime eventTime) {
Assertions.checkNotNull(listener);
Timeline previousTimeline = currentTimeline;
currentTimeline = eventTime.timeline;
Iterator<SessionDescriptor> iterator = sessions.values().iterator();
while (iterator.hasNext()) {
SessionDescriptor session = iterator.next();
if (!session.tryResolvingToNewTimeline(previousTimeline, currentTimeline)) {
iterator.remove();
if (session.isCreated) {
if (session.sessionId.equals(currentSessionId)) {
currentSessionId = null;
}
listener.onSessionFinished(
eventTime, session.sessionId, /* automaticTransitionToNextPlayback= */ false);
}
}
}
handlePositionDiscontinuity(eventTime, Player.DISCONTINUITY_REASON_INTERNAL);
}
@Override
public synchronized void handlePositionDiscontinuity(
EventTime eventTime, @DiscontinuityReason int reason) {
Assertions.checkNotNull(listener);
boolean hasAutomaticTransition =
reason == Player.DISCONTINUITY_REASON_PERIOD_TRANSITION
|| reason == Player.DISCONTINUITY_REASON_AD_INSERTION;
Iterator<SessionDescriptor> iterator = sessions.values().iterator();
while (iterator.hasNext()) {
SessionDescriptor session = iterator.next();
if (session.isFinishedAtEventTime(eventTime)) {
iterator.remove();
if (session.isCreated) {
boolean isRemovingCurrentSession = session.sessionId.equals(currentSessionId);
boolean isAutomaticTransition =
hasAutomaticTransition && isRemovingCurrentSession && session.isActive;
if (isRemovingCurrentSession) {
currentSessionId = null;
}
listener.onSessionFinished(eventTime, session.sessionId, isAutomaticTransition);
}
}
}
@Nullable SessionDescriptor previousSessionDescriptor = sessions.get(currentSessionId);
SessionDescriptor currentSessionDescriptor =
getOrAddSession(eventTime.windowIndex, eventTime.mediaPeriodId);
currentSessionId = currentSessionDescriptor.sessionId;
if (eventTime.mediaPeriodId != null
&& eventTime.mediaPeriodId.isAd()
&& (previousSessionDescriptor == null
|| previousSessionDescriptor.windowSequenceNumber
!= eventTime.mediaPeriodId.windowSequenceNumber
|| previousSessionDescriptor.adMediaPeriodId == null
|| previousSessionDescriptor.adMediaPeriodId.adGroupIndex
!= eventTime.mediaPeriodId.adGroupIndex
|| previousSessionDescriptor.adMediaPeriodId.adIndexInAdGroup
!= eventTime.mediaPeriodId.adIndexInAdGroup)) {
// New ad playback started. Find corresponding content session and notify ad playback started.
MediaPeriodId contentMediaPeriodId =
new MediaPeriodId(
eventTime.mediaPeriodId.periodUid, eventTime.mediaPeriodId.windowSequenceNumber);
SessionDescriptor contentSession =
getOrAddSession(eventTime.windowIndex, contentMediaPeriodId);
if (contentSession.isCreated && currentSessionDescriptor.isCreated) {
listener.onAdPlaybackStarted(
eventTime, contentSession.sessionId, currentSessionDescriptor.sessionId);
}
}
}
@Override
public void finishAllSessions(EventTime eventTime) {
currentSessionId = null;
Iterator<SessionDescriptor> iterator = sessions.values().iterator();
while (iterator.hasNext()) {
SessionDescriptor session = iterator.next();
iterator.remove();
if (session.isCreated && listener != null) {
listener.onSessionFinished(
eventTime, session.sessionId, /* automaticTransitionToNextPlayback= */ false);
}
}
}
private SessionDescriptor getOrAddSession(
int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {
// There should only be one matching session if mediaPeriodId is non-null. If mediaPeriodId is
// null, there may be multiple matching sessions with different window sequence numbers or
// adMediaPeriodIds. The best match is the one with the smaller window sequence number, and for
// windows with ads, the content session is preferred over ad sessions.
SessionDescriptor bestMatch = null;
long bestMatchWindowSequenceNumber = Long.MAX_VALUE;
for (SessionDescriptor sessionDescriptor : sessions.values()) {
sessionDescriptor.maybeSetWindowSequenceNumber(windowIndex, mediaPeriodId);
if (sessionDescriptor.belongsToSession(windowIndex, mediaPeriodId)) {
long windowSequenceNumber = sessionDescriptor.windowSequenceNumber;
if (windowSequenceNumber == C.INDEX_UNSET
|| windowSequenceNumber < bestMatchWindowSequenceNumber) {
bestMatch = sessionDescriptor;
bestMatchWindowSequenceNumber = windowSequenceNumber;
} else if (windowSequenceNumber == bestMatchWindowSequenceNumber
&& Util.castNonNull(bestMatch).adMediaPeriodId != null
&& sessionDescriptor.adMediaPeriodId != null) {
bestMatch = sessionDescriptor;
}
}
}
if (bestMatch == null) {
String sessionId = generateSessionId();
bestMatch = new SessionDescriptor(sessionId, windowIndex, mediaPeriodId);
sessions.put(sessionId, bestMatch);
}
return bestMatch;
}
private static String generateSessionId() {
byte[] randomBytes = new byte[SESSION_ID_LENGTH];
RANDOM.nextBytes(randomBytes);
return Base64.encodeToString(randomBytes, Base64.URL_SAFE | Base64.NO_WRAP);
}
/**
* Descriptor for a session.
*
* <p>The session may be described in one of three ways:
*
* <ul>
* <li>A window index with unset window sequence number and a null ad media period id
* <li>A content window with index and sequence number, but a null ad media period id.
* <li>An ad with all values set.
* </ul>
*/
private final class SessionDescriptor {
private final String sessionId;
private int windowIndex;
private long windowSequenceNumber;
private @MonotonicNonNull MediaPeriodId adMediaPeriodId;
private boolean isCreated;
private boolean isActive;
public SessionDescriptor(
String sessionId, int windowIndex, @Nullable MediaPeriodId mediaPeriodId) {
this.sessionId = sessionId;
this.windowIndex = windowIndex;
this.windowSequenceNumber =
mediaPeriodId == null ? C.INDEX_UNSET : mediaPeriodId.windowSequenceNumber;
if (mediaPeriodId != null && mediaPeriodId.isAd()) {
this.adMediaPeriodId = mediaPeriodId;
}
}
public boolean tryResolvingToNewTimeline(Timeline oldTimeline, Timeline newTimeline) {
windowIndex = resolveWindowIndexToNewTimeline(oldTimeline, newTimeline, windowIndex);
if (windowIndex == C.INDEX_UNSET) {
return false;
}
if (adMediaPeriodId == null) {
return true;
}
int newPeriodIndex = newTimeline.getIndexOfPeriod(adMediaPeriodId.periodUid);
return newPeriodIndex != C.INDEX_UNSET;
}
public boolean belongsToSession(
int eventWindowIndex, @Nullable MediaPeriodId eventMediaPeriodId) {
if (eventMediaPeriodId == null) {
// Events without concrete media period id are for all sessions of the same window.
return eventWindowIndex == windowIndex;
}
if (adMediaPeriodId == null) {
// If this is a content session, only events for content with the same window sequence
// number belong to this session.
return !eventMediaPeriodId.isAd()
&& eventMediaPeriodId.windowSequenceNumber == windowSequenceNumber;
}
// If this is an ad session, only events for this ad belong to the session.
return eventMediaPeriodId.windowSequenceNumber == adMediaPeriodId.windowSequenceNumber
&& eventMediaPeriodId.adGroupIndex == adMediaPeriodId.adGroupIndex
&& eventMediaPeriodId.adIndexInAdGroup == adMediaPeriodId.adIndexInAdGroup;
}
public void maybeSetWindowSequenceNumber(
int eventWindowIndex, @Nullable MediaPeriodId eventMediaPeriodId) {
if (windowSequenceNumber == C.INDEX_UNSET
&& eventWindowIndex == windowIndex
&& eventMediaPeriodId != null) {
// Set window sequence number for this session as soon as we have one.
windowSequenceNumber = eventMediaPeriodId.windowSequenceNumber;
}
}
public boolean isFinishedAtEventTime(EventTime eventTime) {
if (windowSequenceNumber == C.INDEX_UNSET) {
// Sessions with unspecified window sequence number are kept until we know more.
return false;
}
if (eventTime.mediaPeriodId == null) {
// For event times without media period id (e.g. after seek to new window), we only keep
// sessions of this window.
return windowIndex != eventTime.windowIndex;
}
if (eventTime.mediaPeriodId.windowSequenceNumber > windowSequenceNumber) {
// All past window sequence numbers are finished.
return true;
}
if (adMediaPeriodId == null) {
// Current or future content is not finished.
return false;
}
int eventPeriodIndex = eventTime.timeline.getIndexOfPeriod(eventTime.mediaPeriodId.periodUid);
int adPeriodIndex = eventTime.timeline.getIndexOfPeriod(adMediaPeriodId.periodUid);
if (eventTime.mediaPeriodId.windowSequenceNumber < adMediaPeriodId.windowSequenceNumber
|| eventPeriodIndex < adPeriodIndex) {
// Ads in future windows or periods are not finished.
return false;
}
if (eventPeriodIndex > adPeriodIndex) {
// Ads in past periods are finished.
return true;
}
if (eventTime.mediaPeriodId.isAd()) {
int eventAdGroup = eventTime.mediaPeriodId.adGroupIndex;
int eventAdIndex = eventTime.mediaPeriodId.adIndexInAdGroup;
// Finished if event is for an ad after this one in the same period.
return eventAdGroup > adMediaPeriodId.adGroupIndex
|| (eventAdGroup == adMediaPeriodId.adGroupIndex
&& eventAdIndex > adMediaPeriodId.adIndexInAdGroup);
} else {
// Finished if the event is for content after this ad.
return eventTime.mediaPeriodId.nextAdGroupIndex == C.INDEX_UNSET
|| eventTime.mediaPeriodId.nextAdGroupIndex > adMediaPeriodId.adGroupIndex;
}
}
private int resolveWindowIndexToNewTimeline(
Timeline oldTimeline, Timeline newTimeline, int windowIndex) {
if (windowIndex >= oldTimeline.getWindowCount()) {
return windowIndex < newTimeline.getWindowCount() ? windowIndex : C.INDEX_UNSET;
}
oldTimeline.getWindow(windowIndex, window);
for (int periodIndex = window.firstPeriodIndex;
periodIndex <= window.lastPeriodIndex;
periodIndex++) {
Object periodUid = oldTimeline.getUidOfPeriod(periodIndex);
int newPeriodIndex = newTimeline.getIndexOfPeriod(periodUid);
if (newPeriodIndex != C.INDEX_UNSET) {
return newTimeline.getPeriod(newPeriodIndex, period).windowIndex;
}
}
return C.INDEX_UNSET;
}
}
}

View File

@ -0,0 +1,128 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.analytics;
import com.google.android.exoplayer2.Player.DiscontinuityReason;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime;
import com.google.android.exoplayer2.source.MediaSource.MediaPeriodId;
/**
* Manager for active playback sessions.
*
* <p>The manager keeps track of the association between window index and/or media period id to
* session identifier.
*/
public interface PlaybackSessionManager {
/** A listener for session updates. */
interface Listener {
/**
* Called when a new session is created as a result of {@link #updateSessions(EventTime)}.
*
* @param eventTime The {@link EventTime} at which the session is created.
* @param sessionId The identifier of the new session.
*/
void onSessionCreated(EventTime eventTime, String sessionId);
/**
* Called when a session becomes active, i.e. playing in the foreground.
*
* @param eventTime The {@link EventTime} at which the session becomes active.
* @param sessionId The identifier of the session.
*/
void onSessionActive(EventTime eventTime, String sessionId);
/**
* Called when a session is interrupted by ad playback.
*
* @param eventTime The {@link EventTime} at which the ad playback starts.
* @param contentSessionId The session identifier of the content session.
* @param adSessionId The identifier of the ad session.
*/
void onAdPlaybackStarted(EventTime eventTime, String contentSessionId, String adSessionId);
/**
* Called when a session is permanently finished.
*
* @param eventTime The {@link EventTime} at which the session finished.
* @param sessionId The identifier of the finished session.
* @param automaticTransitionToNextPlayback Whether the session finished because of an automatic
* transition to the next playback item.
*/
void onSessionFinished(
EventTime eventTime, String sessionId, boolean automaticTransitionToNextPlayback);
}
/**
* Sets the listener to be notified of session updates. Must be called before the session manager
* is used.
*
* @param listener The {@link Listener} to be notified of session updates.
*/
void setListener(Listener listener);
/**
* Returns the session identifier for the given media period id.
*
* <p>Note that this will reserve a new session identifier if it doesn't exist yet, but will not
* call any {@link Listener} callbacks.
*
* @param timeline The timeline, {@code mediaPeriodId} is part of.
* @param mediaPeriodId A {@link MediaPeriodId}.
*/
String getSessionForMediaPeriodId(Timeline timeline, MediaPeriodId mediaPeriodId);
/**
* Returns whether an event time belong to a session.
*
* @param eventTime The {@link EventTime}.
* @param sessionId A session identifier.
* @return Whether the event belongs to the specified session.
*/
boolean belongsToSession(EventTime eventTime, String sessionId);
/**
* Updates or creates sessions based on a player {@link EventTime}.
*
* @param eventTime The {@link EventTime}.
*/
void updateSessions(EventTime eventTime);
/**
* Updates the session associations to a new timeline.
*
* @param eventTime The event time with the timeline change.
*/
void handleTimelineUpdate(EventTime eventTime);
/**
* Handles a position discontinuity.
*
* @param eventTime The event time of the position discontinuity.
* @param reason The {@link DiscontinuityReason}.
*/
void handlePositionDiscontinuity(EventTime eventTime, @DiscontinuityReason int reason);
/**
* Finishes all existing sessions and calls their respective {@link
* Listener#onSessionFinished(EventTime, String, boolean)} callback.
*
* @param eventTime The event time at which sessions are finished.
*/
void finishAllSessions(EventTime eventTime);
}

View File

@ -0,0 +1,980 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.analytics;
import android.os.SystemClock;
import android.util.Pair;
import androidx.annotation.IntDef;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.analytics.AnalyticsListener.EventTime;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.util.Collections;
import java.util.List;
import org.checkerframework.checker.nullness.compatqual.NullableType;
/** Statistics about playbacks. */
public final class PlaybackStats {
/**
* State of a playback. One of {@link #PLAYBACK_STATE_NOT_STARTED}, {@link
* #PLAYBACK_STATE_JOINING_FOREGROUND}, {@link #PLAYBACK_STATE_JOINING_BACKGROUND}, {@link
* #PLAYBACK_STATE_PLAYING}, {@link #PLAYBACK_STATE_PAUSED}, {@link #PLAYBACK_STATE_SEEKING},
* {@link #PLAYBACK_STATE_BUFFERING}, {@link #PLAYBACK_STATE_PAUSED_BUFFERING}, {@link
* #PLAYBACK_STATE_SEEK_BUFFERING}, {@link #PLAYBACK_STATE_SUPPRESSED}, {@link
* #PLAYBACK_STATE_SUPPRESSED_BUFFERING}, {@link #PLAYBACK_STATE_ENDED}, {@link
* #PLAYBACK_STATE_STOPPED}, {@link #PLAYBACK_STATE_FAILED}, {@link
* #PLAYBACK_STATE_INTERRUPTED_BY_AD} or {@link #PLAYBACK_STATE_ABANDONED}.
*/
@Documented
@Retention(RetentionPolicy.SOURCE)
@Target({ElementType.TYPE_PARAMETER, ElementType.TYPE_USE})
@IntDef({
PLAYBACK_STATE_NOT_STARTED,
PLAYBACK_STATE_JOINING_BACKGROUND,
PLAYBACK_STATE_JOINING_FOREGROUND,
PLAYBACK_STATE_PLAYING,
PLAYBACK_STATE_PAUSED,
PLAYBACK_STATE_SEEKING,
PLAYBACK_STATE_BUFFERING,
PLAYBACK_STATE_PAUSED_BUFFERING,
PLAYBACK_STATE_SEEK_BUFFERING,
PLAYBACK_STATE_SUPPRESSED,
PLAYBACK_STATE_SUPPRESSED_BUFFERING,
PLAYBACK_STATE_ENDED,
PLAYBACK_STATE_STOPPED,
PLAYBACK_STATE_FAILED,
PLAYBACK_STATE_INTERRUPTED_BY_AD,
PLAYBACK_STATE_ABANDONED
})
@interface PlaybackState {}
/** Playback has not started (initial state). */
public static final int PLAYBACK_STATE_NOT_STARTED = 0;
/** Playback is buffering in the background for initial playback start. */
public static final int PLAYBACK_STATE_JOINING_BACKGROUND = 1;
/** Playback is buffering in the foreground for initial playback start. */
public static final int PLAYBACK_STATE_JOINING_FOREGROUND = 2;
/** Playback is actively playing. */
public static final int PLAYBACK_STATE_PLAYING = 3;
/** Playback is paused but ready to play. */
public static final int PLAYBACK_STATE_PAUSED = 4;
/** Playback is handling a seek. */
public static final int PLAYBACK_STATE_SEEKING = 5;
/** Playback is buffering to resume active playback. */
public static final int PLAYBACK_STATE_BUFFERING = 6;
/** Playback is buffering while paused. */
public static final int PLAYBACK_STATE_PAUSED_BUFFERING = 7;
/** Playback is buffering after a seek. */
public static final int PLAYBACK_STATE_SEEK_BUFFERING = 8;
/** Playback is suppressed (e.g. due to audio focus loss). */
public static final int PLAYBACK_STATE_SUPPRESSED = 9;
/** Playback is suppressed (e.g. due to audio focus loss) while buffering to resume a playback. */
public static final int PLAYBACK_STATE_SUPPRESSED_BUFFERING = 10;
/** Playback has reached the end of the media. */
public static final int PLAYBACK_STATE_ENDED = 11;
/** Playback is stopped and can be restarted. */
public static final int PLAYBACK_STATE_STOPPED = 12;
/** Playback is stopped due a fatal error and can be retried. */
public static final int PLAYBACK_STATE_FAILED = 13;
/** Playback is interrupted by an ad. */
public static final int PLAYBACK_STATE_INTERRUPTED_BY_AD = 14;
/** Playback is abandoned before reaching the end of the media. */
public static final int PLAYBACK_STATE_ABANDONED = 15;
/** Total number of playback states. */
/* package */ static final int PLAYBACK_STATE_COUNT = 16;
/** Empty playback stats. */
public static final PlaybackStats EMPTY = merge(/* nothing */ );
/**
* Returns the combined {@link PlaybackStats} for all input {@link PlaybackStats}.
*
* <p>Note that the full history of events is not kept as the history only makes sense in the
* context of a single playback.
*
* @param playbackStats Array of {@link PlaybackStats} to combine.
* @return The combined {@link PlaybackStats}.
*/
public static PlaybackStats merge(PlaybackStats... playbackStats) {
int playbackCount = 0;
long[] playbackStateDurationsMs = new long[PLAYBACK_STATE_COUNT];
long firstReportedTimeMs = C.TIME_UNSET;
int foregroundPlaybackCount = 0;
int abandonedBeforeReadyCount = 0;
int endedCount = 0;
int backgroundJoiningCount = 0;
long totalValidJoinTimeMs = C.TIME_UNSET;
int validJoinTimeCount = 0;
int totalPauseCount = 0;
int totalPauseBufferCount = 0;
int totalSeekCount = 0;
int totalRebufferCount = 0;
long maxRebufferTimeMs = C.TIME_UNSET;
int adPlaybackCount = 0;
long totalVideoFormatHeightTimeMs = 0;
long totalVideoFormatHeightTimeProduct = 0;
long totalVideoFormatBitrateTimeMs = 0;
long totalVideoFormatBitrateTimeProduct = 0;
long totalAudioFormatTimeMs = 0;
long totalAudioFormatBitrateTimeProduct = 0;
int initialVideoFormatHeightCount = 0;
int initialVideoFormatBitrateCount = 0;
int totalInitialVideoFormatHeight = C.LENGTH_UNSET;
long totalInitialVideoFormatBitrate = C.LENGTH_UNSET;
int initialAudioFormatBitrateCount = 0;
long totalInitialAudioFormatBitrate = C.LENGTH_UNSET;
long totalBandwidthTimeMs = 0;
long totalBandwidthBytes = 0;
long totalDroppedFrames = 0;
long totalAudioUnderruns = 0;
int fatalErrorPlaybackCount = 0;
int fatalErrorCount = 0;
int nonFatalErrorCount = 0;
for (PlaybackStats stats : playbackStats) {
playbackCount += stats.playbackCount;
for (int i = 0; i < PLAYBACK_STATE_COUNT; i++) {
playbackStateDurationsMs[i] += stats.playbackStateDurationsMs[i];
}
if (firstReportedTimeMs == C.TIME_UNSET) {
firstReportedTimeMs = stats.firstReportedTimeMs;
} else if (stats.firstReportedTimeMs != C.TIME_UNSET) {
firstReportedTimeMs = Math.min(firstReportedTimeMs, stats.firstReportedTimeMs);
}
foregroundPlaybackCount += stats.foregroundPlaybackCount;
abandonedBeforeReadyCount += stats.abandonedBeforeReadyCount;
endedCount += stats.endedCount;
backgroundJoiningCount += stats.backgroundJoiningCount;
if (totalValidJoinTimeMs == C.TIME_UNSET) {
totalValidJoinTimeMs = stats.totalValidJoinTimeMs;
} else if (stats.totalValidJoinTimeMs != C.TIME_UNSET) {
totalValidJoinTimeMs += stats.totalValidJoinTimeMs;
}
validJoinTimeCount += stats.validJoinTimeCount;
totalPauseCount += stats.totalPauseCount;
totalPauseBufferCount += stats.totalPauseBufferCount;
totalSeekCount += stats.totalSeekCount;
totalRebufferCount += stats.totalRebufferCount;
if (maxRebufferTimeMs == C.TIME_UNSET) {
maxRebufferTimeMs = stats.maxRebufferTimeMs;
} else if (stats.maxRebufferTimeMs != C.TIME_UNSET) {
maxRebufferTimeMs = Math.max(maxRebufferTimeMs, stats.maxRebufferTimeMs);
}
adPlaybackCount += stats.adPlaybackCount;
totalVideoFormatHeightTimeMs += stats.totalVideoFormatHeightTimeMs;
totalVideoFormatHeightTimeProduct += stats.totalVideoFormatHeightTimeProduct;
totalVideoFormatBitrateTimeMs += stats.totalVideoFormatBitrateTimeMs;
totalVideoFormatBitrateTimeProduct += stats.totalVideoFormatBitrateTimeProduct;
totalAudioFormatTimeMs += stats.totalAudioFormatTimeMs;
totalAudioFormatBitrateTimeProduct += stats.totalAudioFormatBitrateTimeProduct;
initialVideoFormatHeightCount += stats.initialVideoFormatHeightCount;
initialVideoFormatBitrateCount += stats.initialVideoFormatBitrateCount;
if (totalInitialVideoFormatHeight == C.LENGTH_UNSET) {
totalInitialVideoFormatHeight = stats.totalInitialVideoFormatHeight;
} else if (stats.totalInitialVideoFormatHeight != C.LENGTH_UNSET) {
totalInitialVideoFormatHeight += stats.totalInitialVideoFormatHeight;
}
if (totalInitialVideoFormatBitrate == C.LENGTH_UNSET) {
totalInitialVideoFormatBitrate = stats.totalInitialVideoFormatBitrate;
} else if (stats.totalInitialVideoFormatBitrate != C.LENGTH_UNSET) {
totalInitialVideoFormatBitrate += stats.totalInitialVideoFormatBitrate;
}
initialAudioFormatBitrateCount += stats.initialAudioFormatBitrateCount;
if (totalInitialAudioFormatBitrate == C.LENGTH_UNSET) {
totalInitialAudioFormatBitrate = stats.totalInitialAudioFormatBitrate;
} else if (stats.totalInitialAudioFormatBitrate != C.LENGTH_UNSET) {
totalInitialAudioFormatBitrate += stats.totalInitialAudioFormatBitrate;
}
totalBandwidthTimeMs += stats.totalBandwidthTimeMs;
totalBandwidthBytes += stats.totalBandwidthBytes;
totalDroppedFrames += stats.totalDroppedFrames;
totalAudioUnderruns += stats.totalAudioUnderruns;
fatalErrorPlaybackCount += stats.fatalErrorPlaybackCount;
fatalErrorCount += stats.fatalErrorCount;
nonFatalErrorCount += stats.nonFatalErrorCount;
}
return new PlaybackStats(
playbackCount,
playbackStateDurationsMs,
/* playbackStateHistory */ Collections.emptyList(),
/* mediaTimeHistory= */ Collections.emptyList(),
firstReportedTimeMs,
foregroundPlaybackCount,
abandonedBeforeReadyCount,
endedCount,
backgroundJoiningCount,
totalValidJoinTimeMs,
validJoinTimeCount,
totalPauseCount,
totalPauseBufferCount,
totalSeekCount,
totalRebufferCount,
maxRebufferTimeMs,
adPlaybackCount,
/* videoFormatHistory= */ Collections.emptyList(),
/* audioFormatHistory= */ Collections.emptyList(),
totalVideoFormatHeightTimeMs,
totalVideoFormatHeightTimeProduct,
totalVideoFormatBitrateTimeMs,
totalVideoFormatBitrateTimeProduct,
totalAudioFormatTimeMs,
totalAudioFormatBitrateTimeProduct,
initialVideoFormatHeightCount,
initialVideoFormatBitrateCount,
totalInitialVideoFormatHeight,
totalInitialVideoFormatBitrate,
initialAudioFormatBitrateCount,
totalInitialAudioFormatBitrate,
totalBandwidthTimeMs,
totalBandwidthBytes,
totalDroppedFrames,
totalAudioUnderruns,
fatalErrorPlaybackCount,
fatalErrorCount,
nonFatalErrorCount,
/* fatalErrorHistory= */ Collections.emptyList(),
/* nonFatalErrorHistory= */ Collections.emptyList());
}
/** The number of individual playbacks for which these stats were collected. */
public final int playbackCount;
// Playback state stats.
/**
* The playback state history as ordered pairs of the {@link EventTime} at which a state became
* active and the {@link PlaybackState}.
*/
public final List<Pair<EventTime, @PlaybackState Integer>> playbackStateHistory;
/**
* The media time history as an ordered list of long[2] arrays with [0] being the realtime as
* returned by {@code SystemClock.elapsedRealtime()} and [1] being the media time at this
* realtime, in milliseconds.
*/
public final List<long[]> mediaTimeHistory;
/**
* The elapsed real-time as returned by {@code SystemClock.elapsedRealtime()} of the first
* reported playback event, or {@link C#TIME_UNSET} if no event has been reported.
*/
public final long firstReportedTimeMs;
/** The number of playbacks which were the active foreground playback at some point. */
public final int foregroundPlaybackCount;
/** The number of playbacks which were abandoned before they were ready to play. */
public final int abandonedBeforeReadyCount;
/** The number of playbacks which reached the ended state at least once. */
public final int endedCount;
/** The number of playbacks which were pre-buffered in the background. */
public final int backgroundJoiningCount;
/**
* The total time spent joining the playback, in milliseconds, or {@link C#TIME_UNSET} if no valid
* join time could be determined.
*
* <p>Note that this does not include background joining time. A join time may be invalid if the
* playback never reached {@link #PLAYBACK_STATE_PLAYING} or {@link #PLAYBACK_STATE_PAUSED}, or
* joining was interrupted by a seek, stop, or error state.
*/
public final long totalValidJoinTimeMs;
/**
* The number of playbacks with a valid join time as documented in {@link #totalValidJoinTimeMs}.
*/
public final int validJoinTimeCount;
/** The total number of times a playback has been paused. */
public final int totalPauseCount;
/** The total number of times a playback has been paused while rebuffering. */
public final int totalPauseBufferCount;
/**
* The total number of times a seek occurred. This includes seeks happening before playback
* resumed after another seek.
*/
public final int totalSeekCount;
/**
* The total number of times a rebuffer occurred. This excludes initial joining and buffering
* after seek.
*/
public final int totalRebufferCount;
/**
* The maximum time spent during a single rebuffer, in milliseconds, or {@link C#TIME_UNSET} if no
* rebuffer occurred.
*/
public final long maxRebufferTimeMs;
/** The number of ad playbacks. */
public final int adPlaybackCount;
// Format stats.
/**
* The video format history as ordered pairs of the {@link EventTime} at which a format started
* being used and the {@link Format}. The {@link Format} may be null if no video format was used.
*/
public final List<Pair<EventTime, @NullableType Format>> videoFormatHistory;
/**
* The audio format history as ordered pairs of the {@link EventTime} at which a format started
* being used and the {@link Format}. The {@link Format} may be null if no audio format was used.
*/
public final List<Pair<EventTime, @NullableType Format>> audioFormatHistory;
/** The total media time for which video format height data is available, in milliseconds. */
public final long totalVideoFormatHeightTimeMs;
/**
* The accumulated sum of all video format heights, in pixels, times the time the format was used
* for playback, in milliseconds.
*/
public final long totalVideoFormatHeightTimeProduct;
/** The total media time for which video format bitrate data is available, in milliseconds. */
public final long totalVideoFormatBitrateTimeMs;
/**
* The accumulated sum of all video format bitrates, in bits per second, times the time the format
* was used for playback, in milliseconds.
*/
public final long totalVideoFormatBitrateTimeProduct;
/** The total media time for which audio format data is available, in milliseconds. */
public final long totalAudioFormatTimeMs;
/**
* The accumulated sum of all audio format bitrates, in bits per second, times the time the format
* was used for playback, in milliseconds.
*/
public final long totalAudioFormatBitrateTimeProduct;
/** The number of playbacks with initial video format height data. */
public final int initialVideoFormatHeightCount;
/** The number of playbacks with initial video format bitrate data. */
public final int initialVideoFormatBitrateCount;
/**
* The total initial video format height for all playbacks, in pixels, or {@link C#LENGTH_UNSET}
* if no initial video format data is available.
*/
public final int totalInitialVideoFormatHeight;
/**
* The total initial video format bitrate for all playbacks, in bits per second, or {@link
* C#LENGTH_UNSET} if no initial video format data is available.
*/
public final long totalInitialVideoFormatBitrate;
/** The number of playbacks with initial audio format bitrate data. */
public final int initialAudioFormatBitrateCount;
/**
* The total initial audio format bitrate for all playbacks, in bits per second, or {@link
* C#LENGTH_UNSET} if no initial audio format data is available.
*/
public final long totalInitialAudioFormatBitrate;
// Bandwidth stats.
/** The total time for which bandwidth measurement data is available, in milliseconds. */
public final long totalBandwidthTimeMs;
/** The total bytes transferred during {@link #totalBandwidthTimeMs}. */
public final long totalBandwidthBytes;
// Renderer quality stats.
/** The total number of dropped video frames. */
public final long totalDroppedFrames;
/** The total number of audio underruns. */
public final long totalAudioUnderruns;
// Error stats.
/**
* The total number of playback with at least one fatal error. Errors are fatal if playback
* stopped due to this error.
*/
public final int fatalErrorPlaybackCount;
/** The total number of fatal errors. Errors are fatal if playback stopped due to this error. */
public final int fatalErrorCount;
/**
* The total number of non-fatal errors. Error are non-fatal if playback can recover from the
* error without stopping.
*/
public final int nonFatalErrorCount;
/**
* The history of fatal errors as ordered pairs of the {@link EventTime} at which an error
* occurred and the error. Errors are fatal if playback stopped due to this error.
*/
public final List<Pair<EventTime, Exception>> fatalErrorHistory;
/**
* The history of non-fatal errors as ordered pairs of the {@link EventTime} at which an error
* occurred and the error. Error are non-fatal if playback can recover from the error without
* stopping.
*/
public final List<Pair<EventTime, Exception>> nonFatalErrorHistory;
private final long[] playbackStateDurationsMs;
/* package */ PlaybackStats(
int playbackCount,
long[] playbackStateDurationsMs,
List<Pair<EventTime, @PlaybackState Integer>> playbackStateHistory,
List<long[]> mediaTimeHistory,
long firstReportedTimeMs,
int foregroundPlaybackCount,
int abandonedBeforeReadyCount,
int endedCount,
int backgroundJoiningCount,
long totalValidJoinTimeMs,
int validJoinTimeCount,
int totalPauseCount,
int totalPauseBufferCount,
int totalSeekCount,
int totalRebufferCount,
long maxRebufferTimeMs,
int adPlaybackCount,
List<Pair<EventTime, @NullableType Format>> videoFormatHistory,
List<Pair<EventTime, @NullableType Format>> audioFormatHistory,
long totalVideoFormatHeightTimeMs,
long totalVideoFormatHeightTimeProduct,
long totalVideoFormatBitrateTimeMs,
long totalVideoFormatBitrateTimeProduct,
long totalAudioFormatTimeMs,
long totalAudioFormatBitrateTimeProduct,
int initialVideoFormatHeightCount,
int initialVideoFormatBitrateCount,
int totalInitialVideoFormatHeight,
long totalInitialVideoFormatBitrate,
int initialAudioFormatBitrateCount,
long totalInitialAudioFormatBitrate,
long totalBandwidthTimeMs,
long totalBandwidthBytes,
long totalDroppedFrames,
long totalAudioUnderruns,
int fatalErrorPlaybackCount,
int fatalErrorCount,
int nonFatalErrorCount,
List<Pair<EventTime, Exception>> fatalErrorHistory,
List<Pair<EventTime, Exception>> nonFatalErrorHistory) {
this.playbackCount = playbackCount;
this.playbackStateDurationsMs = playbackStateDurationsMs;
this.playbackStateHistory = Collections.unmodifiableList(playbackStateHistory);
this.mediaTimeHistory = Collections.unmodifiableList(mediaTimeHistory);
this.firstReportedTimeMs = firstReportedTimeMs;
this.foregroundPlaybackCount = foregroundPlaybackCount;
this.abandonedBeforeReadyCount = abandonedBeforeReadyCount;
this.endedCount = endedCount;
this.backgroundJoiningCount = backgroundJoiningCount;
this.totalValidJoinTimeMs = totalValidJoinTimeMs;
this.validJoinTimeCount = validJoinTimeCount;
this.totalPauseCount = totalPauseCount;
this.totalPauseBufferCount = totalPauseBufferCount;
this.totalSeekCount = totalSeekCount;
this.totalRebufferCount = totalRebufferCount;
this.maxRebufferTimeMs = maxRebufferTimeMs;
this.adPlaybackCount = adPlaybackCount;
this.videoFormatHistory = Collections.unmodifiableList(videoFormatHistory);
this.audioFormatHistory = Collections.unmodifiableList(audioFormatHistory);
this.totalVideoFormatHeightTimeMs = totalVideoFormatHeightTimeMs;
this.totalVideoFormatHeightTimeProduct = totalVideoFormatHeightTimeProduct;
this.totalVideoFormatBitrateTimeMs = totalVideoFormatBitrateTimeMs;
this.totalVideoFormatBitrateTimeProduct = totalVideoFormatBitrateTimeProduct;
this.totalAudioFormatTimeMs = totalAudioFormatTimeMs;
this.totalAudioFormatBitrateTimeProduct = totalAudioFormatBitrateTimeProduct;
this.initialVideoFormatHeightCount = initialVideoFormatHeightCount;
this.initialVideoFormatBitrateCount = initialVideoFormatBitrateCount;
this.totalInitialVideoFormatHeight = totalInitialVideoFormatHeight;
this.totalInitialVideoFormatBitrate = totalInitialVideoFormatBitrate;
this.initialAudioFormatBitrateCount = initialAudioFormatBitrateCount;
this.totalInitialAudioFormatBitrate = totalInitialAudioFormatBitrate;
this.totalBandwidthTimeMs = totalBandwidthTimeMs;
this.totalBandwidthBytes = totalBandwidthBytes;
this.totalDroppedFrames = totalDroppedFrames;
this.totalAudioUnderruns = totalAudioUnderruns;
this.fatalErrorPlaybackCount = fatalErrorPlaybackCount;
this.fatalErrorCount = fatalErrorCount;
this.nonFatalErrorCount = nonFatalErrorCount;
this.fatalErrorHistory = Collections.unmodifiableList(fatalErrorHistory);
this.nonFatalErrorHistory = Collections.unmodifiableList(nonFatalErrorHistory);
}
/**
* Returns the total time spent in a given {@link PlaybackState}, in milliseconds.
*
* @param playbackState A {@link PlaybackState}.
* @return Total spent in the given playback state, in milliseconds
*/
public long getPlaybackStateDurationMs(@PlaybackState int playbackState) {
return playbackStateDurationsMs[playbackState];
}
/**
* Returns the {@link PlaybackState} at the given time.
*
* @param realtimeMs The time as returned by {@link SystemClock#elapsedRealtime()}.
* @return The {@link PlaybackState} at that time, or {@link #PLAYBACK_STATE_NOT_STARTED} if the
* given time is before the first known playback state in the history.
*/
public @PlaybackState int getPlaybackStateAtTime(long realtimeMs) {
@PlaybackState int state = PLAYBACK_STATE_NOT_STARTED;
for (Pair<EventTime, @PlaybackState Integer> timeAndState : playbackStateHistory) {
if (timeAndState.first.realtimeMs > realtimeMs) {
break;
}
state = timeAndState.second;
}
return state;
}
/**
* Returns the estimated media time at the given realtime, in milliseconds, or {@link
* C#TIME_UNSET} if the media time history is unknown.
*
* @param realtimeMs The realtime as returned by {@link SystemClock#elapsedRealtime()}.
* @return The estimated media time in milliseconds at this realtime, {@link C#TIME_UNSET} if no
* estimate can be given.
*/
public long getMediaTimeMsAtRealtimeMs(long realtimeMs) {
if (mediaTimeHistory.isEmpty()) {
return C.TIME_UNSET;
}
int nextIndex = 0;
while (nextIndex < mediaTimeHistory.size()
&& mediaTimeHistory.get(nextIndex)[0] <= realtimeMs) {
nextIndex++;
}
if (nextIndex == 0) {
return mediaTimeHistory.get(0)[1];
}
if (nextIndex == mediaTimeHistory.size()) {
return mediaTimeHistory.get(mediaTimeHistory.size() - 1)[1];
}
long prevRealtimeMs = mediaTimeHistory.get(nextIndex - 1)[0];
long prevMediaTimeMs = mediaTimeHistory.get(nextIndex - 1)[1];
long nextRealtimeMs = mediaTimeHistory.get(nextIndex)[0];
long nextMediaTimeMs = mediaTimeHistory.get(nextIndex)[1];
long realtimeDurationMs = nextRealtimeMs - prevRealtimeMs;
if (realtimeDurationMs == 0) {
return prevMediaTimeMs;
}
float fraction = (float) (realtimeMs - prevRealtimeMs) / realtimeDurationMs;
return prevMediaTimeMs + (long) ((nextMediaTimeMs - prevMediaTimeMs) * fraction);
}
/**
* Returns the mean time spent joining the playback, in milliseconds, or {@link C#TIME_UNSET} if
* no valid join time is available. Only includes playbacks with valid join times as documented in
* {@link #totalValidJoinTimeMs}.
*/
public long getMeanJoinTimeMs() {
return validJoinTimeCount == 0 ? C.TIME_UNSET : totalValidJoinTimeMs / validJoinTimeCount;
}
/**
* Returns the total time spent joining the playback in foreground, in milliseconds. This does
* include invalid join times where the playback never reached {@link #PLAYBACK_STATE_PLAYING} or
* {@link #PLAYBACK_STATE_PAUSED}, or joining was interrupted by a seek, stop, or error state.
*/
public long getTotalJoinTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_JOINING_FOREGROUND);
}
/** Returns the total time spent actively playing, in milliseconds. */
public long getTotalPlayTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_PLAYING);
}
/**
* Returns the mean time spent actively playing per foreground playback, in milliseconds, or
* {@link C#TIME_UNSET} if no playback has been in foreground.
*/
public long getMeanPlayTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalPlayTimeMs() / foregroundPlaybackCount;
}
/** Returns the total time spent in a paused state, in milliseconds. */
public long getTotalPausedTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_PAUSED)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_PAUSED_BUFFERING);
}
/**
* Returns the mean time spent in a paused state per foreground playback, in milliseconds, or
* {@link C#TIME_UNSET} if no playback has been in foreground.
*/
public long getMeanPausedTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalPausedTimeMs() / foregroundPlaybackCount;
}
/**
* Returns the total time spent rebuffering, in milliseconds. This excludes initial join times,
* buffer times after a seek and buffering while paused.
*/
public long getTotalRebufferTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_BUFFERING);
}
/**
* Returns the mean time spent rebuffering per foreground playback, in milliseconds, or {@link
* C#TIME_UNSET} if no playback has been in foreground. This excludes initial join times, buffer
* times after a seek and buffering while paused.
*/
public long getMeanRebufferTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalRebufferTimeMs() / foregroundPlaybackCount;
}
/**
* Returns the mean time spent during a single rebuffer, in milliseconds, or {@link C#TIME_UNSET}
* if no rebuffer was recorded. This excludes initial join times and buffer times after a seek.
*/
public long getMeanSingleRebufferTimeMs() {
return totalRebufferCount == 0
? C.TIME_UNSET
: (getPlaybackStateDurationMs(PLAYBACK_STATE_BUFFERING)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_PAUSED_BUFFERING))
/ totalRebufferCount;
}
/**
* Returns the total time spent from the start of a seek until playback is ready again, in
* milliseconds.
*/
public long getTotalSeekTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_SEEKING)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_SEEK_BUFFERING);
}
/**
* Returns the mean time spent per foreground playback from the start of a seek until playback is
* ready again, in milliseconds, or {@link C#TIME_UNSET} if no playback has been in foreground.
*/
public long getMeanSeekTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalSeekTimeMs() / foregroundPlaybackCount;
}
/**
* Returns the mean time spent from the start of a single seek until playback is ready again, in
* milliseconds, or {@link C#TIME_UNSET} if no seek occurred.
*/
public long getMeanSingleSeekTimeMs() {
return totalSeekCount == 0 ? C.TIME_UNSET : getTotalSeekTimeMs() / totalSeekCount;
}
/**
* Returns the total time spent actively waiting for playback, in milliseconds. This includes all
* join times, rebuffer times and seek times, but excludes times without user intention to play,
* e.g. all paused states.
*/
public long getTotalWaitTimeMs() {
return getPlaybackStateDurationMs(PLAYBACK_STATE_JOINING_FOREGROUND)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_BUFFERING)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_SEEKING)
+ getPlaybackStateDurationMs(PLAYBACK_STATE_SEEK_BUFFERING);
}
/**
* Returns the mean time spent actively waiting for playback per foreground playback, in
* milliseconds, or {@link C#TIME_UNSET} if no playback has been in foreground. This includes all
* join times, rebuffer times and seek times, but excludes times without user intention to play,
* e.g. all paused states.
*/
public long getMeanWaitTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalWaitTimeMs() / foregroundPlaybackCount;
}
/** Returns the total time spent playing or actively waiting for playback, in milliseconds. */
public long getTotalPlayAndWaitTimeMs() {
return getTotalPlayTimeMs() + getTotalWaitTimeMs();
}
/**
* Returns the mean time spent playing or actively waiting for playback per foreground playback,
* in milliseconds, or {@link C#TIME_UNSET} if no playback has been in foreground.
*/
public long getMeanPlayAndWaitTimeMs() {
return foregroundPlaybackCount == 0
? C.TIME_UNSET
: getTotalPlayAndWaitTimeMs() / foregroundPlaybackCount;
}
/** Returns the total time covered by any playback state, in milliseconds. */
public long getTotalElapsedTimeMs() {
long totalTimeMs = 0;
for (int i = 0; i < PLAYBACK_STATE_COUNT; i++) {
totalTimeMs += playbackStateDurationsMs[i];
}
return totalTimeMs;
}
/**
* Returns the mean time covered by any playback state per playback, in milliseconds, or {@link
* C#TIME_UNSET} if no playback was recorded.
*/
public long getMeanElapsedTimeMs() {
return playbackCount == 0 ? C.TIME_UNSET : getTotalElapsedTimeMs() / playbackCount;
}
/**
* Returns the ratio of foreground playbacks which were abandoned before they were ready to play,
* or {@code 0.0} if no playback has been in foreground.
*/
public float getAbandonedBeforeReadyRatio() {
int foregroundAbandonedBeforeReady =
abandonedBeforeReadyCount - (playbackCount - foregroundPlaybackCount);
return foregroundPlaybackCount == 0
? 0f
: (float) foregroundAbandonedBeforeReady / foregroundPlaybackCount;
}
/**
* Returns the ratio of foreground playbacks which reached the ended state at least once, or
* {@code 0.0} if no playback has been in foreground.
*/
public float getEndedRatio() {
return foregroundPlaybackCount == 0 ? 0f : (float) endedCount / foregroundPlaybackCount;
}
/**
* Returns the mean number of times a playback has been paused per foreground playback, or {@code
* 0.0} if no playback has been in foreground.
*/
public float getMeanPauseCount() {
return foregroundPlaybackCount == 0 ? 0f : (float) totalPauseCount / foregroundPlaybackCount;
}
/**
* Returns the mean number of times a playback has been paused while rebuffering per foreground
* playback, or {@code 0.0} if no playback has been in foreground.
*/
public float getMeanPauseBufferCount() {
return foregroundPlaybackCount == 0
? 0f
: (float) totalPauseBufferCount / foregroundPlaybackCount;
}
/**
* Returns the mean number of times a seek occurred per foreground playback, or {@code 0.0} if no
* playback has been in foreground. This includes seeks happening before playback resumed after
* another seek.
*/
public float getMeanSeekCount() {
return foregroundPlaybackCount == 0 ? 0f : (float) totalSeekCount / foregroundPlaybackCount;
}
/**
* Returns the mean number of times a rebuffer occurred per foreground playback, or {@code 0.0} if
* no playback has been in foreground. This excludes initial joining and buffering after seek.
*/
public float getMeanRebufferCount() {
return foregroundPlaybackCount == 0 ? 0f : (float) totalRebufferCount / foregroundPlaybackCount;
}
/**
* Returns the ratio of wait times to the total time spent playing and waiting, or {@code 0.0} if
* no time was spend playing or waiting. This is equivalent to {@link #getTotalWaitTimeMs()} /
* {@link #getTotalPlayAndWaitTimeMs()} and also to {@link #getJoinTimeRatio()} + {@link
* #getRebufferTimeRatio()} + {@link #getSeekTimeRatio()}.
*/
public float getWaitTimeRatio() {
long playAndWaitTimeMs = getTotalPlayAndWaitTimeMs();
return playAndWaitTimeMs == 0 ? 0f : (float) getTotalWaitTimeMs() / playAndWaitTimeMs;
}
/**
* Returns the ratio of foreground join time to the total time spent playing and waiting, or
* {@code 0.0} if no time was spend playing or waiting. This is equivalent to {@link
* #getTotalJoinTimeMs()} / {@link #getTotalPlayAndWaitTimeMs()}.
*/
public float getJoinTimeRatio() {
long playAndWaitTimeMs = getTotalPlayAndWaitTimeMs();
return playAndWaitTimeMs == 0 ? 0f : (float) getTotalJoinTimeMs() / playAndWaitTimeMs;
}
/**
* Returns the ratio of rebuffer time to the total time spent playing and waiting, or {@code 0.0}
* if no time was spend playing or waiting. This is equivalent to {@link
* #getTotalRebufferTimeMs()} / {@link #getTotalPlayAndWaitTimeMs()}.
*/
public float getRebufferTimeRatio() {
long playAndWaitTimeMs = getTotalPlayAndWaitTimeMs();
return playAndWaitTimeMs == 0 ? 0f : (float) getTotalRebufferTimeMs() / playAndWaitTimeMs;
}
/**
* Returns the ratio of seek time to the total time spent playing and waiting, or {@code 0.0} if
* no time was spend playing or waiting. This is equivalent to {@link #getTotalSeekTimeMs()} /
* {@link #getTotalPlayAndWaitTimeMs()}.
*/
public float getSeekTimeRatio() {
long playAndWaitTimeMs = getTotalPlayAndWaitTimeMs();
return playAndWaitTimeMs == 0 ? 0f : (float) getTotalSeekTimeMs() / playAndWaitTimeMs;
}
/**
* Returns the rate of rebuffer events, in rebuffers per play time second, or {@code 0.0} if no
* time was spend playing. This is equivalent to 1.0 / {@link #getMeanTimeBetweenRebuffers()}.
*/
public float getRebufferRate() {
long playTimeMs = getTotalPlayTimeMs();
return playTimeMs == 0 ? 0f : 1000f * totalRebufferCount / playTimeMs;
}
/**
* Returns the mean play time between rebuffer events, in seconds. This is equivalent to 1.0 /
* {@link #getRebufferRate()}. Note that this may return {@link Float#POSITIVE_INFINITY}.
*/
public float getMeanTimeBetweenRebuffers() {
return 1f / getRebufferRate();
}
/**
* Returns the mean initial video format height, in pixels, or {@link C#LENGTH_UNSET} if no video
* format data is available.
*/
public int getMeanInitialVideoFormatHeight() {
return initialVideoFormatHeightCount == 0
? C.LENGTH_UNSET
: totalInitialVideoFormatHeight / initialVideoFormatHeightCount;
}
/**
* Returns the mean initial video format bitrate, in bits per second, or {@link C#LENGTH_UNSET} if
* no video format data is available.
*/
public int getMeanInitialVideoFormatBitrate() {
return initialVideoFormatBitrateCount == 0
? C.LENGTH_UNSET
: (int) (totalInitialVideoFormatBitrate / initialVideoFormatBitrateCount);
}
/**
* Returns the mean initial audio format bitrate, in bits per second, or {@link C#LENGTH_UNSET} if
* no audio format data is available.
*/
public int getMeanInitialAudioFormatBitrate() {
return initialAudioFormatBitrateCount == 0
? C.LENGTH_UNSET
: (int) (totalInitialAudioFormatBitrate / initialAudioFormatBitrateCount);
}
/**
* Returns the mean video format height, in pixels, or {@link C#LENGTH_UNSET} if no video format
* data is available. This is a weighted average taking the time the format was used for playback
* into account.
*/
public int getMeanVideoFormatHeight() {
return totalVideoFormatHeightTimeMs == 0
? C.LENGTH_UNSET
: (int) (totalVideoFormatHeightTimeProduct / totalVideoFormatHeightTimeMs);
}
/**
* Returns the mean video format bitrate, in bits per second, or {@link C#LENGTH_UNSET} if no
* video format data is available. This is a weighted average taking the time the format was used
* for playback into account.
*/
public int getMeanVideoFormatBitrate() {
return totalVideoFormatBitrateTimeMs == 0
? C.LENGTH_UNSET
: (int) (totalVideoFormatBitrateTimeProduct / totalVideoFormatBitrateTimeMs);
}
/**
* Returns the mean audio format bitrate, in bits per second, or {@link C#LENGTH_UNSET} if no
* audio format data is available. This is a weighted average taking the time the format was used
* for playback into account.
*/
public int getMeanAudioFormatBitrate() {
return totalAudioFormatTimeMs == 0
? C.LENGTH_UNSET
: (int) (totalAudioFormatBitrateTimeProduct / totalAudioFormatTimeMs);
}
/**
* Returns the mean network bandwidth based on transfer measurements, in bits per second, or
* {@link C#LENGTH_UNSET} if no transfer data is available.
*/
public int getMeanBandwidth() {
return totalBandwidthTimeMs == 0
? C.LENGTH_UNSET
: (int) (totalBandwidthBytes * 8000 / totalBandwidthTimeMs);
}
/**
* Returns the mean rate at which video frames are dropped, in dropped frames per play time
* second, or {@code 0.0} if no time was spent playing.
*/
public float getDroppedFramesRate() {
long playTimeMs = getTotalPlayTimeMs();
return playTimeMs == 0 ? 0f : 1000f * totalDroppedFrames / playTimeMs;
}
/**
* Returns the mean rate at which audio underruns occurred, in underruns per play time second, or
* {@code 0.0} if no time was spent playing.
*/
public float getAudioUnderrunRate() {
long playTimeMs = getTotalPlayTimeMs();
return playTimeMs == 0 ? 0f : 1000f * totalAudioUnderruns / playTimeMs;
}
/**
* Returns the ratio of foreground playbacks which experienced fatal errors, or {@code 0.0} if no
* playback has been in foreground.
*/
public float getFatalErrorRatio() {
return foregroundPlaybackCount == 0
? 0f
: (float) fatalErrorPlaybackCount / foregroundPlaybackCount;
}
/**
* Returns the rate of fatal errors, in errors per play time second, or {@code 0.0} if no time was
* spend playing. This is equivalent to 1.0 / {@link #getMeanTimeBetweenFatalErrors()}.
*/
public float getFatalErrorRate() {
long playTimeMs = getTotalPlayTimeMs();
return playTimeMs == 0 ? 0f : 1000f * fatalErrorCount / playTimeMs;
}
/**
* Returns the mean play time between fatal errors, in seconds. This is equivalent to 1.0 / {@link
* #getFatalErrorRate()}. Note that this may return {@link Float#POSITIVE_INFINITY}.
*/
public float getMeanTimeBetweenFatalErrors() {
return 1f / getFatalErrorRate();
}
/**
* Returns the mean number of non-fatal errors per foreground playback, or {@code 0.0} if no
* playback has been in foreground.
*/
public float getMeanNonFatalErrorCount() {
return foregroundPlaybackCount == 0 ? 0f : (float) nonFatalErrorCount / foregroundPlaybackCount;
}
/**
* Returns the rate of non-fatal errors, in errors per play time second, or {@code 0.0} if no time
* was spend playing. This is equivalent to 1.0 / {@link #getMeanTimeBetweenNonFatalErrors()}.
*/
public float getNonFatalErrorRate() {
long playTimeMs = getTotalPlayTimeMs();
return playTimeMs == 0 ? 0f : 1000f * nonFatalErrorCount / playTimeMs;
}
/**
* Returns the mean play time between non-fatal errors, in seconds. This is equivalent to 1.0 /
* {@link #getNonFatalErrorRate()}. Note that this may return {@link Float#POSITIVE_INFINITY}.
*/
public float getMeanTimeBetweenNonFatalErrors() {
return 1f / getNonFatalErrorRate();
}
}

View File

@ -0,0 +1,19 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package com.google.android.exoplayer2.analytics;
import com.google.android.exoplayer2.util.NonNullApi;

View File

@ -31,7 +31,7 @@ import java.nio.ByteBuffer;
/** /**
* Utility methods for parsing Dolby TrueHD and (E-)AC-3 syncframes. (E-)AC-3 parsing follows the * Utility methods for parsing Dolby TrueHD and (E-)AC-3 syncframes. (E-)AC-3 parsing follows the
* definition in ETSI TS 102 366 V1.2.1. * definition in ETSI TS 102 366 V1.4.1.
*/ */
public final class Ac3Util { public final class Ac3Util {
@ -39,8 +39,8 @@ public final class Ac3Util {
public static final class SyncFrameInfo { public static final class SyncFrameInfo {
/** /**
* AC3 stream types. See also ETSI TS 102 366 E.1.3.1.1. One of {@link #STREAM_TYPE_UNDEFINED}, * AC3 stream types. See also E.1.3.1.1. One of {@link #STREAM_TYPE_UNDEFINED}, {@link
* {@link #STREAM_TYPE_TYPE0}, {@link #STREAM_TYPE_TYPE1} or {@link #STREAM_TYPE_TYPE2}. * #STREAM_TYPE_TYPE0}, {@link #STREAM_TYPE_TYPE1} or {@link #STREAM_TYPE_TYPE2}.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.SOURCE) @Retention(RetentionPolicy.SOURCE)
@ -114,9 +114,7 @@ public final class Ac3Util {
* The number of new samples per (E-)AC-3 audio block. * The number of new samples per (E-)AC-3 audio block.
*/ */
private static final int AUDIO_SAMPLES_PER_AUDIO_BLOCK = 256; private static final int AUDIO_SAMPLES_PER_AUDIO_BLOCK = 256;
/** /** Each syncframe has 6 blocks that provide 256 new audio samples. See subsection 4.1. */
* Each syncframe has 6 blocks that provide 256 new audio samples. See ETSI TS 102 366 4.1.
*/
private static final int AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT = 6 * AUDIO_SAMPLES_PER_AUDIO_BLOCK; private static final int AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT = 6 * AUDIO_SAMPLES_PER_AUDIO_BLOCK;
/** /**
* Number of audio blocks per E-AC-3 syncframe, indexed by numblkscod. * Number of audio blocks per E-AC-3 syncframe, indexed by numblkscod.
@ -134,20 +132,21 @@ public final class Ac3Util {
* Channel counts, indexed by acmod. * Channel counts, indexed by acmod.
*/ */
private static final int[] CHANNEL_COUNT_BY_ACMOD = new int[] {2, 1, 2, 3, 3, 4, 4, 5}; private static final int[] CHANNEL_COUNT_BY_ACMOD = new int[] {2, 1, 2, 3, 3, 4, 4, 5};
/** /** Nominal bitrates in kbps, indexed by frmsizecod / 2. (See table 4.13.) */
* Nominal bitrates in kbps, indexed by frmsizecod / 2. (See ETSI TS 102 366 table 4.13.) private static final int[] BITRATE_BY_HALF_FRMSIZECOD =
*/ new int[] {
private static final int[] BITRATE_BY_HALF_FRMSIZECOD = new int[] {32, 40, 48, 56, 64, 80, 96, 32, 40, 48, 56, 64, 80, 96, 112, 128, 160, 192, 224, 256, 320, 384, 448, 512, 576, 640
112, 128, 160, 192, 224, 256, 320, 384, 448, 512, 576, 640}; };
/** /** 16-bit words per syncframe, indexed by frmsizecod / 2. (See table 4.13.) */
* 16-bit words per syncframe, indexed by frmsizecod / 2. (See ETSI TS 102 366 table 4.13.) private static final int[] SYNCFRAME_SIZE_WORDS_BY_HALF_FRMSIZECOD_44_1 =
*/ new int[] {
private static final int[] SYNCFRAME_SIZE_WORDS_BY_HALF_FRMSIZECOD_44_1 = new int[] {69, 87, 104, 69, 87, 104, 121, 139, 174, 208, 243, 278, 348, 417, 487, 557, 696, 835, 975, 1114, 1253,
121, 139, 174, 208, 243, 278, 348, 417, 487, 557, 696, 835, 975, 1114, 1253, 1393}; 1393
};
/** /**
* Returns the AC-3 format given {@code data} containing the AC3SpecificBox according to ETSI TS * Returns the AC-3 format given {@code data} containing the AC3SpecificBox according to Annex F.
* 102 366 Annex F. The reading position of {@code data} will be modified. * The reading position of {@code data} will be modified.
* *
* @param data The AC3SpecificBox to parse. * @param data The AC3SpecificBox to parse.
* @param trackId The track identifier to set on the format. * @param trackId The track identifier to set on the format.
@ -156,7 +155,7 @@ public final class Ac3Util {
* @return The AC-3 format parsed from data in the header. * @return The AC-3 format parsed from data in the header.
*/ */
public static Format parseAc3AnnexFFormat( public static Format parseAc3AnnexFFormat(
ParsableByteArray data, String trackId, String language, DrmInitData drmInitData) { ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) {
int fscod = (data.readUnsignedByte() & 0xC0) >> 6; int fscod = (data.readUnsignedByte() & 0xC0) >> 6;
int sampleRate = SAMPLE_RATE_BY_FSCOD[fscod]; int sampleRate = SAMPLE_RATE_BY_FSCOD[fscod];
int nextByte = data.readUnsignedByte(); int nextByte = data.readUnsignedByte();
@ -179,8 +178,8 @@ public final class Ac3Util {
} }
/** /**
* Returns the E-AC-3 format given {@code data} containing the EC3SpecificBox according to ETSI TS * Returns the E-AC-3 format given {@code data} containing the EC3SpecificBox according to Annex
* 102 366 Annex F. The reading position of {@code data} will be modified. * F. The reading position of {@code data} will be modified.
* *
* @param data The EC3SpecificBox to parse. * @param data The EC3SpecificBox to parse.
* @param trackId The track identifier to set on the format. * @param trackId The track identifier to set on the format.
@ -189,7 +188,7 @@ public final class Ac3Util {
* @return The E-AC-3 format parsed from data in the header. * @return The E-AC-3 format parsed from data in the header.
*/ */
public static Format parseEAc3AnnexFFormat( public static Format parseEAc3AnnexFFormat(
ParsableByteArray data, String trackId, String language, DrmInitData drmInitData) { ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) {
data.skipBytes(2); // data_rate, num_ind_sub data.skipBytes(2); // data_rate, num_ind_sub
// Read the first independent substream. // Read the first independent substream.
@ -243,9 +242,10 @@ public final class Ac3Util {
public static SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) { public static SyncFrameInfo parseAc3SyncframeInfo(ParsableBitArray data) {
int initialPosition = data.getPosition(); int initialPosition = data.getPosition();
data.skipBits(40); data.skipBits(40);
boolean isEac3 = data.readBits(5) == 16; // See bsid in subsection E.1.3.1.6. // Parse the bitstream ID for AC-3 and E-AC-3 (see subsections 4.3, E.1.2 and E.1.3.1.6).
boolean isEac3 = data.readBits(5) > 10;
data.setPosition(initialPosition); data.setPosition(initialPosition);
String mimeType; @Nullable String mimeType;
@StreamType int streamType = SyncFrameInfo.STREAM_TYPE_UNDEFINED; @StreamType int streamType = SyncFrameInfo.STREAM_TYPE_UNDEFINED;
int sampleRate; int sampleRate;
int acmod; int acmod;
@ -254,7 +254,7 @@ public final class Ac3Util {
boolean lfeon; boolean lfeon;
int channelCount; int channelCount;
if (isEac3) { if (isEac3) {
// Syntax from ETSI TS 102 366 V1.2.1 subsections E.1.2.1 and E.1.2.2. // Subsection E.1.2.
data.skipBits(16); // syncword data.skipBits(16); // syncword
switch (data.readBits(2)) { // strmtyp switch (data.readBits(2)) { // strmtyp
case 0: case 0:
@ -472,7 +472,8 @@ public final class Ac3Util {
if (data.length < 6) { if (data.length < 6) {
return C.LENGTH_UNSET; return C.LENGTH_UNSET;
} }
boolean isEac3 = ((data[5] & 0xFF) >> 3) == 16; // See bsid in subsection E.1.3.1.6. // Parse the bitstream ID for AC-3 and E-AC-3 (see subsections 4.3, E.1.2 and E.1.3.1.6).
boolean isEac3 = ((data[5] & 0xF8) >> 3) > 10;
if (isEac3) { if (isEac3) {
int frmsiz = (data[2] & 0x07) << 8; // Most significant 3 bits. int frmsiz = (data[2] & 0x07) << 8; // Most significant 3 bits.
frmsiz |= data[3] & 0xFF; // Least significant 8 bits. frmsiz |= data[3] & 0xFF; // Least significant 8 bits.
@ -485,24 +486,22 @@ public final class Ac3Util {
} }
/** /**
* Returns the number of audio samples in an AC-3 syncframe. * Reads the number of audio samples represented by the given (E-)AC-3 syncframe. The buffer's
*/
public static int getAc3SyncframeAudioSampleCount() {
return AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT;
}
/**
* Reads the number of audio samples represented by the given E-AC-3 syncframe. The buffer's
* position is not modified. * position is not modified.
* *
* @param buffer The {@link ByteBuffer} from which to read the syncframe. * @param buffer The {@link ByteBuffer} from which to read the syncframe.
* @return The number of audio samples represented by the syncframe. * @return The number of audio samples represented by the syncframe.
*/ */
public static int parseEAc3SyncframeAudioSampleCount(ByteBuffer buffer) { public static int parseAc3SyncframeAudioSampleCount(ByteBuffer buffer) {
// See ETSI TS 102 366 subsection E.1.2.2. // Parse the bitstream ID for AC-3 and E-AC-3 (see subsections 4.3, E.1.2 and E.1.3.1.6).
int fscod = (buffer.get(buffer.position() + 4) & 0xC0) >> 6; boolean isEac3 = ((buffer.get(buffer.position() + 5) & 0xF8) >> 3) > 10;
return AUDIO_SAMPLES_PER_AUDIO_BLOCK * (fscod == 0x03 ? 6 if (isEac3) {
: BLOCKS_PER_SYNCFRAME_BY_NUMBLKSCOD[(buffer.get(buffer.position() + 4) & 0x30) >> 4]); int fscod = (buffer.get(buffer.position() + 4) & 0xC0) >> 6;
int numblkscod = fscod == 0x03 ? 3 : (buffer.get(buffer.position() + 4) & 0x30) >> 4;
return BLOCKS_PER_SYNCFRAME_BY_NUMBLKSCOD[numblkscod] * AUDIO_SAMPLES_PER_AUDIO_BLOCK;
} else {
return AC3_SYNCFRAME_AUDIO_SAMPLE_COUNT;
}
} }
/** /**
@ -535,8 +534,8 @@ public final class Ac3Util {
* contain the start of a syncframe. * contain the start of a syncframe.
*/ */
public static int parseTrueHdSyncframeAudioSampleCount(byte[] syncframe) { public static int parseTrueHdSyncframeAudioSampleCount(byte[] syncframe) {
// TODO: Link to specification if available. // See "Dolby TrueHD (MLP) high-level bitstream description" on the Dolby developer site,
// The syncword ends 0xBA for TrueHD or 0xBB for MLP. // subsections 2.2 and 4.2.1. The syncword ends 0xBA for TrueHD or 0xBB for MLP.
if (syncframe[4] != (byte) 0xF8 if (syncframe[4] != (byte) 0xF8
|| syncframe[5] != (byte) 0x72 || syncframe[5] != (byte) 0x72
|| syncframe[6] != (byte) 0x6F || syncframe[6] != (byte) 0x6F

View File

@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer2.audio; package com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmInitData;
@ -56,6 +57,11 @@ public final class Ac4Util {
/** The channel count of AC-4 stream. */ /** The channel count of AC-4 stream. */
// TODO: Parse AC-4 stream channel count. // TODO: Parse AC-4 stream channel count.
private static final int CHANNEL_COUNT_2 = 2; private static final int CHANNEL_COUNT_2 = 2;
/**
* The AC-4 sync frame header size for extractor. The seven bytes are 0xAC, 0x40, 0xFF, 0xFF,
* sizeByte1, sizeByte2, sizeByte3. See ETSI TS 103 190-1 V1.3.1, Annex G
*/
public static final int SAMPLE_HEADER_SIZE = 7;
/** /**
* The header size for AC-4 parser. Only needs to be as big as we need to read, not the full * The header size for AC-4 parser. Only needs to be as big as we need to read, not the full
* header size. * header size.
@ -95,7 +101,7 @@ public final class Ac4Util {
* @return The AC-4 format parsed from data in the header. * @return The AC-4 format parsed from data in the header.
*/ */
public static Format parseAc4AnnexEFormat( public static Format parseAc4AnnexEFormat(
ParsableByteArray data, String trackId, String language, DrmInitData drmInitData) { ParsableByteArray data, String trackId, String language, @Nullable DrmInitData drmInitData) {
data.skipBytes(1); // ac4_dsi_version, bitstream_version[0:5] data.skipBytes(1); // ac4_dsi_version, bitstream_version[0:5]
int sampleRate = ((data.readUnsignedByte() & 0x20) >> 5 == 1) ? 48000 : 44100; int sampleRate = ((data.readUnsignedByte() & 0x20) >> 5 == 1) ? 48000 : 44100;
return Format.createAudioSampleFormat( return Format.createAudioSampleFormat(
@ -217,7 +223,7 @@ public final class Ac4Util {
/** Populates {@code buffer} with an AC-4 sample header for a sample of the specified size. */ /** Populates {@code buffer} with an AC-4 sample header for a sample of the specified size. */
public static void getAc4SampleHeader(int size, ParsableByteArray buffer) { public static void getAc4SampleHeader(int size, ParsableByteArray buffer) {
// See ETSI TS 103 190-1 V1.3.1, Annex G. // See ETSI TS 103 190-1 V1.3.1, Annex G.
buffer.reset(/* limit= */ 7); buffer.reset(SAMPLE_HEADER_SIZE);
buffer.data[0] = (byte) 0xAC; buffer.data[0] = (byte) 0xAC;
buffer.data[1] = 0x40; buffer.data[1] = 0x40;
buffer.data[2] = (byte) 0xFF; buffer.data[2] = (byte) 0xFF;

View File

@ -18,6 +18,7 @@ package com.google.android.exoplayer2.audio;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Util;
/** /**
* Attributes for audio playback, which configure the underlying platform * Attributes for audio playback, which configure the underlying platform
@ -42,17 +43,19 @@ public final class AudioAttributes {
private @C.AudioContentType int contentType; private @C.AudioContentType int contentType;
private @C.AudioFlags int flags; private @C.AudioFlags int flags;
private @C.AudioUsage int usage; private @C.AudioUsage int usage;
private @C.AudioAllowedCapturePolicy int allowedCapturePolicy;
/** /**
* Creates a new builder for {@link AudioAttributes}. * Creates a new builder for {@link AudioAttributes}.
* <p> *
* By default the content type is {@link C#CONTENT_TYPE_UNKNOWN}, usage is * <p>By default the content type is {@link C#CONTENT_TYPE_UNKNOWN}, usage is {@link
* {@link C#USAGE_MEDIA}, and no flags are set. * C#USAGE_MEDIA}, capture policy is {@link C#ALLOW_CAPTURE_BY_ALL} and no flags are set.
*/ */
public Builder() { public Builder() {
contentType = C.CONTENT_TYPE_UNKNOWN; contentType = C.CONTENT_TYPE_UNKNOWN;
flags = 0; flags = 0;
usage = C.USAGE_MEDIA; usage = C.USAGE_MEDIA;
allowedCapturePolicy = C.ALLOW_CAPTURE_BY_ALL;
} }
/** /**
@ -79,11 +82,15 @@ public final class AudioAttributes {
return this; return this;
} }
/** /** See {@link android.media.AudioAttributes.Builder#setAllowedCapturePolicy(int)}. */
* Creates an {@link AudioAttributes} instance from this builder. public Builder setAllowedCapturePolicy(@C.AudioAllowedCapturePolicy int allowedCapturePolicy) {
*/ this.allowedCapturePolicy = allowedCapturePolicy;
return this;
}
/** Creates an {@link AudioAttributes} instance from this builder. */
public AudioAttributes build() { public AudioAttributes build() {
return new AudioAttributes(contentType, flags, usage); return new AudioAttributes(contentType, flags, usage, allowedCapturePolicy);
} }
} }
@ -91,24 +98,38 @@ public final class AudioAttributes {
public final @C.AudioContentType int contentType; public final @C.AudioContentType int contentType;
public final @C.AudioFlags int flags; public final @C.AudioFlags int flags;
public final @C.AudioUsage int usage; public final @C.AudioUsage int usage;
public final @C.AudioAllowedCapturePolicy int allowedCapturePolicy;
private @Nullable android.media.AudioAttributes audioAttributesV21; @Nullable private android.media.AudioAttributes audioAttributesV21;
private AudioAttributes(@C.AudioContentType int contentType, @C.AudioFlags int flags, private AudioAttributes(
@C.AudioUsage int usage) { @C.AudioContentType int contentType,
@C.AudioFlags int flags,
@C.AudioUsage int usage,
@C.AudioAllowedCapturePolicy int allowedCapturePolicy) {
this.contentType = contentType; this.contentType = contentType;
this.flags = flags; this.flags = flags;
this.usage = usage; this.usage = usage;
this.allowedCapturePolicy = allowedCapturePolicy;
} }
/**
* Returns a {@link android.media.AudioAttributes} from this instance.
*
* <p>Field {@link AudioAttributes#allowedCapturePolicy} is ignored for API levels prior to 29.
*/
@TargetApi(21) @TargetApi(21)
public android.media.AudioAttributes getAudioAttributesV21() { public android.media.AudioAttributes getAudioAttributesV21() {
if (audioAttributesV21 == null) { if (audioAttributesV21 == null) {
audioAttributesV21 = new android.media.AudioAttributes.Builder() android.media.AudioAttributes.Builder builder =
.setContentType(contentType) new android.media.AudioAttributes.Builder()
.setFlags(flags) .setContentType(contentType)
.setUsage(usage) .setFlags(flags)
.build(); .setUsage(usage);
if (Util.SDK_INT >= 29) {
builder.setAllowedCapturePolicy(allowedCapturePolicy);
}
audioAttributesV21 = builder.build();
} }
return audioAttributesV21; return audioAttributesV21;
} }
@ -122,8 +143,10 @@ public final class AudioAttributes {
return false; return false;
} }
AudioAttributes other = (AudioAttributes) obj; AudioAttributes other = (AudioAttributes) obj;
return this.contentType == other.contentType && this.flags == other.flags return this.contentType == other.contentType
&& this.usage == other.usage; && this.flags == other.flags
&& this.usage == other.usage
&& this.allowedCapturePolicy == other.allowedCapturePolicy;
} }
@Override @Override
@ -132,6 +155,7 @@ public final class AudioAttributes {
result = 31 * result + contentType; result = 31 * result + contentType;
result = 31 * result + flags; result = 31 * result + flags;
result = 31 * result + usage; result = 31 * result + usage;
result = 31 * result + allowedCapturePolicy;
return result; return result;
} }

View File

@ -16,6 +16,8 @@
package com.google.android.exoplayer2.audio; package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
@ -23,24 +25,56 @@ import java.nio.ByteOrder;
* Interface for audio processors, which take audio data as input and transform it, potentially * Interface for audio processors, which take audio data as input and transform it, potentially
* modifying its channel count, encoding and/or sample rate. * modifying its channel count, encoding and/or sample rate.
* *
* <p>Call {@link #configure(int, int, int)} to configure the processor to receive input audio, then
* call {@link #isActive()} to determine whether the processor is active in the new configuration.
* {@link #queueInput(ByteBuffer)}, {@link #getOutputChannelCount()}, {@link #getOutputEncoding()}
* and {@link #getOutputSampleRateHz()} may only be called if the processor is active. Call {@link
* #reset()} to reset the processor to its unconfigured state and release any resources.
*
* <p>In addition to being able to modify the format of audio, implementations may allow parameters * <p>In addition to being able to modify the format of audio, implementations may allow parameters
* to be set that affect the output audio and whether the processor is active/inactive. * to be set that affect the output audio and whether the processor is active/inactive.
*/ */
public interface AudioProcessor { public interface AudioProcessor {
/** Exception thrown when a processor can't be configured for a given input audio format. */ /** PCM audio format that may be handled by an audio processor. */
final class UnhandledFormatException extends Exception { final class AudioFormat {
public static final AudioFormat NOT_SET =
new AudioFormat(
/* sampleRate= */ Format.NO_VALUE,
/* channelCount= */ Format.NO_VALUE,
/* encoding= */ Format.NO_VALUE);
public UnhandledFormatException( /** The sample rate in Hertz. */
int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) { public final int sampleRate;
super("Unhandled format: " + sampleRateHz + " Hz, " + channelCount + " channels in encoding " /** The number of interleaved channels. */
+ encoding); public final int channelCount;
/** The type of linear PCM encoding. */
@C.PcmEncoding public final int encoding;
/** The number of bytes used to represent one audio frame. */
public final int bytesPerFrame;
public AudioFormat(int sampleRate, int channelCount, @C.PcmEncoding int encoding) {
this.sampleRate = sampleRate;
this.channelCount = channelCount;
this.encoding = encoding;
bytesPerFrame =
Util.isEncodingLinearPcm(encoding)
? Util.getPcmFrameSize(encoding, channelCount)
: Format.NO_VALUE;
}
@Override
public String toString() {
return "AudioFormat["
+ "sampleRate="
+ sampleRate
+ ", channelCount="
+ channelCount
+ ", encoding="
+ encoding
+ ']';
}
}
/** Exception thrown when a processor can't be configured for a given input audio format. */
final class UnhandledAudioFormatException extends Exception {
public UnhandledAudioFormatException(AudioFormat inputAudioFormat) {
super("Unhandled format: " + inputAudioFormat);
} }
} }
@ -50,47 +84,24 @@ public interface AudioProcessor {
/** /**
* Configures the processor to process input audio with the specified format. After calling this * Configures the processor to process input audio with the specified format. After calling this
* method, call {@link #isActive()} to determine whether the audio processor is active. * method, call {@link #isActive()} to determine whether the audio processor is active. Returns
* the configured output audio format if this instance is active.
* *
* <p>If the audio processor is active after configuration, call {@link #getOutputSampleRateHz()}, * <p>After calling this method, it is necessary to {@link #flush()} the processor to apply the
* {@link #getOutputChannelCount()} and {@link #getOutputEncoding()} to get its new output format. * new configuration. Before applying the new configuration, it is safe to queue input and get
* output in the old input/output formats. Call {@link #queueEndOfStream()} when no more input
* will be supplied in the old input format.
* *
* <p>If this method returns {@code true}, it is necessary to {@link #flush()} the processor * @param inputAudioFormat The format of audio that will be queued after the next call to {@link
* before queueing more data, but you can (optionally) first drain output in the previous * #flush()}.
* configuration by calling {@link #queueEndOfStream()} and {@link #getOutput()}. If this method * @return The configured output audio format if this instance is {@link #isActive() active}.
* returns {@code false}, it is safe to queue new input immediately. * @throws UnhandledAudioFormatException Thrown if the specified format can't be handled as input.
*
* @param sampleRateHz The sample rate of input audio in Hz.
* @param channelCount The number of interleaved channels in input audio.
* @param encoding The encoding of input audio.
* @return Whether the processor must be {@link #flush() flushed} before queueing more input.
* @throws UnhandledFormatException Thrown if the specified format can't be handled as input.
*/ */
boolean configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) AudioFormat configure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException;
throws UnhandledFormatException;
/** Returns whether the processor is configured and will process input buffers. */ /** Returns whether the processor is configured and will process input buffers. */
boolean isActive(); boolean isActive();
/**
* Returns the number of audio channels in the data output by the processor. The value may change
* as a result of calling {@link #configure(int, int, int)}.
*/
int getOutputChannelCount();
/**
* Returns the audio encoding used in the data output by the processor. The value may change as a
* result of calling {@link #configure(int, int, int)}.
*/
@C.PcmEncoding
int getOutputEncoding();
/**
* Returns the sample rate of audio output by the processor, in hertz. The value may change as a
* result of calling {@link #configure(int, int, int)}.
*/
int getOutputSampleRateHz();
/** /**
* Queues audio data between the position and limit of the input {@code buffer} for processing. * Queues audio data between the position and limit of the input {@code buffer} for processing.
* {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as * {@code buffer} must be a direct byte buffer with native byte order. Its contents are treated as
@ -132,6 +143,6 @@ public interface AudioProcessor {
*/ */
void flush(); void flush();
/** Resets the processor to its unconfigured state. */ /** Resets the processor to its unconfigured state, releasing any resources. */
void reset(); void reset();
} }

View File

@ -15,6 +15,8 @@
*/ */
package com.google.android.exoplayer2.audio; package com.google.android.exoplayer2.audio;
import static com.google.android.exoplayer2.util.Util.castNonNull;
import android.os.Handler; import android.os.Handler;
import android.os.SystemClock; import android.os.SystemClock;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -105,8 +107,8 @@ public interface AudioRendererEventListener {
* Invokes {@link AudioRendererEventListener#onAudioEnabled(DecoderCounters)}. * Invokes {@link AudioRendererEventListener#onAudioEnabled(DecoderCounters)}.
*/ */
public void enabled(final DecoderCounters decoderCounters) { public void enabled(final DecoderCounters decoderCounters) {
if (listener != null) { if (handler != null) {
handler.post(() -> listener.onAudioEnabled(decoderCounters)); handler.post(() -> castNonNull(listener).onAudioEnabled(decoderCounters));
} }
} }
@ -115,11 +117,12 @@ public interface AudioRendererEventListener {
*/ */
public void decoderInitialized(final String decoderName, public void decoderInitialized(final String decoderName,
final long initializedTimestampMs, final long initializationDurationMs) { final long initializedTimestampMs, final long initializationDurationMs) {
if (listener != null) { if (handler != null) {
handler.post( handler.post(
() -> () ->
listener.onAudioDecoderInitialized( castNonNull(listener)
decoderName, initializedTimestampMs, initializationDurationMs)); .onAudioDecoderInitialized(
decoderName, initializedTimestampMs, initializationDurationMs));
} }
} }
@ -127,8 +130,8 @@ public interface AudioRendererEventListener {
* Invokes {@link AudioRendererEventListener#onAudioInputFormatChanged(Format)}. * Invokes {@link AudioRendererEventListener#onAudioInputFormatChanged(Format)}.
*/ */
public void inputFormatChanged(final Format format) { public void inputFormatChanged(final Format format) {
if (listener != null) { if (handler != null) {
handler.post(() -> listener.onAudioInputFormatChanged(format)); handler.post(() -> castNonNull(listener).onAudioInputFormatChanged(format));
} }
} }
@ -137,9 +140,11 @@ public interface AudioRendererEventListener {
*/ */
public void audioTrackUnderrun(final int bufferSize, final long bufferSizeMs, public void audioTrackUnderrun(final int bufferSize, final long bufferSizeMs,
final long elapsedSinceLastFeedMs) { final long elapsedSinceLastFeedMs) {
if (listener != null) { if (handler != null) {
handler.post( handler.post(
() -> listener.onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs)); () ->
castNonNull(listener)
.onAudioSinkUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs));
} }
} }
@ -148,11 +153,11 @@ public interface AudioRendererEventListener {
*/ */
public void disabled(final DecoderCounters counters) { public void disabled(final DecoderCounters counters) {
counters.ensureUpdated(); counters.ensureUpdated();
if (listener != null) { if (handler != null) {
handler.post( handler.post(
() -> { () -> {
counters.ensureUpdated(); counters.ensureUpdated();
listener.onAudioDisabled(counters); castNonNull(listener).onAudioDisabled(counters);
}); });
} }
} }
@ -161,11 +166,9 @@ public interface AudioRendererEventListener {
* Invokes {@link AudioRendererEventListener#onAudioSessionId(int)}. * Invokes {@link AudioRendererEventListener#onAudioSessionId(int)}.
*/ */
public void audioSessionId(final int audioSessionId) { public void audioSessionId(final int audioSessionId) {
if (listener != null) { if (handler != null) {
handler.post(() -> listener.onAudioSessionId(audioSessionId)); handler.post(() -> castNonNull(listener).onAudioSessionId(audioSessionId));
} }
} }
} }
} }

View File

@ -259,13 +259,12 @@ public interface AudioSink {
boolean hasPendingData(); boolean hasPendingData();
/** /**
* Attempts to set the playback parameters and returns the active playback parameters, which may * Attempts to set the playback parameters. The audio sink may override these parameters if they
* differ from those passed in. * are not supported.
* *
* @param playbackParameters The new playback parameters to attempt to set. * @param playbackParameters The new playback parameters to attempt to set.
* @return The active playback parameters.
*/ */
PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters); void setPlaybackParameters(PlaybackParameters playbackParameters);
/** /**
* Gets the active {@link PlaybackParameters}. * Gets the active {@link PlaybackParameters}.

View File

@ -37,7 +37,7 @@ import java.lang.annotation.RetentionPolicy;
* *
* <p>If {@link #hasTimestamp()} returns {@code true}, call {@link #getTimestampSystemTimeUs()} to * <p>If {@link #hasTimestamp()} returns {@code true}, call {@link #getTimestampSystemTimeUs()} to
* get the system time at which the latest timestamp was sampled and {@link * get the system time at which the latest timestamp was sampled and {@link
* #getTimestampPositionFrames()} to get its position in frames. If {@link #isTimestampAdvancing()} * #getTimestampPositionFrames()} to get its position in frames. If {@link #hasAdvancingTimestamp()}
* returns {@code true}, the caller should assume that the timestamp has been increasing in real * returns {@code true}, the caller should assume that the timestamp has been increasing in real
* time since it was sampled. Otherwise, it may be stationary. * time since it was sampled. Otherwise, it may be stationary.
* *
@ -68,7 +68,7 @@ import java.lang.annotation.RetentionPolicy;
private static final int STATE_ERROR = 4; private static final int STATE_ERROR = 4;
/** The polling interval for {@link #STATE_INITIALIZING} and {@link #STATE_TIMESTAMP}. */ /** The polling interval for {@link #STATE_INITIALIZING} and {@link #STATE_TIMESTAMP}. */
private static final int FAST_POLL_INTERVAL_US = 5_000; private static final int FAST_POLL_INTERVAL_US = 10_000;
/** /**
* The polling interval for {@link #STATE_TIMESTAMP_ADVANCING} and {@link #STATE_NO_TIMESTAMP}. * The polling interval for {@link #STATE_TIMESTAMP_ADVANCING} and {@link #STATE_NO_TIMESTAMP}.
*/ */
@ -82,7 +82,7 @@ import java.lang.annotation.RetentionPolicy;
*/ */
private static final int INITIALIZING_DURATION_US = 500_000; private static final int INITIALIZING_DURATION_US = 500_000;
private final @Nullable AudioTimestampV19 audioTimestamp; @Nullable private final AudioTimestampV19 audioTimestamp;
private @State int state; private @State int state;
private long initializeSystemTimeUs; private long initializeSystemTimeUs;
@ -110,7 +110,7 @@ import java.lang.annotation.RetentionPolicy;
* timestamp is available via {@link #getTimestampSystemTimeUs()} and {@link * timestamp is available via {@link #getTimestampSystemTimeUs()} and {@link
* #getTimestampPositionFrames()}, and the caller should call {@link #acceptTimestamp()} if the * #getTimestampPositionFrames()}, and the caller should call {@link #acceptTimestamp()} if the
* timestamp was valid, or {@link #rejectTimestamp()} otherwise. The values returned by {@link * timestamp was valid, or {@link #rejectTimestamp()} otherwise. The values returned by {@link
* #hasTimestamp()} and {@link #isTimestampAdvancing()} may be updated. * #hasTimestamp()} and {@link #hasAdvancingTimestamp()} may be updated.
* *
* @param systemTimeUs The current system time, in microseconds. * @param systemTimeUs The current system time, in microseconds.
* @return Whether the timestamp was updated. * @return Whether the timestamp was updated.
@ -200,12 +200,12 @@ import java.lang.annotation.RetentionPolicy;
} }
/** /**
* Returns whether the timestamp appears to be advancing. If {@code true}, call {@link * Returns whether this instance has an advancing timestamp. If {@code true}, call {@link
* #getTimestampSystemTimeUs()} and {@link #getTimestampSystemTimeUs()} to access the timestamp. A * #getTimestampSystemTimeUs()} and {@link #getTimestampSystemTimeUs()} to access the timestamp. A
* current position for the track can be extrapolated based on elapsed real time since the system * current position for the track can be extrapolated based on elapsed real time since the system
* time at which the timestamp was sampled. * time at which the timestamp was sampled.
*/ */
public boolean isTimestampAdvancing() { public boolean hasAdvancingTimestamp() {
return state == STATE_TIMESTAMP_ADVANCING; return state == STATE_TIMESTAMP_ADVANCING;
} }

View File

@ -123,6 +123,8 @@ import java.lang.reflect.Method;
* <p>This is a fail safe that should not be required on correctly functioning devices. * <p>This is a fail safe that should not be required on correctly functioning devices.
*/ */
private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND; private static final long MAX_LATENCY_US = 5 * C.MICROS_PER_SECOND;
/** The duration of time used to smooth over an adjustment between position sampling modes. */
private static final long MODE_SWITCH_SMOOTHING_DURATION_US = C.MICROS_PER_SECOND;
private static final long FORCE_RESET_WORKAROUND_TIMEOUT_MS = 200; private static final long FORCE_RESET_WORKAROUND_TIMEOUT_MS = 200;
@ -133,10 +135,10 @@ import java.lang.reflect.Method;
private final Listener listener; private final Listener listener;
private final long[] playheadOffsets; private final long[] playheadOffsets;
private @Nullable AudioTrack audioTrack; @Nullable private AudioTrack audioTrack;
private int outputPcmFrameSize; private int outputPcmFrameSize;
private int bufferSize; private int bufferSize;
private @Nullable AudioTimestampPoller audioTimestampPoller; @Nullable private AudioTimestampPoller audioTimestampPoller;
private int outputSampleRate; private int outputSampleRate;
private boolean needsPassthroughWorkarounds; private boolean needsPassthroughWorkarounds;
private long bufferSizeUs; private long bufferSizeUs;
@ -144,7 +146,7 @@ import java.lang.reflect.Method;
private long smoothedPlayheadOffsetUs; private long smoothedPlayheadOffsetUs;
private long lastPlayheadSampleTimeUs; private long lastPlayheadSampleTimeUs;
private @Nullable Method getLatencyMethod; @Nullable private Method getLatencyMethod;
private long latencyUs; private long latencyUs;
private boolean hasData; private boolean hasData;
@ -160,6 +162,15 @@ import java.lang.reflect.Method;
private long stopPlaybackHeadPosition; private long stopPlaybackHeadPosition;
private long endPlaybackHeadPosition; private long endPlaybackHeadPosition;
// Results from the previous call to getCurrentPositionUs.
private long lastPositionUs;
private long lastSystemTimeUs;
private boolean lastSampleUsedGetTimestampMode;
// Results from the last call to getCurrentPositionUs that used a different sample mode.
private long previousModePositionUs;
private long previousModeSystemTimeUs;
/** /**
* Creates a new audio track position tracker. * Creates a new audio track position tracker.
* *
@ -206,6 +217,7 @@ import java.lang.reflect.Method;
hasData = false; hasData = false;
stopTimestampUs = C.TIME_UNSET; stopTimestampUs = C.TIME_UNSET;
forceResetWorkaroundTimeMs = C.TIME_UNSET; forceResetWorkaroundTimeMs = C.TIME_UNSET;
lastLatencySampleTimeUs = 0;
latencyUs = 0; latencyUs = 0;
} }
@ -217,18 +229,16 @@ import java.lang.reflect.Method;
// If the device supports it, use the playback timestamp from AudioTrack.getTimestamp. // If the device supports it, use the playback timestamp from AudioTrack.getTimestamp.
// Otherwise, derive a smoothed position by sampling the track's frame position. // Otherwise, derive a smoothed position by sampling the track's frame position.
long systemTimeUs = System.nanoTime() / 1000; long systemTimeUs = System.nanoTime() / 1000;
long positionUs;
AudioTimestampPoller audioTimestampPoller = Assertions.checkNotNull(this.audioTimestampPoller); AudioTimestampPoller audioTimestampPoller = Assertions.checkNotNull(this.audioTimestampPoller);
if (audioTimestampPoller.hasTimestamp()) { boolean useGetTimestampMode = audioTimestampPoller.hasAdvancingTimestamp();
if (useGetTimestampMode) {
// Calculate the speed-adjusted position using the timestamp (which may be in the future). // Calculate the speed-adjusted position using the timestamp (which may be in the future).
long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames(); long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
long timestampPositionUs = framesToDurationUs(timestampPositionFrames); long timestampPositionUs = framesToDurationUs(timestampPositionFrames);
if (!audioTimestampPoller.isTimestampAdvancing()) {
return timestampPositionUs;
}
long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs(); long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs();
return timestampPositionUs + elapsedSinceTimestampUs; positionUs = timestampPositionUs + elapsedSinceTimestampUs;
} else { } else {
long positionUs;
if (playheadOffsetCount == 0) { if (playheadOffsetCount == 0) {
// The AudioTrack has started, but we don't have any samples to compute a smoothed position. // The AudioTrack has started, but we don't have any samples to compute a smoothed position.
positionUs = getPlaybackHeadPositionUs(); positionUs = getPlaybackHeadPositionUs();
@ -239,10 +249,31 @@ import java.lang.reflect.Method;
positionUs = systemTimeUs + smoothedPlayheadOffsetUs; positionUs = systemTimeUs + smoothedPlayheadOffsetUs;
} }
if (!sourceEnded) { if (!sourceEnded) {
positionUs -= latencyUs; positionUs = Math.max(0, positionUs - latencyUs);
} }
return positionUs;
} }
if (lastSampleUsedGetTimestampMode != useGetTimestampMode) {
// We've switched sampling mode.
previousModeSystemTimeUs = lastSystemTimeUs;
previousModePositionUs = lastPositionUs;
}
long elapsedSincePreviousModeUs = systemTimeUs - previousModeSystemTimeUs;
if (elapsedSincePreviousModeUs < MODE_SWITCH_SMOOTHING_DURATION_US) {
// Use a ramp to smooth between the old mode and the new one to avoid introducing a sudden
// jump if the two modes disagree.
long previousModeProjectedPositionUs = previousModePositionUs + elapsedSincePreviousModeUs;
// A ramp consisting of 1000 points distributed over MODE_SWITCH_SMOOTHING_DURATION_US.
long rampPoint = (elapsedSincePreviousModeUs * 1000) / MODE_SWITCH_SMOOTHING_DURATION_US;
positionUs *= rampPoint;
positionUs += (1000 - rampPoint) * previousModeProjectedPositionUs;
positionUs /= 1000;
}
lastSystemTimeUs = systemTimeUs;
lastPositionUs = positionUs;
lastSampleUsedGetTimestampMode = useGetTimestampMode;
return positionUs;
} }
/** Starts position tracking. Must be called immediately before {@link AudioTrack#play()}. */ /** Starts position tracking. Must be called immediately before {@link AudioTrack#play()}. */
@ -353,7 +384,7 @@ import java.lang.reflect.Method;
} }
/** /**
* Resets the position tracker. Should be called when the audio track previous passed to {@link * Resets the position tracker. Should be called when the audio track previously passed to {@link
* #setAudioTrack(AudioTrack, int, int, int)} is no longer in use. * #setAudioTrack(AudioTrack, int, int, int)} is no longer in use.
*/ */
public void reset() { public void reset() {
@ -457,6 +488,8 @@ import java.lang.reflect.Method;
playheadOffsetCount = 0; playheadOffsetCount = 0;
nextPlayheadOffsetIndex = 0; nextPlayheadOffsetIndex = 0;
lastPlayheadSampleTimeUs = 0; lastPlayheadSampleTimeUs = 0;
lastSystemTimeUs = 0;
previousModeSystemTimeUs = 0;
} }
/** /**

View File

@ -16,24 +16,23 @@
package com.google.android.exoplayer2.audio; package com.google.android.exoplayer2.audio;
import androidx.annotation.CallSuper; import androidx.annotation.CallSuper;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
/** /**
* Base class for audio processors that keep an output buffer and an internal buffer that is reused * Base class for audio processors that keep an output buffer and an internal buffer that is reused
* whenever input is queued. * whenever input is queued. Subclasses should override {@link #onConfigure(AudioFormat)} to return
* the output audio format for the processor if it's active.
*/ */
public abstract class BaseAudioProcessor implements AudioProcessor { public abstract class BaseAudioProcessor implements AudioProcessor {
/** The configured input sample rate, in Hertz, or {@link Format#NO_VALUE} if not configured. */ /** The current input audio format. */
protected int sampleRateHz; protected AudioFormat inputAudioFormat;
/** The configured input channel count, or {@link Format#NO_VALUE} if not configured. */ /** The current output audio format. */
protected int channelCount; protected AudioFormat outputAudioFormat;
/** The configured input encoding, or {@link Format#NO_VALUE} if not configured. */
@C.PcmEncoding protected int encoding;
private AudioFormat pendingInputAudioFormat;
private AudioFormat pendingOutputAudioFormat;
private ByteBuffer buffer; private ByteBuffer buffer;
private ByteBuffer outputBuffer; private ByteBuffer outputBuffer;
private boolean inputEnded; private boolean inputEnded;
@ -41,29 +40,23 @@ public abstract class BaseAudioProcessor implements AudioProcessor {
public BaseAudioProcessor() { public BaseAudioProcessor() {
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
channelCount = Format.NO_VALUE; pendingInputAudioFormat = AudioFormat.NOT_SET;
sampleRateHz = Format.NO_VALUE; pendingOutputAudioFormat = AudioFormat.NOT_SET;
encoding = Format.NO_VALUE; inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
}
@Override
public final AudioFormat configure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
pendingInputAudioFormat = inputAudioFormat;
pendingOutputAudioFormat = onConfigure(inputAudioFormat);
return isActive() ? pendingOutputAudioFormat : AudioFormat.NOT_SET;
} }
@Override @Override
public boolean isActive() { public boolean isActive() {
return sampleRateHz != Format.NO_VALUE; return pendingOutputAudioFormat != AudioFormat.NOT_SET;
}
@Override
public int getOutputChannelCount() {
return channelCount;
}
@Override
public int getOutputEncoding() {
return encoding;
}
@Override
public int getOutputSampleRateHz() {
return sampleRateHz;
} }
@Override @Override
@ -91,6 +84,8 @@ public abstract class BaseAudioProcessor implements AudioProcessor {
public final void flush() { public final void flush() {
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
inputEnded = false; inputEnded = false;
inputAudioFormat = pendingInputAudioFormat;
outputAudioFormat = pendingOutputAudioFormat;
onFlush(); onFlush();
} }
@ -98,26 +93,13 @@ public abstract class BaseAudioProcessor implements AudioProcessor {
public final void reset() { public final void reset() {
flush(); flush();
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
sampleRateHz = Format.NO_VALUE; pendingInputAudioFormat = AudioFormat.NOT_SET;
channelCount = Format.NO_VALUE; pendingOutputAudioFormat = AudioFormat.NOT_SET;
encoding = Format.NO_VALUE; inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
onReset(); onReset();
} }
/** Sets the input format of this processor, returning whether the input format has changed. */
protected final boolean setInputFormat(
int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) {
if (sampleRateHz == this.sampleRateHz
&& channelCount == this.channelCount
&& encoding == this.encoding) {
return false;
}
this.sampleRateHz = sampleRateHz;
this.channelCount = channelCount;
this.encoding = encoding;
return true;
}
/** /**
* Replaces the current output buffer with a buffer of at least {@code count} bytes and returns * Replaces the current output buffer with a buffer of at least {@code count} bytes and returns
* it. Callers should write to the returned buffer then {@link ByteBuffer#flip()} it so it can be * it. Callers should write to the returned buffer then {@link ByteBuffer#flip()} it so it can be
@ -138,6 +120,12 @@ public abstract class BaseAudioProcessor implements AudioProcessor {
return outputBuffer.hasRemaining(); return outputBuffer.hasRemaining();
} }
/** Called when the processor is configured for a new input format. */
protected AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledAudioFormatException {
return AudioFormat.NOT_SET;
}
/** Called when the end-of-stream is queued to the processor. */ /** Called when the end-of-stream is queued to the processor. */
protected void onQueueEndOfStream() { protected void onQueueEndOfStream() {
// Do nothing. // Do nothing.

View File

@ -19,22 +19,20 @@ import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.util.Arrays;
/** /**
* An {@link AudioProcessor} that applies a mapping from input channels onto specified output * An {@link AudioProcessor} that applies a mapping from input channels onto specified output
* channels. This can be used to reorder, duplicate or discard channels. * channels. This can be used to reorder, duplicate or discard channels.
*/ */
@SuppressWarnings("nullness:initialization.fields.uninitialized")
/* package */ final class ChannelMappingAudioProcessor extends BaseAudioProcessor { /* package */ final class ChannelMappingAudioProcessor extends BaseAudioProcessor {
@Nullable private int[] pendingOutputChannels; @Nullable private int[] pendingOutputChannels;
private boolean active;
@Nullable private int[] outputChannels; @Nullable private int[] outputChannels;
/** /**
* Resets the channel mapping. After calling this method, call {@link #configure(int, int, int)} * Resets the channel mapping. After calling this method, call {@link #configure(AudioFormat)} to
* to start using the new channel map. * start using the new channel map.
* *
* @param outputChannels The mapping from input to output channel indices, or {@code null} to * @param outputChannels The mapping from input to output channel indices, or {@code null} to
* leave the input unchanged. * leave the input unchanged.
@ -45,42 +43,28 @@ import java.util.Arrays;
} }
@Override @Override
public boolean configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledFormatException { throws UnhandledAudioFormatException {
boolean outputChannelsChanged = !Arrays.equals(pendingOutputChannels, outputChannels); @Nullable int[] outputChannels = pendingOutputChannels;
outputChannels = pendingOutputChannels;
int[] outputChannels = this.outputChannels;
if (outputChannels == null) { if (outputChannels == null) {
active = false; return AudioFormat.NOT_SET;
return outputChannelsChanged;
}
if (encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
if (!outputChannelsChanged && !setInputFormat(sampleRateHz, channelCount, encoding)) {
return false;
} }
active = channelCount != outputChannels.length; if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
}
boolean active = inputAudioFormat.channelCount != outputChannels.length;
for (int i = 0; i < outputChannels.length; i++) { for (int i = 0; i < outputChannels.length; i++) {
int channelIndex = outputChannels[i]; int channelIndex = outputChannels[i];
if (channelIndex >= channelCount) { if (channelIndex >= inputAudioFormat.channelCount) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); throw new UnhandledAudioFormatException(inputAudioFormat);
} }
active |= (channelIndex != i); active |= (channelIndex != i);
} }
return true; return active
} ? new AudioFormat(inputAudioFormat.sampleRate, outputChannels.length, C.ENCODING_PCM_16BIT)
: AudioFormat.NOT_SET;
@Override
public boolean isActive() {
return active;
}
@Override
public int getOutputChannelCount() {
return outputChannels == null ? channelCount : outputChannels.length;
} }
@Override @Override
@ -88,24 +72,28 @@ import java.util.Arrays;
int[] outputChannels = Assertions.checkNotNull(this.outputChannels); int[] outputChannels = Assertions.checkNotNull(this.outputChannels);
int position = inputBuffer.position(); int position = inputBuffer.position();
int limit = inputBuffer.limit(); int limit = inputBuffer.limit();
int frameCount = (limit - position) / (2 * channelCount); int frameCount = (limit - position) / inputAudioFormat.bytesPerFrame;
int outputSize = frameCount * outputChannels.length * 2; int outputSize = frameCount * outputAudioFormat.bytesPerFrame;
ByteBuffer buffer = replaceOutputBuffer(outputSize); ByteBuffer buffer = replaceOutputBuffer(outputSize);
while (position < limit) { while (position < limit) {
for (int channelIndex : outputChannels) { for (int channelIndex : outputChannels) {
buffer.putShort(inputBuffer.getShort(position + 2 * channelIndex)); buffer.putShort(inputBuffer.getShort(position + 2 * channelIndex));
} }
position += channelCount * 2; position += inputAudioFormat.bytesPerFrame;
} }
inputBuffer.position(limit); inputBuffer.position(limit);
buffer.flip(); buffer.flip();
} }
@Override
protected void onFlush() {
outputChannels = pendingOutputChannels;
}
@Override @Override
protected void onReset() { protected void onReset() {
outputChannels = null; outputChannels = null;
pendingOutputChannels = null; pendingOutputChannels = null;
active = false;
} }
} }

View File

@ -27,6 +27,8 @@ import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.audio.AudioProcessor.UnhandledAudioFormatException;
import com.google.android.exoplayer2.extractor.MpegAudioHeader;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
@ -118,9 +120,20 @@ public final class DefaultAudioSink implements AudioSink {
/** /**
* Creates a new default chain of audio processors, with the user-defined {@code * Creates a new default chain of audio processors, with the user-defined {@code
* audioProcessors} applied before silence skipping and playback parameters. * audioProcessors} applied before silence skipping and speed adjustment processors.
*/ */
public DefaultAudioProcessorChain(AudioProcessor... audioProcessors) { public DefaultAudioProcessorChain(AudioProcessor... audioProcessors) {
this(audioProcessors, new SilenceSkippingAudioProcessor(), new SonicAudioProcessor());
}
/**
* Creates a new default chain of audio processors, with the user-defined {@code
* audioProcessors} applied before silence skipping and speed adjustment processors.
*/
public DefaultAudioProcessorChain(
AudioProcessor[] audioProcessors,
SilenceSkippingAudioProcessor silenceSkippingAudioProcessor,
SonicAudioProcessor sonicAudioProcessor) {
// The passed-in type may be more specialized than AudioProcessor[], so allocate a new array // The passed-in type may be more specialized than AudioProcessor[], so allocate a new array
// rather than using Arrays.copyOf. // rather than using Arrays.copyOf.
this.audioProcessors = new AudioProcessor[audioProcessors.length + 2]; this.audioProcessors = new AudioProcessor[audioProcessors.length + 2];
@ -130,8 +143,8 @@ public final class DefaultAudioSink implements AudioSink {
/* dest= */ this.audioProcessors, /* dest= */ this.audioProcessors,
/* destPos= */ 0, /* destPos= */ 0,
/* length= */ audioProcessors.length); /* length= */ audioProcessors.length);
silenceSkippingAudioProcessor = new SilenceSkippingAudioProcessor(); this.silenceSkippingAudioProcessor = silenceSkippingAudioProcessor;
sonicAudioProcessor = new SonicAudioProcessor(); this.sonicAudioProcessor = sonicAudioProcessor;
this.audioProcessors[audioProcessors.length] = silenceSkippingAudioProcessor; this.audioProcessors[audioProcessors.length] = silenceSkippingAudioProcessor;
this.audioProcessors[audioProcessors.length + 1] = sonicAudioProcessor; this.audioProcessors[audioProcessors.length + 1] = sonicAudioProcessor;
} }
@ -236,7 +249,7 @@ public final class DefaultAudioSink implements AudioSink {
@Nullable private final AudioCapabilities audioCapabilities; @Nullable private final AudioCapabilities audioCapabilities;
private final AudioProcessorChain audioProcessorChain; private final AudioProcessorChain audioProcessorChain;
private final boolean enableConvertHighResIntPcmToFloat; private final boolean enableFloatOutput;
private final ChannelMappingAudioProcessor channelMappingAudioProcessor; private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
private final TrimmingAudioProcessor trimmingAudioProcessor; private final TrimmingAudioProcessor trimmingAudioProcessor;
private final AudioProcessor[] toIntPcmAvailableAudioProcessors; private final AudioProcessor[] toIntPcmAvailableAudioProcessors;
@ -246,7 +259,7 @@ public final class DefaultAudioSink implements AudioSink {
private final ArrayDeque<PlaybackParametersCheckpoint> playbackParametersCheckpoints; private final ArrayDeque<PlaybackParametersCheckpoint> playbackParametersCheckpoints;
@Nullable private Listener listener; @Nullable private Listener listener;
/** Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}). */ /** Used to keep the audio session active on pre-V21 builds (see {@link #initialize(long)}). */
@Nullable private AudioTrack keepSessionIdAudioTrack; @Nullable private AudioTrack keepSessionIdAudioTrack;
@Nullable private Configuration pendingConfiguration; @Nullable private Configuration pendingConfiguration;
@ -297,7 +310,7 @@ public final class DefaultAudioSink implements AudioSink {
*/ */
public DefaultAudioSink( public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors) { @Nullable AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors) {
this(audioCapabilities, audioProcessors, /* enableConvertHighResIntPcmToFloat= */ false); this(audioCapabilities, audioProcessors, /* enableFloatOutput= */ false);
} }
/** /**
@ -307,19 +320,16 @@ public final class DefaultAudioSink implements AudioSink {
* default capabilities (no encoded audio passthrough support) should be assumed. * default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before * @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
* output. May be empty. * output. May be empty.
* @param enableConvertHighResIntPcmToFloat Whether to enable conversion of high resolution * @param enableFloatOutput Whether to enable 32-bit float output. Where possible, 32-bit float
* integer PCM to 32-bit float for output, if possible. Functionality that uses 16-bit integer * output will be used if the input is 32-bit float, and also if the input is high resolution
* audio processing (for example, speed and pitch adjustment) will not be available when float * (24-bit or 32-bit) integer PCM. Audio processing (for example, speed adjustment) will not
* output is in use. * be available when float output is in use.
*/ */
public DefaultAudioSink( public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities, @Nullable AudioCapabilities audioCapabilities,
AudioProcessor[] audioProcessors, AudioProcessor[] audioProcessors,
boolean enableConvertHighResIntPcmToFloat) { boolean enableFloatOutput) {
this( this(audioCapabilities, new DefaultAudioProcessorChain(audioProcessors), enableFloatOutput);
audioCapabilities,
new DefaultAudioProcessorChain(audioProcessors),
enableConvertHighResIntPcmToFloat);
} }
/** /**
@ -330,18 +340,18 @@ public final class DefaultAudioSink implements AudioSink {
* default capabilities (no encoded audio passthrough support) should be assumed. * default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessorChain An {@link AudioProcessorChain} which is used to apply playback * @param audioProcessorChain An {@link AudioProcessorChain} which is used to apply playback
* parameters adjustments. The instance passed in must not be reused in other sinks. * parameters adjustments. The instance passed in must not be reused in other sinks.
* @param enableConvertHighResIntPcmToFloat Whether to enable conversion of high resolution * @param enableFloatOutput Whether to enable 32-bit float output. Where possible, 32-bit float
* integer PCM to 32-bit float for output, if possible. Functionality that uses 16-bit integer * output will be used if the input is 32-bit float, and also if the input is high resolution
* audio processing (for example, speed and pitch adjustment) will not be available when float * (24-bit or 32-bit) integer PCM. Audio processing (for example, speed adjustment) will not
* output is in use. * be available when float output is in use.
*/ */
public DefaultAudioSink( public DefaultAudioSink(
@Nullable AudioCapabilities audioCapabilities, @Nullable AudioCapabilities audioCapabilities,
AudioProcessorChain audioProcessorChain, AudioProcessorChain audioProcessorChain,
boolean enableConvertHighResIntPcmToFloat) { boolean enableFloatOutput) {
this.audioCapabilities = audioCapabilities; this.audioCapabilities = audioCapabilities;
this.audioProcessorChain = Assertions.checkNotNull(audioProcessorChain); this.audioProcessorChain = Assertions.checkNotNull(audioProcessorChain);
this.enableConvertHighResIntPcmToFloat = enableConvertHighResIntPcmToFloat; this.enableFloatOutput = enableFloatOutput;
releasingConditionVariable = new ConditionVariable(true); releasingConditionVariable = new ConditionVariable(true);
audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener()); audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener());
channelMappingAudioProcessor = new ChannelMappingAudioProcessor(); channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
@ -420,34 +430,34 @@ public final class DefaultAudioSink implements AudioSink {
} }
boolean isInputPcm = Util.isEncodingLinearPcm(inputEncoding); boolean isInputPcm = Util.isEncodingLinearPcm(inputEncoding);
boolean processingEnabled = isInputPcm && inputEncoding != C.ENCODING_PCM_FLOAT; boolean processingEnabled = isInputPcm;
int sampleRate = inputSampleRate; int sampleRate = inputSampleRate;
int channelCount = inputChannelCount; int channelCount = inputChannelCount;
@C.Encoding int encoding = inputEncoding; @C.Encoding int encoding = inputEncoding;
boolean shouldConvertHighResIntPcmToFloat = boolean useFloatOutput =
enableConvertHighResIntPcmToFloat enableFloatOutput
&& supportsOutput(inputChannelCount, C.ENCODING_PCM_FLOAT) && supportsOutput(inputChannelCount, C.ENCODING_PCM_FLOAT)
&& Util.isEncodingHighResolutionIntegerPcm(inputEncoding); && Util.isEncodingHighResolutionPcm(inputEncoding);
AudioProcessor[] availableAudioProcessors = AudioProcessor[] availableAudioProcessors =
shouldConvertHighResIntPcmToFloat useFloatOutput ? toFloatPcmAvailableAudioProcessors : toIntPcmAvailableAudioProcessors;
? toFloatPcmAvailableAudioProcessors
: toIntPcmAvailableAudioProcessors;
boolean flushAudioProcessors = false;
if (processingEnabled) { if (processingEnabled) {
trimmingAudioProcessor.setTrimFrameCount(trimStartFrames, trimEndFrames); trimmingAudioProcessor.setTrimFrameCount(trimStartFrames, trimEndFrames);
channelMappingAudioProcessor.setChannelMap(outputChannels); channelMappingAudioProcessor.setChannelMap(outputChannels);
AudioProcessor.AudioFormat outputFormat =
new AudioProcessor.AudioFormat(sampleRate, channelCount, encoding);
for (AudioProcessor audioProcessor : availableAudioProcessors) { for (AudioProcessor audioProcessor : availableAudioProcessors) {
try { try {
flushAudioProcessors |= audioProcessor.configure(sampleRate, channelCount, encoding); AudioProcessor.AudioFormat nextFormat = audioProcessor.configure(outputFormat);
} catch (AudioProcessor.UnhandledFormatException e) { if (audioProcessor.isActive()) {
outputFormat = nextFormat;
}
} catch (UnhandledAudioFormatException e) {
throw new ConfigurationException(e); throw new ConfigurationException(e);
} }
if (audioProcessor.isActive()) {
channelCount = audioProcessor.getOutputChannelCount();
sampleRate = audioProcessor.getOutputSampleRateHz();
encoding = audioProcessor.getOutputEncoding();
}
} }
sampleRate = outputFormat.sampleRate;
channelCount = outputFormat.channelCount;
encoding = outputFormat.encoding;
} }
int outputChannelConfig = getChannelConfig(channelCount, isInputPcm); int outputChannelConfig = getChannelConfig(channelCount, isInputPcm);
@ -459,7 +469,7 @@ public final class DefaultAudioSink implements AudioSink {
isInputPcm ? Util.getPcmFrameSize(inputEncoding, inputChannelCount) : C.LENGTH_UNSET; isInputPcm ? Util.getPcmFrameSize(inputEncoding, inputChannelCount) : C.LENGTH_UNSET;
int outputPcmFrameSize = int outputPcmFrameSize =
isInputPcm ? Util.getPcmFrameSize(encoding, channelCount) : C.LENGTH_UNSET; isInputPcm ? Util.getPcmFrameSize(encoding, channelCount) : C.LENGTH_UNSET;
boolean canApplyPlaybackParameters = processingEnabled && !shouldConvertHighResIntPcmToFloat; boolean canApplyPlaybackParameters = processingEnabled && !useFloatOutput;
Configuration pendingConfiguration = Configuration pendingConfiguration =
new Configuration( new Configuration(
isInputPcm, isInputPcm,
@ -473,11 +483,7 @@ public final class DefaultAudioSink implements AudioSink {
processingEnabled, processingEnabled,
canApplyPlaybackParameters, canApplyPlaybackParameters,
availableAudioProcessors); availableAudioProcessors);
// If we have a pending configuration already, we always drain audio processors as the preceding if (isInitialized()) {
// configuration may have required it (even if this one doesn't).
boolean drainAudioProcessors = flushAudioProcessors || this.pendingConfiguration != null;
if (isInitialized()
&& (!pendingConfiguration.canReuseAudioTrack(configuration) || drainAudioProcessors)) {
this.pendingConfiguration = pendingConfiguration; this.pendingConfiguration = pendingConfiguration;
} else { } else {
configuration = pendingConfiguration; configuration = pendingConfiguration;
@ -832,17 +838,12 @@ public final class DefaultAudioSink implements AudioSink {
} }
@Override @Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) { public void setPlaybackParameters(PlaybackParameters playbackParameters) {
if (configuration != null && !configuration.canApplyPlaybackParameters) { if (configuration != null && !configuration.canApplyPlaybackParameters) {
this.playbackParameters = PlaybackParameters.DEFAULT; this.playbackParameters = PlaybackParameters.DEFAULT;
return this.playbackParameters; return;
} }
PlaybackParameters lastSetPlaybackParameters = PlaybackParameters lastSetPlaybackParameters = getPlaybackParameters();
afterDrainPlaybackParameters != null
? afterDrainPlaybackParameters
: !playbackParametersCheckpoints.isEmpty()
? playbackParametersCheckpoints.getLast().playbackParameters
: this.playbackParameters;
if (!playbackParameters.equals(lastSetPlaybackParameters)) { if (!playbackParameters.equals(lastSetPlaybackParameters)) {
if (isInitialized()) { if (isInitialized()) {
// Drain the audio processors so we can determine the frame position at which the new // Drain the audio processors so we can determine the frame position at which the new
@ -854,12 +855,16 @@ public final class DefaultAudioSink implements AudioSink {
this.playbackParameters = playbackParameters; this.playbackParameters = playbackParameters;
} }
} }
return this.playbackParameters;
} }
@Override @Override
public PlaybackParameters getPlaybackParameters() { public PlaybackParameters getPlaybackParameters() {
return playbackParameters; // Mask the already set parameters.
return afterDrainPlaybackParameters != null
? afterDrainPlaybackParameters
: !playbackParametersCheckpoints.isEmpty()
? playbackParametersCheckpoints.getLast().playbackParameters
: playbackParameters;
} }
@Override @Override
@ -1149,9 +1154,7 @@ public final class DefaultAudioSink implements AudioSink {
case C.ENCODING_PCM_24BIT: case C.ENCODING_PCM_24BIT:
case C.ENCODING_PCM_32BIT: case C.ENCODING_PCM_32BIT:
case C.ENCODING_PCM_8BIT: case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_A_LAW:
case C.ENCODING_PCM_FLOAT: case C.ENCODING_PCM_FLOAT:
case C.ENCODING_PCM_MU_LAW:
case Format.NO_VALUE: case Format.NO_VALUE:
default: default:
throw new IllegalArgumentException(); throw new IllegalArgumentException();
@ -1159,22 +1162,26 @@ public final class DefaultAudioSink implements AudioSink {
} }
private static int getFramesPerEncodedSample(@C.Encoding int encoding, ByteBuffer buffer) { private static int getFramesPerEncodedSample(@C.Encoding int encoding, ByteBuffer buffer) {
if (encoding == C.ENCODING_DTS || encoding == C.ENCODING_DTS_HD) { switch (encoding) {
return DtsUtil.parseDtsAudioSampleCount(buffer); case C.ENCODING_MP3:
} else if (encoding == C.ENCODING_AC3) { return MpegAudioHeader.getFrameSampleCount(buffer.get(buffer.position()));
return Ac3Util.getAc3SyncframeAudioSampleCount(); case C.ENCODING_DTS:
} else if (encoding == C.ENCODING_E_AC3 || encoding == C.ENCODING_E_AC3_JOC) { case C.ENCODING_DTS_HD:
return Ac3Util.parseEAc3SyncframeAudioSampleCount(buffer); return DtsUtil.parseDtsAudioSampleCount(buffer);
} else if (encoding == C.ENCODING_AC4) { case C.ENCODING_AC3:
return Ac4Util.parseAc4SyncframeAudioSampleCount(buffer); case C.ENCODING_E_AC3:
} else if (encoding == C.ENCODING_DOLBY_TRUEHD) { case C.ENCODING_E_AC3_JOC:
int syncframeOffset = Ac3Util.findTrueHdSyncframeOffset(buffer); return Ac3Util.parseAc3SyncframeAudioSampleCount(buffer);
return syncframeOffset == C.INDEX_UNSET case C.ENCODING_AC4:
? 0 return Ac4Util.parseAc4SyncframeAudioSampleCount(buffer);
: (Ac3Util.parseTrueHdSyncframeAudioSampleCount(buffer, syncframeOffset) case C.ENCODING_DOLBY_TRUEHD:
* Ac3Util.TRUEHD_RECHUNK_SAMPLE_COUNT); int syncframeOffset = Ac3Util.findTrueHdSyncframeOffset(buffer);
} else { return syncframeOffset == C.INDEX_UNSET
throw new IllegalStateException("Unexpected audio encoding: " + encoding); ? 0
: (Ac3Util.parseTrueHdSyncframeAudioSampleCount(buffer, syncframeOffset)
* Ac3Util.TRUEHD_RECHUNK_SAMPLE_COUNT);
default:
throw new IllegalStateException("Unexpected audio encoding: " + encoding);
} }
} }
@ -1226,7 +1233,6 @@ public final class DefaultAudioSink implements AudioSink {
audioTrack.setVolume(volume); audioTrack.setVolume(volume);
} }
@SuppressWarnings("deprecation")
private static void setVolumeInternalV3(AudioTrack audioTrack, float volume) { private static void setVolumeInternalV3(AudioTrack audioTrack, float volume) {
audioTrack.setStereoVolume(volume, volume); audioTrack.setStereoVolume(volume, volume);
} }

View File

@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer2.audio; package com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.drm.DrmInitData; import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
@ -80,7 +81,7 @@ public final class DtsUtil {
* @return The DTS format parsed from data in the header. * @return The DTS format parsed from data in the header.
*/ */
public static Format parseDtsFormat( public static Format parseDtsFormat(
byte[] frame, String trackId, String language, DrmInitData drmInitData) { byte[] frame, String trackId, @Nullable String language, @Nullable DrmInitData drmInitData) {
ParsableBitArray frameBits = getNormalizedFrameHeader(frame); ParsableBitArray frameBits = getNormalizedFrameHeader(frame);
frameBits.skipBits(32 + 1 + 5 + 1 + 7 + 14); // SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE frameBits.skipBits(32 + 1 + 5 + 1 + 7 + 14); // SYNC, FTYPE, SHORT, CPF, NBLKS, FSIZE
int amode = frameBits.readBits(6); int amode = frameBits.readBits(6);

View File

@ -16,12 +16,19 @@
package com.google.android.exoplayer2.audio; package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
/** /**
* An {@link AudioProcessor} that converts 24-bit and 32-bit integer PCM audio to 32-bit float PCM * An {@link AudioProcessor} that converts high resolution PCM audio to 32-bit float. The following
* audio. * encodings are supported as input:
*
* <ul>
* <li>{@link C#ENCODING_PCM_24BIT}
* <li>{@link C#ENCODING_PCM_32BIT}
* <li>{@link C#ENCODING_PCM_FLOAT} ({@link #isActive()} will return {@code false})
* </ul>
*/ */
/* package */ final class FloatResamplingAudioProcessor extends BaseAudioProcessor { /* package */ final class FloatResamplingAudioProcessor extends BaseAudioProcessor {
@ -29,50 +36,56 @@ import java.nio.ByteBuffer;
private static final double PCM_32_BIT_INT_TO_PCM_32_BIT_FLOAT_FACTOR = 1.0 / 0x7FFFFFFF; private static final double PCM_32_BIT_INT_TO_PCM_32_BIT_FLOAT_FACTOR = 1.0 / 0x7FFFFFFF;
@Override @Override
public boolean configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledFormatException { throws UnhandledAudioFormatException {
if (!Util.isEncodingHighResolutionIntegerPcm(encoding)) { @C.PcmEncoding int encoding = inputAudioFormat.encoding;
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); if (!Util.isEncodingHighResolutionPcm(encoding)) {
throw new UnhandledAudioFormatException(inputAudioFormat);
} }
return setInputFormat(sampleRateHz, channelCount, encoding); return encoding != C.ENCODING_PCM_FLOAT
} ? new AudioFormat(
inputAudioFormat.sampleRate, inputAudioFormat.channelCount, C.ENCODING_PCM_FLOAT)
@Override : AudioFormat.NOT_SET;
public boolean isActive() {
return Util.isEncodingHighResolutionIntegerPcm(encoding);
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_FLOAT;
} }
@Override @Override
public void queueInput(ByteBuffer inputBuffer) { public void queueInput(ByteBuffer inputBuffer) {
boolean isInput32Bit = encoding == C.ENCODING_PCM_32BIT;
int position = inputBuffer.position(); int position = inputBuffer.position();
int limit = inputBuffer.limit(); int limit = inputBuffer.limit();
int size = limit - position; int size = limit - position;
int resampledSize = isInput32Bit ? size : (size / 3) * 4; ByteBuffer buffer;
ByteBuffer buffer = replaceOutputBuffer(resampledSize); switch (inputAudioFormat.encoding) {
if (isInput32Bit) { case C.ENCODING_PCM_24BIT:
for (int i = position; i < limit; i += 4) { buffer = replaceOutputBuffer((size / 3) * 4);
int pcm32BitInteger = for (int i = position; i < limit; i += 3) {
(inputBuffer.get(i) & 0xFF) int pcm32BitInteger =
| ((inputBuffer.get(i + 1) & 0xFF) << 8) ((inputBuffer.get(i) & 0xFF) << 8)
| ((inputBuffer.get(i + 2) & 0xFF) << 16) | ((inputBuffer.get(i + 1) & 0xFF) << 16)
| ((inputBuffer.get(i + 3) & 0xFF) << 24); | ((inputBuffer.get(i + 2) & 0xFF) << 24);
writePcm32BitFloat(pcm32BitInteger, buffer); writePcm32BitFloat(pcm32BitInteger, buffer);
} }
} else { break;
for (int i = position; i < limit; i += 3) { case C.ENCODING_PCM_32BIT:
int pcm32BitInteger = buffer = replaceOutputBuffer(size);
((inputBuffer.get(i) & 0xFF) << 8) for (int i = position; i < limit; i += 4) {
| ((inputBuffer.get(i + 1) & 0xFF) << 16) int pcm32BitInteger =
| ((inputBuffer.get(i + 2) & 0xFF) << 24); (inputBuffer.get(i) & 0xFF)
writePcm32BitFloat(pcm32BitInteger, buffer); | ((inputBuffer.get(i + 1) & 0xFF) << 8)
} | ((inputBuffer.get(i + 2) & 0xFF) << 16)
| ((inputBuffer.get(i + 3) & 0xFF) << 24);
writePcm32BitFloat(pcm32BitInteger, buffer);
}
break;
case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_INVALID:
case Format.NO_VALUE:
default:
// Never happens.
throw new IllegalStateException();
} }
inputBuffer.position(inputBuffer.limit()); inputBuffer.position(inputBuffer.limit());

View File

@ -0,0 +1,151 @@
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.PlaybackParameters;
import java.nio.ByteBuffer;
/** An overridable {@link AudioSink} implementation forwarding all methods to another sink. */
public class ForwardingAudioSink implements AudioSink {
private final AudioSink sink;
public ForwardingAudioSink(AudioSink sink) {
this.sink = sink;
}
@Override
public void setListener(Listener listener) {
sink.setListener(listener);
}
@Override
public boolean supportsOutput(int channelCount, int encoding) {
return sink.supportsOutput(channelCount, encoding);
}
@Override
public long getCurrentPositionUs(boolean sourceEnded) {
return sink.getCurrentPositionUs(sourceEnded);
}
@Override
public void configure(
int inputEncoding,
int inputChannelCount,
int inputSampleRate,
int specifiedBufferSize,
@Nullable int[] outputChannels,
int trimStartFrames,
int trimEndFrames)
throws ConfigurationException {
sink.configure(
inputEncoding,
inputChannelCount,
inputSampleRate,
specifiedBufferSize,
outputChannels,
trimStartFrames,
trimEndFrames);
}
@Override
public void play() {
sink.play();
}
@Override
public void handleDiscontinuity() {
sink.handleDiscontinuity();
}
@Override
public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs)
throws InitializationException, WriteException {
return sink.handleBuffer(buffer, presentationTimeUs);
}
@Override
public void playToEndOfStream() throws WriteException {
sink.playToEndOfStream();
}
@Override
public boolean isEnded() {
return sink.isEnded();
}
@Override
public boolean hasPendingData() {
return sink.hasPendingData();
}
@Override
public void setPlaybackParameters(PlaybackParameters playbackParameters) {
sink.setPlaybackParameters(playbackParameters);
}
@Override
public PlaybackParameters getPlaybackParameters() {
return sink.getPlaybackParameters();
}
@Override
public void setAudioAttributes(AudioAttributes audioAttributes) {
sink.setAudioAttributes(audioAttributes);
}
@Override
public void setAudioSessionId(int audioSessionId) {
sink.setAudioSessionId(audioSessionId);
}
@Override
public void setAuxEffectInfo(AuxEffectInfo auxEffectInfo) {
sink.setAuxEffectInfo(auxEffectInfo);
}
@Override
public void enableTunnelingV21(int tunnelingAudioSessionId) {
sink.enableTunnelingV21(tunnelingAudioSessionId);
}
@Override
public void disableTunneling() {
sink.disableTunneling();
}
@Override
public void setVolume(float volume) {
sink.setVolume(volume);
}
@Override
public void pause() {
sink.pause();
}
@Override
public void flush() {
sink.flush();
}
@Override
public void reset() {
sink.reset();
}
}

View File

@ -28,18 +28,21 @@ import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException; import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.ExoPlayer; import com.google.android.exoplayer2.ExoPlayer;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.PlayerMessage.Target; import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher; import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.drm.DrmInitData;
import com.google.android.exoplayer2.drm.DrmSessionManager; import com.google.android.exoplayer2.drm.DrmSessionManager;
import com.google.android.exoplayer2.drm.FrameworkMediaCrypto; import com.google.android.exoplayer2.drm.FrameworkMediaCrypto;
import com.google.android.exoplayer2.mediacodec.MediaCodecInfo; import com.google.android.exoplayer2.mediacodec.MediaCodecInfo;
import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer; import com.google.android.exoplayer2.mediacodec.MediaCodecRenderer;
import com.google.android.exoplayer2.mediacodec.MediaCodecSelector; import com.google.android.exoplayer2.mediacodec.MediaCodecSelector;
import com.google.android.exoplayer2.mediacodec.MediaCodecUtil;
import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException; import com.google.android.exoplayer2.mediacodec.MediaCodecUtil.DecoderQueryException;
import com.google.android.exoplayer2.mediacodec.MediaFormatUtil; import com.google.android.exoplayer2.mediacodec.MediaFormatUtil;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.MediaClock; import com.google.android.exoplayer2.util.MediaClock;
import com.google.android.exoplayer2.util.MimeTypes; import com.google.android.exoplayer2.util.MimeTypes;
@ -76,6 +79,11 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private static final int MAX_PENDING_STREAM_CHANGE_COUNT = 10; private static final int MAX_PENDING_STREAM_CHANGE_COUNT = 10;
private static final String TAG = "MediaCodecAudioRenderer"; private static final String TAG = "MediaCodecAudioRenderer";
/**
* Custom key used to indicate bits per sample by some decoders on Vivo devices. For example
* OMX.vivo.alac.decoder on the Vivo Z1 Pro.
*/
private static final String VIVO_BITS_PER_SAMPLE_KEY = "v-bits-per-sample";
private final Context context; private final Context context;
private final EventDispatcher eventDispatcher; private final EventDispatcher eventDispatcher;
@ -87,10 +95,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private boolean codecNeedsDiscardChannelsWorkaround; private boolean codecNeedsDiscardChannelsWorkaround;
private boolean codecNeedsEosBufferTimestampWorkaround; private boolean codecNeedsEosBufferTimestampWorkaround;
private android.media.MediaFormat passthroughMediaFormat; private android.media.MediaFormat passthroughMediaFormat;
private @C.Encoding int pcmEncoding; @Nullable private Format inputFormat;
private int channelCount;
private int encoderDelay;
private int encoderPadding;
private long currentPositionUs; private long currentPositionUs;
private boolean allowFirstBufferPositionDiscontinuity; private boolean allowFirstBufferPositionDiscontinuity;
private boolean allowPositionDiscontinuity; private boolean allowPositionDiscontinuity;
@ -101,6 +106,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* @param context A context. * @param context A context.
* @param mediaCodecSelector A decoder selector. * @param mediaCodecSelector A decoder selector.
*/ */
@SuppressWarnings("deprecation")
public MediaCodecAudioRenderer(Context context, MediaCodecSelector mediaCodecSelector) { public MediaCodecAudioRenderer(Context context, MediaCodecSelector mediaCodecSelector) {
this( this(
context, context,
@ -119,7 +125,12 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* begin in parallel with key acquisition. This parameter specifies whether the renderer is * begin in parallel with key acquisition. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager} * permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media. * has obtained the keys necessary to decrypt encrypted regions of the media.
* @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler,
* AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the
* {@link MediaSource} factories.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public MediaCodecAudioRenderer( public MediaCodecAudioRenderer(
Context context, Context context,
MediaCodecSelector mediaCodecSelector, MediaCodecSelector mediaCodecSelector,
@ -141,6 +152,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
*/ */
@SuppressWarnings("deprecation")
public MediaCodecAudioRenderer( public MediaCodecAudioRenderer(
Context context, Context context,
MediaCodecSelector mediaCodecSelector, MediaCodecSelector mediaCodecSelector,
@ -168,7 +180,12 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be * @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler,
* AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the
* {@link MediaSource} factories.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public MediaCodecAudioRenderer( public MediaCodecAudioRenderer(
Context context, Context context,
MediaCodecSelector mediaCodecSelector, MediaCodecSelector mediaCodecSelector,
@ -203,7 +220,12 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* default capabilities (no encoded audio passthrough support) should be assumed. * default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessors Optional {@link AudioProcessor}s that will process PCM audio before * @param audioProcessors Optional {@link AudioProcessor}s that will process PCM audio before
* output. * output.
* @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler,
* AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the
* {@link MediaSource} factories.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public MediaCodecAudioRenderer( public MediaCodecAudioRenderer(
Context context, Context context,
MediaCodecSelector mediaCodecSelector, MediaCodecSelector mediaCodecSelector,
@ -237,7 +259,12 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioSink The sink to which audio will be output. * @param audioSink The sink to which audio will be output.
* @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler,
* AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the
* {@link MediaSource} factories.
*/ */
@Deprecated
@SuppressWarnings("deprecation")
public MediaCodecAudioRenderer( public MediaCodecAudioRenderer(
Context context, Context context,
MediaCodecSelector mediaCodecSelector, MediaCodecSelector mediaCodecSelector,
@ -257,6 +284,36 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
audioSink); audioSink);
} }
/**
* @param context A context.
* @param mediaCodecSelector A decoder selector.
* @param enableDecoderFallback Whether to enable fallback to lower-priority decoders if decoder
* initialization fails. This may result in using a decoder that is slower/less efficient than
* the primary decoder.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioSink The sink to which audio will be output.
*/
@SuppressWarnings("deprecation")
public MediaCodecAudioRenderer(
Context context,
MediaCodecSelector mediaCodecSelector,
boolean enableDecoderFallback,
@Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
AudioSink audioSink) {
this(
context,
mediaCodecSelector,
/* drmSessionManager= */ null,
/* playClearSamplesWithoutKeys= */ false,
enableDecoderFallback,
eventHandler,
eventListener,
audioSink);
}
/** /**
* @param context A context. * @param context A context.
* @param mediaCodecSelector A decoder selector. * @param mediaCodecSelector A decoder selector.
@ -274,7 +331,11 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* null if delivery of events is not required. * null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required. * @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioSink The sink to which audio will be output. * @param audioSink The sink to which audio will be output.
* @deprecated Use {@link #MediaCodecAudioRenderer(Context, MediaCodecSelector, boolean, Handler,
* AudioRendererEventListener, AudioSink)} instead, and pass DRM-related parameters to the
* {@link MediaSource} factories.
*/ */
@Deprecated
public MediaCodecAudioRenderer( public MediaCodecAudioRenderer(
Context context, Context context,
MediaCodecSelector mediaCodecSelector, MediaCodecSelector mediaCodecSelector,
@ -300,66 +361,65 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} }
@Override @Override
protected int supportsFormat(MediaCodecSelector mediaCodecSelector, @Capabilities
DrmSessionManager<FrameworkMediaCrypto> drmSessionManager, Format format) protected int supportsFormat(
MediaCodecSelector mediaCodecSelector,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
Format format)
throws DecoderQueryException { throws DecoderQueryException {
String mimeType = format.sampleMimeType; String mimeType = format.sampleMimeType;
if (!MimeTypes.isAudio(mimeType)) { if (!MimeTypes.isAudio(mimeType)) {
return FORMAT_UNSUPPORTED_TYPE; return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE);
} }
@TunnelingSupport
int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED; int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
boolean supportsFormatDrm = supportsFormatDrm(drmSessionManager, format.drmInitData); boolean supportsFormatDrm =
format.drmInitData == null
|| FrameworkMediaCrypto.class.equals(format.exoMediaCryptoType)
|| (format.exoMediaCryptoType == null
&& supportsFormatDrm(drmSessionManager, format.drmInitData));
if (supportsFormatDrm if (supportsFormatDrm
&& allowPassthrough(format.channelCount, mimeType) && allowPassthrough(format.channelCount, mimeType)
&& mediaCodecSelector.getPassthroughDecoderInfo() != null) { && mediaCodecSelector.getPassthroughDecoderInfo() != null) {
return ADAPTIVE_NOT_SEAMLESS | tunnelingSupport | FORMAT_HANDLED; return RendererCapabilities.create(FORMAT_HANDLED, ADAPTIVE_NOT_SEAMLESS, tunnelingSupport);
} }
if ((MimeTypes.AUDIO_RAW.equals(mimeType) if ((MimeTypes.AUDIO_RAW.equals(mimeType)
&& !audioSink.supportsOutput(format.channelCount, format.pcmEncoding)) && !audioSink.supportsOutput(format.channelCount, format.pcmEncoding))
|| !audioSink.supportsOutput(format.channelCount, C.ENCODING_PCM_16BIT)) { || !audioSink.supportsOutput(format.channelCount, C.ENCODING_PCM_16BIT)) {
// Assume the decoder outputs 16-bit PCM, unless the input is raw. // Assume the decoder outputs 16-bit PCM, unless the input is raw.
return FORMAT_UNSUPPORTED_SUBTYPE; return RendererCapabilities.create(FORMAT_UNSUPPORTED_SUBTYPE);
}
boolean requiresSecureDecryption = false;
DrmInitData drmInitData = format.drmInitData;
if (drmInitData != null) {
for (int i = 0; i < drmInitData.schemeDataCount; i++) {
requiresSecureDecryption |= drmInitData.get(i).requiresSecureDecryption;
}
} }
List<MediaCodecInfo> decoderInfos = List<MediaCodecInfo> decoderInfos =
mediaCodecSelector.getDecoderInfos( getDecoderInfos(mediaCodecSelector, format, /* requiresSecureDecoder= */ false);
format.sampleMimeType, requiresSecureDecryption, /* requiresTunnelingDecoder= */ false);
if (decoderInfos.isEmpty()) { if (decoderInfos.isEmpty()) {
return requiresSecureDecryption return RendererCapabilities.create(FORMAT_UNSUPPORTED_SUBTYPE);
&& !mediaCodecSelector
.getDecoderInfos(
format.sampleMimeType,
/* requiresSecureDecoder= */ false,
/* requiresTunnelingDecoder= */ false)
.isEmpty()
? FORMAT_UNSUPPORTED_DRM
: FORMAT_UNSUPPORTED_SUBTYPE;
} }
if (!supportsFormatDrm) { if (!supportsFormatDrm) {
return FORMAT_UNSUPPORTED_DRM; return RendererCapabilities.create(FORMAT_UNSUPPORTED_DRM);
} }
// Check capabilities for the first decoder in the list, which takes priority. // Check capabilities for the first decoder in the list, which takes priority.
MediaCodecInfo decoderInfo = decoderInfos.get(0); MediaCodecInfo decoderInfo = decoderInfos.get(0);
boolean isFormatSupported = decoderInfo.isFormatSupported(format); boolean isFormatSupported = decoderInfo.isFormatSupported(format);
@AdaptiveSupport
int adaptiveSupport = int adaptiveSupport =
isFormatSupported && decoderInfo.isSeamlessAdaptationSupported(format) isFormatSupported && decoderInfo.isSeamlessAdaptationSupported(format)
? ADAPTIVE_SEAMLESS ? ADAPTIVE_SEAMLESS
: ADAPTIVE_NOT_SEAMLESS; : ADAPTIVE_NOT_SEAMLESS;
@FormatSupport
int formatSupport = isFormatSupported ? FORMAT_HANDLED : FORMAT_EXCEEDS_CAPABILITIES; int formatSupport = isFormatSupported ? FORMAT_HANDLED : FORMAT_EXCEEDS_CAPABILITIES;
return adaptiveSupport | tunnelingSupport | formatSupport; return RendererCapabilities.create(formatSupport, adaptiveSupport, tunnelingSupport);
} }
@Override @Override
protected List<MediaCodecInfo> getDecoderInfos( protected List<MediaCodecInfo> getDecoderInfos(
MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder) MediaCodecSelector mediaCodecSelector, Format format, boolean requiresSecureDecoder)
throws DecoderQueryException { throws DecoderQueryException {
if (allowPassthrough(format.channelCount, format.sampleMimeType)) { @Nullable String mimeType = format.sampleMimeType;
if (mimeType == null) {
return Collections.emptyList();
}
if (allowPassthrough(format.channelCount, mimeType)) {
@Nullable
MediaCodecInfo passthroughDecoderInfo = mediaCodecSelector.getPassthroughDecoderInfo(); MediaCodecInfo passthroughDecoderInfo = mediaCodecSelector.getPassthroughDecoderInfo();
if (passthroughDecoderInfo != null) { if (passthroughDecoderInfo != null) {
return Collections.singletonList(passthroughDecoderInfo); return Collections.singletonList(passthroughDecoderInfo);
@ -367,8 +427,9 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} }
List<MediaCodecInfo> decoderInfos = List<MediaCodecInfo> decoderInfos =
mediaCodecSelector.getDecoderInfos( mediaCodecSelector.getDecoderInfos(
format.sampleMimeType, requiresSecureDecoder, /* requiresTunnelingDecoder= */ false); mimeType, requiresSecureDecoder, /* requiresTunnelingDecoder= */ false);
if (MimeTypes.AUDIO_E_AC3_JOC.equals(format.sampleMimeType)) { decoderInfos = MediaCodecUtil.getDecoderInfosSortedByFormatSupport(decoderInfos, format);
if (MimeTypes.AUDIO_E_AC3_JOC.equals(mimeType)) {
// E-AC3 decoders can decode JOC streams, but in 2-D rather than 3-D. // E-AC3 decoders can decode JOC streams, but in 2-D rather than 3-D.
List<MediaCodecInfo> decoderInfosWithEac3 = new ArrayList<>(decoderInfos); List<MediaCodecInfo> decoderInfosWithEac3 = new ArrayList<>(decoderInfos);
decoderInfosWithEac3.addAll( decoderInfosWithEac3.addAll(
@ -398,7 +459,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
MediaCodecInfo codecInfo, MediaCodecInfo codecInfo,
MediaCodec codec, MediaCodec codec,
Format format, Format format,
MediaCrypto crypto, @Nullable MediaCrypto crypto,
float codecOperatingRate) { float codecOperatingRate) {
codecMaxInputSize = getCodecMaxInputSize(codecInfo, format, getStreamFormats()); codecMaxInputSize = getCodecMaxInputSize(codecInfo, format, getStreamFormats());
codecNeedsDiscardChannelsWorkaround = codecNeedsDiscardChannelsWorkaround(codecInfo.name); codecNeedsDiscardChannelsWorkaround = codecNeedsDiscardChannelsWorkaround(codecInfo.name);
@ -434,14 +495,37 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} else if (codecInfo.isSeamlessAdaptationSupported( } else if (codecInfo.isSeamlessAdaptationSupported(
oldFormat, newFormat, /* isNewFormatComplete= */ true)) { oldFormat, newFormat, /* isNewFormatComplete= */ true)) {
return KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION; return KEEP_CODEC_RESULT_YES_WITHOUT_RECONFIGURATION;
} else if (areCodecConfigurationCompatible(oldFormat, newFormat)) { } else if (canKeepCodecWithFlush(oldFormat, newFormat)) {
return KEEP_CODEC_RESULT_YES_WITH_FLUSH; return KEEP_CODEC_RESULT_YES_WITH_FLUSH;
} else { } else {
return KEEP_CODEC_RESULT_NO; return KEEP_CODEC_RESULT_NO;
} }
} }
/**
* Returns whether the codec can be flushed and reused when switching to a new format. Reuse is
* generally possible when the codec would be configured in an identical way after the format
* change (excluding {@link MediaFormat#KEY_MAX_INPUT_SIZE} and configuration that does not come
* from the {@link Format}).
*
* @param oldFormat The first format.
* @param newFormat The second format.
* @return Whether the codec can be flushed and reused when switching to a new format.
*/
protected boolean canKeepCodecWithFlush(Format oldFormat, Format newFormat) {
// Flush and reuse the codec if the audio format and initialization data matches. For Opus, we
// don't flush and reuse the codec because the decoder may discard samples after flushing, which
// would result in audio being dropped just after a stream change (see [Internal: b/143450854]).
return Util.areEqual(oldFormat.sampleMimeType, newFormat.sampleMimeType)
&& oldFormat.channelCount == newFormat.channelCount
&& oldFormat.sampleRate == newFormat.sampleRate
&& oldFormat.pcmEncoding == newFormat.pcmEncoding
&& oldFormat.initializationDataEquals(newFormat)
&& !MimeTypes.AUDIO_OPUS.equals(oldFormat.sampleMimeType);
}
@Override @Override
@Nullable
public MediaClock getMediaClock() { public MediaClock getMediaClock() {
return this; return this;
} }
@ -468,39 +552,37 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} }
@Override @Override
protected void onInputFormatChanged(Format newFormat) throws ExoPlaybackException { protected void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException {
super.onInputFormatChanged(newFormat); super.onInputFormatChanged(formatHolder);
eventDispatcher.inputFormatChanged(newFormat); inputFormat = formatHolder.format;
// If the input format is anything other than PCM then we assume that the audio decoder will eventDispatcher.inputFormatChanged(inputFormat);
// output 16-bit PCM.
pcmEncoding = MimeTypes.AUDIO_RAW.equals(newFormat.sampleMimeType) ? newFormat.pcmEncoding
: C.ENCODING_PCM_16BIT;
channelCount = newFormat.channelCount;
encoderDelay = newFormat.encoderDelay;
encoderPadding = newFormat.encoderPadding;
} }
@Override @Override
protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputFormat) protected void onOutputFormatChanged(MediaCodec codec, MediaFormat outputMediaFormat)
throws ExoPlaybackException { throws ExoPlaybackException {
@C.Encoding int encoding; @C.Encoding int encoding;
MediaFormat format; MediaFormat mediaFormat;
if (passthroughMediaFormat != null) { if (passthroughMediaFormat != null) {
format = passthroughMediaFormat; mediaFormat = passthroughMediaFormat;
encoding = encoding =
getPassthroughEncoding( getPassthroughEncoding(
format.getInteger(MediaFormat.KEY_CHANNEL_COUNT), mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT),
format.getString(MediaFormat.KEY_MIME)); mediaFormat.getString(MediaFormat.KEY_MIME));
} else { } else {
format = outputFormat; mediaFormat = outputMediaFormat;
encoding = pcmEncoding; if (outputMediaFormat.containsKey(VIVO_BITS_PER_SAMPLE_KEY)) {
encoding = Util.getPcmEncoding(outputMediaFormat.getInteger(VIVO_BITS_PER_SAMPLE_KEY));
} else {
encoding = getPcmEncoding(inputFormat);
}
} }
int channelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); int channelCount = mediaFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); int sampleRate = mediaFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int[] channelMap; int[] channelMap;
if (codecNeedsDiscardChannelsWorkaround && channelCount == 6 && this.channelCount < 6) { if (codecNeedsDiscardChannelsWorkaround && channelCount == 6 && inputFormat.channelCount < 6) {
channelMap = new int[this.channelCount]; channelMap = new int[inputFormat.channelCount];
for (int i = 0; i < this.channelCount; i++) { for (int i = 0; i < inputFormat.channelCount; i++) {
channelMap[i] = i; channelMap[i] = i;
} }
} else { } else {
@ -508,10 +590,17 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} }
try { try {
audioSink.configure(encoding, channelCount, sampleRate, 0, channelMap, encoderDelay, audioSink.configure(
encoderPadding); encoding,
channelCount,
sampleRate,
0,
channelMap,
inputFormat.encoderDelay,
inputFormat.encoderPadding);
} catch (AudioSink.ConfigurationException e) { } catch (AudioSink.ConfigurationException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex()); // TODO(internal: b/145658993) Use outputFormat instead.
throw createRendererException(e, inputFormat);
} }
} }
@ -522,7 +611,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@C.Encoding @C.Encoding
protected int getPassthroughEncoding(int channelCount, String mimeType) { protected int getPassthroughEncoding(int channelCount, String mimeType) {
if (MimeTypes.AUDIO_E_AC3_JOC.equals(mimeType)) { if (MimeTypes.AUDIO_E_AC3_JOC.equals(mimeType)) {
if (audioSink.supportsOutput(channelCount, C.ENCODING_E_AC3_JOC)) { // E-AC3 JOC is object-based so the output channel count is arbitrary.
if (audioSink.supportsOutput(/* channelCount= */ Format.NO_VALUE, C.ENCODING_E_AC3_JOC)) {
return MimeTypes.getEncoding(MimeTypes.AUDIO_E_AC3_JOC); return MimeTypes.getEncoding(MimeTypes.AUDIO_E_AC3_JOC);
} }
// E-AC3 receivers can decode JOC streams, but in 2-D rather than 3-D, so try to fall back. // E-AC3 receivers can decode JOC streams, but in 2-D rather than 3-D, so try to fall back.
@ -659,8 +749,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} }
@Override @Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) { public void setPlaybackParameters(PlaybackParameters playbackParameters) {
return audioSink.setPlaybackParameters(playbackParameters); audioSink.setPlaybackParameters(playbackParameters);
} }
@Override @Override
@ -737,7 +827,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return true; return true;
} }
} catch (AudioSink.InitializationException | AudioSink.WriteException e) { } catch (AudioSink.InitializationException | AudioSink.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex()); // TODO(internal: b/145658993) Use outputFormat instead.
throw createRendererException(e, inputFormat);
} }
return false; return false;
} }
@ -747,7 +838,8 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
try { try {
audioSink.playToEndOfStream(); audioSink.playToEndOfStream();
} catch (AudioSink.WriteException e) { } catch (AudioSink.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex()); // TODO(internal: b/145658993) Use outputFormat instead.
throw createRendererException(e, inputFormat);
} }
} }
@ -776,7 +868,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
* will allow possible adaptation to other compatible formats in {@code streamFormats}. * will allow possible adaptation to other compatible formats in {@code streamFormats}.
* *
* @param codecInfo A {@link MediaCodecInfo} describing the decoder. * @param codecInfo A {@link MediaCodecInfo} describing the decoder.
* @param format The format for which the codec is being configured. * @param format The {@link Format} for which the codec is being configured.
* @param streamFormats The possible stream formats. * @param streamFormats The possible stream formats.
* @return A suitable maximum input size. * @return A suitable maximum input size.
*/ */
@ -798,10 +890,10 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} }
/** /**
* Returns a maximum input buffer size for a given format. * Returns a maximum input buffer size for a given {@link Format}.
* *
* @param codecInfo A {@link MediaCodecInfo} describing the decoder. * @param codecInfo A {@link MediaCodecInfo} describing the decoder.
* @param format The format. * @param format The {@link Format}.
* @return A maximum input buffer size in bytes, or {@link Format#NO_VALUE} if a maximum could not * @return A maximum input buffer size in bytes, or {@link Format#NO_VALUE} if a maximum could not
* be determined. * be determined.
*/ */
@ -818,34 +910,16 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
return format.maxInputSize; return format.maxInputSize;
} }
/**
* Returns whether two {@link Format}s will cause the same codec to be configured in an identical
* way, excluding {@link MediaFormat#KEY_MAX_INPUT_SIZE} and configuration that does not come from
* the {@link Format}.
*
* @param oldFormat The first format.
* @param newFormat The second format.
* @return Whether the two formats will cause a codec to be configured in an identical way,
* excluding {@link MediaFormat#KEY_MAX_INPUT_SIZE} and configuration that does not come from
* the {@link Format}.
*/
protected boolean areCodecConfigurationCompatible(Format oldFormat, Format newFormat) {
return Util.areEqual(oldFormat.sampleMimeType, newFormat.sampleMimeType)
&& oldFormat.channelCount == newFormat.channelCount
&& oldFormat.sampleRate == newFormat.sampleRate
&& oldFormat.initializationDataEquals(newFormat);
}
/** /**
* Returns the framework {@link MediaFormat} that can be used to configure a {@link MediaCodec} * Returns the framework {@link MediaFormat} that can be used to configure a {@link MediaCodec}
* for decoding the given {@link Format} for playback. * for decoding the given {@link Format} for playback.
* *
* @param format The format of the media. * @param format The {@link Format} of the media.
* @param codecMimeType The MIME type handled by the codec. * @param codecMimeType The MIME type handled by the codec.
* @param codecMaxInputSize The maximum input size supported by the codec. * @param codecMaxInputSize The maximum input size supported by the codec.
* @param codecOperatingRate The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if * @param codecOperatingRate The codec operating rate, or {@link #CODEC_OPERATING_RATE_UNSET} if
* no codec operating rate should be set. * no codec operating rate should be set.
* @return The framework media format. * @return The framework {@link MediaFormat}.
*/ */
@SuppressLint("InlinedApi") @SuppressLint("InlinedApi")
protected MediaFormat getMediaFormat( protected MediaFormat getMediaFormat(
@ -927,6 +1001,15 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
|| Util.DEVICE.startsWith("ms01")); || Util.DEVICE.startsWith("ms01"));
} }
@C.Encoding
private static int getPcmEncoding(Format format) {
// If the format is anything other than PCM then we assume that the audio decoder will output
// 16-bit PCM.
return MimeTypes.AUDIO_RAW.equals(format.sampleMimeType)
? format.pcmEncoding
: C.ENCODING_PCM_16BIT;
}
private final class AudioSinkListener implements AudioSink.Listener { private final class AudioSinkListener implements AudioSink.Listener {
@Override @Override

View File

@ -20,29 +20,36 @@ import com.google.android.exoplayer2.Format;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
/** /**
* An {@link AudioProcessor} that converts 8-bit, 24-bit and 32-bit integer PCM audio to 16-bit * An {@link AudioProcessor} that converts different PCM audio encodings to 16-bit integer PCM. The
* integer PCM audio. * following encodings are supported as input:
*
* <ul>
* <li>{@link C#ENCODING_PCM_8BIT}
* <li>{@link C#ENCODING_PCM_16BIT} ({@link #isActive()} will return {@code false})
* <li>{@link C#ENCODING_PCM_16BIT_BIG_ENDIAN}
* <li>{@link C#ENCODING_PCM_24BIT}
* <li>{@link C#ENCODING_PCM_32BIT}
* <li>{@link C#ENCODING_PCM_FLOAT}
* </ul>
*/ */
/* package */ final class ResamplingAudioProcessor extends BaseAudioProcessor { /* package */ final class ResamplingAudioProcessor extends BaseAudioProcessor {
@Override @Override
public boolean configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledFormatException { throws UnhandledAudioFormatException {
if (encoding != C.ENCODING_PCM_8BIT && encoding != C.ENCODING_PCM_16BIT @C.PcmEncoding int encoding = inputAudioFormat.encoding;
&& encoding != C.ENCODING_PCM_24BIT && encoding != C.ENCODING_PCM_32BIT) { if (encoding != C.ENCODING_PCM_8BIT
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); && encoding != C.ENCODING_PCM_16BIT
&& encoding != C.ENCODING_PCM_16BIT_BIG_ENDIAN
&& encoding != C.ENCODING_PCM_24BIT
&& encoding != C.ENCODING_PCM_32BIT
&& encoding != C.ENCODING_PCM_FLOAT) {
throw new UnhandledAudioFormatException(inputAudioFormat);
} }
return setInputFormat(sampleRateHz, channelCount, encoding); return encoding != C.ENCODING_PCM_16BIT
} ? new AudioFormat(
inputAudioFormat.sampleRate, inputAudioFormat.channelCount, C.ENCODING_PCM_16BIT)
@Override : AudioFormat.NOT_SET;
public boolean isActive() {
return encoding != C.ENCODING_INVALID && encoding != C.ENCODING_PCM_16BIT;
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
} }
@Override @Override
@ -52,20 +59,21 @@ import java.nio.ByteBuffer;
int limit = inputBuffer.limit(); int limit = inputBuffer.limit();
int size = limit - position; int size = limit - position;
int resampledSize; int resampledSize;
switch (encoding) { switch (inputAudioFormat.encoding) {
case C.ENCODING_PCM_8BIT: case C.ENCODING_PCM_8BIT:
resampledSize = size * 2; resampledSize = size * 2;
break; break;
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
resampledSize = size;
break;
case C.ENCODING_PCM_24BIT: case C.ENCODING_PCM_24BIT:
resampledSize = (size / 3) * 2; resampledSize = (size / 3) * 2;
break; break;
case C.ENCODING_PCM_32BIT: case C.ENCODING_PCM_32BIT:
case C.ENCODING_PCM_FLOAT:
resampledSize = size / 2; resampledSize = size / 2;
break; break;
case C.ENCODING_PCM_16BIT: case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_FLOAT:
case C.ENCODING_PCM_A_LAW:
case C.ENCODING_PCM_MU_LAW:
case C.ENCODING_INVALID: case C.ENCODING_INVALID:
case Format.NO_VALUE: case Format.NO_VALUE:
default: default:
@ -74,32 +82,45 @@ import java.nio.ByteBuffer;
// Resample the little endian input and update the input/output buffers. // Resample the little endian input and update the input/output buffers.
ByteBuffer buffer = replaceOutputBuffer(resampledSize); ByteBuffer buffer = replaceOutputBuffer(resampledSize);
switch (encoding) { switch (inputAudioFormat.encoding) {
case C.ENCODING_PCM_8BIT: case C.ENCODING_PCM_8BIT:
// 8->16 bit resampling. Shift each byte from [0, 256) to [-128, 128) and scale up. // 8 -> 16 bit resampling. Shift each byte from [0, 256) to [-128, 128) and scale up.
for (int i = position; i < limit; i++) { for (int i = position; i < limit; i++) {
buffer.put((byte) 0); buffer.put((byte) 0);
buffer.put((byte) ((inputBuffer.get(i) & 0xFF) - 128)); buffer.put((byte) ((inputBuffer.get(i) & 0xFF) - 128));
} }
break; break;
case C.ENCODING_PCM_16BIT_BIG_ENDIAN:
// Big endian to little endian resampling. Swap the byte order.
for (int i = position; i < limit; i += 2) {
buffer.put(inputBuffer.get(i + 1));
buffer.put(inputBuffer.get(i));
}
break;
case C.ENCODING_PCM_24BIT: case C.ENCODING_PCM_24BIT:
// 24->16 bit resampling. Drop the least significant byte. // 24 -> 16 bit resampling. Drop the least significant byte.
for (int i = position; i < limit; i += 3) { for (int i = position; i < limit; i += 3) {
buffer.put(inputBuffer.get(i + 1)); buffer.put(inputBuffer.get(i + 1));
buffer.put(inputBuffer.get(i + 2)); buffer.put(inputBuffer.get(i + 2));
} }
break; break;
case C.ENCODING_PCM_32BIT: case C.ENCODING_PCM_32BIT:
// 32->16 bit resampling. Drop the two least significant bytes. // 32 -> 16 bit resampling. Drop the two least significant bytes.
for (int i = position; i < limit; i += 4) { for (int i = position; i < limit; i += 4) {
buffer.put(inputBuffer.get(i + 2)); buffer.put(inputBuffer.get(i + 2));
buffer.put(inputBuffer.get(i + 3)); buffer.put(inputBuffer.get(i + 3));
} }
break; break;
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_FLOAT: case C.ENCODING_PCM_FLOAT:
case C.ENCODING_PCM_A_LAW: // 32 bit floating point -> 16 bit resampling. Floating point values are in the range
case C.ENCODING_PCM_MU_LAW: // [-1.0, 1.0], so need to be scaled by Short.MAX_VALUE.
for (int i = position; i < limit; i += 4) {
short value = (short) (inputBuffer.getFloat(i) * Short.MAX_VALUE);
buffer.put((byte) (value & 0xFF));
buffer.put((byte) ((value >> 8) & 0xFF));
}
break;
case C.ENCODING_PCM_16BIT:
case C.ENCODING_INVALID: case C.ENCODING_INVALID:
case Format.NO_VALUE: case Format.NO_VALUE:
default: default:

View File

@ -17,11 +17,13 @@ package com.google.android.exoplayer2.audio;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import java.lang.annotation.Documented; import java.lang.annotation.Documented;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder;
/** /**
* An {@link AudioProcessor} that skips silence in the input stream. Input and output are 16-bit * An {@link AudioProcessor} that skips silence in the input stream. Input and output are 16-bit
@ -30,27 +32,20 @@ import java.nio.ByteBuffer;
public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor { public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
/** /**
* The minimum duration of audio that must be below {@link #SILENCE_THRESHOLD_LEVEL} to classify * The default value for {@link #SilenceSkippingAudioProcessor(long, long, short)
* that part of audio as silent, in microseconds. * minimumSilenceDurationUs}.
*/ */
private static final long MINIMUM_SILENCE_DURATION_US = 150_000; public static final long DEFAULT_MINIMUM_SILENCE_DURATION_US = 150_000;
/** /**
* The duration of silence by which to extend non-silent sections, in microseconds. The value must * The default value for {@link #SilenceSkippingAudioProcessor(long, long, short)
* not exceed {@link #MINIMUM_SILENCE_DURATION_US}. * paddingSilenceUs}.
*/ */
private static final long PADDING_SILENCE_US = 20_000; public static final long DEFAULT_PADDING_SILENCE_US = 20_000;
/** /**
* The absolute level below which an individual PCM sample is classified as silent. Note: the * The default value for {@link #SilenceSkippingAudioProcessor(long, long, short)
* specified value will be rounded so that the threshold check only depends on the more * silenceThresholdLevel}.
* significant byte, for efficiency.
*/ */
private static final short SILENCE_THRESHOLD_LEVEL = 1024; public static final short DEFAULT_SILENCE_THRESHOLD_LEVEL = 1024;
/**
* Threshold for classifying an individual PCM sample as silent based on its more significant
* byte. This is {@link #SILENCE_THRESHOLD_LEVEL} divided by 256 with rounding.
*/
private static final byte SILENCE_THRESHOLD_LEVEL_MSB = (SILENCE_THRESHOLD_LEVEL + 128) >> 8;
/** Trimming states. */ /** Trimming states. */
@Documented @Documented
@ -68,8 +63,10 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
/** State when the input is silent. */ /** State when the input is silent. */
private static final int STATE_SILENT = 2; private static final int STATE_SILENT = 2;
private final long minimumSilenceDurationUs;
private final long paddingSilenceUs;
private final short silenceThresholdLevel;
private int bytesPerFrame; private int bytesPerFrame;
private boolean enabled; private boolean enabled;
/** /**
@ -91,21 +88,44 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
private boolean hasOutputNoise; private boolean hasOutputNoise;
private long skippedFrames; private long skippedFrames;
/** Creates a new silence trimming audio processor. */ /** Creates a new silence skipping audio processor. */
public SilenceSkippingAudioProcessor() { public SilenceSkippingAudioProcessor() {
this(
DEFAULT_MINIMUM_SILENCE_DURATION_US,
DEFAULT_PADDING_SILENCE_US,
DEFAULT_SILENCE_THRESHOLD_LEVEL);
}
/**
* Creates a new silence skipping audio processor.
*
* @param minimumSilenceDurationUs The minimum duration of audio that must be below {@code
* silenceThresholdLevel} to classify that part of audio as silent, in microseconds.
* @param paddingSilenceUs The duration of silence by which to extend non-silent sections, in
* microseconds. The value must not exceed {@code minimumSilenceDurationUs}.
* @param silenceThresholdLevel The absolute level below which an individual PCM sample is
* classified as silent.
*/
public SilenceSkippingAudioProcessor(
long minimumSilenceDurationUs, long paddingSilenceUs, short silenceThresholdLevel) {
Assertions.checkArgument(paddingSilenceUs <= minimumSilenceDurationUs);
this.minimumSilenceDurationUs = minimumSilenceDurationUs;
this.paddingSilenceUs = paddingSilenceUs;
this.silenceThresholdLevel = silenceThresholdLevel;
maybeSilenceBuffer = Util.EMPTY_BYTE_ARRAY; maybeSilenceBuffer = Util.EMPTY_BYTE_ARRAY;
paddingBuffer = Util.EMPTY_BYTE_ARRAY; paddingBuffer = Util.EMPTY_BYTE_ARRAY;
} }
/** /**
* Sets whether to skip silence in the input. Calling this method will discard any data buffered * Sets whether to skip silence in the input. This method may only be called after draining data
* within the processor, and may update the value returned by {@link #isActive()}. * through the processor. The value returned by {@link #isActive()} may change, and the processor
* must be {@link #flush() flushed} before queueing more data.
* *
* @param enabled Whether to skip silence in the input. * @param enabled Whether to skip silence in the input.
*/ */
public void setEnabled(boolean enabled) { public void setEnabled(boolean enabled) {
this.enabled = enabled; this.enabled = enabled;
flush();
} }
/** /**
@ -119,18 +139,17 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
// AudioProcessor implementation. // AudioProcessor implementation.
@Override @Override
public boolean configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledFormatException { throws UnhandledAudioFormatException {
if (encoding != C.ENCODING_PCM_16BIT) { if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); throw new UnhandledAudioFormatException(inputAudioFormat);
} }
bytesPerFrame = channelCount * 2; return enabled ? inputAudioFormat : AudioFormat.NOT_SET;
return setInputFormat(sampleRateHz, channelCount, encoding);
} }
@Override @Override
public boolean isActive() { public boolean isActive() {
return super.isActive() && enabled; return enabled;
} }
@Override @Override
@ -165,12 +184,13 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
@Override @Override
protected void onFlush() { protected void onFlush() {
if (isActive()) { if (enabled) {
int maybeSilenceBufferSize = durationUsToFrames(MINIMUM_SILENCE_DURATION_US) * bytesPerFrame; bytesPerFrame = inputAudioFormat.bytesPerFrame;
int maybeSilenceBufferSize = durationUsToFrames(minimumSilenceDurationUs) * bytesPerFrame;
if (maybeSilenceBuffer.length != maybeSilenceBufferSize) { if (maybeSilenceBuffer.length != maybeSilenceBufferSize) {
maybeSilenceBuffer = new byte[maybeSilenceBufferSize]; maybeSilenceBuffer = new byte[maybeSilenceBufferSize];
} }
paddingSize = durationUsToFrames(PADDING_SILENCE_US) * bytesPerFrame; paddingSize = durationUsToFrames(paddingSilenceUs) * bytesPerFrame;
if (paddingBuffer.length != paddingSize) { if (paddingBuffer.length != paddingSize) {
paddingBuffer = new byte[paddingSize]; paddingBuffer = new byte[paddingSize];
} }
@ -317,7 +337,7 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
* Returns the number of input frames corresponding to {@code durationUs} microseconds of audio. * Returns the number of input frames corresponding to {@code durationUs} microseconds of audio.
*/ */
private int durationUsToFrames(long durationUs) { private int durationUsToFrames(long durationUs) {
return (int) ((durationUs * sampleRateHz) / C.MICROS_PER_SECOND); return (int) ((durationUs * inputAudioFormat.sampleRate) / C.MICROS_PER_SECOND);
} }
/** /**
@ -325,9 +345,10 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
* classified as a noisy frame, or the limit of the buffer if no such frame exists. * classified as a noisy frame, or the limit of the buffer if no such frame exists.
*/ */
private int findNoisePosition(ByteBuffer buffer) { private int findNoisePosition(ByteBuffer buffer) {
Assertions.checkArgument(buffer.order() == ByteOrder.LITTLE_ENDIAN);
// The input is in ByteOrder.nativeOrder(), which is little endian on Android. // The input is in ByteOrder.nativeOrder(), which is little endian on Android.
for (int i = buffer.position() + 1; i < buffer.limit(); i += 2) { for (int i = buffer.position(); i < buffer.limit(); i += 2) {
if (Math.abs(buffer.get(i)) > SILENCE_THRESHOLD_LEVEL_MSB) { if (Math.abs(buffer.getShort(i)) > silenceThresholdLevel) {
// Round to the start of the frame. // Round to the start of the frame.
return bytesPerFrame * (i / bytesPerFrame); return bytesPerFrame * (i / bytesPerFrame);
} }
@ -340,9 +361,10 @@ public final class SilenceSkippingAudioProcessor extends BaseAudioProcessor {
* from the byte position to the limit are classified as silent. * from the byte position to the limit are classified as silent.
*/ */
private int findNoiseLimit(ByteBuffer buffer) { private int findNoiseLimit(ByteBuffer buffer) {
Assertions.checkArgument(buffer.order() == ByteOrder.LITTLE_ENDIAN);
// The input is in ByteOrder.nativeOrder(), which is little endian on Android. // The input is in ByteOrder.nativeOrder(), which is little endian on Android.
for (int i = buffer.limit() - 1; i >= buffer.position(); i -= 2) { for (int i = buffer.limit() - 2; i >= buffer.position(); i -= 2) {
if (Math.abs(buffer.get(i)) > SILENCE_THRESHOLD_LEVEL_MSB) { if (Math.abs(buffer.getShort(i)) > silenceThresholdLevel) {
// Return the start of the next frame. // Return the start of the next frame.
return bytesPerFrame * (i / bytesPerFrame) + bytesPerFrame; return bytesPerFrame * (i / bytesPerFrame) + bytesPerFrame;
} }

View File

@ -17,7 +17,6 @@ package com.google.android.exoplayer2.audio;
import android.media.audiofx.Virtualizer; import android.media.audiofx.Virtualizer;
import android.os.Handler; import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock; import android.os.SystemClock;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
@ -29,6 +28,7 @@ import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.FormatHolder; import com.google.android.exoplayer2.FormatHolder;
import com.google.android.exoplayer2.PlaybackParameters; import com.google.android.exoplayer2.PlaybackParameters;
import com.google.android.exoplayer2.PlayerMessage.Target; import com.google.android.exoplayer2.PlayerMessage.Target;
import com.google.android.exoplayer2.RendererCapabilities;
import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher; import com.google.android.exoplayer2.audio.AudioRendererEventListener.EventDispatcher;
import com.google.android.exoplayer2.decoder.DecoderCounters; import com.google.android.exoplayer2.decoder.DecoderCounters;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer; import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
@ -95,9 +95,9 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
private final boolean playClearSamplesWithoutKeys; private final boolean playClearSamplesWithoutKeys;
private final EventDispatcher eventDispatcher; private final EventDispatcher eventDispatcher;
private final AudioSink audioSink; private final AudioSink audioSink;
private final FormatHolder formatHolder;
private final DecoderInputBuffer flagsOnlyBuffer; private final DecoderInputBuffer flagsOnlyBuffer;
private boolean drmResourcesAcquired;
private DecoderCounters decoderCounters; private DecoderCounters decoderCounters;
private Format inputFormat; private Format inputFormat;
private int encoderDelay; private int encoderDelay;
@ -213,40 +213,42 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
eventDispatcher = new EventDispatcher(eventHandler, eventListener); eventDispatcher = new EventDispatcher(eventHandler, eventListener);
this.audioSink = audioSink; this.audioSink = audioSink;
audioSink.setListener(new AudioSinkListener()); audioSink.setListener(new AudioSinkListener());
formatHolder = new FormatHolder();
flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance(); flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance();
decoderReinitializationState = REINITIALIZATION_STATE_NONE; decoderReinitializationState = REINITIALIZATION_STATE_NONE;
audioTrackNeedsConfigure = true; audioTrackNeedsConfigure = true;
} }
@Override @Override
@Nullable
public MediaClock getMediaClock() { public MediaClock getMediaClock() {
return this; return this;
} }
@Override @Override
@Capabilities
public final int supportsFormat(Format format) { public final int supportsFormat(Format format) {
if (!MimeTypes.isAudio(format.sampleMimeType)) { if (!MimeTypes.isAudio(format.sampleMimeType)) {
return FORMAT_UNSUPPORTED_TYPE; return RendererCapabilities.create(FORMAT_UNSUPPORTED_TYPE);
} }
int formatSupport = supportsFormatInternal(drmSessionManager, format); @FormatSupport int formatSupport = supportsFormatInternal(drmSessionManager, format);
if (formatSupport <= FORMAT_UNSUPPORTED_DRM) { if (formatSupport <= FORMAT_UNSUPPORTED_DRM) {
return formatSupport; return RendererCapabilities.create(formatSupport);
} }
@TunnelingSupport
int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED; int tunnelingSupport = Util.SDK_INT >= 21 ? TUNNELING_SUPPORTED : TUNNELING_NOT_SUPPORTED;
return ADAPTIVE_NOT_SEAMLESS | tunnelingSupport | formatSupport; return RendererCapabilities.create(formatSupport, ADAPTIVE_NOT_SEAMLESS, tunnelingSupport);
} }
/** /**
* Returns the {@link #FORMAT_SUPPORT_MASK} component of the return value for {@link * Returns the {@link FormatSupport} for the given {@link Format}.
* #supportsFormat(Format)}.
* *
* @param drmSessionManager The renderer's {@link DrmSessionManager}. * @param drmSessionManager The renderer's {@link DrmSessionManager}.
* @param format The format, which has an audio {@link Format#sampleMimeType}. * @param format The format, which has an audio {@link Format#sampleMimeType}.
* @return The extent to which the renderer supports the format itself. * @return The {@link FormatSupport} for this {@link Format}.
*/ */
@FormatSupport
protected abstract int supportsFormatInternal( protected abstract int supportsFormatInternal(
DrmSessionManager<ExoMediaCrypto> drmSessionManager, Format format); @Nullable DrmSessionManager<ExoMediaCrypto> drmSessionManager, Format format);
/** /**
* Returns whether the sink supports the audio format. * Returns whether the sink supports the audio format.
@ -263,7 +265,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
try { try {
audioSink.playToEndOfStream(); audioSink.playToEndOfStream();
} catch (AudioSink.WriteException e) { } catch (AudioSink.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex()); throw createRendererException(e, inputFormat);
} }
return; return;
} }
@ -271,10 +273,11 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
// Try and read a format if we don't have one already. // Try and read a format if we don't have one already.
if (inputFormat == null) { if (inputFormat == null) {
// We don't have a format yet, so try and read one. // We don't have a format yet, so try and read one.
FormatHolder formatHolder = getFormatHolder();
flagsOnlyBuffer.clear(); flagsOnlyBuffer.clear();
int result = readSource(formatHolder, flagsOnlyBuffer, true); int result = readSource(formatHolder, flagsOnlyBuffer, true);
if (result == C.RESULT_FORMAT_READ) { if (result == C.RESULT_FORMAT_READ) {
onInputFormatChanged(formatHolder.format); onInputFormatChanged(formatHolder);
} else if (result == C.RESULT_BUFFER_READ) { } else if (result == C.RESULT_BUFFER_READ) {
// End of stream read having not read a format. // End of stream read having not read a format.
Assertions.checkState(flagsOnlyBuffer.isEndOfStream()); Assertions.checkState(flagsOnlyBuffer.isEndOfStream());
@ -299,7 +302,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
TraceUtil.endSection(); TraceUtil.endSection();
} catch (AudioDecoderException | AudioSink.ConfigurationException } catch (AudioDecoderException | AudioSink.ConfigurationException
| AudioSink.InitializationException | AudioSink.WriteException e) { | AudioSink.InitializationException | AudioSink.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex()); throw createRendererException(e, inputFormat);
} }
decoderCounters.ensureUpdated(); decoderCounters.ensureUpdated();
} }
@ -341,21 +344,26 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
* @return The decoder. * @return The decoder.
* @throws AudioDecoderException If an error occurred creating a suitable decoder. * @throws AudioDecoderException If an error occurred creating a suitable decoder.
*/ */
protected abstract SimpleDecoder<DecoderInputBuffer, ? extends SimpleOutputBuffer, protected abstract SimpleDecoder<
? extends AudioDecoderException> createDecoder(Format format, ExoMediaCrypto mediaCrypto) DecoderInputBuffer, ? extends SimpleOutputBuffer, ? extends AudioDecoderException>
throws AudioDecoderException; createDecoder(Format format, @Nullable ExoMediaCrypto mediaCrypto)
throws AudioDecoderException;
/** /**
* Returns the format of audio buffers output by the decoder. Will not be called until the first * Returns the format of audio buffers output by the decoder. Will not be called until the first
* output buffer has been dequeued, so the decoder may use input data to determine the format. * output buffer has been dequeued, so the decoder may use input data to determine the format.
* <p>
* The default implementation returns a 16-bit PCM format with the same channel count and sample
* rate as the input.
*/ */
protected Format getOutputFormat() { protected abstract Format getOutputFormat();
return Format.createAudioSampleFormat(null, MimeTypes.AUDIO_RAW, null, Format.NO_VALUE,
Format.NO_VALUE, inputFormat.channelCount, inputFormat.sampleRate, C.ENCODING_PCM_16BIT, /**
null, null, 0, null); * Returns whether the existing decoder can be kept for a new format.
*
* @param oldFormat The previous format.
* @param newFormat The new format.
* @return True if the existing decoder can be kept.
*/
protected boolean canKeepCodec(Format oldFormat, Format newFormat) {
return false;
} }
private boolean drainOutputBuffer() throws ExoPlaybackException, AudioDecoderException, private boolean drainOutputBuffer() throws ExoPlaybackException, AudioDecoderException,
@ -427,6 +435,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
} }
int result; int result;
FormatHolder formatHolder = getFormatHolder();
if (waitingForKeys) { if (waitingForKeys) {
// We've already read an encrypted sample into buffer, and are waiting for keys. // We've already read an encrypted sample into buffer, and are waiting for keys.
result = C.RESULT_BUFFER_READ; result = C.RESULT_BUFFER_READ;
@ -438,7 +447,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
return false; return false;
} }
if (result == C.RESULT_FORMAT_READ) { if (result == C.RESULT_FORMAT_READ) {
onInputFormatChanged(formatHolder.format); onInputFormatChanged(formatHolder);
return true; return true;
} }
if (inputBuffer.isEndOfStream()) { if (inputBuffer.isEndOfStream()) {
@ -462,12 +471,14 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
} }
private boolean shouldWaitForKeys(boolean bufferEncrypted) throws ExoPlaybackException { private boolean shouldWaitForKeys(boolean bufferEncrypted) throws ExoPlaybackException {
if (decoderDrmSession == null || (!bufferEncrypted && playClearSamplesWithoutKeys)) { if (decoderDrmSession == null
|| (!bufferEncrypted
&& (playClearSamplesWithoutKeys || decoderDrmSession.playClearSamplesWithoutKeys()))) {
return false; return false;
} }
@DrmSession.State int drmSessionState = decoderDrmSession.getState(); @DrmSession.State int drmSessionState = decoderDrmSession.getState();
if (drmSessionState == DrmSession.STATE_ERROR) { if (drmSessionState == DrmSession.STATE_ERROR) {
throw ExoPlaybackException.createForRenderer(decoderDrmSession.getError(), getIndex()); throw createRendererException(decoderDrmSession.getError(), inputFormat);
} }
return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS; return drmSessionState != DrmSession.STATE_OPENED_WITH_KEYS;
} }
@ -477,7 +488,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
try { try {
audioSink.playToEndOfStream(); audioSink.playToEndOfStream();
} catch (AudioSink.WriteException e) { } catch (AudioSink.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex()); // TODO(internal: b/145658993) Use outputFormat for the call from drainOutputBuffer.
throw createRendererException(e, inputFormat);
} }
} }
@ -517,8 +529,8 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
} }
@Override @Override
public PlaybackParameters setPlaybackParameters(PlaybackParameters playbackParameters) { public void setPlaybackParameters(PlaybackParameters playbackParameters) {
return audioSink.setPlaybackParameters(playbackParameters); audioSink.setPlaybackParameters(playbackParameters);
} }
@Override @Override
@ -528,6 +540,10 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
@Override @Override
protected void onEnabled(boolean joining) throws ExoPlaybackException { protected void onEnabled(boolean joining) throws ExoPlaybackException {
if (drmSessionManager != null && !drmResourcesAcquired) {
drmResourcesAcquired = true;
drmSessionManager.prepare();
}
decoderCounters = new DecoderCounters(); decoderCounters = new DecoderCounters();
eventDispatcher.enabled(decoderCounters); eventDispatcher.enabled(decoderCounters);
int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId; int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
@ -576,6 +592,14 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
} }
} }
@Override
protected void onReset() {
if (drmSessionManager != null && drmResourcesAcquired) {
drmResourcesAcquired = false;
drmSessionManager.release();
}
}
@Override @Override
public void handleMessage(int messageType, @Nullable Object message) throws ExoPlaybackException { public void handleMessage(int messageType, @Nullable Object message) throws ExoPlaybackException {
switch (messageType) { switch (messageType) {
@ -628,7 +652,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
codecInitializedTimestamp - codecInitializingTimestamp); codecInitializedTimestamp - codecInitializingTimestamp);
decoderCounters.decoderInitCount++; decoderCounters.decoderInitCount++;
} catch (AudioDecoderException e) { } catch (AudioDecoderException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex()); throw createRendererException(e, inputFormat);
} }
} }
@ -646,62 +670,43 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
} }
private void setSourceDrmSession(@Nullable DrmSession<ExoMediaCrypto> session) { private void setSourceDrmSession(@Nullable DrmSession<ExoMediaCrypto> session) {
DrmSession<ExoMediaCrypto> previous = sourceDrmSession; DrmSession.replaceSession(sourceDrmSession, session);
sourceDrmSession = session; sourceDrmSession = session;
releaseDrmSessionIfUnused(previous);
} }
private void setDecoderDrmSession(@Nullable DrmSession<ExoMediaCrypto> session) { private void setDecoderDrmSession(@Nullable DrmSession<ExoMediaCrypto> session) {
DrmSession<ExoMediaCrypto> previous = decoderDrmSession; DrmSession.replaceSession(decoderDrmSession, session);
decoderDrmSession = session; decoderDrmSession = session;
releaseDrmSessionIfUnused(previous);
} }
private void releaseDrmSessionIfUnused(@Nullable DrmSession<ExoMediaCrypto> session) { @SuppressWarnings("unchecked")
if (session != null && session != decoderDrmSession && session != sourceDrmSession) { private void onInputFormatChanged(FormatHolder formatHolder) throws ExoPlaybackException {
drmSessionManager.releaseSession(session); Format newFormat = Assertions.checkNotNull(formatHolder.format);
if (formatHolder.includesDrmSession) {
setSourceDrmSession((DrmSession<ExoMediaCrypto>) formatHolder.drmSession);
} else {
sourceDrmSession =
getUpdatedSourceDrmSession(inputFormat, newFormat, drmSessionManager, sourceDrmSession);
} }
}
private void onInputFormatChanged(Format newFormat) throws ExoPlaybackException {
Format oldFormat = inputFormat; Format oldFormat = inputFormat;
inputFormat = newFormat; inputFormat = newFormat;
boolean drmInitDataChanged = !Util.areEqual(inputFormat.drmInitData, oldFormat == null ? null if (!canKeepCodec(oldFormat, inputFormat)) {
: oldFormat.drmInitData); if (decoderReceivedBuffers) {
if (drmInitDataChanged) { // Signal end of stream and wait for any final output buffers before re-initialization.
if (inputFormat.drmInitData != null) { decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM;
if (drmSessionManager == null) {
throw ExoPlaybackException.createForRenderer(
new IllegalStateException("Media requires a DrmSessionManager"), getIndex());
}
DrmSession<ExoMediaCrypto> session =
drmSessionManager.acquireSession(Looper.myLooper(), newFormat.drmInitData);
if (session == decoderDrmSession || session == sourceDrmSession) {
// We already had this session. The manager must be reference counting, so release it once
// to get the count attributed to this renderer back down to 1.
drmSessionManager.releaseSession(session);
}
setSourceDrmSession(session);
} else { } else {
setSourceDrmSession(null); // There aren't any final output buffers, so release the decoder immediately.
releaseDecoder();
maybeInitDecoder();
audioTrackNeedsConfigure = true;
} }
} }
if (decoderReceivedBuffers) { encoderDelay = inputFormat.encoderDelay;
// Signal end of stream and wait for any final output buffers before re-initialization. encoderPadding = inputFormat.encoderPadding;
decoderReinitializationState = REINITIALIZATION_STATE_SIGNAL_END_OF_STREAM;
} else {
// There aren't any final output buffers, so release the decoder immediately.
releaseDecoder();
maybeInitDecoder();
audioTrackNeedsConfigure = true;
}
encoderDelay = newFormat.encoderDelay; eventDispatcher.inputFormatChanged(inputFormat);
encoderPadding = newFormat.encoderPadding;
eventDispatcher.inputFormatChanged(newFormat);
} }
private void onQueueInputBuffer(DecoderInputBuffer buffer) { private void onQueueInputBuffer(DecoderInputBuffer buffer) {

View File

@ -17,7 +17,6 @@ package com.google.android.exoplayer2.audio;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.C.Encoding;
import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
@ -62,12 +61,14 @@ public final class SonicAudioProcessor implements AudioProcessor {
*/ */
private static final int MIN_BYTES_FOR_SPEEDUP_CALCULATION = 1024; private static final int MIN_BYTES_FOR_SPEEDUP_CALCULATION = 1024;
private int channelCount; private int pendingOutputSampleRate;
private int sampleRateHz;
private float speed; private float speed;
private float pitch; private float pitch;
private int outputSampleRateHz;
private int pendingOutputSampleRateHz; private AudioFormat pendingInputAudioFormat;
private AudioFormat pendingOutputAudioFormat;
private AudioFormat inputAudioFormat;
private AudioFormat outputAudioFormat;
private boolean pendingSonicRecreation; private boolean pendingSonicRecreation;
@Nullable private Sonic sonic; @Nullable private Sonic sonic;
@ -84,18 +85,20 @@ public final class SonicAudioProcessor implements AudioProcessor {
public SonicAudioProcessor() { public SonicAudioProcessor() {
speed = 1f; speed = 1f;
pitch = 1f; pitch = 1f;
channelCount = Format.NO_VALUE; pendingInputAudioFormat = AudioFormat.NOT_SET;
sampleRateHz = Format.NO_VALUE; pendingOutputAudioFormat = AudioFormat.NOT_SET;
outputSampleRateHz = Format.NO_VALUE; inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer(); shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE; pendingOutputSampleRate = SAMPLE_RATE_NO_CHANGE;
} }
/** /**
* Sets the playback speed. Calling this method will discard any data buffered within the * Sets the playback speed. This method may only be called after draining data through the
* processor, and may update the value returned by {@link #isActive()}. * processor. The value returned by {@link #isActive()} may change, and the processor must be
* {@link #flush() flushed} before queueing more data.
* *
* @param speed The requested new playback speed. * @param speed The requested new playback speed.
* @return The actual new playback speed. * @return The actual new playback speed.
@ -106,13 +109,13 @@ public final class SonicAudioProcessor implements AudioProcessor {
this.speed = speed; this.speed = speed;
pendingSonicRecreation = true; pendingSonicRecreation = true;
} }
flush();
return speed; return speed;
} }
/** /**
* Sets the playback pitch. Calling this method will discard any data buffered within the * Sets the playback pitch. This method may only be called after draining data through the
* processor, and may update the value returned by {@link #isActive()}. * processor. The value returned by {@link #isActive()} may change, and the processor must be
* {@link #flush() flushed} before queueing more data.
* *
* @param pitch The requested new pitch. * @param pitch The requested new pitch.
* @return The actual new pitch. * @return The actual new pitch.
@ -123,20 +126,19 @@ public final class SonicAudioProcessor implements AudioProcessor {
this.pitch = pitch; this.pitch = pitch;
pendingSonicRecreation = true; pendingSonicRecreation = true;
} }
flush();
return pitch; return pitch;
} }
/** /**
* Sets the sample rate for output audio, in hertz. Pass {@link #SAMPLE_RATE_NO_CHANGE} to output * Sets the sample rate for output audio, in Hertz. Pass {@link #SAMPLE_RATE_NO_CHANGE} to output
* audio at the same sample rate as the input. After calling this method, call * audio at the same sample rate as the input. After calling this method, call {@link
* {@link #configure(int, int, int)} to start using the new sample rate. * #configure(AudioFormat)} to configure the processor with the new sample rate.
* *
* @param sampleRateHz The sample rate for output audio, in hertz. * @param sampleRateHz The sample rate for output audio, in Hertz.
* @see #configure(int, int, int) * @see #configure(AudioFormat)
*/ */
public void setOutputSampleRateHz(int sampleRateHz) { public void setOutputSampleRateHz(int sampleRateHz) {
pendingOutputSampleRateHz = sampleRateHz; pendingOutputSampleRate = sampleRateHz;
} }
/** /**
@ -149,55 +151,39 @@ public final class SonicAudioProcessor implements AudioProcessor {
*/ */
public long scaleDurationForSpeedup(long duration) { public long scaleDurationForSpeedup(long duration) {
if (outputBytes >= MIN_BYTES_FOR_SPEEDUP_CALCULATION) { if (outputBytes >= MIN_BYTES_FOR_SPEEDUP_CALCULATION) {
return outputSampleRateHz == sampleRateHz return outputAudioFormat.sampleRate == inputAudioFormat.sampleRate
? Util.scaleLargeTimestamp(duration, inputBytes, outputBytes) ? Util.scaleLargeTimestamp(duration, inputBytes, outputBytes)
: Util.scaleLargeTimestamp(duration, inputBytes * outputSampleRateHz, : Util.scaleLargeTimestamp(
outputBytes * sampleRateHz); duration,
inputBytes * outputAudioFormat.sampleRate,
outputBytes * inputAudioFormat.sampleRate);
} else { } else {
return (long) ((double) speed * duration); return (long) ((double) speed * duration);
} }
} }
@Override @Override
public boolean configure(int sampleRateHz, int channelCount, @Encoding int encoding) public AudioFormat configure(AudioFormat inputAudioFormat) throws UnhandledAudioFormatException {
throws UnhandledFormatException { if (inputAudioFormat.encoding != C.ENCODING_PCM_16BIT) {
if (encoding != C.ENCODING_PCM_16BIT) { throw new UnhandledAudioFormatException(inputAudioFormat);
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding);
} }
int outputSampleRateHz = pendingOutputSampleRateHz == SAMPLE_RATE_NO_CHANGE int outputSampleRateHz =
? sampleRateHz : pendingOutputSampleRateHz; pendingOutputSampleRate == SAMPLE_RATE_NO_CHANGE
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount ? inputAudioFormat.sampleRate
&& this.outputSampleRateHz == outputSampleRateHz) { : pendingOutputSampleRate;
return false; pendingInputAudioFormat = inputAudioFormat;
} pendingOutputAudioFormat =
this.sampleRateHz = sampleRateHz; new AudioFormat(outputSampleRateHz, inputAudioFormat.channelCount, C.ENCODING_PCM_16BIT);
this.channelCount = channelCount;
this.outputSampleRateHz = outputSampleRateHz;
pendingSonicRecreation = true; pendingSonicRecreation = true;
return true; return pendingOutputAudioFormat;
} }
@Override @Override
public boolean isActive() { public boolean isActive() {
return sampleRateHz != Format.NO_VALUE return pendingOutputAudioFormat.sampleRate != Format.NO_VALUE
&& (Math.abs(speed - 1f) >= CLOSE_THRESHOLD && (Math.abs(speed - 1f) >= CLOSE_THRESHOLD
|| Math.abs(pitch - 1f) >= CLOSE_THRESHOLD || Math.abs(pitch - 1f) >= CLOSE_THRESHOLD
|| outputSampleRateHz != sampleRateHz); || pendingOutputAudioFormat.sampleRate != pendingInputAudioFormat.sampleRate);
}
@Override
public int getOutputChannelCount() {
return channelCount;
}
@Override
public int getOutputEncoding() {
return C.ENCODING_PCM_16BIT;
}
@Override
public int getOutputSampleRateHz() {
return outputSampleRateHz;
} }
@Override @Override
@ -249,8 +235,16 @@ public final class SonicAudioProcessor implements AudioProcessor {
@Override @Override
public void flush() { public void flush() {
if (isActive()) { if (isActive()) {
inputAudioFormat = pendingInputAudioFormat;
outputAudioFormat = pendingOutputAudioFormat;
if (pendingSonicRecreation) { if (pendingSonicRecreation) {
sonic = new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz); sonic =
new Sonic(
inputAudioFormat.sampleRate,
inputAudioFormat.channelCount,
speed,
pitch,
outputAudioFormat.sampleRate);
} else if (sonic != null) { } else if (sonic != null) {
sonic.flush(); sonic.flush();
} }
@ -265,13 +259,14 @@ public final class SonicAudioProcessor implements AudioProcessor {
public void reset() { public void reset() {
speed = 1f; speed = 1f;
pitch = 1f; pitch = 1f;
channelCount = Format.NO_VALUE; pendingInputAudioFormat = AudioFormat.NOT_SET;
sampleRateHz = Format.NO_VALUE; pendingOutputAudioFormat = AudioFormat.NOT_SET;
outputSampleRateHz = Format.NO_VALUE; inputAudioFormat = AudioFormat.NOT_SET;
outputAudioFormat = AudioFormat.NOT_SET;
buffer = EMPTY_BUFFER; buffer = EMPTY_BUFFER;
shortBuffer = buffer.asShortBuffer(); shortBuffer = buffer.asShortBuffer();
outputBuffer = EMPTY_BUFFER; outputBuffer = EMPTY_BUFFER;
pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE; pendingOutputSampleRate = SAMPLE_RATE_NO_CHANGE;
pendingSonicRecreation = false; pendingSonicRecreation = false;
sonic = null; sonic = null;
inputBytes = 0; inputBytes = 0;

View File

@ -64,8 +64,9 @@ public final class TeeAudioProcessor extends BaseAudioProcessor {
} }
@Override @Override
public boolean configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) { public AudioFormat onConfigure(AudioFormat inputAudioFormat) {
return setInputFormat(sampleRateHz, channelCount, encoding); // This processor is always active (if passed to the sink) and outputs its input.
return inputAudioFormat;
} }
@Override @Override
@ -80,8 +81,23 @@ public final class TeeAudioProcessor extends BaseAudioProcessor {
@Override @Override
protected void onFlush() { protected void onFlush() {
flushSinkIfActive();
}
@Override
protected void onQueueEndOfStream() {
flushSinkIfActive();
}
@Override
protected void onReset() {
flushSinkIfActive();
}
private void flushSinkIfActive() {
if (isActive()) { if (isActive()) {
audioBufferSink.flush(sampleRateHz, channelCount, encoding); audioBufferSink.flush(
inputAudioFormat.sampleRate, inputAudioFormat.channelCount, inputAudioFormat.encoding);
} }
} }
@ -165,7 +181,7 @@ public final class TeeAudioProcessor extends BaseAudioProcessor {
// Write the rest of the header as little endian data. // Write the rest of the header as little endian data.
scratchByteBuffer.clear(); scratchByteBuffer.clear();
scratchByteBuffer.putInt(16); scratchByteBuffer.putInt(16);
scratchByteBuffer.putShort((short) WavUtil.getTypeForEncoding(encoding)); scratchByteBuffer.putShort((short) WavUtil.getTypeForPcmEncoding(encoding));
scratchByteBuffer.putShort((short) channelCount); scratchByteBuffer.putShort((short) channelCount);
scratchByteBuffer.putInt(sampleRateHz); scratchByteBuffer.putInt(sampleRateHz);
int bytesPerSample = Util.getPcmFrameSize(encoding, channelCount); int bytesPerSample = Util.getPcmFrameSize(encoding, channelCount);
@ -190,7 +206,7 @@ public final class TeeAudioProcessor extends BaseAudioProcessor {
} }
private void reset() throws IOException { private void reset() throws IOException {
RandomAccessFile randomAccessFile = this.randomAccessFile; @Nullable RandomAccessFile randomAccessFile = this.randomAccessFile;
if (randomAccessFile == null) { if (randomAccessFile == null) {
return; return;
} }

View File

@ -24,11 +24,9 @@ import java.nio.ByteBuffer;
@C.PcmEncoding private static final int OUTPUT_ENCODING = C.ENCODING_PCM_16BIT; @C.PcmEncoding private static final int OUTPUT_ENCODING = C.ENCODING_PCM_16BIT;
private boolean isActive;
private int trimStartFrames; private int trimStartFrames;
private int trimEndFrames; private int trimEndFrames;
private int bytesPerFrame; private boolean reconfigurationPending;
private boolean receivedInputSinceConfigure;
private int pendingTrimStartBytes; private int pendingTrimStartBytes;
private byte[] endBuffer; private byte[] endBuffer;
@ -42,7 +40,7 @@ import java.nio.ByteBuffer;
/** /**
* Sets the number of audio frames to trim from the start and end of audio passed to this * Sets the number of audio frames to trim from the start and end of audio passed to this
* processor. After calling this method, call {@link #configure(int, int, int)} to apply the new * processor. After calling this method, call {@link #configure(AudioFormat)} to apply the new
* trimming frame counts. * trimming frame counts.
* *
* @param trimStartFrames The number of audio frames to trim from the start of audio. * @param trimStartFrames The number of audio frames to trim from the start of audio.
@ -68,28 +66,13 @@ import java.nio.ByteBuffer;
} }
@Override @Override
public boolean configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) public AudioFormat onConfigure(AudioFormat inputAudioFormat)
throws UnhandledFormatException { throws UnhandledAudioFormatException {
if (encoding != OUTPUT_ENCODING) { if (inputAudioFormat.encoding != OUTPUT_ENCODING) {
throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); throw new UnhandledAudioFormatException(inputAudioFormat);
} }
if (endBufferSize > 0) { reconfigurationPending = true;
trimmedFrameCount += endBufferSize / bytesPerFrame; return trimStartFrames != 0 || trimEndFrames != 0 ? inputAudioFormat : AudioFormat.NOT_SET;
}
bytesPerFrame = Util.getPcmFrameSize(OUTPUT_ENCODING, channelCount);
endBuffer = new byte[trimEndFrames * bytesPerFrame];
endBufferSize = 0;
pendingTrimStartBytes = trimStartFrames * bytesPerFrame;
boolean wasActive = isActive;
isActive = trimStartFrames != 0 || trimEndFrames != 0;
receivedInputSinceConfigure = false;
setInputFormat(sampleRateHz, channelCount, encoding);
return wasActive != isActive;
}
@Override
public boolean isActive() {
return isActive;
} }
@Override @Override
@ -101,11 +84,10 @@ import java.nio.ByteBuffer;
if (remaining == 0) { if (remaining == 0) {
return; return;
} }
receivedInputSinceConfigure = true;
// Trim any pending start bytes from the input buffer. // Trim any pending start bytes from the input buffer.
int trimBytes = Math.min(remaining, pendingTrimStartBytes); int trimBytes = Math.min(remaining, pendingTrimStartBytes);
trimmedFrameCount += trimBytes / bytesPerFrame; trimmedFrameCount += trimBytes / inputAudioFormat.bytesPerFrame;
pendingTrimStartBytes -= trimBytes; pendingTrimStartBytes -= trimBytes;
inputBuffer.position(position + trimBytes); inputBuffer.position(position + trimBytes);
if (pendingTrimStartBytes > 0) { if (pendingTrimStartBytes > 0) {
@ -142,37 +124,51 @@ import java.nio.ByteBuffer;
buffer.flip(); buffer.flip();
} }
@SuppressWarnings("ReferenceEquality")
@Override @Override
public ByteBuffer getOutput() { public ByteBuffer getOutput() {
if (super.isEnded() && endBufferSize > 0) { if (super.isEnded() && endBufferSize > 0) {
// Because audio processors may be drained in the middle of the stream we assume that the // Because audio processors may be drained in the middle of the stream we assume that the
// contents of the end buffer need to be output. For gapless transitions, configure will be // contents of the end buffer need to be output. For gapless transitions, configure will
// always be called, which clears the end buffer as needed. When audio is actually ending we // always be called, so the end buffer is cleared in onQueueEndOfStream.
// play the padding data which is incorrect. This behavior can be fixed once we have the
// timestamps associated with input buffers.
replaceOutputBuffer(endBufferSize).put(endBuffer, 0, endBufferSize).flip(); replaceOutputBuffer(endBufferSize).put(endBuffer, 0, endBufferSize).flip();
endBufferSize = 0; endBufferSize = 0;
} }
return super.getOutput(); return super.getOutput();
} }
@SuppressWarnings("ReferenceEquality")
@Override @Override
public boolean isEnded() { public boolean isEnded() {
return super.isEnded() && endBufferSize == 0; return super.isEnded() && endBufferSize == 0;
} }
@Override @Override
protected void onFlush() { protected void onQueueEndOfStream() {
if (receivedInputSinceConfigure) { if (reconfigurationPending) {
// Audio processors are flushed after initial configuration, so we leave the pending trim // Trim audio in the end buffer.
// start byte count unmodified if the processor was just configured. Otherwise we (possibly if (endBufferSize > 0) {
// incorrectly) assume that this is a seek to a non-zero position. We should instead check the trimmedFrameCount += endBufferSize / inputAudioFormat.bytesPerFrame;
// timestamp of the first input buffer queued after flushing to decide whether to trim (see }
// also [Internal: b/77292509]). endBufferSize = 0;
pendingTrimStartBytes = 0;
} }
}
@Override
protected void onFlush() {
if (reconfigurationPending) {
// Flushing activates the new configuration, so prepare to trim bytes from the start/end.
reconfigurationPending = false;
endBuffer = new byte[trimEndFrames * inputAudioFormat.bytesPerFrame];
pendingTrimStartBytes = trimStartFrames * inputAudioFormat.bytesPerFrame;
}
// TODO(internal b/77292509): Flushing occurs to activate a configuration (handled above) but
// also when seeking within a stream. This implementation currently doesn't handle seek to start
// (where we need to trim at the start again), nor seeks to non-zero positions before start
// trimming has occurred (where we should set pendingTrimStartBytes to zero). These cases can be
// fixed by trimming in queueInput based on timestamp, once that information is available.
// Any data in the end buffer should no longer be output if we are playing from a different
// position, so discard it and refill the buffer using new input.
endBufferSize = 0; endBufferSize = 0;
} }

View File

@ -23,39 +23,45 @@ import com.google.android.exoplayer2.util.Util;
public final class WavUtil { public final class WavUtil {
/** Four character code for "RIFF". */ /** Four character code for "RIFF". */
public static final int RIFF_FOURCC = Util.getIntegerCodeForString("RIFF"); public static final int RIFF_FOURCC = 0x52494646;
/** Four character code for "WAVE". */ /** Four character code for "WAVE". */
public static final int WAVE_FOURCC = Util.getIntegerCodeForString("WAVE"); public static final int WAVE_FOURCC = 0x57415645;
/** Four character code for "fmt ". */ /** Four character code for "fmt ". */
public static final int FMT_FOURCC = Util.getIntegerCodeForString("fmt "); public static final int FMT_FOURCC = 0x666d7420;
/** Four character code for "data". */ /** Four character code for "data". */
public static final int DATA_FOURCC = Util.getIntegerCodeForString("data"); public static final int DATA_FOURCC = 0x64617461;
/** WAVE type value for integer PCM audio data. */ /** WAVE type value for integer PCM audio data. */
private static final int TYPE_PCM = 0x0001; public static final int TYPE_PCM = 0x0001;
/** WAVE type value for float PCM audio data. */ /** WAVE type value for float PCM audio data. */
private static final int TYPE_FLOAT = 0x0003; public static final int TYPE_FLOAT = 0x0003;
/** WAVE type value for 8-bit ITU-T G.711 A-law audio data. */ /** WAVE type value for 8-bit ITU-T G.711 A-law audio data. */
private static final int TYPE_A_LAW = 0x0006; public static final int TYPE_ALAW = 0x0006;
/** WAVE type value for 8-bit ITU-T G.711 mu-law audio data. */ /** WAVE type value for 8-bit ITU-T G.711 mu-law audio data. */
private static final int TYPE_MU_LAW = 0x0007; public static final int TYPE_MLAW = 0x0007;
/** WAVE type value for IMA ADPCM audio data. */
public static final int TYPE_IMA_ADPCM = 0x0011;
/** WAVE type value for extended WAVE format. */ /** WAVE type value for extended WAVE format. */
private static final int TYPE_WAVE_FORMAT_EXTENSIBLE = 0xFFFE; public static final int TYPE_WAVE_FORMAT_EXTENSIBLE = 0xFFFE;
/** Returns the WAVE type value for the given {@code encoding}. */ /**
public static int getTypeForEncoding(@C.PcmEncoding int encoding) { * Returns the WAVE format type value for the given {@link C.PcmEncoding}.
switch (encoding) { *
* @param pcmEncoding The {@link C.PcmEncoding} value.
* @return The corresponding WAVE format type.
* @throws IllegalArgumentException If {@code pcmEncoding} is not a {@link C.PcmEncoding}, or if
* it's {@link C#ENCODING_INVALID} or {@link Format#NO_VALUE}.
*/
public static int getTypeForPcmEncoding(@C.PcmEncoding int pcmEncoding) {
switch (pcmEncoding) {
case C.ENCODING_PCM_8BIT: case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_16BIT: case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_24BIT: case C.ENCODING_PCM_24BIT:
case C.ENCODING_PCM_32BIT: case C.ENCODING_PCM_32BIT:
return TYPE_PCM; return TYPE_PCM;
case C.ENCODING_PCM_A_LAW:
return TYPE_A_LAW;
case C.ENCODING_PCM_MU_LAW:
return TYPE_MU_LAW;
case C.ENCODING_PCM_FLOAT: case C.ENCODING_PCM_FLOAT:
return TYPE_FLOAT; return TYPE_FLOAT;
case C.ENCODING_PCM_16BIT_BIG_ENDIAN: // Not TYPE_PCM, because TYPE_PCM is little endian.
case C.ENCODING_INVALID: case C.ENCODING_INVALID:
case Format.NO_VALUE: case Format.NO_VALUE:
default: default:
@ -63,18 +69,17 @@ public final class WavUtil {
} }
} }
/** Returns the PCM encoding for the given WAVE {@code type} value. */ /**
public static @C.PcmEncoding int getEncodingForType(int type, int bitsPerSample) { * Returns the {@link C.PcmEncoding} for the given WAVE format type value, or {@link
* C#ENCODING_INVALID} if the type is not a known PCM type.
*/
public static @C.PcmEncoding int getPcmEncodingForType(int type, int bitsPerSample) {
switch (type) { switch (type) {
case TYPE_PCM: case TYPE_PCM:
case TYPE_WAVE_FORMAT_EXTENSIBLE: case TYPE_WAVE_FORMAT_EXTENSIBLE:
return Util.getPcmEncoding(bitsPerSample); return Util.getPcmEncoding(bitsPerSample);
case TYPE_FLOAT: case TYPE_FLOAT:
return bitsPerSample == 32 ? C.ENCODING_PCM_FLOAT : C.ENCODING_INVALID; return bitsPerSample == 32 ? C.ENCODING_PCM_FLOAT : C.ENCODING_INVALID;
case TYPE_A_LAW:
return C.ENCODING_PCM_A_LAW;
case TYPE_MU_LAW:
return C.ENCODING_PCM_MU_LAW;
default: default:
return C.ENCODING_INVALID; return C.ENCODING_INVALID;
} }

View File

@ -0,0 +1,19 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package com.google.android.exoplayer2.audio;
import com.google.android.exoplayer2.util.NonNullApi;

View File

@ -0,0 +1,19 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package com.google.android.exoplayer2.database;
import com.google.android.exoplayer2.util.NonNullApi;

View File

@ -53,6 +53,11 @@ public abstract class Buffer {
return getFlag(C.BUFFER_FLAG_KEY_FRAME); return getFlag(C.BUFFER_FLAG_KEY_FRAME);
} }
/** Returns whether the {@link C#BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA} flag is set. */
public final boolean hasSupplementalData() {
return getFlag(C.BUFFER_FLAG_HAS_SUPPLEMENTAL_DATA);
}
/** /**
* Replaces this buffer's flags with {@code flags}. * Replaces this buffer's flags with {@code flags}.
* *

View File

@ -25,27 +25,41 @@ import com.google.android.exoplayer2.util.Util;
public final class CryptoInfo { public final class CryptoInfo {
/** /**
* The 16 byte initialization vector. If the initialization vector of the content is shorter than
* 16 bytes, 0 byte padding is appended to extend the vector to the required 16 byte length.
*
* @see android.media.MediaCodec.CryptoInfo#iv * @see android.media.MediaCodec.CryptoInfo#iv
*/ */
public byte[] iv; public byte[] iv;
/** /**
* The 16 byte key id.
*
* @see android.media.MediaCodec.CryptoInfo#key * @see android.media.MediaCodec.CryptoInfo#key
*/ */
public byte[] key; public byte[] key;
/** /**
* The type of encryption that has been applied. Must be one of the {@link C.CryptoMode} values.
*
* @see android.media.MediaCodec.CryptoInfo#mode * @see android.media.MediaCodec.CryptoInfo#mode
*/ */
@C.CryptoMode @C.CryptoMode public int mode;
public int mode;
/** /**
* The number of leading unencrypted bytes in each sub-sample. If null, all bytes are treated as
* encrypted and {@link #numBytesOfEncryptedData} must be specified.
*
* @see android.media.MediaCodec.CryptoInfo#numBytesOfClearData * @see android.media.MediaCodec.CryptoInfo#numBytesOfClearData
*/ */
public int[] numBytesOfClearData; public int[] numBytesOfClearData;
/** /**
* The number of trailing encrypted bytes in each sub-sample. If null, all bytes are treated as
* clear and {@link #numBytesOfClearData} must be specified.
*
* @see android.media.MediaCodec.CryptoInfo#numBytesOfEncryptedData * @see android.media.MediaCodec.CryptoInfo#numBytesOfEncryptedData
*/ */
public int[] numBytesOfEncryptedData; public int[] numBytesOfEncryptedData;
/** /**
* The number of subSamples that make up the buffer's contents.
*
* @see android.media.MediaCodec.CryptoInfo#numSubSamples * @see android.media.MediaCodec.CryptoInfo#numSubSamples
*/ */
public int numSubSamples; public int numSubSamples;

View File

@ -15,6 +15,8 @@
*/ */
package com.google.android.exoplayer2.decoder; package com.google.android.exoplayer2.decoder;
import androidx.annotation.Nullable;
/** /**
* A media decoder. * A media decoder.
* *
@ -37,6 +39,7 @@ public interface Decoder<I, O, E extends Exception> {
* @return The input buffer, which will have been cleared, or null if a buffer isn't available. * @return The input buffer, which will have been cleared, or null if a buffer isn't available.
* @throws E If a decoder error has occurred. * @throws E If a decoder error has occurred.
*/ */
@Nullable
I dequeueInputBuffer() throws E; I dequeueInputBuffer() throws E;
/** /**
@ -53,6 +56,7 @@ public interface Decoder<I, O, E extends Exception> {
* @return The output buffer, or null if an output buffer isn't available. * @return The output buffer, or null if an output buffer isn't available.
* @throws E If a decoder error has occurred. * @throws E If a decoder error has occurred.
*/ */
@Nullable
O dequeueOutputBuffer() throws E; O dequeueOutputBuffer() throws E;
/** /**

View File

@ -16,11 +16,13 @@
package com.google.android.exoplayer2.decoder; package com.google.android.exoplayer2.decoder;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import java.lang.annotation.Documented; import java.lang.annotation.Documented;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.checkerframework.checker.nullness.qual.EnsuresNonNull;
/** /**
* Holds input for a decoder. * Holds input for a decoder.
@ -58,16 +60,28 @@ public class DecoderInputBuffer extends Buffer {
*/ */
public final CryptoInfo cryptoInfo; public final CryptoInfo cryptoInfo;
/** The buffer's data, or {@code null} if no data has been set. */
@Nullable public ByteBuffer data;
// TODO: Remove this temporary signaling once end-of-stream propagation for clips using content
// protection is fixed. See [Internal: b/153326944] for details.
/** /**
* The buffer's data, or {@code null} if no data has been set. * Whether the last attempt to read a sample into this buffer failed due to not yet having the DRM
* keys associated with the next sample.
*/ */
public ByteBuffer data; public boolean waitingForKeys;
/** /**
* The time at which the sample should be presented. * The time at which the sample should be presented.
*/ */
public long timeUs; public long timeUs;
/**
* Supplemental data related to the buffer, if {@link #hasSupplementalData()} returns true. If
* present, the buffer is populated with supplemental data from position 0 to its limit.
*/
@Nullable public ByteBuffer supplementalData;
@BufferReplacementMode private final int bufferReplacementMode; @BufferReplacementMode private final int bufferReplacementMode;
/** /**
@ -89,6 +103,21 @@ public class DecoderInputBuffer extends Buffer {
this.bufferReplacementMode = bufferReplacementMode; this.bufferReplacementMode = bufferReplacementMode;
} }
/**
* Clears {@link #supplementalData} and ensures that it's large enough to accommodate {@code
* length} bytes.
*
* @param length The length of the supplemental data that must be accommodated, in bytes.
*/
@EnsuresNonNull("supplementalData")
public void resetSupplementalData(int length) {
if (supplementalData == null || supplementalData.capacity() < length) {
supplementalData = ByteBuffer.allocate(length);
} else {
supplementalData.clear();
}
}
/** /**
* Ensures that {@link #data} is large enough to accommodate a write of a given length at its * Ensures that {@link #data} is large enough to accommodate a write of a given length at its
* current position. * current position.
@ -101,6 +130,7 @@ public class DecoderInputBuffer extends Buffer {
* @throws IllegalStateException If there is insufficient capacity to accommodate the write and * @throws IllegalStateException If there is insufficient capacity to accommodate the write and
* the buffer replacement mode of the holder is {@link #BUFFER_REPLACEMENT_MODE_DISABLED}. * the buffer replacement mode of the holder is {@link #BUFFER_REPLACEMENT_MODE_DISABLED}.
*/ */
@EnsuresNonNull("data")
public void ensureSpaceForWrite(int length) { public void ensureSpaceForWrite(int length) {
if (data == null) { if (data == null) {
data = createReplacementByteBuffer(length); data = createReplacementByteBuffer(length);
@ -115,10 +145,10 @@ public class DecoderInputBuffer extends Buffer {
} }
// Instantiate a new buffer if possible. // Instantiate a new buffer if possible.
ByteBuffer newData = createReplacementByteBuffer(requiredCapacity); ByteBuffer newData = createReplacementByteBuffer(requiredCapacity);
newData.order(data.order());
// Copy data up to the current position from the old buffer to the new one. // Copy data up to the current position from the old buffer to the new one.
if (position > 0) { if (position > 0) {
data.position(0); data.flip();
data.limit(position);
newData.put(data); newData.put(data);
} }
// Set the new buffer. // Set the new buffer.
@ -141,12 +171,15 @@ public class DecoderInputBuffer extends Buffer {
} }
/** /**
* Flips {@link #data} in preparation for being queued to a decoder. * Flips {@link #data} and {@link #supplementalData} in preparation for being queued to a decoder.
* *
* @see java.nio.Buffer#flip() * @see java.nio.Buffer#flip()
*/ */
public final void flip() { public final void flip() {
data.flip(); data.flip();
if (supplementalData != null) {
supplementalData.flip();
}
} }
@Override @Override
@ -155,6 +188,10 @@ public class DecoderInputBuffer extends Buffer {
if (data != null) { if (data != null) {
data.clear(); data.clear();
} }
if (supplementalData != null) {
supplementalData.clear();
}
waitingForKeys = false;
} }
private ByteBuffer createReplacementByteBuffer(int requiredCapacity) { private ByteBuffer createReplacementByteBuffer(int requiredCapacity) {

View File

@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer2.decoder; package com.google.android.exoplayer2.decoder;
import androidx.annotation.CallSuper;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
@ -86,6 +87,7 @@ public abstract class SimpleDecoder<
} }
@Override @Override
@Nullable
public final I dequeueInputBuffer() throws E { public final I dequeueInputBuffer() throws E {
synchronized (lock) { synchronized (lock) {
maybeThrowException(); maybeThrowException();
@ -108,6 +110,7 @@ public abstract class SimpleDecoder<
} }
@Override @Override
@Nullable
public final O dequeueOutputBuffer() throws E { public final O dequeueOutputBuffer() throws E {
synchronized (lock) { synchronized (lock) {
maybeThrowException(); maybeThrowException();
@ -123,6 +126,7 @@ public abstract class SimpleDecoder<
* *
* @param outputBuffer The output buffer being released. * @param outputBuffer The output buffer being released.
*/ */
@CallSuper
protected void releaseOutputBuffer(O outputBuffer) { protected void releaseOutputBuffer(O outputBuffer) {
synchronized (lock) { synchronized (lock) {
releaseOutputBufferInternal(outputBuffer); releaseOutputBufferInternal(outputBuffer);
@ -145,9 +149,11 @@ public abstract class SimpleDecoder<
while (!queuedOutputBuffers.isEmpty()) { while (!queuedOutputBuffers.isEmpty()) {
queuedOutputBuffers.removeFirst().release(); queuedOutputBuffers.removeFirst().release();
} }
exception = null;
} }
} }
@CallSuper
@Override @Override
public void release() { public void release() {
synchronized (lock) { synchronized (lock) {
@ -220,6 +226,7 @@ public abstract class SimpleDecoder<
if (inputBuffer.isDecodeOnly()) { if (inputBuffer.isDecodeOnly()) {
outputBuffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY); outputBuffer.addFlag(C.BUFFER_FLAG_DECODE_ONLY);
} }
@Nullable E exception;
try { try {
exception = decode(inputBuffer, outputBuffer, resetDecoder); exception = decode(inputBuffer, outputBuffer, resetDecoder);
} catch (RuntimeException e) { } catch (RuntimeException e) {
@ -233,8 +240,9 @@ public abstract class SimpleDecoder<
exception = createUnexpectedDecodeException(e); exception = createUnexpectedDecodeException(e);
} }
if (exception != null) { if (exception != null) {
// Memory barrier to ensure that the decoder exception is visible from the playback thread. synchronized (lock) {
synchronized (lock) {} this.exception = exception;
}
return false; return false;
} }
} }

View File

@ -15,6 +15,7 @@
*/ */
package com.google.android.exoplayer2.decoder; package com.google.android.exoplayer2.decoder;
import androidx.annotation.Nullable;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.ByteOrder; import java.nio.ByteOrder;
@ -25,7 +26,7 @@ public class SimpleOutputBuffer extends OutputBuffer {
private final SimpleDecoder<?, SimpleOutputBuffer, ?> owner; private final SimpleDecoder<?, SimpleOutputBuffer, ?> owner;
public ByteBuffer data; @Nullable public ByteBuffer data;
public SimpleOutputBuffer(SimpleDecoder<?, SimpleOutputBuffer, ?> owner) { public SimpleOutputBuffer(SimpleDecoder<?, SimpleOutputBuffer, ?> owner) {
this.owner = owner; this.owner = owner;

View File

@ -0,0 +1,19 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
@NonNullApi
package com.google.android.exoplayer2.decoder;
import com.google.android.exoplayer2.util.NonNullApi;

View File

@ -1,72 +0,0 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.drm;
/**
* A reference-counted resource used in the decryption of media samples.
*
* @param <T> The reference type with which to make {@link Owner#onLastReferenceReleased} calls.
* Subclasses are expected to pass themselves.
*/
public abstract class DecryptionResource<T extends DecryptionResource<T>> {
/**
* Implemented by the class in charge of managing a {@link DecryptionResource resource's}
* lifecycle.
*/
public interface Owner<T extends DecryptionResource<T>> {
/**
* Called when the last reference to a {@link DecryptionResource} is {@link #releaseReference()
* released}.
*/
void onLastReferenceReleased(T resource);
}
// TODO: Consider adding a handler on which the owner should be called.
private final DecryptionResource.Owner<T> owner;
private int referenceCount;
/**
* Creates a new instance with reference count zero.
*
* @param owner The owner of this instance.
*/
public DecryptionResource(Owner<T> owner) {
this.owner = owner;
referenceCount = 0;
}
/** Increases by one the reference count for this resource. */
public void acquireReference() {
referenceCount++;
}
/**
* Decreases by one the reference count for this resource, and notifies the owner if said count
* reached zero as a result of this operation.
*
* <p>Must only be called as releasing counter-part of {@link #acquireReference()}.
*/
@SuppressWarnings("unchecked")
public void releaseReference() {
if (--referenceCount == 0) {
owner.onLastReferenceReleased((T) this);
} else if (referenceCount < 0) {
throw new IllegalStateException("Illegal release of resource.");
}
}
}

View File

@ -22,16 +22,19 @@ import android.os.Handler;
import android.os.HandlerThread; import android.os.HandlerThread;
import android.os.Looper; import android.os.Looper;
import android.os.Message; import android.os.Message;
import androidx.annotation.Nullable; import android.os.SystemClock;
import android.util.Pair; import android.util.Pair;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
import com.google.android.exoplayer2.drm.ExoMediaDrm.KeyRequest; import com.google.android.exoplayer2.drm.ExoMediaDrm.KeyRequest;
import com.google.android.exoplayer2.drm.ExoMediaDrm.ProvisionRequest; import com.google.android.exoplayer2.drm.ExoMediaDrm.ProvisionRequest;
import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.EventDispatcher; import com.google.android.exoplayer2.util.EventDispatcher;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
import com.google.android.exoplayer2.util.Util; import com.google.android.exoplayer2.util.Util;
import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
@ -42,20 +45,24 @@ import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
import org.checkerframework.checker.nullness.qual.MonotonicNonNull; import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
import org.checkerframework.checker.nullness.qual.RequiresNonNull; import org.checkerframework.checker.nullness.qual.RequiresNonNull;
/** /** A {@link DrmSession} that supports playbacks using {@link ExoMediaDrm}. */
* A {@link DrmSession} that supports playbacks using {@link ExoMediaDrm}.
*/
@TargetApi(18) @TargetApi(18)
/* package */ class DefaultDrmSession<T extends ExoMediaCrypto> implements DrmSession<T> { /* package */ class DefaultDrmSession<T extends ExoMediaCrypto> implements DrmSession<T> {
/** /** Thrown when an unexpected exception or error is thrown during provisioning or key requests. */
* Manages provisioning requests. public static final class UnexpectedDrmSessionException extends IOException {
*/
public UnexpectedDrmSessionException(Throwable cause) {
super("Unexpected " + cause.getClass().getSimpleName() + ": " + cause.getMessage(), cause);
}
}
/** Manages provisioning requests. */
public interface ProvisioningManager<T extends ExoMediaCrypto> { public interface ProvisioningManager<T extends ExoMediaCrypto> {
/** /**
* Called when a session requires provisioning. The manager <em>may</em> call * Called when a session requires provisioning. The manager <em>may</em> call {@link
* {@link #provision()} to have this session perform the provisioning operation. The manager * #provision()} to have this session perform the provisioning operation. The manager
* <em>will</em> call {@link DefaultDrmSession#onProvisionCompleted()} when provisioning has * <em>will</em> call {@link DefaultDrmSession#onProvisionCompleted()} when provisioning has
* completed, or {@link DefaultDrmSession#onProvisionError} if provisioning fails. * completed, or {@link DefaultDrmSession#onProvisionError} if provisioning fails.
* *
@ -70,44 +77,55 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
*/ */
void onProvisionError(Exception error); void onProvisionError(Exception error);
/** /** Called by a session when it successfully completes a provisioning operation. */
* Called by a session when it successfully completes a provisioning operation.
*/
void onProvisionCompleted(); void onProvisionCompleted();
}
/** Callback to be notified when the session is released. */
public interface ReleaseCallback<T extends ExoMediaCrypto> {
/**
* Called immediately after releasing session resources.
*
* @param session The session.
*/
void onSessionReleased(DefaultDrmSession<T> session);
} }
private static final String TAG = "DefaultDrmSession"; private static final String TAG = "DefaultDrmSession";
private static final int MSG_PROVISION = 0; private static final int MSG_PROVISION = 0;
private static final int MSG_KEYS = 1; private static final int MSG_KEYS = 1;
private static final int MAX_LICENSE_DURATION_TO_RENEW = 60; private static final int MAX_LICENSE_DURATION_TO_RENEW_SECONDS = 60;
/** The DRM scheme datas, or null if this session uses offline keys. */ /** The DRM scheme datas, or null if this session uses offline keys. */
public final @Nullable List<SchemeData> schemeDatas; @Nullable public final List<SchemeData> schemeDatas;
private final ExoMediaDrm<T> mediaDrm; private final ExoMediaDrm<T> mediaDrm;
private final ProvisioningManager<T> provisioningManager; private final ProvisioningManager<T> provisioningManager;
private final ReleaseCallback<T> releaseCallback;
private final @DefaultDrmSessionManager.Mode int mode; private final @DefaultDrmSessionManager.Mode int mode;
private final @Nullable HashMap<String, String> optionalKeyRequestParameters; private final boolean playClearSamplesWithoutKeys;
private final boolean isPlaceholderSession;
private final HashMap<String, String> keyRequestParameters;
private final EventDispatcher<DefaultDrmSessionEventListener> eventDispatcher; private final EventDispatcher<DefaultDrmSessionEventListener> eventDispatcher;
private final int initialDrmRequestRetryCount; private final LoadErrorHandlingPolicy loadErrorHandlingPolicy;
/* package */ final MediaDrmCallback callback; /* package */ final MediaDrmCallback callback;
/* package */ final UUID uuid; /* package */ final UUID uuid;
/* package */ final PostResponseHandler postResponseHandler; /* package */ final ResponseHandler responseHandler;
private @DrmSession.State int state; private @DrmSession.State int state;
private int openCount; private int referenceCount;
private HandlerThread requestHandlerThread; @Nullable private HandlerThread requestHandlerThread;
private PostRequestHandler postRequestHandler; @Nullable private RequestHandler requestHandler;
private @Nullable T mediaCrypto; @Nullable private T mediaCrypto;
private @Nullable DrmSessionException lastException; @Nullable private DrmSessionException lastException;
private byte @MonotonicNonNull [] sessionId; @Nullable private byte[] sessionId;
private byte @MonotonicNonNull [] offlineLicenseKeySetId; @MonotonicNonNull private byte[] offlineLicenseKeySetId;
private @Nullable KeyRequest currentKeyRequest; @Nullable private KeyRequest currentKeyRequest;
private @Nullable ProvisionRequest currentProvisionRequest; @Nullable private ProvisionRequest currentProvisionRequest;
/** /**
* Instantiates a new DRM session. * Instantiates a new DRM session.
@ -115,92 +133,60 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
* @param uuid The UUID of the drm scheme. * @param uuid The UUID of the drm scheme.
* @param mediaDrm The media DRM. * @param mediaDrm The media DRM.
* @param provisioningManager The manager for provisioning. * @param provisioningManager The manager for provisioning.
* @param releaseCallback The {@link ReleaseCallback}.
* @param schemeDatas DRM scheme datas for this session, or null if an {@code * @param schemeDatas DRM scheme datas for this session, or null if an {@code
* offlineLicenseKeySetId} is provided. * offlineLicenseKeySetId} is provided or if {@code isPlaceholderSession} is true.
* @param mode The DRM mode. * @param mode The DRM mode. Ignored if {@code isPlaceholderSession} is true.
* @param isPlaceholderSession Whether this session is not expected to acquire any keys.
* @param offlineLicenseKeySetId The offline license key set identifier, or null when not using * @param offlineLicenseKeySetId The offline license key set identifier, or null when not using
* offline keys. * offline keys.
* @param optionalKeyRequestParameters The optional key request parameters. * @param keyRequestParameters Key request parameters.
* @param callback The media DRM callback. * @param callback The media DRM callback.
* @param playbackLooper The playback looper. * @param playbackLooper The playback looper.
* @param eventDispatcher The dispatcher for DRM session manager events. * @param eventDispatcher The dispatcher for DRM session manager events.
* @param initialDrmRequestRetryCount The number of times to retry for initial provisioning and * @param loadErrorHandlingPolicy The {@link LoadErrorHandlingPolicy} for key and provisioning
* key request before reporting error. * requests.
*/ */
// the constructor does not initialize fields: sessionId
@SuppressWarnings("nullness:initialization.fields.uninitialized")
public DefaultDrmSession( public DefaultDrmSession(
UUID uuid, UUID uuid,
ExoMediaDrm<T> mediaDrm, ExoMediaDrm<T> mediaDrm,
ProvisioningManager<T> provisioningManager, ProvisioningManager<T> provisioningManager,
ReleaseCallback<T> releaseCallback,
@Nullable List<SchemeData> schemeDatas, @Nullable List<SchemeData> schemeDatas,
@DefaultDrmSessionManager.Mode int mode, @DefaultDrmSessionManager.Mode int mode,
boolean playClearSamplesWithoutKeys,
boolean isPlaceholderSession,
@Nullable byte[] offlineLicenseKeySetId, @Nullable byte[] offlineLicenseKeySetId,
@Nullable HashMap<String, String> optionalKeyRequestParameters, HashMap<String, String> keyRequestParameters,
MediaDrmCallback callback, MediaDrmCallback callback,
Looper playbackLooper, Looper playbackLooper,
EventDispatcher<DefaultDrmSessionEventListener> eventDispatcher, EventDispatcher<DefaultDrmSessionEventListener> eventDispatcher,
int initialDrmRequestRetryCount) { LoadErrorHandlingPolicy loadErrorHandlingPolicy) {
if (mode == DefaultDrmSessionManager.MODE_QUERY if (mode == DefaultDrmSessionManager.MODE_QUERY
|| mode == DefaultDrmSessionManager.MODE_RELEASE) { || mode == DefaultDrmSessionManager.MODE_RELEASE) {
Assertions.checkNotNull(offlineLicenseKeySetId); Assertions.checkNotNull(offlineLicenseKeySetId);
} }
this.uuid = uuid; this.uuid = uuid;
this.provisioningManager = provisioningManager; this.provisioningManager = provisioningManager;
this.releaseCallback = releaseCallback;
this.mediaDrm = mediaDrm; this.mediaDrm = mediaDrm;
this.mode = mode; this.mode = mode;
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
this.isPlaceholderSession = isPlaceholderSession;
if (offlineLicenseKeySetId != null) { if (offlineLicenseKeySetId != null) {
this.offlineLicenseKeySetId = offlineLicenseKeySetId; this.offlineLicenseKeySetId = offlineLicenseKeySetId;
this.schemeDatas = null; this.schemeDatas = null;
} else { } else {
this.schemeDatas = Collections.unmodifiableList(Assertions.checkNotNull(schemeDatas)); this.schemeDatas = Collections.unmodifiableList(Assertions.checkNotNull(schemeDatas));
} }
this.optionalKeyRequestParameters = optionalKeyRequestParameters; this.keyRequestParameters = keyRequestParameters;
this.callback = callback; this.callback = callback;
this.initialDrmRequestRetryCount = initialDrmRequestRetryCount;
this.eventDispatcher = eventDispatcher; this.eventDispatcher = eventDispatcher;
this.loadErrorHandlingPolicy = loadErrorHandlingPolicy;
state = STATE_OPENING; state = STATE_OPENING;
responseHandler = new ResponseHandler(playbackLooper);
postResponseHandler = new PostResponseHandler(playbackLooper);
requestHandlerThread = new HandlerThread("DrmRequestHandler");
requestHandlerThread.start();
postRequestHandler = new PostRequestHandler(requestHandlerThread.getLooper());
}
// Life cycle.
public void acquire() {
if (++openCount == 1) {
if (state == STATE_ERROR) {
return;
}
if (openInternal(true)) {
doLicense(true);
}
}
}
/** @return True if the session is closed and cleaned up, false otherwise. */
// Assigning null to various non-null variables for clean-up. Class won't be used after release.
@SuppressWarnings("assignment.type.incompatible")
public boolean release() {
if (--openCount == 0) {
state = STATE_RELEASED;
postResponseHandler.removeCallbacksAndMessages(null);
postRequestHandler.removeCallbacksAndMessages(null);
postRequestHandler = null;
requestHandlerThread.quit();
requestHandlerThread = null;
mediaCrypto = null;
lastException = null;
currentKeyRequest = null;
currentProvisionRequest = null;
if (sessionId != null) {
mediaDrm.closeSession(sessionId);
sessionId = null;
eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmSessionReleased);
}
return true;
}
return false;
} }
public boolean hasSessionId(byte[] sessionId) { public boolean hasSessionId(byte[] sessionId) {
@ -221,7 +207,11 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
public void provision() { public void provision() {
currentProvisionRequest = mediaDrm.getProvisionRequest(); currentProvisionRequest = mediaDrm.getProvisionRequest();
postRequestHandler.post(MSG_PROVISION, currentProvisionRequest, /* allowRetry= */ true); Util.castNonNull(requestHandler)
.post(
MSG_PROVISION,
Assertions.checkNotNull(currentProvisionRequest),
/* allowRetry= */ true);
} }
public void onProvisionCompleted() { public void onProvisionCompleted() {
@ -242,6 +232,11 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
return state; return state;
} }
@Override
public boolean playClearSamplesWithoutKeys() {
return playClearSamplesWithoutKeys;
}
@Override @Override
public final @Nullable DrmSessionException getError() { public final @Nullable DrmSessionException getError() {
return state == STATE_ERROR ? lastException : null; return state == STATE_ERROR ? lastException : null;
@ -253,15 +248,54 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
@Override @Override
public @Nullable Map<String, String> queryKeyStatus() { @Nullable
public Map<String, String> queryKeyStatus() {
return sessionId == null ? null : mediaDrm.queryKeyStatus(sessionId); return sessionId == null ? null : mediaDrm.queryKeyStatus(sessionId);
} }
@Override @Override
public @Nullable byte[] getOfflineLicenseKeySetId() { @Nullable
public byte[] getOfflineLicenseKeySetId() {
return offlineLicenseKeySetId; return offlineLicenseKeySetId;
} }
@Override
public void acquire() {
Assertions.checkState(referenceCount >= 0);
if (++referenceCount == 1) {
Assertions.checkState(state == STATE_OPENING);
requestHandlerThread = new HandlerThread("DrmRequestHandler");
requestHandlerThread.start();
requestHandler = new RequestHandler(requestHandlerThread.getLooper());
if (openInternal(true)) {
doLicense(true);
}
}
}
@Override
public void release() {
if (--referenceCount == 0) {
// Assigning null to various non-null variables for clean-up.
state = STATE_RELEASED;
Util.castNonNull(responseHandler).removeCallbacksAndMessages(null);
Util.castNonNull(requestHandler).removeCallbacksAndMessages(null);
requestHandler = null;
Util.castNonNull(requestHandlerThread).quit();
requestHandlerThread = null;
mediaCrypto = null;
lastException = null;
currentKeyRequest = null;
currentProvisionRequest = null;
if (sessionId != null) {
mediaDrm.closeSession(sessionId);
sessionId = null;
eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmSessionReleased);
}
releaseCallback.onSessionReleased(this);
}
}
// Internal methods. // Internal methods.
/** /**
@ -280,9 +314,10 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
try { try {
sessionId = mediaDrm.openSession(); sessionId = mediaDrm.openSession();
eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmSessionAcquired);
mediaCrypto = mediaDrm.createMediaCrypto(sessionId); mediaCrypto = mediaDrm.createMediaCrypto(sessionId);
eventDispatcher.dispatch(DefaultDrmSessionEventListener::onDrmSessionAcquired);
state = STATE_OPENED; state = STATE_OPENED;
Assertions.checkNotNull(sessionId);
return true; return true;
} catch (NotProvisionedException e) { } catch (NotProvisionedException e) {
if (allowProvisioning) { if (allowProvisioning) {
@ -321,6 +356,10 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@RequiresNonNull("sessionId") @RequiresNonNull("sessionId")
private void doLicense(boolean allowRetry) { private void doLicense(boolean allowRetry) {
if (isPlaceholderSession) {
return;
}
byte[] sessionId = Util.castNonNull(this.sessionId);
switch (mode) { switch (mode) {
case DefaultDrmSessionManager.MODE_PLAYBACK: case DefaultDrmSessionManager.MODE_PLAYBACK:
case DefaultDrmSessionManager.MODE_QUERY: case DefaultDrmSessionManager.MODE_QUERY:
@ -329,9 +368,12 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} else if (state == STATE_OPENED_WITH_KEYS || restoreKeys()) { } else if (state == STATE_OPENED_WITH_KEYS || restoreKeys()) {
long licenseDurationRemainingSec = getLicenseDurationRemainingSec(); long licenseDurationRemainingSec = getLicenseDurationRemainingSec();
if (mode == DefaultDrmSessionManager.MODE_PLAYBACK if (mode == DefaultDrmSessionManager.MODE_PLAYBACK
&& licenseDurationRemainingSec <= MAX_LICENSE_DURATION_TO_RENEW) { && licenseDurationRemainingSec <= MAX_LICENSE_DURATION_TO_RENEW_SECONDS) {
Log.d(TAG, "Offline license has expired or will expire soon. " Log.d(
+ "Remaining seconds: " + licenseDurationRemainingSec); TAG,
"Offline license has expired or will expire soon. "
+ "Remaining seconds: "
+ licenseDurationRemainingSec);
postKeyRequest(sessionId, ExoMediaDrm.KEY_TYPE_OFFLINE, allowRetry); postKeyRequest(sessionId, ExoMediaDrm.KEY_TYPE_OFFLINE, allowRetry);
} else if (licenseDurationRemainingSec <= 0) { } else if (licenseDurationRemainingSec <= 0) {
onError(new KeysExpiredException()); onError(new KeysExpiredException());
@ -342,17 +384,13 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} }
break; break;
case DefaultDrmSessionManager.MODE_DOWNLOAD: case DefaultDrmSessionManager.MODE_DOWNLOAD:
if (offlineLicenseKeySetId == null) { if (offlineLicenseKeySetId == null || restoreKeys()) {
postKeyRequest(sessionId, ExoMediaDrm.KEY_TYPE_OFFLINE, allowRetry); postKeyRequest(sessionId, ExoMediaDrm.KEY_TYPE_OFFLINE, allowRetry);
} else {
// Renew
if (restoreKeys()) {
postKeyRequest(sessionId, ExoMediaDrm.KEY_TYPE_OFFLINE, allowRetry);
}
} }
break; break;
case DefaultDrmSessionManager.MODE_RELEASE: case DefaultDrmSessionManager.MODE_RELEASE:
Assertions.checkNotNull(offlineLicenseKeySetId); Assertions.checkNotNull(offlineLicenseKeySetId);
Assertions.checkNotNull(this.sessionId);
// It's not necessary to restore the key (and open a session to do that) before releasing it // It's not necessary to restore the key (and open a session to do that) before releasing it
// but this serves as a good sanity/fast-failure check. // but this serves as a good sanity/fast-failure check.
if (restoreKeys()) { if (restoreKeys()) {
@ -370,7 +408,7 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
mediaDrm.restoreKeys(sessionId, offlineLicenseKeySetId); mediaDrm.restoreKeys(sessionId, offlineLicenseKeySetId);
return true; return true;
} catch (Exception e) { } catch (Exception e) {
Log.e(TAG, "Error trying to restore Widevine keys.", e); Log.e(TAG, "Error trying to restore keys.", e);
onError(e); onError(e);
} }
return false; return false;
@ -387,9 +425,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
private void postKeyRequest(byte[] scope, int type, boolean allowRetry) { private void postKeyRequest(byte[] scope, int type, boolean allowRetry) {
try { try {
currentKeyRequest = currentKeyRequest = mediaDrm.getKeyRequest(scope, schemeDatas, type, keyRequestParameters);
mediaDrm.getKeyRequest(scope, schemeDatas, type, optionalKeyRequestParameters); Util.castNonNull(requestHandler)
postRequestHandler.post(MSG_KEYS, currentKeyRequest, allowRetry); .post(MSG_KEYS, Assertions.checkNotNull(currentKeyRequest), allowRetry);
} catch (Exception e) { } catch (Exception e) {
onKeysError(e); onKeysError(e);
} }
@ -415,8 +453,10 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
} else { } else {
byte[] keySetId = mediaDrm.provideKeyResponse(sessionId, responseData); byte[] keySetId = mediaDrm.provideKeyResponse(sessionId, responseData);
if ((mode == DefaultDrmSessionManager.MODE_DOWNLOAD if ((mode == DefaultDrmSessionManager.MODE_DOWNLOAD
|| (mode == DefaultDrmSessionManager.MODE_PLAYBACK && offlineLicenseKeySetId != null)) || (mode == DefaultDrmSessionManager.MODE_PLAYBACK
&& keySetId != null && keySetId.length != 0) { && offlineLicenseKeySetId != null))
&& keySetId != null
&& keySetId.length != 0) {
offlineLicenseKeySetId = keySetId; offlineLicenseKeySetId = keySetId;
} }
state = STATE_OPENED_WITH_KEYS; state = STATE_OPENED_WITH_KEYS;
@ -459,9 +499,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
// Internal classes. // Internal classes.
@SuppressLint("HandlerLeak") @SuppressLint("HandlerLeak")
private class PostResponseHandler extends Handler { private class ResponseHandler extends Handler {
public PostResponseHandler(Looper looper) { public ResponseHandler(Looper looper) {
super(looper); super(looper);
} }
@ -480,68 +520,88 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
break; break;
default: default:
break; break;
} }
} }
} }
@SuppressLint("HandlerLeak") @SuppressLint("HandlerLeak")
private class PostRequestHandler extends Handler { private class RequestHandler extends Handler {
public PostRequestHandler(Looper backgroundLooper) { public RequestHandler(Looper backgroundLooper) {
super(backgroundLooper); super(backgroundLooper);
} }
void post(int what, Object request, boolean allowRetry) { void post(int what, Object request, boolean allowRetry) {
int allowRetryInt = allowRetry ? 1 : 0; RequestTask requestTask =
int errorCount = 0; new RequestTask(allowRetry, /* startTimeMs= */ SystemClock.elapsedRealtime(), request);
obtainMessage(what, allowRetryInt, errorCount, request).sendToTarget(); obtainMessage(what, requestTask).sendToTarget();
} }
@Override @Override
@SuppressWarnings("unchecked")
public void handleMessage(Message msg) { public void handleMessage(Message msg) {
Object request = msg.obj; RequestTask requestTask = (RequestTask) msg.obj;
Object response; Object response;
try { try {
switch (msg.what) { switch (msg.what) {
case MSG_PROVISION: case MSG_PROVISION:
response = callback.executeProvisionRequest(uuid, (ProvisionRequest) request); response =
callback.executeProvisionRequest(uuid, (ProvisionRequest) requestTask.request);
break; break;
case MSG_KEYS: case MSG_KEYS:
response = callback.executeKeyRequest(uuid, (KeyRequest) request); response = callback.executeKeyRequest(uuid, (KeyRequest) requestTask.request);
break; break;
default: default:
throw new RuntimeException(); throw new RuntimeException();
} }
} catch (Exception e) { } catch (Exception e) {
if (maybeRetryRequest(msg)) { if (maybeRetryRequest(msg, e)) {
return; return;
} }
response = e; response = e;
} }
postResponseHandler.obtainMessage(msg.what, Pair.create(request, response)).sendToTarget(); responseHandler
.obtainMessage(msg.what, Pair.create(requestTask.request, response))
.sendToTarget();
} }
private boolean maybeRetryRequest(Message originalMsg) { private boolean maybeRetryRequest(Message originalMsg, Exception e) {
boolean allowRetry = originalMsg.arg1 == 1; RequestTask requestTask = (RequestTask) originalMsg.obj;
if (!allowRetry) { if (!requestTask.allowRetry) {
return false; return false;
} }
int errorCount = originalMsg.arg2 + 1; requestTask.errorCount++;
if (errorCount > initialDrmRequestRetryCount) { if (requestTask.errorCount
> loadErrorHandlingPolicy.getMinimumLoadableRetryCount(C.DATA_TYPE_DRM)) {
return false; return false;
} }
Message retryMsg = Message.obtain(originalMsg); IOException ioException =
retryMsg.arg2 = errorCount; e instanceof IOException ? (IOException) e : new UnexpectedDrmSessionException(e);
sendMessageDelayed(retryMsg, getRetryDelayMillis(errorCount)); long retryDelayMs =
loadErrorHandlingPolicy.getRetryDelayMsFor(
C.DATA_TYPE_DRM,
/* loadDurationMs= */ SystemClock.elapsedRealtime() - requestTask.startTimeMs,
ioException,
requestTask.errorCount);
if (retryDelayMs == C.TIME_UNSET) {
// The error is fatal.
return false;
}
sendMessageDelayed(Message.obtain(originalMsg), retryDelayMs);
return true; return true;
} }
}
private long getRetryDelayMillis(int errorCount) { private static final class RequestTask {
return Math.min((errorCount - 1) * 1000, 5000);
public final boolean allowRetry;
public final long startTimeMs;
public final Object request;
public int errorCount;
public RequestTask(boolean allowRetry, long startTimeMs, Object request) {
this.allowRetry = allowRetry;
this.startTimeMs = startTimeMs;
this.request = request;
} }
} }
} }

View File

@ -24,7 +24,7 @@ public interface DefaultDrmSessionEventListener {
default void onDrmSessionAcquired() {} default void onDrmSessionAcquired() {}
/** Called each time keys are loaded. */ /** Called each time keys are loaded. */
void onDrmKeysLoaded(); default void onDrmKeysLoaded() {}
/** /**
* Called when a drm error occurs. * Called when a drm error occurs.
@ -38,13 +38,13 @@ public interface DefaultDrmSessionEventListener {
* *
* @param error The corresponding exception. * @param error The corresponding exception.
*/ */
void onDrmSessionManagerError(Exception error); default void onDrmSessionManagerError(Exception error) {}
/** Called each time offline keys are restored. */ /** Called each time offline keys are restored. */
void onDrmKeysRestored(); default void onDrmKeysRestored() {}
/** Called each time offline keys are removed. */ /** Called each time offline keys are removed. */
void onDrmKeysRemoved(); default void onDrmKeysRemoved() {}
/** Called each time a drm session is released. */ /** Called each time a drm session is released. */
default void onDrmSessionReleased() {} default void onDrmSessionReleased() {}

View File

@ -22,12 +22,12 @@ import android.os.Looper;
import android.os.Message; import android.os.Message;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import android.text.TextUtils;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.drm.DefaultDrmSession.ProvisioningManager;
import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException; import com.google.android.exoplayer2.drm.DrmSession.DrmSessionException;
import com.google.android.exoplayer2.drm.ExoMediaDrm.OnEventListener; import com.google.android.exoplayer2.drm.ExoMediaDrm.OnEventListener;
import com.google.android.exoplayer2.upstream.DefaultLoadErrorHandlingPolicy;
import com.google.android.exoplayer2.upstream.LoadErrorHandlingPolicy;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
import com.google.android.exoplayer2.util.EventDispatcher; import com.google.android.exoplayer2.util.EventDispatcher;
import com.google.android.exoplayer2.util.Log; import com.google.android.exoplayer2.util.Log;
@ -36,16 +36,162 @@ import java.lang.annotation.Documented;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
import java.util.UUID; import java.util.UUID;
/** /** A {@link DrmSessionManager} that supports playbacks using {@link ExoMediaDrm}. */
* A {@link DrmSessionManager} that supports playbacks using {@link ExoMediaDrm}.
*/
@TargetApi(18) @TargetApi(18)
public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSessionManager<T>, public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSessionManager<T> {
ProvisioningManager<T> {
/**
* Builder for {@link DefaultDrmSessionManager} instances.
*
* <p>See {@link #Builder} for the list of default values.
*/
public static final class Builder {
private final HashMap<String, String> keyRequestParameters;
private UUID uuid;
private ExoMediaDrm.Provider<ExoMediaCrypto> exoMediaDrmProvider;
private boolean multiSession;
private int[] useDrmSessionsForClearContentTrackTypes;
private boolean playClearSamplesWithoutKeys;
private LoadErrorHandlingPolicy loadErrorHandlingPolicy;
/**
* Creates a builder with default values. The default values are:
*
* <ul>
* <li>{@link #setKeyRequestParameters keyRequestParameters}: An empty map.
* <li>{@link #setUuidAndExoMediaDrmProvider UUID}: {@link C#WIDEVINE_UUID}.
* <li>{@link #setUuidAndExoMediaDrmProvider ExoMediaDrm.Provider}: {@link
* FrameworkMediaDrm#DEFAULT_PROVIDER}.
* <li>{@link #setMultiSession multiSession}: {@code false}.
* <li>{@link #setUseDrmSessionsForClearContent useDrmSessionsForClearContent}: No tracks.
* <li>{@link #setPlayClearSamplesWithoutKeys playClearSamplesWithoutKeys}: {@code false}.
* <li>{@link #setLoadErrorHandlingPolicy LoadErrorHandlingPolicy}: {@link
* DefaultLoadErrorHandlingPolicy}.
* </ul>
*/
@SuppressWarnings("unchecked")
public Builder() {
keyRequestParameters = new HashMap<>();
uuid = C.WIDEVINE_UUID;
exoMediaDrmProvider = (ExoMediaDrm.Provider) FrameworkMediaDrm.DEFAULT_PROVIDER;
loadErrorHandlingPolicy = new DefaultLoadErrorHandlingPolicy();
useDrmSessionsForClearContentTrackTypes = new int[0];
}
/**
* Sets the key request parameters to pass as the last argument to {@link
* ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}.
*
* <p>Custom data for PlayReady should be set under {@link #PLAYREADY_CUSTOM_DATA_KEY}.
*
* @param keyRequestParameters A map with parameters.
* @return This builder.
*/
public Builder setKeyRequestParameters(Map<String, String> keyRequestParameters) {
this.keyRequestParameters.clear();
this.keyRequestParameters.putAll(Assertions.checkNotNull(keyRequestParameters));
return this;
}
/**
* Sets the UUID of the DRM scheme and the {@link ExoMediaDrm.Provider} to use.
*
* @param uuid The UUID of the DRM scheme.
* @param exoMediaDrmProvider The {@link ExoMediaDrm.Provider}.
* @return This builder.
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public Builder setUuidAndExoMediaDrmProvider(
UUID uuid, ExoMediaDrm.Provider exoMediaDrmProvider) {
this.uuid = Assertions.checkNotNull(uuid);
this.exoMediaDrmProvider = Assertions.checkNotNull(exoMediaDrmProvider);
return this;
}
/**
* Sets whether this session manager is allowed to acquire multiple simultaneous sessions.
*
* <p>Users should pass false when a single key request will obtain all keys required to decrypt
* the associated content. {@code multiSession} is required when content uses key rotation.
*
* @param multiSession Whether this session manager is allowed to acquire multiple simultaneous
* sessions.
* @return This builder.
*/
public Builder setMultiSession(boolean multiSession) {
this.multiSession = multiSession;
return this;
}
/**
* Sets whether this session manager should attach {@link DrmSession DrmSessions} to the clear
* sections of the media content.
*
* <p>Using {@link DrmSession DrmSessions} for clear content avoids the recreation of decoders
* when transitioning between clear and encrypted sections of content.
*
* @param useDrmSessionsForClearContentTrackTypes The track types ({@link C#TRACK_TYPE_AUDIO}
* and/or {@link C#TRACK_TYPE_VIDEO}) for which to use a {@link DrmSession} regardless of
* whether the content is clear or encrypted.
* @return This builder.
* @throws IllegalArgumentException If {@code useDrmSessionsForClearContentTrackTypes} contains
* track types other than {@link C#TRACK_TYPE_AUDIO} and {@link C#TRACK_TYPE_VIDEO}.
*/
public Builder setUseDrmSessionsForClearContent(
int... useDrmSessionsForClearContentTrackTypes) {
for (int trackType : useDrmSessionsForClearContentTrackTypes) {
Assertions.checkArgument(
trackType == C.TRACK_TYPE_VIDEO || trackType == C.TRACK_TYPE_AUDIO);
}
this.useDrmSessionsForClearContentTrackTypes =
useDrmSessionsForClearContentTrackTypes.clone();
return this;
}
/**
* Sets whether clear samples within protected content should be played when keys for the
* encrypted part of the content have yet to be loaded.
*
* @param playClearSamplesWithoutKeys Whether clear samples within protected content should be
* played when keys for the encrypted part of the content have yet to be loaded.
* @return This builder.
*/
public Builder setPlayClearSamplesWithoutKeys(boolean playClearSamplesWithoutKeys) {
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
return this;
}
/**
* Sets the {@link LoadErrorHandlingPolicy} for key and provisioning requests.
*
* @param loadErrorHandlingPolicy A {@link LoadErrorHandlingPolicy}.
* @return This builder.
*/
public Builder setLoadErrorHandlingPolicy(LoadErrorHandlingPolicy loadErrorHandlingPolicy) {
this.loadErrorHandlingPolicy = Assertions.checkNotNull(loadErrorHandlingPolicy);
return this;
}
/** Builds a {@link DefaultDrmSessionManager} instance. */
public DefaultDrmSessionManager<ExoMediaCrypto> build(MediaDrmCallback mediaDrmCallback) {
return new DefaultDrmSessionManager<>(
uuid,
exoMediaDrmProvider,
mediaDrmCallback,
keyRequestParameters,
multiSession,
useDrmSessionsForClearContentTrackTypes,
playClearSamplesWithoutKeys,
loadErrorHandlingPolicy);
}
}
/** /**
* Signals that the {@link DrmInitData} passed to {@link #acquireSession} does not contain does * Signals that the {@link DrmInitData} passed to {@link #acquireSession} does not contain does
@ -59,7 +205,8 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
} }
/** /**
* The key to use when passing CustomData to a PlayReady instance in an optional parameter map. * A key for specifying PlayReady custom data in the key request parameters passed to {@link
* Builder#setKeyRequestParameters(Map)}.
*/ */
public static final String PLAYREADY_CUSTOM_DATA_KEY = "PRCustomData"; public static final String PLAYREADY_CUSTOM_DATA_KEY = "PRCustomData";
@ -76,9 +223,7 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
* licenses. * licenses.
*/ */
public static final int MODE_PLAYBACK = 0; public static final int MODE_PLAYBACK = 0;
/** /** Restores an offline license to allow its status to be queried. */
* Restores an offline license to allow its status to be queried.
*/
public static final int MODE_QUERY = 1; public static final int MODE_QUERY = 1;
/** Downloads an offline license or renews an existing one. */ /** Downloads an offline license or renews an existing one. */
public static final int MODE_DOWNLOAD = 2; public static final int MODE_DOWNLOAD = 2;
@ -90,165 +235,136 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
private static final String TAG = "DefaultDrmSessionMgr"; private static final String TAG = "DefaultDrmSessionMgr";
private final UUID uuid; private final UUID uuid;
private final ExoMediaDrm<T> mediaDrm; private final ExoMediaDrm.Provider<T> exoMediaDrmProvider;
private final MediaDrmCallback callback; private final MediaDrmCallback callback;
private final @Nullable HashMap<String, String> optionalKeyRequestParameters; private final HashMap<String, String> keyRequestParameters;
private final EventDispatcher<DefaultDrmSessionEventListener> eventDispatcher; private final EventDispatcher<DefaultDrmSessionEventListener> eventDispatcher;
private final boolean multiSession; private final boolean multiSession;
private final int initialDrmRequestRetryCount; private final int[] useDrmSessionsForClearContentTrackTypes;
private final boolean playClearSamplesWithoutKeys;
private final ProvisioningManagerImpl provisioningManagerImpl;
private final LoadErrorHandlingPolicy loadErrorHandlingPolicy;
private final List<DefaultDrmSession<T>> sessions; private final List<DefaultDrmSession<T>> sessions;
private final List<DefaultDrmSession<T>> provisioningSessions; private final List<DefaultDrmSession<T>> provisioningSessions;
private @Nullable Looper playbackLooper; private int prepareCallsCount;
@Nullable private ExoMediaDrm<T> exoMediaDrm;
@Nullable private DefaultDrmSession<T> placeholderDrmSession;
@Nullable private DefaultDrmSession<T> noMultiSessionDrmSession;
@Nullable private Looper playbackLooper;
private int mode; private int mode;
private @Nullable byte[] offlineLicenseKeySetId; @Nullable private byte[] offlineLicenseKeySetId;
/* package */ volatile @Nullable MediaDrmHandler mediaDrmHandler; /* package */ volatile @Nullable MediaDrmHandler mediaDrmHandler;
/** /**
* Instantiates a new instance using the Widevine scheme.
*
* @param callback Performs key and provisioning requests.
* @param optionalKeyRequestParameters An optional map of parameters to pass as the last argument
* to {@link ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}. May be null.
* @throws UnsupportedDrmException If the specified DRM scheme is not supported.
*/
public static DefaultDrmSessionManager<FrameworkMediaCrypto> newWidevineInstance(
MediaDrmCallback callback, @Nullable HashMap<String, String> optionalKeyRequestParameters)
throws UnsupportedDrmException {
return newFrameworkInstance(C.WIDEVINE_UUID, callback, optionalKeyRequestParameters);
}
/**
* Instantiates a new instance using the PlayReady scheme.
*
* <p>Note that PlayReady is unsupported by most Android devices, with the exception of Android TV
* devices, which do provide support.
*
* @param callback Performs key and provisioning requests.
* @param customData Optional custom data to include in requests generated by the instance.
* @throws UnsupportedDrmException If the specified DRM scheme is not supported.
*/
public static DefaultDrmSessionManager<FrameworkMediaCrypto> newPlayReadyInstance(
MediaDrmCallback callback, @Nullable String customData) throws UnsupportedDrmException {
HashMap<String, String> optionalKeyRequestParameters;
if (!TextUtils.isEmpty(customData)) {
optionalKeyRequestParameters = new HashMap<>();
optionalKeyRequestParameters.put(PLAYREADY_CUSTOM_DATA_KEY, customData);
} else {
optionalKeyRequestParameters = null;
}
return newFrameworkInstance(C.PLAYREADY_UUID, callback, optionalKeyRequestParameters);
}
/**
* Instantiates a new instance.
*
* @param uuid The UUID of the drm scheme. * @param uuid The UUID of the drm scheme.
* @param exoMediaDrm An underlying {@link ExoMediaDrm} for use by the manager.
* @param callback Performs key and provisioning requests. * @param callback Performs key and provisioning requests.
* @param optionalKeyRequestParameters An optional map of parameters to pass as the last argument * @param keyRequestParameters An optional map of parameters to pass as the last argument to
* to {@link ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}. May be null. * {@link ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}. May be null.
* @throws UnsupportedDrmException If the specified DRM scheme is not supported. * @deprecated Use {@link Builder} instead.
*/
public static DefaultDrmSessionManager<FrameworkMediaCrypto> newFrameworkInstance(
UUID uuid,
MediaDrmCallback callback,
@Nullable HashMap<String, String> optionalKeyRequestParameters)
throws UnsupportedDrmException {
return new DefaultDrmSessionManager<>(
uuid,
FrameworkMediaDrm.newInstance(uuid),
callback,
optionalKeyRequestParameters,
/* multiSession= */ false,
INITIAL_DRM_REQUEST_RETRY_COUNT);
}
/**
* @param uuid The UUID of the drm scheme.
* @param mediaDrm An underlying {@link ExoMediaDrm} for use by the manager.
* @param callback Performs key and provisioning requests.
* @param optionalKeyRequestParameters An optional map of parameters to pass as the last argument
* to {@link ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}. May be null.
*/ */
@SuppressWarnings("deprecation")
@Deprecated
public DefaultDrmSessionManager( public DefaultDrmSessionManager(
UUID uuid, UUID uuid,
ExoMediaDrm<T> mediaDrm, ExoMediaDrm<T> exoMediaDrm,
MediaDrmCallback callback, MediaDrmCallback callback,
@Nullable HashMap<String, String> optionalKeyRequestParameters) { @Nullable HashMap<String, String> keyRequestParameters) {
this( this(
uuid, uuid,
mediaDrm, exoMediaDrm,
callback, callback,
optionalKeyRequestParameters, keyRequestParameters == null ? new HashMap<>() : keyRequestParameters,
/* multiSession= */ false, /* multiSession= */ false,
INITIAL_DRM_REQUEST_RETRY_COUNT); INITIAL_DRM_REQUEST_RETRY_COUNT);
} }
/** /**
* @param uuid The UUID of the drm scheme. * @param uuid The UUID of the drm scheme.
* @param mediaDrm An underlying {@link ExoMediaDrm} for use by the manager. * @param exoMediaDrm An underlying {@link ExoMediaDrm} for use by the manager.
* @param callback Performs key and provisioning requests. * @param callback Performs key and provisioning requests.
* @param optionalKeyRequestParameters An optional map of parameters to pass as the last argument * @param keyRequestParameters An optional map of parameters to pass as the last argument to
* to {@link ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}. May be null. * {@link ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}. May be null.
* @param multiSession A boolean that specify whether multiple key session support is enabled. * @param multiSession A boolean that specify whether multiple key session support is enabled.
* Default is false. * Default is false.
* @deprecated Use {@link Builder} instead.
*/ */
@Deprecated
public DefaultDrmSessionManager( public DefaultDrmSessionManager(
UUID uuid, UUID uuid,
ExoMediaDrm<T> mediaDrm, ExoMediaDrm<T> exoMediaDrm,
MediaDrmCallback callback, MediaDrmCallback callback,
@Nullable HashMap<String, String> optionalKeyRequestParameters, @Nullable HashMap<String, String> keyRequestParameters,
boolean multiSession) { boolean multiSession) {
this( this(
uuid, uuid,
mediaDrm, exoMediaDrm,
callback, callback,
optionalKeyRequestParameters, keyRequestParameters == null ? new HashMap<>() : keyRequestParameters,
multiSession, multiSession,
INITIAL_DRM_REQUEST_RETRY_COUNT); INITIAL_DRM_REQUEST_RETRY_COUNT);
} }
/** /**
* @param uuid The UUID of the drm scheme. * @param uuid The UUID of the drm scheme.
* @param mediaDrm An underlying {@link ExoMediaDrm} for use by the manager. * @param exoMediaDrm An underlying {@link ExoMediaDrm} for use by the manager.
* @param callback Performs key and provisioning requests. * @param callback Performs key and provisioning requests.
* @param optionalKeyRequestParameters An optional map of parameters to pass as the last argument * @param keyRequestParameters An optional map of parameters to pass as the last argument to
* to {@link ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}. May be null. * {@link ExoMediaDrm#getKeyRequest(byte[], List, int, HashMap)}. May be null.
* @param multiSession A boolean that specify whether multiple key session support is enabled. * @param multiSession A boolean that specify whether multiple key session support is enabled.
* Default is false. * Default is false.
* @param initialDrmRequestRetryCount The number of times to retry for initial provisioning and * @param initialDrmRequestRetryCount The number of times to retry for initial provisioning and
* key request before reporting error. * key request before reporting error.
* @deprecated Use {@link Builder} instead.
*/ */
@Deprecated
public DefaultDrmSessionManager( public DefaultDrmSessionManager(
UUID uuid, UUID uuid,
ExoMediaDrm<T> mediaDrm, ExoMediaDrm<T> exoMediaDrm,
MediaDrmCallback callback, MediaDrmCallback callback,
@Nullable HashMap<String, String> optionalKeyRequestParameters, @Nullable HashMap<String, String> keyRequestParameters,
boolean multiSession, boolean multiSession,
int initialDrmRequestRetryCount) { int initialDrmRequestRetryCount) {
this(
uuid,
new ExoMediaDrm.AppManagedProvider<>(exoMediaDrm),
callback,
keyRequestParameters == null ? new HashMap<>() : keyRequestParameters,
multiSession,
/* useDrmSessionsForClearContentTrackTypes= */ new int[0],
/* playClearSamplesWithoutKeys= */ false,
new DefaultLoadErrorHandlingPolicy(initialDrmRequestRetryCount));
}
// the constructor does not initialize fields: offlineLicenseKeySetId
@SuppressWarnings("nullness:initialization.fields.uninitialized")
private DefaultDrmSessionManager(
UUID uuid,
ExoMediaDrm.Provider<T> exoMediaDrmProvider,
MediaDrmCallback callback,
HashMap<String, String> keyRequestParameters,
boolean multiSession,
int[] useDrmSessionsForClearContentTrackTypes,
boolean playClearSamplesWithoutKeys,
LoadErrorHandlingPolicy loadErrorHandlingPolicy) {
Assertions.checkNotNull(uuid); Assertions.checkNotNull(uuid);
Assertions.checkNotNull(mediaDrm);
Assertions.checkArgument(!C.COMMON_PSSH_UUID.equals(uuid), "Use C.CLEARKEY_UUID instead"); Assertions.checkArgument(!C.COMMON_PSSH_UUID.equals(uuid), "Use C.CLEARKEY_UUID instead");
this.uuid = uuid; this.uuid = uuid;
this.mediaDrm = mediaDrm; this.exoMediaDrmProvider = exoMediaDrmProvider;
this.callback = callback; this.callback = callback;
this.optionalKeyRequestParameters = optionalKeyRequestParameters; this.keyRequestParameters = keyRequestParameters;
this.eventDispatcher = new EventDispatcher<>(); this.eventDispatcher = new EventDispatcher<>();
this.multiSession = multiSession; this.multiSession = multiSession;
this.initialDrmRequestRetryCount = initialDrmRequestRetryCount; this.useDrmSessionsForClearContentTrackTypes = useDrmSessionsForClearContentTrackTypes;
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
this.loadErrorHandlingPolicy = loadErrorHandlingPolicy;
provisioningManagerImpl = new ProvisioningManagerImpl();
mode = MODE_PLAYBACK; mode = MODE_PLAYBACK;
sessions = new ArrayList<>(); sessions = new ArrayList<>();
provisioningSessions = new ArrayList<>(); provisioningSessions = new ArrayList<>();
if (multiSession && C.WIDEVINE_UUID.equals(uuid) && Util.SDK_INT >= 19) {
// TODO: Enabling session sharing probably doesn't do anything useful here. It would only be
// useful if DefaultDrmSession instances were aware of one another's state, which is not
// implemented. Or if custom renderers are being used that allow playback to proceed before
// keys, which seems unlikely to be true in practice.
mediaDrm.setPropertyString("sessionSharing", "enable");
}
mediaDrm.setOnEventListener(new MediaDrmEventListener());
} }
/** /**
@ -270,57 +386,10 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
eventDispatcher.removeListener(eventListener); eventDispatcher.removeListener(eventListener);
} }
/**
* Provides access to {@link ExoMediaDrm#getPropertyString(String)}.
* <p>
* This method may be called when the manager is in any state.
*
* @param key The key to request.
* @return The retrieved property.
*/
public final String getPropertyString(String key) {
return mediaDrm.getPropertyString(key);
}
/**
* Provides access to {@link ExoMediaDrm#setPropertyString(String, String)}.
* <p>
* This method may be called when the manager is in any state.
*
* @param key The property to write.
* @param value The value to write.
*/
public final void setPropertyString(String key, String value) {
mediaDrm.setPropertyString(key, value);
}
/**
* Provides access to {@link ExoMediaDrm#getPropertyByteArray(String)}.
* <p>
* This method may be called when the manager is in any state.
*
* @param key The key to request.
* @return The retrieved property.
*/
public final byte[] getPropertyByteArray(String key) {
return mediaDrm.getPropertyByteArray(key);
}
/**
* Provides access to {@link ExoMediaDrm#setPropertyByteArray(String, byte[])}.
* <p>
* This method may be called when the manager is in any state.
*
* @param key The property to write.
* @param value The value to write.
*/
public final void setPropertyByteArray(String key, byte[] value) {
mediaDrm.setPropertyByteArray(key, value);
}
/** /**
* Sets the mode, which determines the role of sessions acquired from the instance. This must be * Sets the mode, which determines the role of sessions acquired from the instance. This must be
* called before {@link #acquireSession(Looper, DrmInitData)} is called. * called before {@link #acquireSession(Looper, DrmInitData)} or {@link
* #acquirePlaceholderSession} is called.
* *
* <p>By default, the mode is {@link #MODE_PLAYBACK} and a streaming license is requested when * <p>By default, the mode is {@link #MODE_PLAYBACK} and a streaming license is requested when
* required. * required.
@ -352,6 +421,23 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
// DrmSessionManager implementation. // DrmSessionManager implementation.
@Override
public final void prepare() {
if (prepareCallsCount++ == 0) {
Assertions.checkState(exoMediaDrm == null);
exoMediaDrm = exoMediaDrmProvider.acquireExoMediaDrm(uuid);
exoMediaDrm.setOnEventListener(new MediaDrmEventListener());
}
}
@Override
public final void release() {
if (--prepareCallsCount == 0) {
Assertions.checkNotNull(exoMediaDrm).release();
exoMediaDrm = null;
}
}
@Override @Override
public boolean canAcquireSession(DrmInitData drmInitData) { public boolean canAcquireSession(DrmInitData drmInitData) {
if (offlineLicenseKeySetId != null) { if (offlineLicenseKeySetId != null) {
@ -373,7 +459,8 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
if (schemeType == null || C.CENC_TYPE_cenc.equals(schemeType)) { if (schemeType == null || C.CENC_TYPE_cenc.equals(schemeType)) {
// If there is no scheme information, assume patternless AES-CTR. // If there is no scheme information, assume patternless AES-CTR.
return true; return true;
} else if (C.CENC_TYPE_cbc1.equals(schemeType) || C.CENC_TYPE_cbcs.equals(schemeType) } else if (C.CENC_TYPE_cbc1.equals(schemeType)
|| C.CENC_TYPE_cbcs.equals(schemeType)
|| C.CENC_TYPE_cens.equals(schemeType)) { || C.CENC_TYPE_cens.equals(schemeType)) {
// API support for AES-CBC and pattern encryption was added in API 24. However, the // API support for AES-CBC and pattern encryption was added in API 24. However, the
// implementation was not stable until API 25. // implementation was not stable until API 25.
@ -384,16 +471,37 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
} }
@Override @Override
public DrmSession<T> acquireSession(Looper playbackLooper, DrmInitData drmInitData) { @Nullable
Assertions.checkState(this.playbackLooper == null || this.playbackLooper == playbackLooper); public DrmSession<T> acquirePlaceholderSession(Looper playbackLooper, int trackType) {
if (sessions.isEmpty()) { assertExpectedPlaybackLooper(playbackLooper);
this.playbackLooper = playbackLooper; ExoMediaDrm<T> exoMediaDrm = Assertions.checkNotNull(this.exoMediaDrm);
if (mediaDrmHandler == null) { boolean avoidPlaceholderDrmSessions =
mediaDrmHandler = new MediaDrmHandler(playbackLooper); FrameworkMediaCrypto.class.equals(exoMediaDrm.getExoMediaCryptoType())
} && FrameworkMediaCrypto.WORKAROUND_DEVICE_NEEDS_KEYS_TO_CONFIGURE_CODEC;
// Avoid attaching a session to sparse formats.
if (avoidPlaceholderDrmSessions
|| Util.linearSearch(useDrmSessionsForClearContentTrackTypes, trackType) == C.INDEX_UNSET
|| exoMediaDrm.getExoMediaCryptoType() == null) {
return null;
} }
maybeCreateMediaDrmHandler(playbackLooper);
if (placeholderDrmSession == null) {
DefaultDrmSession<T> placeholderDrmSession =
createNewDefaultSession(
/* schemeDatas= */ Collections.emptyList(), /* isPlaceholderSession= */ true);
sessions.add(placeholderDrmSession);
this.placeholderDrmSession = placeholderDrmSession;
}
placeholderDrmSession.acquire();
return placeholderDrmSession;
}
List<SchemeData> schemeDatas = null; @Override
public DrmSession<T> acquireSession(Looper playbackLooper, DrmInitData drmInitData) {
assertExpectedPlaybackLooper(playbackLooper);
maybeCreateMediaDrmHandler(playbackLooper);
@Nullable List<SchemeData> schemeDatas = null;
if (offlineLicenseKeySetId == null) { if (offlineLicenseKeySetId == null) {
schemeDatas = getSchemeDatas(drmInitData, uuid, false); schemeDatas = getSchemeDatas(drmInitData, uuid, false);
if (schemeDatas.isEmpty()) { if (schemeDatas.isEmpty()) {
@ -403,9 +511,9 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
} }
} }
DefaultDrmSession<T> session; @Nullable DefaultDrmSession<T> session;
if (!multiSession) { if (!multiSession) {
session = sessions.isEmpty() ? null : sessions.get(0); session = noMultiSessionDrmSession;
} else { } else {
// Only use an existing session if it has matching init data. // Only use an existing session if it has matching init data.
session = null; session = null;
@ -419,19 +527,10 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
if (session == null) { if (session == null) {
// Create a new session. // Create a new session.
session = session = createNewDefaultSession(schemeDatas, /* isPlaceholderSession= */ false);
new DefaultDrmSession<>( if (!multiSession) {
uuid, noMultiSessionDrmSession = session;
mediaDrm, }
this,
schemeDatas,
mode,
offlineLicenseKeySetId,
optionalKeyRequestParameters,
callback,
playbackLooper,
eventDispatcher,
initialDrmRequestRetryCount);
sessions.add(session); sessions.add(session);
} }
session.acquire(); session.acquire();
@ -439,57 +538,64 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
} }
@Override @Override
public void releaseSession(DrmSession<T> session) { @Nullable
if (session instanceof ErrorStateDrmSession) { public Class<T> getExoMediaCryptoType(DrmInitData drmInitData) {
// Do nothing. return canAcquireSession(drmInitData)
return; ? Assertions.checkNotNull(exoMediaDrm).getExoMediaCryptoType()
} : null;
DefaultDrmSession<T> drmSession = (DefaultDrmSession<T>) session;
if (drmSession.release()) {
sessions.remove(drmSession);
if (provisioningSessions.size() > 1 && provisioningSessions.get(0) == drmSession) {
// Other sessions were waiting for the released session to complete a provision operation.
// We need to have one of those sessions perform the provision operation instead.
provisioningSessions.get(1).provision();
}
provisioningSessions.remove(drmSession);
}
}
// ProvisioningManager implementation.
@Override
public void provisionRequired(DefaultDrmSession<T> session) {
if (provisioningSessions.contains(session)) {
// The session has already requested provisioning.
return;
}
provisioningSessions.add(session);
if (provisioningSessions.size() == 1) {
// This is the first session requesting provisioning, so have it perform the operation.
session.provision();
}
}
@Override
public void onProvisionCompleted() {
for (DefaultDrmSession<T> session : provisioningSessions) {
session.onProvisionCompleted();
}
provisioningSessions.clear();
}
@Override
public void onProvisionError(Exception error) {
for (DefaultDrmSession<T> session : provisioningSessions) {
session.onProvisionError(error);
}
provisioningSessions.clear();
} }
// Internal methods. // Internal methods.
private void assertExpectedPlaybackLooper(Looper playbackLooper) {
Assertions.checkState(this.playbackLooper == null || this.playbackLooper == playbackLooper);
this.playbackLooper = playbackLooper;
}
private void maybeCreateMediaDrmHandler(Looper playbackLooper) {
if (mediaDrmHandler == null) {
mediaDrmHandler = new MediaDrmHandler(playbackLooper);
}
}
private DefaultDrmSession<T> createNewDefaultSession(
@Nullable List<SchemeData> schemeDatas, boolean isPlaceholderSession) {
Assertions.checkNotNull(exoMediaDrm);
// Placeholder sessions should always play clear samples without keys.
boolean playClearSamplesWithoutKeys = this.playClearSamplesWithoutKeys | isPlaceholderSession;
return new DefaultDrmSession<>(
uuid,
exoMediaDrm,
/* provisioningManager= */ provisioningManagerImpl,
/* releaseCallback= */ this::onSessionReleased,
schemeDatas,
mode,
playClearSamplesWithoutKeys,
isPlaceholderSession,
offlineLicenseKeySetId,
keyRequestParameters,
callback,
Assertions.checkNotNull(playbackLooper),
eventDispatcher,
loadErrorHandlingPolicy);
}
private void onSessionReleased(DefaultDrmSession<T> drmSession) {
sessions.remove(drmSession);
if (placeholderDrmSession == drmSession) {
placeholderDrmSession = null;
}
if (noMultiSessionDrmSession == drmSession) {
noMultiSessionDrmSession = null;
}
if (provisioningSessions.size() > 1 && provisioningSessions.get(0) == drmSession) {
// Other sessions were waiting for the released session to complete a provision operation.
// We need to have one of those sessions perform the provision operation instead.
provisioningSessions.get(1).provision();
}
provisioningSessions.remove(drmSession);
}
/** /**
* Extracts {@link SchemeData} instances suitable for the given DRM scheme {@link UUID}. * Extracts {@link SchemeData} instances suitable for the given DRM scheme {@link UUID}.
* *
@ -506,8 +612,9 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
List<SchemeData> matchingSchemeDatas = new ArrayList<>(drmInitData.schemeDataCount); List<SchemeData> matchingSchemeDatas = new ArrayList<>(drmInitData.schemeDataCount);
for (int i = 0; i < drmInitData.schemeDataCount; i++) { for (int i = 0; i < drmInitData.schemeDataCount; i++) {
SchemeData schemeData = drmInitData.get(i); SchemeData schemeData = drmInitData.get(i);
boolean uuidMatches = schemeData.matches(uuid) boolean uuidMatches =
|| (C.CLEARKEY_UUID.equals(uuid) && schemeData.matches(C.COMMON_PSSH_UUID)); schemeData.matches(uuid)
|| (C.CLEARKEY_UUID.equals(uuid) && schemeData.matches(C.COMMON_PSSH_UUID));
if (uuidMatches && (schemeData.data != null || allowMissingData)) { if (uuidMatches && (schemeData.data != null || allowMissingData)) {
matchingSchemeDatas.add(schemeData); matchingSchemeDatas.add(schemeData);
} }
@ -536,7 +643,37 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
} }
} }
} }
}
private class ProvisioningManagerImpl implements DefaultDrmSession.ProvisioningManager<T> {
@Override
public void provisionRequired(DefaultDrmSession<T> session) {
if (provisioningSessions.contains(session)) {
// The session has already requested provisioning.
return;
}
provisioningSessions.add(session);
if (provisioningSessions.size() == 1) {
// This is the first session requesting provisioning, so have it perform the operation.
session.provision();
}
}
@Override
public void onProvisionCompleted() {
for (DefaultDrmSession<T> session : provisioningSessions) {
session.onProvisionCompleted();
}
provisioningSessions.clear();
}
@Override
public void onProvisionError(Exception error) {
for (DefaultDrmSession<T> session : provisioningSessions) {
session.onProvisionError(error);
}
provisioningSessions.clear();
}
} }
private class MediaDrmEventListener implements OnEventListener<T> { private class MediaDrmEventListener implements OnEventListener<T> {
@ -550,7 +687,5 @@ public class DefaultDrmSessionManager<T extends ExoMediaCrypto> implements DrmSe
@Nullable byte[] data) { @Nullable byte[] data) {
Assertions.checkNotNull(mediaDrmHandler).obtainMessage(event, sessionId).sendToTarget(); Assertions.checkNotNull(mediaDrmHandler).obtainMessage(event, sessionId).sendToTarget();
} }
} }
} }

View File

@ -17,8 +17,8 @@ package com.google.android.exoplayer2.drm;
import android.os.Parcel; import android.os.Parcel;
import android.os.Parcelable; import android.os.Parcelable;
import androidx.annotation.Nullable;
import android.text.TextUtils; import android.text.TextUtils;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
import com.google.android.exoplayer2.util.Assertions; import com.google.android.exoplayer2.util.Assertions;
@ -87,7 +87,7 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
private int hashCode; private int hashCode;
/** The protection scheme type, or null if not applicable or unknown. */ /** The protection scheme type, or null if not applicable or unknown. */
public final @Nullable String schemeType; @Nullable public final String schemeType;
/** /**
* Number of {@link SchemeData}s. * Number of {@link SchemeData}s.
@ -152,7 +152,8 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
* @return The initialization data for the scheme, or null if the scheme is not supported. * @return The initialization data for the scheme, or null if the scheme is not supported.
*/ */
@Deprecated @Deprecated
public @Nullable SchemeData get(UUID uuid) { @Nullable
public SchemeData get(UUID uuid) {
for (SchemeData schemeData : schemeDatas) { for (SchemeData schemeData : schemeDatas) {
if (schemeData.matches(uuid)) { if (schemeData.matches(uuid)) {
return schemeData; return schemeData;
@ -286,15 +287,11 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
*/ */
private final UUID uuid; private final UUID uuid;
/** The URL of the server to which license requests should be made. May be null if unknown. */ /** The URL of the server to which license requests should be made. May be null if unknown. */
public final @Nullable String licenseServerUrl; @Nullable public final String licenseServerUrl;
/** The mimeType of {@link #data}. */ /** The mimeType of {@link #data}. */
public final String mimeType; public final String mimeType;
/** The initialization data. May be null for scheme support checks only. */ /** The initialization data. May be null for scheme support checks only. */
public final @Nullable byte[] data; @Nullable public final byte[] data;
/**
* Whether secure decryption is required.
*/
public final boolean requiresSecureDecryption;
/** /**
* @param uuid The {@link UUID} of the DRM scheme, or {@link C#UUID_NIL} if the data is * @param uuid The {@link UUID} of the DRM scheme, or {@link C#UUID_NIL} if the data is
@ -303,19 +300,7 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
* @param data See {@link #data}. * @param data See {@link #data}.
*/ */
public SchemeData(UUID uuid, String mimeType, @Nullable byte[] data) { public SchemeData(UUID uuid, String mimeType, @Nullable byte[] data) {
this(uuid, mimeType, data, false); this(uuid, /* licenseServerUrl= */ null, mimeType, data);
}
/**
* @param uuid The {@link UUID} of the DRM scheme, or {@link C#UUID_NIL} if the data is
* universal (i.e. applies to all schemes).
* @param mimeType See {@link #mimeType}.
* @param data See {@link #data}.
* @param requiresSecureDecryption See {@link #requiresSecureDecryption}.
*/
public SchemeData(
UUID uuid, String mimeType, @Nullable byte[] data, boolean requiresSecureDecryption) {
this(uuid, /* licenseServerUrl= */ null, mimeType, data, requiresSecureDecryption);
} }
/** /**
@ -324,19 +309,13 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
* @param licenseServerUrl See {@link #licenseServerUrl}. * @param licenseServerUrl See {@link #licenseServerUrl}.
* @param mimeType See {@link #mimeType}. * @param mimeType See {@link #mimeType}.
* @param data See {@link #data}. * @param data See {@link #data}.
* @param requiresSecureDecryption See {@link #requiresSecureDecryption}.
*/ */
public SchemeData( public SchemeData(
UUID uuid, UUID uuid, @Nullable String licenseServerUrl, String mimeType, @Nullable byte[] data) {
@Nullable String licenseServerUrl,
String mimeType,
@Nullable byte[] data,
boolean requiresSecureDecryption) {
this.uuid = Assertions.checkNotNull(uuid); this.uuid = Assertions.checkNotNull(uuid);
this.licenseServerUrl = licenseServerUrl; this.licenseServerUrl = licenseServerUrl;
this.mimeType = Assertions.checkNotNull(mimeType); this.mimeType = Assertions.checkNotNull(mimeType);
this.data = data; this.data = data;
this.requiresSecureDecryption = requiresSecureDecryption;
} }
/* package */ SchemeData(Parcel in) { /* package */ SchemeData(Parcel in) {
@ -344,7 +323,6 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
licenseServerUrl = in.readString(); licenseServerUrl = in.readString();
mimeType = Util.castNonNull(in.readString()); mimeType = Util.castNonNull(in.readString());
data = in.createByteArray(); data = in.createByteArray();
requiresSecureDecryption = in.readByte() != 0;
} }
/** /**
@ -381,7 +359,7 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
* @return The new instance. * @return The new instance.
*/ */
public SchemeData copyWithData(@Nullable byte[] data) { public SchemeData copyWithData(@Nullable byte[] data) {
return new SchemeData(uuid, licenseServerUrl, mimeType, data, requiresSecureDecryption); return new SchemeData(uuid, licenseServerUrl, mimeType, data);
} }
@Override @Override
@ -425,10 +403,8 @@ public final class DrmInitData implements Comparator<SchemeData>, Parcelable {
dest.writeString(licenseServerUrl); dest.writeString(licenseServerUrl);
dest.writeString(mimeType); dest.writeString(mimeType);
dest.writeByteArray(data); dest.writeByteArray(data);
dest.writeByte((byte) (requiresSecureDecryption ? 1 : 0));
} }
@SuppressWarnings("hiding")
public static final Parcelable.Creator<SchemeData> CREATOR = public static final Parcelable.Creator<SchemeData> CREATOR =
new Parcelable.Creator<SchemeData>() { new Parcelable.Creator<SchemeData>() {

View File

@ -18,6 +18,7 @@ package com.google.android.exoplayer2.drm;
import android.media.MediaDrm; import android.media.MediaDrm;
import androidx.annotation.IntDef; import androidx.annotation.IntDef;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import java.io.IOException;
import java.lang.annotation.Documented; import java.lang.annotation.Documented;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
@ -29,9 +30,26 @@ import java.util.Map;
public interface DrmSession<T extends ExoMediaCrypto> { public interface DrmSession<T extends ExoMediaCrypto> {
/** /**
* Wraps the throwable which is the cause of the error state. * Invokes {@code newSession's} {@link #acquire()} and {@code previousSession's} {@link
* #release()} in that order. Null arguments are ignored. Does nothing if {@code previousSession}
* and {@code newSession} are the same session.
*/ */
class DrmSessionException extends Exception { static <T extends ExoMediaCrypto> void replaceSession(
@Nullable DrmSession<T> previousSession, @Nullable DrmSession<T> newSession) {
if (previousSession == newSession) {
// Do nothing.
return;
}
if (newSession != null) {
newSession.acquire();
}
if (previousSession != null) {
previousSession.release();
}
}
/** Wraps the throwable which is the cause of the error state. */
class DrmSessionException extends IOException {
public DrmSessionException(Throwable cause) { public DrmSessionException(Throwable cause) {
super(cause); super(cause);
@ -59,13 +77,9 @@ public interface DrmSession<T extends ExoMediaCrypto> {
* The session is being opened. * The session is being opened.
*/ */
int STATE_OPENING = 2; int STATE_OPENING = 2;
/** /** The session is open, but does not have keys required for decryption. */
* The session is open, but does not yet have the keys required for decryption.
*/
int STATE_OPENED = 3; int STATE_OPENED = 3;
/** /** The session is open and has keys required for decryption. */
* The session is open and has the keys required for decryption.
*/
int STATE_OPENED_WITH_KEYS = 4; int STATE_OPENED_WITH_KEYS = 4;
/** /**
@ -75,6 +89,11 @@ public interface DrmSession<T extends ExoMediaCrypto> {
*/ */
@State int getState(); @State int getState();
/** Returns whether this session allows playback of clear samples prior to keys being loaded. */
default boolean playClearSamplesWithoutKeys() {
return false;
}
/** /**
* Returns the cause of the error state, or null if {@link #getState()} is not {@link * Returns the cause of the error state, or null if {@link #getState()} is not {@link
* #STATE_ERROR}. * #STATE_ERROR}.
@ -110,4 +129,16 @@ public interface DrmSession<T extends ExoMediaCrypto> {
*/ */
@Nullable @Nullable
byte[] getOfflineLicenseKeySetId(); byte[] getOfflineLicenseKeySetId();
/**
* Increments the reference count. When the caller no longer needs to use the instance, it must
* call {@link #release()} to decrement the reference count.
*/
void acquire();
/**
* Decrements the reference count. If the reference count drops to 0 underlying resources are
* released, and the instance cannot be re-used.
*/
void release();
} }

View File

@ -16,6 +16,8 @@
package com.google.android.exoplayer2.drm; package com.google.android.exoplayer2.drm;
import android.os.Looper; import android.os.Looper;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
/** /**
@ -23,6 +25,51 @@ import com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
*/ */
public interface DrmSessionManager<T extends ExoMediaCrypto> { public interface DrmSessionManager<T extends ExoMediaCrypto> {
/** Returns {@link #DUMMY}. */
@SuppressWarnings("unchecked")
static <T extends ExoMediaCrypto> DrmSessionManager<T> getDummyDrmSessionManager() {
return (DrmSessionManager<T>) DUMMY;
}
/** {@link DrmSessionManager} that supports no DRM schemes. */
DrmSessionManager<ExoMediaCrypto> DUMMY =
new DrmSessionManager<ExoMediaCrypto>() {
@Override
public boolean canAcquireSession(DrmInitData drmInitData) {
return false;
}
@Override
public DrmSession<ExoMediaCrypto> acquireSession(
Looper playbackLooper, DrmInitData drmInitData) {
return new ErrorStateDrmSession<>(
new DrmSession.DrmSessionException(
new UnsupportedDrmException(UnsupportedDrmException.REASON_UNSUPPORTED_SCHEME)));
}
@Override
@Nullable
public Class<ExoMediaCrypto> getExoMediaCryptoType(DrmInitData drmInitData) {
return null;
}
};
/**
* Acquires any required resources.
*
* <p>{@link #release()} must be called to ensure the acquired resources are released. After
* releasing, an instance may be re-prepared.
*/
default void prepare() {
// Do nothing.
}
/** Releases any acquired resources. */
default void release() {
// Do nothing.
}
/** /**
* Returns whether the manager is capable of acquiring a session for the given * Returns whether the manager is capable of acquiring a session for the given
* {@link DrmInitData}. * {@link DrmInitData}.
@ -34,8 +81,29 @@ public interface DrmSessionManager<T extends ExoMediaCrypto> {
boolean canAcquireSession(DrmInitData drmInitData); boolean canAcquireSession(DrmInitData drmInitData);
/** /**
* Acquires a {@link DrmSession} for the specified {@link DrmInitData}. The {@link DrmSession} * Returns a {@link DrmSession} that does not execute key requests, with an incremented reference
* must be returned to {@link #releaseSession(DrmSession)} when it is no longer required. * count. When the caller no longer needs to use the instance, it must call {@link
* DrmSession#release()} to decrement the reference count.
*
* <p>Placeholder {@link DrmSession DrmSessions} may be used to configure secure decoders for
* playback of clear content periods. This can reduce the cost of transitioning between clear and
* encrypted content periods.
*
* @param playbackLooper The looper associated with the media playback thread.
* @param trackType The type of the track to acquire a placeholder session for. Must be one of the
* {@link C}{@code .TRACK_TYPE_*} constants.
* @return The placeholder DRM session, or null if this DRM session manager does not support
* placeholder sessions.
*/
@Nullable
default DrmSession<T> acquirePlaceholderSession(Looper playbackLooper, int trackType) {
return null;
}
/**
* Returns a {@link DrmSession} for the specified {@link DrmInitData}, with an incremented
* reference count. When the caller no longer needs to use the instance, it must call {@link
* DrmSession#release()} to decrement the reference count.
* *
* @param playbackLooper The looper associated with the media playback thread. * @param playbackLooper The looper associated with the media playback thread.
* @param drmInitData DRM initialization data. All contained {@link SchemeData}s must contain * @param drmInitData DRM initialization data. All contained {@link SchemeData}s must contain
@ -45,8 +113,9 @@ public interface DrmSessionManager<T extends ExoMediaCrypto> {
DrmSession<T> acquireSession(Looper playbackLooper, DrmInitData drmInitData); DrmSession<T> acquireSession(Looper playbackLooper, DrmInitData drmInitData);
/** /**
* Releases a {@link DrmSession}. * Returns the {@link ExoMediaCrypto} type returned by sessions acquired using the given {@link
* DrmInitData}, or null if a session cannot be acquired with the given {@link DrmInitData}.
*/ */
void releaseSession(DrmSession<T> drmSession); @Nullable
Class<? extends ExoMediaCrypto> getExoMediaCryptoType(DrmInitData drmInitData);
} }

View File

@ -0,0 +1,146 @@
/*
* Copyright (C) 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.drm;
import android.media.MediaDrmException;
import android.os.PersistableBundle;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.util.Util;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/** An {@link ExoMediaDrm} that does not support any protection schemes. */
@RequiresApi(18)
public final class DummyExoMediaDrm<T extends ExoMediaCrypto> implements ExoMediaDrm<T> {
/** Returns a new instance. */
@SuppressWarnings("unchecked")
public static <T extends ExoMediaCrypto> DummyExoMediaDrm<T> getInstance() {
return (DummyExoMediaDrm<T>) new DummyExoMediaDrm<>();
}
@Override
public void setOnEventListener(OnEventListener<? super T> listener) {
// Do nothing.
}
@Override
public void setOnKeyStatusChangeListener(OnKeyStatusChangeListener<? super T> listener) {
// Do nothing.
}
@Override
public byte[] openSession() throws MediaDrmException {
throw new MediaDrmException("Attempting to open a session using a dummy ExoMediaDrm.");
}
@Override
public void closeSession(byte[] sessionId) {
// Do nothing.
}
@Override
public KeyRequest getKeyRequest(
byte[] scope,
@Nullable List<DrmInitData.SchemeData> schemeDatas,
int keyType,
@Nullable HashMap<String, String> optionalParameters) {
// Should not be invoked. No session should exist.
throw new IllegalStateException();
}
@Nullable
@Override
public byte[] provideKeyResponse(byte[] scope, byte[] response) {
// Should not be invoked. No session should exist.
throw new IllegalStateException();
}
@Override
public ProvisionRequest getProvisionRequest() {
// Should not be invoked. No provision should be required.
throw new IllegalStateException();
}
@Override
public void provideProvisionResponse(byte[] response) {
// Should not be invoked. No provision should be required.
throw new IllegalStateException();
}
@Override
public Map<String, String> queryKeyStatus(byte[] sessionId) {
// Should not be invoked. No session should exist.
throw new IllegalStateException();
}
@Override
public void acquire() {
// Do nothing.
}
@Override
public void release() {
// Do nothing.
}
@Override
public void restoreKeys(byte[] sessionId, byte[] keySetId) {
// Should not be invoked. No session should exist.
throw new IllegalStateException();
}
@Override
@Nullable
public PersistableBundle getMetrics() {
return null;
}
@Override
public String getPropertyString(String propertyName) {
return "";
}
@Override
public byte[] getPropertyByteArray(String propertyName) {
return Util.EMPTY_BYTE_ARRAY;
}
@Override
public void setPropertyString(String propertyName, String value) {
// Do nothing.
}
@Override
public void setPropertyByteArray(String propertyName, byte[] value) {
// Do nothing.
}
@Override
public T createMediaCrypto(byte[] sessionId) {
// Should not be invoked. No session should exist.
throw new IllegalStateException();
}
@Override
@Nullable
public Class<T> getExoMediaCryptoType() {
// No ExoMediaCrypto type is supported.
return null;
}
}

View File

@ -34,23 +34,41 @@ public final class ErrorStateDrmSession<T extends ExoMediaCrypto> implements Drm
} }
@Override @Override
public @Nullable DrmSessionException getError() { public boolean playClearSamplesWithoutKeys() {
return false;
}
@Override
@Nullable
public DrmSessionException getError() {
return error; return error;
} }
@Override @Override
public @Nullable T getMediaCrypto() { @Nullable
public T getMediaCrypto() {
return null; return null;
} }
@Override @Override
public @Nullable Map<String, String> queryKeyStatus() { @Nullable
public Map<String, String> queryKeyStatus() {
return null; return null;
} }
@Override @Override
public @Nullable byte[] getOfflineLicenseKeySetId() { @Nullable
public byte[] getOfflineLicenseKeySetId() {
return null; return null;
} }
@Override
public void acquire() {
// Do nothing.
}
@Override
public void release() {
// Do nothing.
}
} }

View File

@ -21,6 +21,7 @@ import android.media.MediaDrm;
import android.media.MediaDrmException; import android.media.MediaDrmException;
import android.media.NotProvisionedException; import android.media.NotProvisionedException;
import android.os.Handler; import android.os.Handler;
import android.os.PersistableBundle;
import androidx.annotation.Nullable; import androidx.annotation.Nullable;
import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
import java.util.HashMap; import java.util.HashMap;
@ -30,12 +31,54 @@ import java.util.UUID;
/** /**
* Used to obtain keys for decrypting protected media streams. See {@link android.media.MediaDrm}. * Used to obtain keys for decrypting protected media streams. See {@link android.media.MediaDrm}.
*
* <h3>Reference counting</h3>
*
* <p>Access to an instance is managed by reference counting, where {@link #acquire()} increments
* the reference count and {@link #release()} decrements it. When the reference count drops to 0
* underlying resources are released, and the instance cannot be re-used.
*
* <p>Each new instance has an initial reference count of 1. Hence application code that creates a
* new instance does not normally need to call {@link #acquire()}, and must call {@link #release()}
* when the instance is no longer required.
*/ */
public interface ExoMediaDrm<T extends ExoMediaCrypto> { public interface ExoMediaDrm<T extends ExoMediaCrypto> {
/** {@link ExoMediaDrm} instances provider. */
interface Provider<T extends ExoMediaCrypto> {
/**
* Returns an {@link ExoMediaDrm} instance with an incremented reference count. When the caller
* no longer needs to use the instance, it must call {@link ExoMediaDrm#release()} to decrement
* the reference count.
*/
ExoMediaDrm<T> acquireExoMediaDrm(UUID uuid);
}
/** /**
* @see MediaDrm#EVENT_KEY_REQUIRED * Provides an {@link ExoMediaDrm} instance owned by the app.
*
* <p>Note that when using this provider the app will have instantiated the {@link ExoMediaDrm}
* instance, and remains responsible for calling {@link ExoMediaDrm#release()} on the instance
* when it's no longer being used.
*/ */
final class AppManagedProvider<T extends ExoMediaCrypto> implements Provider<T> {
private final ExoMediaDrm<T> exoMediaDrm;
/** Creates an instance that provides the given {@link ExoMediaDrm}. */
public AppManagedProvider(ExoMediaDrm<T> exoMediaDrm) {
this.exoMediaDrm = exoMediaDrm;
}
@Override
public ExoMediaDrm<T> acquireExoMediaDrm(UUID uuid) {
exoMediaDrm.acquire();
return exoMediaDrm;
}
}
/** @see MediaDrm#EVENT_KEY_REQUIRED */
@SuppressWarnings("InlinedApi") @SuppressWarnings("InlinedApi")
int EVENT_KEY_REQUIRED = MediaDrm.EVENT_KEY_REQUIRED; int EVENT_KEY_REQUIRED = MediaDrm.EVENT_KEY_REQUIRED;
/** /**
@ -235,7 +278,17 @@ public interface ExoMediaDrm<T extends ExoMediaCrypto> {
Map<String, String> queryKeyStatus(byte[] sessionId); Map<String, String> queryKeyStatus(byte[] sessionId);
/** /**
* @see MediaDrm#release() * Increments the reference count. When the caller no longer needs to use the instance, it must
* call {@link #release()} to decrement the reference count.
*
* <p>A new instance will have an initial reference count of 1, and therefore it is not normally
* necessary for application code to call this method.
*/
void acquire();
/**
* Decrements the reference count. If the reference count drops to 0 underlying resources are
* released, and the instance cannot be re-used.
*/ */
void release(); void release();
@ -244,6 +297,14 @@ public interface ExoMediaDrm<T extends ExoMediaCrypto> {
*/ */
void restoreKeys(byte[] sessionId, byte[] keySetId); void restoreKeys(byte[] sessionId, byte[] keySetId);
/**
* Returns drm metrics. May be null if unavailable.
*
* @see MediaDrm#getMetrics()
*/
@Nullable
PersistableBundle getMetrics();
/** /**
* @see MediaDrm#getPropertyString(String) * @see MediaDrm#getPropertyString(String)
*/ */
@ -271,4 +332,11 @@ public interface ExoMediaDrm<T extends ExoMediaCrypto> {
* @throws MediaCryptoException If the instance can't be created. * @throws MediaCryptoException If the instance can't be created.
*/ */
T createMediaCrypto(byte[] sessionId) throws MediaCryptoException; T createMediaCrypto(byte[] sessionId) throws MediaCryptoException;
/**
* Returns the {@link ExoMediaCrypto} type created by {@link #createMediaCrypto(byte[])}, or null
* if this instance cannot create any {@link ExoMediaCrypto} instances.
*/
@Nullable
Class<T> getExoMediaCryptoType();
} }

View File

@ -16,6 +16,7 @@
package com.google.android.exoplayer2.drm; package com.google.android.exoplayer2.drm;
import android.media.MediaCrypto; import android.media.MediaCrypto;
import com.google.android.exoplayer2.util.Util;
import java.util.UUID; import java.util.UUID;
/** /**
@ -24,6 +25,15 @@ import java.util.UUID;
*/ */
public final class FrameworkMediaCrypto implements ExoMediaCrypto { public final class FrameworkMediaCrypto implements ExoMediaCrypto {
/**
* Whether the device needs keys to have been loaded into the {@link DrmSession} before codec
* configuration.
*/
public static final boolean WORKAROUND_DEVICE_NEEDS_KEYS_TO_CONFIGURE_CODEC =
"Amazon".equals(Util.MANUFACTURER)
&& ("AFTM".equals(Util.MODEL) // Fire TV Stick Gen 1
|| "AFTB".equals(Util.MODEL)); // Fire TV Gen 1
/** The DRM scheme UUID. */ /** The DRM scheme UUID. */
public final UUID uuid; public final UUID uuid;
/** The DRM session id. */ /** The DRM session id. */

View File

@ -23,8 +23,10 @@ import android.media.MediaDrm;
import android.media.MediaDrmException; import android.media.MediaDrmException;
import android.media.NotProvisionedException; import android.media.NotProvisionedException;
import android.media.UnsupportedSchemeException; import android.media.UnsupportedSchemeException;
import androidx.annotation.Nullable; import android.os.PersistableBundle;
import android.text.TextUtils; import android.text.TextUtils;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.drm.DrmInitData.SchemeData; import com.google.android.exoplayer2.drm.DrmInitData.SchemeData;
import com.google.android.exoplayer2.extractor.mp4.PsshAtomUtil; import com.google.android.exoplayer2.extractor.mp4.PsshAtomUtil;
@ -42,23 +44,40 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.UUID; import java.util.UUID;
/** /** An {@link ExoMediaDrm} implementation that wraps the framework {@link MediaDrm}. */
* An {@link ExoMediaDrm} implementation that wraps the framework {@link MediaDrm}.
*/
@TargetApi(23) @TargetApi(23)
@RequiresApi(18)
public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto> { public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto> {
private static final String TAG = "FrameworkMediaDrm";
/**
* {@link ExoMediaDrm.Provider} that returns a new {@link FrameworkMediaDrm} for the requested
* UUID. Returns a {@link DummyExoMediaDrm} if the protection scheme identified by the given UUID
* is not supported by the device.
*/
public static final Provider<FrameworkMediaCrypto> DEFAULT_PROVIDER =
uuid -> {
try {
return newInstance(uuid);
} catch (UnsupportedDrmException e) {
Log.e(TAG, "Failed to instantiate a FrameworkMediaDrm for uuid: " + uuid + ".");
return new DummyExoMediaDrm<>();
}
};
private static final String CENC_SCHEME_MIME_TYPE = "cenc"; private static final String CENC_SCHEME_MIME_TYPE = "cenc";
private static final String MOCK_LA_URL_VALUE = "https://x"; private static final String MOCK_LA_URL_VALUE = "https://x";
private static final String MOCK_LA_URL = "<LA_URL>" + MOCK_LA_URL_VALUE + "</LA_URL>"; private static final String MOCK_LA_URL = "<LA_URL>" + MOCK_LA_URL_VALUE + "</LA_URL>";
private static final int UTF_16_BYTES_PER_CHARACTER = 2; private static final int UTF_16_BYTES_PER_CHARACTER = 2;
private static final String TAG = "FrameworkMediaDrm";
private final UUID uuid; private final UUID uuid;
private final MediaDrm mediaDrm; private final MediaDrm mediaDrm;
private int referenceCount;
/** /**
* Creates an instance for the specified scheme UUID. * Creates an instance with an initial reference count of 1. {@link #release()} must be called on
* the instance when it's no longer required.
* *
* @param uuid The scheme uuid. * @param uuid The scheme uuid.
* @return The created instance. * @return The created instance.
@ -79,6 +98,8 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto
Assertions.checkArgument(!C.COMMON_PSSH_UUID.equals(uuid), "Use C.CLEARKEY_UUID instead"); Assertions.checkArgument(!C.COMMON_PSSH_UUID.equals(uuid), "Use C.CLEARKEY_UUID instead");
this.uuid = uuid; this.uuid = uuid;
this.mediaDrm = new MediaDrm(adjustUuid(uuid)); this.mediaDrm = new MediaDrm(adjustUuid(uuid));
// Creators of an instance automatically acquire ownership of the created instance.
referenceCount = 1;
if (C.WIDEVINE_UUID.equals(uuid) && needsForceWidevineL3Workaround()) { if (C.WIDEVINE_UUID.equals(uuid) && needsForceWidevineL3Workaround()) {
forceWidevineL3(mediaDrm); forceWidevineL3(mediaDrm);
} }
@ -186,8 +207,16 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto
} }
@Override @Override
public void release() { public synchronized void acquire() {
mediaDrm.release(); Assertions.checkState(referenceCount > 0);
referenceCount++;
}
@Override
public synchronized void release() {
if (--referenceCount == 0) {
mediaDrm.release();
}
} }
@Override @Override
@ -195,6 +224,16 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto
mediaDrm.restoreKeys(sessionId, keySetId); mediaDrm.restoreKeys(sessionId, keySetId);
} }
@Override
@Nullable
@TargetApi(28)
public PersistableBundle getMetrics() {
if (Util.SDK_INT < 28) {
return null;
}
return mediaDrm.getMetrics();
}
@Override @Override
public String getPropertyString(String propertyName) { public String getPropertyString(String propertyName) {
return mediaDrm.getPropertyString(propertyName); return mediaDrm.getPropertyString(propertyName);
@ -225,6 +264,11 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto
adjustUuid(uuid), initData, forceAllowInsecureDecoderComponents); adjustUuid(uuid), initData, forceAllowInsecureDecoderComponents);
} }
@Override
public Class<FrameworkMediaCrypto> getExoMediaCryptoType() {
return FrameworkMediaCrypto.class;
}
private static SchemeData getSchemeData(UUID uuid, List<SchemeData> schemeDatas) { private static SchemeData getSchemeData(UUID uuid, List<SchemeData> schemeDatas) {
if (!C.WIDEVINE_UUID.equals(uuid)) { if (!C.WIDEVINE_UUID.equals(uuid)) {
// For non-Widevine CDMs always use the first scheme data. // For non-Widevine CDMs always use the first scheme data.
@ -239,8 +283,7 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto
for (int i = 0; i < schemeDatas.size(); i++) { for (int i = 0; i < schemeDatas.size(); i++) {
SchemeData schemeData = schemeDatas.get(i); SchemeData schemeData = schemeDatas.get(i);
byte[] schemeDataData = Util.castNonNull(schemeData.data); byte[] schemeDataData = Util.castNonNull(schemeData.data);
if (schemeData.requiresSecureDecryption == firstSchemeData.requiresSecureDecryption if (Util.areEqual(schemeData.mimeType, firstSchemeData.mimeType)
&& Util.areEqual(schemeData.mimeType, firstSchemeData.mimeType)
&& Util.areEqual(schemeData.licenseServerUrl, firstSchemeData.licenseServerUrl) && Util.areEqual(schemeData.licenseServerUrl, firstSchemeData.licenseServerUrl)
&& PsshAtomUtil.isPsshAtom(schemeDataData)) { && PsshAtomUtil.isPsshAtom(schemeDataData)) {
concatenatedDataLength += schemeDataData.length; concatenatedDataLength += schemeDataData.length;
@ -298,14 +341,20 @@ public final class FrameworkMediaDrm implements ExoMediaDrm<FrameworkMediaCrypto
C.PLAYREADY_UUID, addLaUrlAttributeIfMissing(schemeSpecificData)); C.PLAYREADY_UUID, addLaUrlAttributeIfMissing(schemeSpecificData));
} }
// Prior to L the Widevine CDM required data to be extracted from the PSSH atom. Some Amazon // Prior to API level 21, the Widevine CDM required scheme specific data to be extracted from
// devices also required data to be extracted from the PSSH atom for PlayReady. // the PSSH atom. We also extract the data on API levels 21 and 22 because these API levels
if ((Util.SDK_INT < 21 && C.WIDEVINE_UUID.equals(uuid)) // don't handle V1 PSSH atoms, but do handle scheme specific data regardless of whether it's
// extracted from a V0 or a V1 PSSH atom. Hence extracting the data allows us to support content
// that only provides V1 PSSH atoms. API levels 23 and above understand V0 and V1 PSSH atoms,
// and so we do not extract the data.
// Some Amazon devices also require data to be extracted from the PSSH atom for PlayReady.
if ((Util.SDK_INT < 23 && C.WIDEVINE_UUID.equals(uuid))
|| (C.PLAYREADY_UUID.equals(uuid) || (C.PLAYREADY_UUID.equals(uuid)
&& "Amazon".equals(Util.MANUFACTURER) && "Amazon".equals(Util.MANUFACTURER)
&& ("AFTB".equals(Util.MODEL) // Fire TV Gen 1 && ("AFTB".equals(Util.MODEL) // Fire TV Gen 1
|| "AFTS".equals(Util.MODEL) // Fire TV Gen 2 || "AFTS".equals(Util.MODEL) // Fire TV Gen 2
|| "AFTM".equals(Util.MODEL)))) { // Fire TV Stick Gen 1 || "AFTM".equals(Util.MODEL) // Fire TV Stick Gen 1
|| "AFTT".equals(Util.MODEL)))) { // Fire TV Stick Gen 2
byte[] psshData = PsshAtomUtil.parseSchemeSpecificData(initData, uuid); byte[] psshData = PsshAtomUtil.parseSchemeSpecificData(initData, uuid);
if (psshData != null) { if (psshData != null) {
// Extraction succeeded, so return the extracted data. // Extraction succeeded, so return the extracted data.

View File

@ -17,8 +17,8 @@ package com.google.android.exoplayer2.drm;
import android.annotation.TargetApi; import android.annotation.TargetApi;
import android.net.Uri; import android.net.Uri;
import androidx.annotation.Nullable;
import android.text.TextUtils; import android.text.TextUtils;
import androidx.annotation.Nullable;
import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.drm.ExoMediaDrm.KeyRequest; import com.google.android.exoplayer2.drm.ExoMediaDrm.KeyRequest;
import com.google.android.exoplayer2.drm.ExoMediaDrm.ProvisionRequest; import com.google.android.exoplayer2.drm.ExoMediaDrm.ProvisionRequest;
@ -111,7 +111,7 @@ public final class HttpMediaDrmCallback implements MediaDrmCallback {
public byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) throws IOException { public byte[] executeProvisionRequest(UUID uuid, ProvisionRequest request) throws IOException {
String url = String url =
request.getDefaultUrl() + "&signedRequest=" + Util.fromUtf8Bytes(request.getData()); request.getDefaultUrl() + "&signedRequest=" + Util.fromUtf8Bytes(request.getData());
return executePost(dataSourceFactory, url, Util.EMPTY_BYTE_ARRAY, null); return executePost(dataSourceFactory, url, /* httpBody= */ null, /* requestProperties= */ null);
} }
@Override @Override
@ -139,7 +139,7 @@ public final class HttpMediaDrmCallback implements MediaDrmCallback {
private static byte[] executePost( private static byte[] executePost(
HttpDataSource.Factory dataSourceFactory, HttpDataSource.Factory dataSourceFactory,
String url, String url,
byte[] data, @Nullable byte[] httpBody,
@Nullable Map<String, String> requestProperties) @Nullable Map<String, String> requestProperties)
throws IOException { throws IOException {
HttpDataSource dataSource = dataSourceFactory.createDataSource(); HttpDataSource dataSource = dataSourceFactory.createDataSource();
@ -154,7 +154,8 @@ public final class HttpMediaDrmCallback implements MediaDrmCallback {
DataSpec dataSpec = DataSpec dataSpec =
new DataSpec( new DataSpec(
Uri.parse(url), Uri.parse(url),
data, DataSpec.HTTP_METHOD_POST,
httpBody,
/* absoluteStreamPosition= */ 0, /* absoluteStreamPosition= */ 0,
/* position= */ 0, /* position= */ 0,
/* length= */ C.LENGTH_UNSET, /* length= */ C.LENGTH_UNSET,

Some files were not shown because too many files have changed in this diff Show More