Use a enum to understand better what source type is used.

This commit also allows a seamless transition for livestreams.
This commit is contained in:
TiA4f8R 2022-01-06 22:00:34 +01:00
parent 3db37166b4
commit ba804c7d4a
No known key found for this signature in database
GPG Key ID: E6D3E7F5949450DD
2 changed files with 93 additions and 76 deletions

View File

@ -178,6 +178,7 @@ import org.schabi.newpipe.player.playqueue.PlayQueueItemTouchCallback;
import org.schabi.newpipe.player.resolver.AudioPlaybackResolver; import org.schabi.newpipe.player.resolver.AudioPlaybackResolver;
import org.schabi.newpipe.player.resolver.MediaSourceTag; import org.schabi.newpipe.player.resolver.MediaSourceTag;
import org.schabi.newpipe.player.resolver.VideoPlaybackResolver; import org.schabi.newpipe.player.resolver.VideoPlaybackResolver;
import org.schabi.newpipe.player.resolver.VideoPlaybackResolver.SourceType;
import org.schabi.newpipe.player.seekbarpreview.SeekbarPreviewThumbnailHelper; import org.schabi.newpipe.player.seekbarpreview.SeekbarPreviewThumbnailHelper;
import org.schabi.newpipe.player.seekbarpreview.SeekbarPreviewThumbnailHolder; import org.schabi.newpipe.player.seekbarpreview.SeekbarPreviewThumbnailHolder;
import org.schabi.newpipe.util.DeviceUtils; import org.schabi.newpipe.util.DeviceUtils;
@ -3293,8 +3294,9 @@ public final class Player implements
if (audioPlayerSelected()) { if (audioPlayerSelected()) {
return audioResolver.resolve(info); return audioResolver.resolve(info);
} else { } else {
if (isAudioOnly if (isAudioOnly && videoResolver.getStreamSourceType().orElse(
&& !videoResolver.wasLastResolvedVideoAndAudioSeparated().orElse(false)) { SourceType.VIDEO_WITH_AUDIO_OR_AUDIO_ONLY)
== SourceType.VIDEO_WITH_AUDIO_OR_AUDIO_ONLY) {
// If the current info has only video streams with audio and if the stream is // If the current info has only video streams with audio and if the stream is
// played as audio, we need to use the audio resolver, otherwise the video stream // played as audio, we need to use the audio resolver, otherwise the video stream
// will be played in background. // will be played in background.
@ -4196,18 +4198,30 @@ public final class Player implements
stream will be fetched and the video stream will be fetched again when the user return to a stream will be fetched and the video stream will be fetched again when the user return to a
video player. video player.
For audio streams: nothing is done, it's not needed to reload the player with the same For audio streams and audio live streams: nothing is done, it's not needed to reload the
audio stream. player with the same audio stream.
For video live streams: the play queue manager is not reloaded if the stream source is a
live source (see VideoPlaybackResolver#resolve()) and if that's not the case, the
requirements for video streams is applied.
In the case where we don't know the index of the video renderer, the play queue manager In the case where we don't know the index of the video renderer, the play queue manager
is also reloaded. */ is also reloaded. */
final StreamType streamType = info.getStreamType(); final StreamType streamType = info.getStreamType();
final SourceType sourceType = videoResolver.getStreamSourceType()
.orElse(SourceType.VIDEO_WITH_SEPARATED_AUDIO);
final boolean isVideoWithSeparatedAudioOrVideoWithNoSeparatedAudioStreams =
sourceType == SourceType.VIDEO_WITH_SEPARATED_AUDIO
|| (sourceType == SourceType.VIDEO_WITH_AUDIO_OR_AUDIO_ONLY
&& isNullOrEmpty(info.getAudioStreams()));
final boolean isVideoStreamTypeAndIsVideoOnlyStreamOrNoAudioStreamsAvailable = final boolean isVideoStreamTypeAndIsVideoOnlyStreamOrNoAudioStreamsAvailable =
(streamType == StreamType.VIDEO_STREAM || streamType == StreamType.LIVE_STREAM) streamType == StreamType.VIDEO_STREAM
&& (videoResolver.wasLastResolvedVideoAndAudioSeparated().orElse(false) && isVideoWithSeparatedAudioOrVideoWithNoSeparatedAudioStreams
|| isNullOrEmpty(info.getAudioStreams())); || (streamType == StreamType.LIVE_STREAM
&& (sourceType == SourceType.LIVE_STREAM
|| isVideoWithSeparatedAudioOrVideoWithNoSeparatedAudioStreams));
if (videoRenderIndex != RENDERER_UNAVAILABLE if (videoRenderIndex != RENDERER_UNAVAILABLE
&& isVideoStreamTypeAndIsVideoOnlyStreamOrNoAudioStreamsAvailable) { && isVideoStreamTypeAndIsVideoOnlyStreamOrNoAudioStreamsAvailable) {

View File

@ -32,11 +32,16 @@ public class VideoPlaybackResolver implements PlaybackResolver {
private final PlayerDataSource dataSource; private final PlayerDataSource dataSource;
@NonNull @NonNull
private final QualityResolver qualityResolver; private final QualityResolver qualityResolver;
private SourceType streamSourceType;
@Nullable @Nullable
private String playbackQuality; private String playbackQuality;
private Boolean wasLastResolvedVideoAndAudioSeparated; public enum SourceType {
LIVE_STREAM,
VIDEO_WITH_SEPARATED_AUDIO,
VIDEO_WITH_AUDIO_OR_AUDIO_ONLY
}
public VideoPlaybackResolver(@NonNull final Context context, public VideoPlaybackResolver(@NonNull final Context context,
@NonNull final PlayerDataSource dataSource, @NonNull final PlayerDataSource dataSource,
@ -49,81 +54,79 @@ public class VideoPlaybackResolver implements PlaybackResolver {
@Override @Override
@Nullable @Nullable
public MediaSource resolve(@NonNull final StreamInfo info) { public MediaSource resolve(@NonNull final StreamInfo info) {
boolean isVideoAndAudioSeparated = false; final MediaSource liveSource = maybeBuildLiveMediaSource(dataSource, info);
try { if (liveSource != null) {
final MediaSource liveSource = maybeBuildLiveMediaSource(dataSource, info); streamSourceType = SourceType.LIVE_STREAM;
if (liveSource != null) { return liveSource;
return liveSource; }
}
final List<MediaSource> mediaSources = new ArrayList<>(); final List<MediaSource> mediaSources = new ArrayList<>();
// Create video stream source // Create video stream source
final List<VideoStream> videos = ListHelper.getSortedStreamVideosList(context, final List<VideoStream> videos = ListHelper.getSortedStreamVideosList(context,
info.getVideoStreams(), info.getVideoOnlyStreams(), false, true); info.getVideoStreams(), info.getVideoOnlyStreams(), false, true);
final int index; final int index;
if (videos.isEmpty()) { if (videos.isEmpty()) {
index = -1; index = -1;
} else if (playbackQuality == null) { } else if (playbackQuality == null) {
index = qualityResolver.getDefaultResolutionIndex(videos); index = qualityResolver.getDefaultResolutionIndex(videos);
} else { } else {
index = qualityResolver.getOverrideResolutionIndex(videos, getPlaybackQuality()); index = qualityResolver.getOverrideResolutionIndex(videos, getPlaybackQuality());
} }
final MediaSourceTag tag = new MediaSourceTag(info, videos, index); final MediaSourceTag tag = new MediaSourceTag(info, videos, index);
@Nullable final VideoStream video = tag.getSelectedVideoStream(); @Nullable final VideoStream video = tag.getSelectedVideoStream();
if (video != null) { if (video != null) {
final MediaSource streamSource = buildMediaSource(dataSource, video.getUrl(), final MediaSource streamSource = buildMediaSource(dataSource, video.getUrl(),
PlayerHelper.cacheKeyOf(info, video), PlayerHelper.cacheKeyOf(info, video),
MediaFormat.getSuffixById(video.getFormatId()), tag); MediaFormat.getSuffixById(video.getFormatId()), tag);
mediaSources.add(streamSource); mediaSources.add(streamSource);
} }
// Create optional audio stream source // Create optional audio stream source
final List<AudioStream> audioStreams = info.getAudioStreams(); final List<AudioStream> audioStreams = info.getAudioStreams();
final AudioStream audio = audioStreams.isEmpty() ? null : audioStreams.get( final AudioStream audio = audioStreams.isEmpty() ? null : audioStreams.get(
ListHelper.getDefaultAudioFormat(context, audioStreams)); ListHelper.getDefaultAudioFormat(context, audioStreams));
// Use the audio stream if there is no video stream, or // Use the audio stream if there is no video stream, or
// Merge with audio stream in case if video does not contain audio // Merge with audio stream in case if video does not contain audio
if (audio != null && (video == null || video.isVideoOnly)) { if (audio != null && (video == null || video.isVideoOnly)) {
final MediaSource audioSource = buildMediaSource(dataSource, audio.getUrl(), final MediaSource audioSource = buildMediaSource(dataSource, audio.getUrl(),
PlayerHelper.cacheKeyOf(info, audio), PlayerHelper.cacheKeyOf(info, audio),
MediaFormat.getSuffixById(audio.getFormatId()), tag); MediaFormat.getSuffixById(audio.getFormatId()), tag);
mediaSources.add(audioSource); mediaSources.add(audioSource);
isVideoAndAudioSeparated = true; streamSourceType = SourceType.VIDEO_WITH_SEPARATED_AUDIO;
} } else {
streamSourceType = SourceType.VIDEO_WITH_AUDIO_OR_AUDIO_ONLY;
}
// If there is no audio or video sources, then this media source cannot be played back // If there is no audio or video sources, then this media source cannot be played back
if (mediaSources.isEmpty()) { if (mediaSources.isEmpty()) {
return null; return null;
} }
// Below are auxiliary media sources // Below are auxiliary media sources
// Create subtitle sources // Create subtitle sources
if (info.getSubtitles() != null) { if (info.getSubtitles() != null) {
for (final SubtitlesStream subtitle : info.getSubtitles()) { for (final SubtitlesStream subtitle : info.getSubtitles()) {
final String mimeType = PlayerHelper.subtitleMimeTypesOf(subtitle.getFormat()); final String mimeType = PlayerHelper.subtitleMimeTypesOf(subtitle.getFormat());
if (mimeType == null) { if (mimeType == null) {
continue; continue;
}
final MediaSource textSource = dataSource.getSampleMediaSourceFactory()
.createMediaSource(
new MediaItem.Subtitle(Uri.parse(subtitle.getUrl()),
mimeType,
PlayerHelper.captionLanguageOf(context, subtitle)),
TIME_UNSET);
mediaSources.add(textSource);
} }
final MediaSource textSource = dataSource.getSampleMediaSourceFactory()
.createMediaSource(
new MediaItem.Subtitle(Uri.parse(subtitle.getUrl()),
mimeType,
PlayerHelper.captionLanguageOf(context, subtitle)),
TIME_UNSET);
mediaSources.add(textSource);
} }
}
if (mediaSources.size() == 1) { if (mediaSources.size() == 1) {
return mediaSources.get(0); return mediaSources.get(0);
} else { } else {
return new MergingMediaSource(mediaSources.toArray( return new MergingMediaSource(mediaSources.toArray(
new MediaSource[0])); new MediaSource[0]));
}
} finally {
wasLastResolvedVideoAndAudioSeparated = isVideoAndAudioSeparated;
} }
} }
@ -134,8 +137,8 @@ public class VideoPlaybackResolver implements PlaybackResolver {
* @return {@link Optional#empty()} if nothing was resolved, otherwise {@code true} or * @return {@link Optional#empty()} if nothing was resolved, otherwise {@code true} or
* {@code false} * {@code false}
*/ */
public Optional<Boolean> wasLastResolvedVideoAndAudioSeparated() { public Optional<SourceType> getStreamSourceType() {
return Optional.ofNullable(wasLastResolvedVideoAndAudioSeparated); return Optional.ofNullable(streamSourceType);
} }
@Nullable @Nullable