Merge remote-tracking branch 'upstream/dev' into youtube_search_filters
This commit is contained in:
commit
2b771cb284
11
README.md
11
README.md
|
@ -13,6 +13,17 @@ If you're using Gradle, you could add NewPipe Extractor as a dependency with the
|
|||
1. Add `maven { url 'https://jitpack.io' }` to the `repositories` in your `build.gradle`.
|
||||
2. Add `compile 'com.github.TeamNewPipe:NewPipeExtractor:v0.11.0'`the `dependencies` in your `build.gradle`. Replace `v0.11.0` with the latest release.
|
||||
|
||||
### Testing changes
|
||||
|
||||
To test changes quickly you can build the library locally. Using the local Maven repository is a good approach, here's a gist of how to use it:
|
||||
|
||||
1. Add `mavenLocal()` in your project `repositories` list (usually as the first entry to give priority above the others).
|
||||
2. It's _recommended_ that you change the `version` of this library (e.g. `LOCAL_SNAPSHOT`).
|
||||
3. Run gradle's `ìnstall` task to deploy this library to your local repository (using the wrapper, present in the root of this project: `./gradlew install`)
|
||||
4. Change the dependency version used in your project to match the one you chose in step 2 (`implementation 'com.github.TeamNewPipe:NewPipeExtractor:LOCAL_SNAPSHOT'`)
|
||||
|
||||
> Tip for Android Studio users: After you make changes and run the `install` task, use the menu option `File → "Sync with File System"` to refresh the library in your project.
|
||||
|
||||
## Supported sites
|
||||
|
||||
The following sites are currently supported:
|
||||
|
|
|
@ -1,15 +1,23 @@
|
|||
allprojects {
|
||||
apply plugin: 'java-library'
|
||||
apply plugin: 'maven'
|
||||
|
||||
sourceCompatibility = 1.7
|
||||
targetCompatibility = 1.7
|
||||
|
||||
version 'v0.13.0'
|
||||
group 'com.github.TeamNewPipe'
|
||||
|
||||
repositories {
|
||||
jcenter()
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':extractor')
|
||||
implementation project(':timeago-parser')
|
||||
}
|
||||
|
||||
subprojects {
|
||||
task sourcesJar(type: Jar, dependsOn: classes) {
|
||||
classifier = 'sources'
|
||||
|
|
|
@ -1,44 +0,0 @@
|
|||
package org.schabi.newpipe.extractor;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class DownloadRequest {
|
||||
|
||||
private final String requestBody;
|
||||
private final Map<String, List<String>> requestHeaders;
|
||||
public static final DownloadRequest emptyRequest = new DownloadRequest(null, null);
|
||||
|
||||
public DownloadRequest(String requestBody, Map<String, List<String>> headers) {
|
||||
super();
|
||||
this.requestBody = requestBody;
|
||||
if(null != headers) {
|
||||
this.requestHeaders = headers;
|
||||
}else {
|
||||
this.requestHeaders = Collections.emptyMap();
|
||||
}
|
||||
}
|
||||
|
||||
public String getRequestBody() {
|
||||
return requestBody;
|
||||
}
|
||||
|
||||
public Map<String, List<String>> getRequestHeaders() {
|
||||
return requestHeaders;
|
||||
}
|
||||
|
||||
public void setRequestCookies(List<String> cookies){
|
||||
requestHeaders.put("Cookie", cookies);
|
||||
}
|
||||
|
||||
public List<String> getRequestCookies(){
|
||||
if(null == requestHeaders) return Collections.emptyList();
|
||||
List<String> cookies = requestHeaders.get("Cookie");
|
||||
if(null == cookies)
|
||||
return Collections.emptyList();
|
||||
else
|
||||
return cookies;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
package org.schabi.newpipe.extractor;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public class DownloadResponse {
|
||||
private final String responseBody;
|
||||
private final Map<String, List<String>> responseHeaders;
|
||||
|
||||
public DownloadResponse(String responseBody, Map<String, List<String>> headers) {
|
||||
super();
|
||||
this.responseBody = responseBody;
|
||||
this.responseHeaders = headers;
|
||||
}
|
||||
|
||||
public String getResponseBody() {
|
||||
return responseBody;
|
||||
}
|
||||
|
||||
public Map<String, List<String>> getResponseHeaders() {
|
||||
return responseHeaders;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public List<String> getResponseCookies(){
|
||||
if(null == responseHeaders) return Collections.emptyList();
|
||||
List<String> cookies = responseHeaders.get("Set-Cookie");
|
||||
if(null == cookies)
|
||||
return Collections.emptyList();
|
||||
else
|
||||
return cookies;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
package org.schabi.newpipe.extractor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 28.01.16.
|
||||
*
|
||||
* Copyright (C) Christian Schabesberger 2016 <chris.schabesberger@mailbox.org>
|
||||
* Downloader.java is part of NewPipe.
|
||||
*
|
||||
* NewPipe is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* NewPipe is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
public interface Downloader {
|
||||
|
||||
/**
|
||||
* Download the text file at the supplied URL as in download(String), but set
|
||||
* the HTTP header field "Accept-Language" to the supplied string.
|
||||
*
|
||||
* @param siteUrl the URL of the text file to return the contents of
|
||||
* @param localization the language and country (usually a 2-character code for each)
|
||||
* @return the contents of the specified text file
|
||||
* @throws IOException
|
||||
*/
|
||||
String download(String siteUrl, Localization localization) throws IOException, ReCaptchaException;
|
||||
|
||||
/**
|
||||
* Download the text file at the supplied URL as in download(String), but set
|
||||
* the HTTP header field "Accept-Language" to the supplied string.
|
||||
*
|
||||
* @param siteUrl the URL of the text file to return the contents of
|
||||
* @param customProperties set request header properties
|
||||
* @return the contents of the specified text file
|
||||
* @throws IOException
|
||||
*/
|
||||
String download(String siteUrl, Map<String, String> customProperties) throws IOException, ReCaptchaException;
|
||||
|
||||
/**
|
||||
* Download (via HTTP) the text file located at the supplied URL, and return its
|
||||
* contents. Primarily intended for downloading web pages.
|
||||
*
|
||||
* @param siteUrl the URL of the text file to download
|
||||
* @return the contents of the specified text file
|
||||
* @throws IOException
|
||||
*/
|
||||
String download(String siteUrl) throws IOException, ReCaptchaException;
|
||||
|
||||
DownloadResponse get(String siteUrl, DownloadRequest request)
|
||||
throws IOException, ReCaptchaException;
|
||||
|
||||
DownloadResponse get(String siteUrl) throws IOException, ReCaptchaException;
|
||||
|
||||
DownloadResponse post(String siteUrl, DownloadRequest request)
|
||||
throws IOException, ReCaptchaException;
|
||||
}
|
|
@ -1,14 +1,17 @@
|
|||
package org.schabi.newpipe.extractor;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import java.io.IOException;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
|
||||
public abstract class Extractor{
|
||||
/**
|
||||
|
@ -16,21 +19,20 @@ public abstract class Extractor{
|
|||
* Useful for getting other things from a service (like the url handlers for cleaning/accepting/get id from urls).
|
||||
*/
|
||||
private final StreamingService service;
|
||||
|
||||
private final LinkHandler linkHandler;
|
||||
private final Localization localization;
|
||||
|
||||
@Nullable
|
||||
@Nullable private Localization forcedLocalization = null;
|
||||
@Nullable private ContentCountry forcedContentCountry = null;
|
||||
|
||||
private boolean pageFetched = false;
|
||||
private final Downloader downloader;
|
||||
|
||||
public Extractor(final StreamingService service, final LinkHandler linkHandler, final Localization localization) {
|
||||
public Extractor(final StreamingService service, final LinkHandler linkHandler) {
|
||||
if(service == null) throw new NullPointerException("service is null");
|
||||
if(linkHandler == null) throw new NullPointerException("LinkHandler is null");
|
||||
this.service = service;
|
||||
this.linkHandler = linkHandler;
|
||||
this.downloader = NewPipe.getDownloader();
|
||||
this.localization = localization;
|
||||
if(downloader == null) throw new NullPointerException("downloader is null");
|
||||
}
|
||||
|
||||
|
@ -91,6 +93,11 @@ public abstract class Extractor{
|
|||
public String getUrl() throws ParsingException {
|
||||
return linkHandler.getUrl();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public String getBaseUrl() throws ParsingException {
|
||||
return linkHandler.getBaseUrl();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public StreamingService getService() {
|
||||
|
@ -105,8 +112,30 @@ public abstract class Extractor{
|
|||
return downloader;
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Localization
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public void forceLocalization(Localization localization) {
|
||||
this.forcedLocalization = localization;
|
||||
}
|
||||
|
||||
public void forceContentCountry(ContentCountry contentCountry) {
|
||||
this.forcedContentCountry = contentCountry;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public Localization getLocalization() {
|
||||
return localization;
|
||||
public Localization getExtractorLocalization() {
|
||||
return forcedLocalization == null ? getService().getLocalization() : forcedLocalization;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public ContentCountry getExtractorContentCountry() {
|
||||
return forcedContentCountry == null ? getService().getContentCountry() : forcedContentCountry;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public TimeAgoParser getTimeAgoParser() {
|
||||
return getService().getTimeAgoParser(getExtractorLocalization());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@ package org.schabi.newpipe.extractor;
|
|||
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
@ -14,8 +13,8 @@ import java.util.List;
|
|||
*/
|
||||
public abstract class ListExtractor<R extends InfoItem> extends Extractor {
|
||||
|
||||
public ListExtractor(StreamingService service, ListLinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public ListExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -38,6 +38,7 @@ public enum MediaFormat {
|
|||
MP3 (0x300, "MP3", "mp3", "audio/mpeg"),
|
||||
OPUS (0x400, "opus", "opus", "audio/opus"),
|
||||
OGG (0x500, "ogg", "ogg", "audio/ogg"),
|
||||
WEBMA_OPUS (0x200, "WebM Opus", "webm", "audio/webm"),
|
||||
// subtitles formats
|
||||
VTT (0x1000, "WebVTT", "vtt", "text/vtt"),
|
||||
TTML (0x2000, "Timed Text Markup Language", "ttml", "application/ttml+xml"),
|
||||
|
@ -125,6 +126,13 @@ public enum MediaFormat {
|
|||
return null;
|
||||
}
|
||||
|
||||
public static MediaFormat getFromSuffix(String suffix) {
|
||||
for (MediaFormat vf: values()) {
|
||||
if (vf.suffix.equals(suffix)) return vf;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the name of the format
|
||||
* @return the name of the format
|
||||
|
@ -148,4 +156,5 @@ public enum MediaFormat {
|
|||
public String getMimeType() {
|
||||
return mimeType;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,24 +20,42 @@ package org.schabi.newpipe.extractor;
|
|||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Provides access to streaming services supported by NewPipe.
|
||||
*/
|
||||
public class NewPipe {
|
||||
private static Downloader downloader = null;
|
||||
private static Localization localization = null;
|
||||
private static Downloader downloader;
|
||||
private static Localization preferredLocalization;
|
||||
private static ContentCountry preferredContentCountry;
|
||||
|
||||
private NewPipe() {
|
||||
}
|
||||
|
||||
public static void init(Downloader d) {
|
||||
downloader = d;
|
||||
preferredLocalization = Localization.DEFAULT;
|
||||
preferredContentCountry = ContentCountry.DEFAULT;
|
||||
}
|
||||
|
||||
public static void init(Downloader d, Localization l) {
|
||||
downloader = d;
|
||||
localization = l;
|
||||
preferredLocalization = l;
|
||||
preferredContentCountry = l.getCountryCode().isEmpty() ? ContentCountry.DEFAULT : new ContentCountry(l.getCountryCode());
|
||||
}
|
||||
|
||||
public static void init(Downloader d, Localization l, ContentCountry c) {
|
||||
downloader = d;
|
||||
preferredLocalization = l;
|
||||
preferredContentCountry = c;
|
||||
}
|
||||
|
||||
public static Downloader getDownloader() {
|
||||
|
@ -99,11 +117,41 @@ public class NewPipe {
|
|||
}
|
||||
}
|
||||
|
||||
public static void setLocalization(Localization localization) {
|
||||
NewPipe.localization = localization;
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Localization
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public static void setupLocalization(Localization preferredLocalization) {
|
||||
setupLocalization(preferredLocalization, null);
|
||||
}
|
||||
|
||||
public static void setupLocalization(Localization preferredLocalization, @Nullable ContentCountry preferredContentCountry) {
|
||||
NewPipe.preferredLocalization = preferredLocalization;
|
||||
|
||||
if (preferredContentCountry != null) {
|
||||
NewPipe.preferredContentCountry = preferredContentCountry;
|
||||
} else {
|
||||
NewPipe.preferredContentCountry = preferredLocalization.getCountryCode().isEmpty()
|
||||
? ContentCountry.DEFAULT
|
||||
: new ContentCountry(preferredLocalization.getCountryCode());
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static Localization getPreferredLocalization() {
|
||||
return localization;
|
||||
return preferredLocalization == null ? Localization.DEFAULT : preferredLocalization;
|
||||
}
|
||||
|
||||
public static void setPreferredLocalization(Localization preferredLocalization) {
|
||||
NewPipe.preferredLocalization = preferredLocalization;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static ContentCountry getPreferredContentCountry() {
|
||||
return preferredContentCountry == null ? ContentCountry.DEFAULT : preferredContentCountry;
|
||||
}
|
||||
|
||||
public static void setPreferredContentCountry(ContentCountry preferredContentCountry) {
|
||||
NewPipe.preferredContentCountry = preferredContentCountry;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,13 @@
|
|||
package org.schabi.newpipe.extractor;
|
||||
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.MediaCCCService;
|
||||
import org.schabi.newpipe.extractor.services.soundcloud.SoundcloudService;
|
||||
import org.schabi.newpipe.extractor.services.youtube.YoutubeService;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static java.util.Collections.unmodifiableList;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.MediaCCCService;
|
||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeService;
|
||||
import org.schabi.newpipe.extractor.services.soundcloud.SoundcloudService;
|
||||
import org.schabi.newpipe.extractor.services.youtube.YoutubeService;
|
||||
|
||||
/*
|
||||
* Copyright (C) Christian Schabesberger 2018 <chris.schabesberger@mailbox.org>
|
||||
|
@ -38,16 +38,18 @@ public final class ServiceList {
|
|||
public static final YoutubeService YouTube;
|
||||
public static final SoundcloudService SoundCloud;
|
||||
public static final MediaCCCService MediaCCC;
|
||||
public static final PeertubeService PeerTube;
|
||||
|
||||
/**
|
||||
* When creating a new service, put this service in the end of this list,
|
||||
* and give it the next free id.
|
||||
*/
|
||||
private static final List<StreamingService> SERVICES = unmodifiableList(
|
||||
asList(
|
||||
private static final List<StreamingService> SERVICES = Collections.unmodifiableList(
|
||||
Arrays.asList(
|
||||
YouTube = new YoutubeService(0),
|
||||
SoundCloud = new SoundcloudService(1),
|
||||
MediaCCC = new MediaCCCService(2)
|
||||
MediaCCC = new MediaCCCService(2),
|
||||
PeerTube = new PeertubeService(3)
|
||||
));
|
||||
|
||||
/**
|
||||
|
|
|
@ -14,11 +14,15 @@ import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
|||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoPatternsManager;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
/*
|
||||
* Copyright (C) Christian Schabesberger 2018 <chris.schabesberger@mailbox.org>
|
||||
|
@ -45,6 +49,7 @@ public abstract class StreamingService {
|
|||
*/
|
||||
public static class ServiceInfo {
|
||||
private final String name;
|
||||
|
||||
private final List<MediaCapability> mediaCapabilities;
|
||||
|
||||
/**
|
||||
|
@ -60,7 +65,7 @@ public abstract class StreamingService {
|
|||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
public List<MediaCapability> getMediaCapabilities() {
|
||||
return mediaCapabilities;
|
||||
}
|
||||
|
@ -111,10 +116,12 @@ public abstract class StreamingService {
|
|||
public String toString() {
|
||||
return serviceId + ":" + serviceInfo.getName();
|
||||
}
|
||||
|
||||
public abstract String getBaseUrl();
|
||||
|
||||
////////////////////////////////////////////
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Url Id handler
|
||||
////////////////////////////////////////////
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
/**
|
||||
* Must return a new instance of an implementation of LinkHandlerFactory for streams.
|
||||
|
@ -143,25 +150,22 @@ public abstract class StreamingService {
|
|||
public abstract SearchQueryHandlerFactory getSearchQHFactory();
|
||||
public abstract ListLinkHandlerFactory getCommentsLHFactory();
|
||||
|
||||
|
||||
////////////////////////////////////////////
|
||||
// Extractor
|
||||
////////////////////////////////////////////
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Extractors
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
/**
|
||||
* Must create a new instance of a SearchExtractor implementation.
|
||||
* @param queryHandler specifies the keyword lock for, and the filters which should be applied.
|
||||
* @param localization specifies the language/country for the extractor.
|
||||
* @return a new SearchExtractor instance
|
||||
*/
|
||||
public abstract SearchExtractor getSearchExtractor(SearchQueryHandler queryHandler, Localization localization);
|
||||
public abstract SearchExtractor getSearchExtractor(SearchQueryHandler queryHandler);
|
||||
|
||||
/**
|
||||
* Must create a new instance of a SuggestionExtractor implementation.
|
||||
* @param localization specifies the language/country for the extractor.
|
||||
* @return a new SuggestionExtractor instance
|
||||
*/
|
||||
public abstract SuggestionExtractor getSuggestionExtractor(Localization localization);
|
||||
public abstract SuggestionExtractor getSuggestionExtractor();
|
||||
|
||||
/**
|
||||
* Outdated or obsolete. null can be returned.
|
||||
|
@ -179,123 +183,85 @@ public abstract class StreamingService {
|
|||
/**
|
||||
* Must create a new instance of a ChannelExtractor implementation.
|
||||
* @param linkHandler is pointing to the channel which should be handled by this new instance.
|
||||
* @param localization specifies the language used for the request.
|
||||
* @return a new ChannelExtractor
|
||||
* @throws ExtractionException
|
||||
*/
|
||||
public abstract ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler,
|
||||
Localization localization) throws ExtractionException;
|
||||
public abstract ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler) throws ExtractionException;
|
||||
|
||||
/**
|
||||
* Must crete a new instance of a PlaylistExtractor implementation.
|
||||
* @param linkHandler is pointing to the playlist which should be handled by this new instance.
|
||||
* @param localization specifies the language used for the request.
|
||||
* @return a new PlaylistExtractor
|
||||
* @throws ExtractionException
|
||||
*/
|
||||
public abstract PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler,
|
||||
Localization localization) throws ExtractionException;
|
||||
public abstract PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler) throws ExtractionException;
|
||||
|
||||
/**
|
||||
* Must create a new instance of a StreamExtractor implementation.
|
||||
* @param linkHandler is pointing to the stream which should be handled by this new instance.
|
||||
* @param localization specifies the language used for the request.
|
||||
* @return a new StreamExtractor
|
||||
* @throws ExtractionException
|
||||
*/
|
||||
public abstract StreamExtractor getStreamExtractor(LinkHandler linkHandler,
|
||||
Localization localization) throws ExtractionException;
|
||||
public abstract CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler,
|
||||
Localization localization) throws ExtractionException;
|
||||
////////////////////////////////////////////
|
||||
// Extractor with default localization
|
||||
////////////////////////////////////////////
|
||||
public abstract StreamExtractor getStreamExtractor(LinkHandler linkHandler) throws ExtractionException;
|
||||
|
||||
public SearchExtractor getSearchExtractor(SearchQueryHandler queryHandler) {
|
||||
return getSearchExtractor(queryHandler, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
public abstract CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler) throws ExtractionException;
|
||||
|
||||
public SuggestionExtractor getSuggestionExtractor() {
|
||||
return getSuggestionExtractor(NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
public ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler) throws ExtractionException {
|
||||
return getChannelExtractor(linkHandler, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
public PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler) throws ExtractionException {
|
||||
return getPlaylistExtractor(linkHandler, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
public StreamExtractor getStreamExtractor(LinkHandler linkHandler) throws ExtractionException {
|
||||
return getStreamExtractor(linkHandler, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
public CommentsExtractor getCommentsExtractor(ListLinkHandler urlIdHandler) throws ExtractionException {
|
||||
return getCommentsExtractor(urlIdHandler, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
////////////////////////////////////////////
|
||||
// Extractor without link handler
|
||||
////////////////////////////////////////////
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Extractors without link handler
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public SearchExtractor getSearchExtractor(String query,
|
||||
List<String> contentFilter,
|
||||
String sortFilter,
|
||||
Localization localization) throws ExtractionException {
|
||||
String sortFilter) throws ExtractionException {
|
||||
return getSearchExtractor(getSearchQHFactory()
|
||||
.fromQuery(query,
|
||||
contentFilter,
|
||||
sortFilter),
|
||||
localization);
|
||||
.fromQuery(query, contentFilter, sortFilter));
|
||||
}
|
||||
|
||||
public ChannelExtractor getChannelExtractor(String id,
|
||||
List<String> contentFilter,
|
||||
String sortFilter,
|
||||
Localization localization) throws ExtractionException {
|
||||
return getChannelExtractor(getChannelLHFactory().fromQuery(id, contentFilter, sortFilter), localization);
|
||||
String sortFilter) throws ExtractionException {
|
||||
return getChannelExtractor(getChannelLHFactory()
|
||||
.fromQuery(id, contentFilter, sortFilter));
|
||||
}
|
||||
|
||||
public PlaylistExtractor getPlaylistExtractor(String id,
|
||||
List<String> contentFilter,
|
||||
String sortFilter,
|
||||
Localization localization) throws ExtractionException {
|
||||
String sortFilter) throws ExtractionException {
|
||||
return getPlaylistExtractor(getPlaylistLHFactory()
|
||||
.fromQuery(id,
|
||||
contentFilter,
|
||||
sortFilter),
|
||||
localization);
|
||||
.fromQuery(id, contentFilter, sortFilter));
|
||||
}
|
||||
|
||||
////////////////////////////////////////////
|
||||
// Short extractor without localization
|
||||
////////////////////////////////////////////
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Short extractors overloads
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public SearchExtractor getSearchExtractor(String query) throws ExtractionException {
|
||||
return getSearchExtractor(getSearchQHFactory().fromQuery(query), NewPipe.getPreferredLocalization());
|
||||
return getSearchExtractor(getSearchQHFactory().fromQuery(query));
|
||||
}
|
||||
|
||||
public ChannelExtractor getChannelExtractor(String url) throws ExtractionException {
|
||||
return getChannelExtractor(getChannelLHFactory().fromUrl(url), NewPipe.getPreferredLocalization());
|
||||
return getChannelExtractor(getChannelLHFactory().fromUrl(url));
|
||||
}
|
||||
|
||||
public PlaylistExtractor getPlaylistExtractor(String url) throws ExtractionException {
|
||||
return getPlaylistExtractor(getPlaylistLHFactory().fromUrl(url), NewPipe.getPreferredLocalization());
|
||||
return getPlaylistExtractor(getPlaylistLHFactory().fromUrl(url));
|
||||
}
|
||||
|
||||
public StreamExtractor getStreamExtractor(String url) throws ExtractionException {
|
||||
return getStreamExtractor(getStreamLHFactory().fromUrl(url), NewPipe.getPreferredLocalization());
|
||||
return getStreamExtractor(getStreamLHFactory().fromUrl(url));
|
||||
}
|
||||
|
||||
|
||||
public CommentsExtractor getCommentsExtractor(String url) throws ExtractionException {
|
||||
ListLinkHandlerFactory llhf = getCommentsLHFactory();
|
||||
if(null == llhf) {
|
||||
return null;
|
||||
}
|
||||
return getCommentsExtractor(llhf.fromUrl(url), NewPipe.getPreferredLocalization());
|
||||
return getCommentsExtractor(llhf.fromUrl(url));
|
||||
}
|
||||
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Utils
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
/**
|
||||
* Figures out where the link is pointing to (a channel, a video, a playlist, etc.)
|
||||
|
@ -318,4 +284,95 @@ public abstract class StreamingService {
|
|||
return LinkType.NONE;
|
||||
}
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Localization
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
/**
|
||||
* Returns a list of localizations that this service supports.
|
||||
*/
|
||||
public List<Localization> getSupportedLocalizations() {
|
||||
return Collections.singletonList(Localization.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a list of countries that this service supports.<br>
|
||||
*/
|
||||
public List<ContentCountry> getSupportedCountries() {
|
||||
return Collections.singletonList(ContentCountry.DEFAULT);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the localization that should be used in this service. It will get which localization
|
||||
* the user prefer (using {@link NewPipe#getPreferredLocalization()}), then it will:
|
||||
* <ul>
|
||||
* <li>Check if the exactly localization is supported by this service.</li>
|
||||
* <li>If not, check if a less specific localization is available, using only the language code.</li>
|
||||
* <li>Fallback to the {@link Localization#DEFAULT default} localization.</li>
|
||||
* </ul>
|
||||
*/
|
||||
public Localization getLocalization() {
|
||||
final Localization preferredLocalization = NewPipe.getPreferredLocalization();
|
||||
|
||||
// Check the localization's language and country
|
||||
if (getSupportedLocalizations().contains(preferredLocalization)) {
|
||||
return preferredLocalization;
|
||||
}
|
||||
|
||||
// Fallback to the first supported language that matches the preferred language
|
||||
for (Localization supportedLanguage : getSupportedLocalizations()) {
|
||||
if (supportedLanguage.getLanguageCode().equals(preferredLocalization.getLanguageCode())) {
|
||||
return supportedLanguage;
|
||||
}
|
||||
}
|
||||
|
||||
return Localization.DEFAULT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the country that should be used to fetch content in this service. It will get which country
|
||||
* the user prefer (using {@link NewPipe#getPreferredContentCountry()}), then it will:
|
||||
* <ul>
|
||||
* <li>Check if the country is supported by this service.</li>
|
||||
* <li>If not, fallback to the {@link ContentCountry#DEFAULT default} country.</li>
|
||||
* </ul>
|
||||
*/
|
||||
public ContentCountry getContentCountry() {
|
||||
final ContentCountry preferredContentCountry = NewPipe.getPreferredContentCountry();
|
||||
|
||||
if (getSupportedCountries().contains(preferredContentCountry)) {
|
||||
return preferredContentCountry;
|
||||
}
|
||||
|
||||
return ContentCountry.DEFAULT;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an instance of the time ago parser using the patterns related to the passed localization.<br>
|
||||
* <br>
|
||||
* Just like {@link #getLocalization()}, it will also try to fallback to a less specific localization if
|
||||
* the exact one is not available/supported.
|
||||
*
|
||||
* @throws IllegalArgumentException if the localization is not supported (parsing patterns are not present).
|
||||
*/
|
||||
public TimeAgoParser getTimeAgoParser(Localization localization) {
|
||||
final TimeAgoParser targetParser = TimeAgoPatternsManager.getTimeAgoParserFor(localization);
|
||||
|
||||
if (targetParser != null) {
|
||||
return targetParser;
|
||||
}
|
||||
|
||||
if (!localization.getCountryCode().isEmpty()) {
|
||||
final Localization lessSpecificLocalization = new Localization(localization.getLanguageCode());
|
||||
final TimeAgoParser lessSpecificParser = TimeAgoPatternsManager.getTimeAgoParserFor(lessSpecificLocalization);
|
||||
|
||||
if (lessSpecificParser != null) {
|
||||
return lessSpecificParser;
|
||||
}
|
||||
}
|
||||
|
||||
throw new IllegalArgumentException("Localization is not supported (\"" + localization.toString() + "\")");
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,48 +0,0 @@
|
|||
package org.schabi.newpipe.extractor;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 28.09.16.
|
||||
*
|
||||
* Copyright (C) Christian Schabesberger 2016 <chris.schabesberger@mailbox.org>
|
||||
* SuggestionExtractor.java is part of NewPipe.
|
||||
*
|
||||
* NewPipe is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or
|
||||
* (at your option) any later version.
|
||||
*
|
||||
* NewPipe is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
public abstract class SuggestionExtractor {
|
||||
|
||||
private final int serviceId;
|
||||
private final Localization localization;
|
||||
|
||||
public SuggestionExtractor(int serviceId, Localization localization) {
|
||||
this.serviceId = serviceId;
|
||||
this.localization = localization;
|
||||
}
|
||||
|
||||
public abstract List<String> suggestionList(String query) throws IOException, ExtractionException;
|
||||
|
||||
public int getServiceId() {
|
||||
return serviceId;
|
||||
}
|
||||
|
||||
protected Localization getLocalization() {
|
||||
return localization;
|
||||
}
|
||||
}
|
|
@ -3,9 +3,8 @@ package org.schabi.newpipe.extractor.channel;
|
|||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 25.07.16.
|
||||
|
@ -29,8 +28,8 @@ import org.schabi.newpipe.extractor.utils.Localization;
|
|||
|
||||
public abstract class ChannelExtractor extends ListExtractor<StreamInfoItem> {
|
||||
|
||||
public ChannelExtractor(StreamingService service, ListLinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public ChannelExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
public abstract String getAvatarUrl() throws ParsingException;
|
||||
|
|
|
@ -6,10 +6,10 @@ import org.schabi.newpipe.extractor.NewPipe;
|
|||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -3,13 +3,12 @@ package org.schabi.newpipe.extractor.comments;
|
|||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
public abstract class CommentsExtractor extends ListExtractor<CommentsInfoItem> {
|
||||
|
||||
public CommentsExtractor(StreamingService service, ListLinkHandler uiHandler, Localization localization) {
|
||||
super(service, uiHandler, localization);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
public CommentsExtractor(StreamingService service, ListLinkHandler uiHandler) {
|
||||
super(service, uiHandler);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,76 +1,87 @@
|
|||
package org.schabi.newpipe.extractor.comments;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
|
||||
public class CommentsInfoItem extends InfoItem{
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
private String commentId;
|
||||
private String commentText;
|
||||
private String authorName;
|
||||
private String authorThumbnail;
|
||||
private String authorEndpoint;
|
||||
private String publishedTime;
|
||||
private Integer likeCount;
|
||||
|
||||
public CommentsInfoItem(int serviceId, String url, String name) {
|
||||
super(InfoType.COMMENT, serviceId, url, name);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
public String getCommentText() {
|
||||
return commentText;
|
||||
}
|
||||
public class CommentsInfoItem extends InfoItem {
|
||||
|
||||
public void setCommentText(String contentText) {
|
||||
this.commentText = contentText;
|
||||
}
|
||||
private String commentId;
|
||||
private String commentText;
|
||||
private String authorName;
|
||||
private String authorThumbnail;
|
||||
private String authorEndpoint;
|
||||
private String textualPublishedTime;
|
||||
@Nullable private DateWrapper publishedTime;
|
||||
private int likeCount;
|
||||
|
||||
public String getAuthorName() {
|
||||
return authorName;
|
||||
}
|
||||
public CommentsInfoItem(int serviceId, String url, String name) {
|
||||
super(InfoType.COMMENT, serviceId, url, name);
|
||||
}
|
||||
|
||||
public void setAuthorName(String authorName) {
|
||||
this.authorName = authorName;
|
||||
}
|
||||
public String getCommentId() {
|
||||
return commentId;
|
||||
}
|
||||
|
||||
public String getAuthorThumbnail() {
|
||||
return authorThumbnail;
|
||||
}
|
||||
public void setCommentId(String commentId) {
|
||||
this.commentId = commentId;
|
||||
}
|
||||
|
||||
public void setAuthorThumbnail(String authorThumbnail) {
|
||||
this.authorThumbnail = authorThumbnail;
|
||||
}
|
||||
public String getCommentText() {
|
||||
return commentText;
|
||||
}
|
||||
|
||||
public String getAuthorEndpoint() {
|
||||
return authorEndpoint;
|
||||
}
|
||||
public void setCommentText(String commentText) {
|
||||
this.commentText = commentText;
|
||||
}
|
||||
|
||||
public void setAuthorEndpoint(String authorEndpoint) {
|
||||
this.authorEndpoint = authorEndpoint;
|
||||
}
|
||||
public String getAuthorName() {
|
||||
return authorName;
|
||||
}
|
||||
|
||||
public String getPublishedTime() {
|
||||
return publishedTime;
|
||||
}
|
||||
public void setAuthorName(String authorName) {
|
||||
this.authorName = authorName;
|
||||
}
|
||||
|
||||
public void setPublishedTime(String publishedTime) {
|
||||
this.publishedTime = publishedTime;
|
||||
}
|
||||
public String getAuthorThumbnail() {
|
||||
return authorThumbnail;
|
||||
}
|
||||
|
||||
public Integer getLikeCount() {
|
||||
return likeCount;
|
||||
}
|
||||
public void setAuthorThumbnail(String authorThumbnail) {
|
||||
this.authorThumbnail = authorThumbnail;
|
||||
}
|
||||
|
||||
public void setLikeCount(Integer likeCount) {
|
||||
this.likeCount = likeCount;
|
||||
}
|
||||
public String getAuthorEndpoint() {
|
||||
return authorEndpoint;
|
||||
}
|
||||
|
||||
public String getCommentId() {
|
||||
return commentId;
|
||||
}
|
||||
public void setAuthorEndpoint(String authorEndpoint) {
|
||||
this.authorEndpoint = authorEndpoint;
|
||||
}
|
||||
|
||||
public void setCommentId(String commentId) {
|
||||
this.commentId = commentId;
|
||||
}
|
||||
public String getTextualPublishedTime() {
|
||||
return textualPublishedTime;
|
||||
}
|
||||
|
||||
public void setTextualPublishedTime(String textualPublishedTime) {
|
||||
this.textualPublishedTime = textualPublishedTime;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public DateWrapper getPublishedTime() {
|
||||
return publishedTime;
|
||||
}
|
||||
|
||||
public void setPublishedTime(@Nullable DateWrapper publishedTime) {
|
||||
this.publishedTime = publishedTime;
|
||||
}
|
||||
|
||||
public int getLikeCount() {
|
||||
return likeCount;
|
||||
}
|
||||
|
||||
public void setLikeCount(int likeCount) {
|
||||
this.likeCount = likeCount;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,9 @@ package org.schabi.newpipe.extractor.comments;
|
|||
|
||||
import org.schabi.newpipe.extractor.InfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public interface CommentsInfoItemExtractor extends InfoItemExtractor {
|
||||
String getCommentId() throws ParsingException;
|
||||
|
@ -9,6 +12,8 @@ public interface CommentsInfoItemExtractor extends InfoItemExtractor {
|
|||
String getAuthorName() throws ParsingException;
|
||||
String getAuthorThumbnail() throws ParsingException;
|
||||
String getAuthorEndpoint() throws ParsingException;
|
||||
String getPublishedTime() throws ParsingException;
|
||||
Integer getLikeCount() throws ParsingException;
|
||||
String getTextualPublishedTime() throws ParsingException;
|
||||
@Nullable
|
||||
DateWrapper getPublishedTime() throws ParsingException;
|
||||
int getLikeCount() throws ParsingException;
|
||||
}
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
package org.schabi.newpipe.extractor.comments;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Vector;
|
||||
|
||||
public class CommentsInfoItemsCollector extends InfoItemsCollector<CommentsInfoItem, CommentsInfoItemExtractor> {
|
||||
|
||||
public CommentsInfoItemsCollector(int serviceId) {
|
||||
|
@ -49,6 +49,11 @@ public class CommentsInfoItemsCollector extends InfoItemsCollector<CommentsInfoI
|
|||
} catch (Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setTextualPublishedTime(extractor.getTextualPublishedTime());
|
||||
} catch (Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setPublishedTime(extractor.getPublishedTime());
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
package org.schabi.newpipe.extractor.downloader;
|
||||
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A base for downloader implementations that NewPipe will use
|
||||
* to download needed resources during extraction.
|
||||
*/
|
||||
public abstract class Downloader {
|
||||
|
||||
/**
|
||||
* Do a GET request to get the resource that the url is pointing to.<br>
|
||||
* <br>
|
||||
* This method calls {@link #get(String, Map, Localization)} with the default preferred localization. It should only be
|
||||
* used when the resource that will be fetched won't be affected by the localization.
|
||||
*
|
||||
* @param url the URL that is pointing to the wanted resource
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response get(String url) throws IOException, ReCaptchaException {
|
||||
return get(url, null, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a GET request to get the resource that the url is pointing to.<br>
|
||||
* <br>
|
||||
* It will set the {@code Accept-Language} header to the language of the localization parameter.
|
||||
*
|
||||
* @param url the URL that is pointing to the wanted resource
|
||||
* @param localization the source of the value of the {@code Accept-Language} header
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response get(String url, @Nullable Localization localization) throws IOException, ReCaptchaException {
|
||||
return get(url, null, localization);
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a GET request with the specified headers.
|
||||
*
|
||||
* @param url the URL that is pointing to the wanted resource
|
||||
* @param headers a list of headers that will be used in the request.
|
||||
* Any default headers <b>should</b> be overridden by these.
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response get(String url, @Nullable Map<String, List<String>> headers) throws IOException, ReCaptchaException {
|
||||
return get(url, headers, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a GET request with the specified headers.<br>
|
||||
* <br>
|
||||
* It will set the {@code Accept-Language} header to the language of the localization parameter.
|
||||
*
|
||||
* @param url the URL that is pointing to the wanted resource
|
||||
* @param headers a list of headers that will be used in the request.
|
||||
* Any default headers <b>should</b> be overridden by these.
|
||||
* @param localization the source of the value of the {@code Accept-Language} header
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response get(String url, @Nullable Map<String, List<String>> headers, @Nullable Localization localization)
|
||||
throws IOException, ReCaptchaException {
|
||||
return execute(Request.newBuilder()
|
||||
.get(url)
|
||||
.headers(headers)
|
||||
.localization(localization)
|
||||
.build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a HEAD request.
|
||||
*
|
||||
* @param url the URL that is pointing to the wanted resource
|
||||
* @return the result of the HEAD request
|
||||
*/
|
||||
public Response head(String url) throws IOException, ReCaptchaException {
|
||||
return head(url, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a HEAD request with the specified headers.
|
||||
*
|
||||
* @param url the URL that is pointing to the wanted resource
|
||||
* @param headers a list of headers that will be used in the request.
|
||||
* Any default headers <b>should</b> be overridden by these.
|
||||
* @return the result of the HEAD request
|
||||
*/
|
||||
public Response head(String url, @Nullable Map<String, List<String>> headers)
|
||||
throws IOException, ReCaptchaException {
|
||||
return execute(Request.newBuilder()
|
||||
.head(url)
|
||||
.headers(headers)
|
||||
.build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a POST request with the specified headers, sending the data array.
|
||||
*
|
||||
* @param url the URL that is pointing to the wanted resource
|
||||
* @param headers a list of headers that will be used in the request.
|
||||
* Any default headers <b>should</b> be overridden by these.
|
||||
* @param dataToSend byte array that will be sent when doing the request.
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response post(String url, @Nullable Map<String, List<String>> headers, @Nullable byte[] dataToSend)
|
||||
throws IOException, ReCaptchaException {
|
||||
return post(url, headers, dataToSend, NewPipe.getPreferredLocalization());
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a POST request with the specified headers, sending the data array.
|
||||
* <br>
|
||||
* It will set the {@code Accept-Language} header to the language of the localization parameter.
|
||||
*
|
||||
* @param url the URL that is pointing to the wanted resource
|
||||
* @param headers a list of headers that will be used in the request.
|
||||
* Any default headers <b>should</b> be overridden by these.
|
||||
* @param dataToSend byte array that will be sent when doing the request.
|
||||
* @param localization the source of the value of the {@code Accept-Language} header
|
||||
* @return the result of the GET request
|
||||
*/
|
||||
public Response post(String url, @Nullable Map<String, List<String>> headers, @Nullable byte[] dataToSend, @Nullable Localization localization)
|
||||
throws IOException, ReCaptchaException {
|
||||
return execute(Request.newBuilder()
|
||||
.post(url, dataToSend)
|
||||
.headers(headers)
|
||||
.localization(localization)
|
||||
.build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Do a request using the specified {@link Request} object.
|
||||
*
|
||||
* @return the result of the request
|
||||
*/
|
||||
public abstract Response execute(@Nonnull Request request) throws IOException, ReCaptchaException;
|
||||
}
|
|
@ -0,0 +1,244 @@
|
|||
package org.schabi.newpipe.extractor.downloader;
|
||||
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* An object that holds request information used when {@link Downloader#execute(Request) executing} a request.
|
||||
*/
|
||||
public class Request {
|
||||
private final String httpMethod;
|
||||
private final String url;
|
||||
private final Map<String, List<String>> headers;
|
||||
@Nullable private final byte[] dataToSend;
|
||||
@Nullable private final Localization localization;
|
||||
|
||||
public Request(String httpMethod, String url, Map<String, List<String>> headers, @Nullable byte[] dataToSend,
|
||||
@Nullable Localization localization, boolean automaticLocalizationHeader) {
|
||||
if (httpMethod == null) throw new IllegalArgumentException("Request's httpMethod is null");
|
||||
if (url == null) throw new IllegalArgumentException("Request's url is null");
|
||||
|
||||
this.httpMethod = httpMethod;
|
||||
this.url = url;
|
||||
this.dataToSend = dataToSend;
|
||||
this.localization = localization;
|
||||
|
||||
Map<String, List<String>> headersToSet = null;
|
||||
if (headers == null) headers = Collections.emptyMap();
|
||||
|
||||
if (automaticLocalizationHeader && localization != null) {
|
||||
headersToSet = new LinkedHashMap<>(headersFromLocalization(localization));
|
||||
headersToSet.putAll(headers);
|
||||
}
|
||||
|
||||
this.headers = Collections.unmodifiableMap(headersToSet == null ? headers : headersToSet);
|
||||
}
|
||||
|
||||
private Request(Builder builder) {
|
||||
this(builder.httpMethod, builder.url, builder.headers, builder.dataToSend,
|
||||
builder.localization, builder.automaticLocalizationHeader);
|
||||
}
|
||||
|
||||
/**
|
||||
* A http method (i.e. {@code GET, POST, HEAD}).
|
||||
*/
|
||||
public String httpMethod() {
|
||||
return httpMethod;
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL that is pointing to the wanted resource.
|
||||
*/
|
||||
public String url() {
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* A list of headers that will be used in the request.<br>
|
||||
* Any default headers that the implementation may have, <b>should</b> be overridden by these.
|
||||
*/
|
||||
public Map<String, List<String>> headers() {
|
||||
return headers;
|
||||
}
|
||||
|
||||
/**
|
||||
* An optional byte array that will be sent when doing the request, very commonly used in
|
||||
* {@code POST} requests.<br>
|
||||
* <br>
|
||||
* The implementation should make note of some recommended headers
|
||||
* (for example, {@code Content-Length} in a post request).
|
||||
*/
|
||||
@Nullable
|
||||
public byte[] dataToSend() {
|
||||
return dataToSend;
|
||||
}
|
||||
|
||||
/**
|
||||
* A localization object that should be used when executing a request.<br>
|
||||
* <br>
|
||||
* Usually the {@code Accept-Language} will be set to this value (a helper
|
||||
* method to do this easily: {@link Request#headersFromLocalization(Localization)}).
|
||||
*/
|
||||
@Nullable
|
||||
public Localization localization() {
|
||||
return localization;
|
||||
}
|
||||
|
||||
public static Builder newBuilder() {
|
||||
return new Builder();
|
||||
}
|
||||
|
||||
public static final class Builder {
|
||||
private String httpMethod;
|
||||
private String url;
|
||||
private Map<String, List<String>> headers = new LinkedHashMap<>();
|
||||
private byte[] dataToSend;
|
||||
private Localization localization;
|
||||
private boolean automaticLocalizationHeader = true;
|
||||
|
||||
public Builder() {
|
||||
}
|
||||
|
||||
/**
|
||||
* A http method (i.e. {@code GET, POST, HEAD}).
|
||||
*/
|
||||
public Builder httpMethod(String httpMethod) {
|
||||
this.httpMethod = httpMethod;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* The URL that is pointing to the wanted resource.
|
||||
*/
|
||||
public Builder url(String url) {
|
||||
this.url = url;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* A list of headers that will be used in the request.<br>
|
||||
* Any default headers that the implementation may have, <b>should</b> be overridden by these.
|
||||
*/
|
||||
public Builder headers(@Nullable Map<String, List<String>> headers) {
|
||||
if (headers == null) {
|
||||
this.headers.clear();
|
||||
return this;
|
||||
}
|
||||
this.headers.clear();
|
||||
this.headers.putAll(headers);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* An optional byte array that will be sent when doing the request, very commonly used in
|
||||
* {@code POST} requests.<br>
|
||||
* <br>
|
||||
* The implementation should make note of some recommended headers
|
||||
* (for example, {@code Content-Length} in a post request).
|
||||
*/
|
||||
public Builder dataToSend(byte[] dataToSend) {
|
||||
this.dataToSend = dataToSend;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* A localization object that should be used when executing a request.<br>
|
||||
* <br>
|
||||
* Usually the {@code Accept-Language} will be set to this value (a helper
|
||||
* method to do this easily: {@link Request#headersFromLocalization(Localization)}).
|
||||
*/
|
||||
public Builder localization(Localization localization) {
|
||||
this.localization = localization;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* If localization headers should automatically be included in the request.
|
||||
*/
|
||||
public Builder automaticLocalizationHeader(boolean automaticLocalizationHeader) {
|
||||
this.automaticLocalizationHeader = automaticLocalizationHeader;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public Request build() {
|
||||
return new Request(this);
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Http Methods Utils
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public Builder get(String url) {
|
||||
this.httpMethod = "GET";
|
||||
this.url = url;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder head(String url) {
|
||||
this.httpMethod = "HEAD";
|
||||
this.url = url;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder post(String url, @Nullable byte[] dataToSend) {
|
||||
this.httpMethod = "POST";
|
||||
this.url = url;
|
||||
this.dataToSend = dataToSend;
|
||||
return this;
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Additional Headers Utils
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
public Builder setHeaders(String headerName, List<String> headerValueList) {
|
||||
this.headers.remove(headerName);
|
||||
this.headers.put(headerName, headerValueList);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder addHeaders(String headerName, List<String> headerValueList) {
|
||||
@Nullable List<String> currentHeaderValueList = this.headers.get(headerName);
|
||||
if (currentHeaderValueList == null) {
|
||||
currentHeaderValueList = new ArrayList<>();
|
||||
}
|
||||
|
||||
currentHeaderValueList.addAll(headerValueList);
|
||||
this.headers.put(headerName, headerValueList);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder setHeader(String headerName, String headerValue) {
|
||||
return setHeaders(headerName, Collections.singletonList(headerValue));
|
||||
}
|
||||
|
||||
public Builder addHeader(String headerName, String headerValue) {
|
||||
return addHeaders(headerName, Collections.singletonList(headerValue));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Utils
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
@Nonnull
|
||||
public static Map<String, List<String>> headersFromLocalization(@Nullable Localization localization) {
|
||||
if (localization == null) return Collections.emptyMap();
|
||||
|
||||
final Map<String, List<String>> headers = new LinkedHashMap<>();
|
||||
if (!localization.getCountryCode().isEmpty()) {
|
||||
headers.put("Accept-Language", Collections.singletonList(localization.getLocalizationCode() +
|
||||
", " + localization.getLanguageCode() + ";q=0.9"));
|
||||
} else {
|
||||
headers.put("Accept-Language", Collections.singletonList(localization.getLanguageCode()));
|
||||
}
|
||||
|
||||
return headers;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
package org.schabi.newpipe.extractor.downloader;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* A Data class used to hold the results from requests made by the Downloader implementation.
|
||||
*/
|
||||
public class Response {
|
||||
private final int responseCode;
|
||||
private final String responseMessage;
|
||||
private final Map<String, List<String>> responseHeaders;
|
||||
private final String responseBody;
|
||||
|
||||
public Response(int responseCode, String responseMessage, Map<String, List<String>> responseHeaders, @Nullable String responseBody) {
|
||||
this.responseCode = responseCode;
|
||||
this.responseMessage = responseMessage;
|
||||
this.responseHeaders = responseHeaders != null ? responseHeaders : Collections.<String, List<String>>emptyMap();
|
||||
|
||||
this.responseBody = responseBody == null ? "" : responseBody;
|
||||
}
|
||||
|
||||
public int responseCode() {
|
||||
return responseCode;
|
||||
}
|
||||
|
||||
public String responseMessage() {
|
||||
return responseMessage;
|
||||
}
|
||||
|
||||
public Map<String, List<String>> responseHeaders() {
|
||||
return responseHeaders;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public String responseBody() {
|
||||
return responseBody;
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Utils
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
/**
|
||||
* For easy access to some header value that (usually) don't repeat itself.
|
||||
* <p>For getting all the values associated to the header, use {@link #responseHeaders()} (e.g. {@code Set-Cookie}).
|
||||
*
|
||||
* @param name the name of the header
|
||||
* @return the first value assigned to this header
|
||||
*/
|
||||
@Nullable
|
||||
public String getHeader(String name) {
|
||||
for (Map.Entry<String, List<String>> headerEntry : responseHeaders.entrySet()) {
|
||||
if (headerEntry.getKey().equalsIgnoreCase(name)) {
|
||||
if (headerEntry.getValue().size() > 0) {
|
||||
return headerEntry.getValue().get(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
|
@ -21,7 +21,14 @@ package org.schabi.newpipe.extractor.exceptions;
|
|||
*/
|
||||
|
||||
public class ReCaptchaException extends ExtractionException {
|
||||
public ReCaptchaException(String message) {
|
||||
private String url;
|
||||
|
||||
public ReCaptchaException(String message, String url) {
|
||||
super(message);
|
||||
this.url = url;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,9 +24,8 @@ import org.schabi.newpipe.extractor.InfoItem;
|
|||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
|
@ -35,9 +34,8 @@ public abstract class KioskExtractor<T extends InfoItem> extends ListExtractor<T
|
|||
|
||||
public KioskExtractor(StreamingService streamingService,
|
||||
ListLinkHandler linkHandler,
|
||||
String kioskId,
|
||||
Localization localization) {
|
||||
super(streamingService, linkHandler, localization);
|
||||
String kioskId) {
|
||||
super(streamingService, linkHandler);
|
||||
this.id = kioskId;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,11 +20,14 @@ package org.schabi.newpipe.extractor.kiosk;
|
|||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import org.schabi.newpipe.extractor.*;
|
||||
import org.schabi.newpipe.extractor.ListExtractor;
|
||||
import org.schabi.newpipe.extractor.ListInfo;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -37,7 +40,8 @@ public class KioskInfo extends ListInfo<StreamInfoItem> {
|
|||
|
||||
public static ListExtractor.InfoItemsPage<StreamInfoItem> getMoreItems(StreamingService service,
|
||||
String url,
|
||||
String pageUrl) throws IOException, ExtractionException {
|
||||
String pageUrl)
|
||||
throws IOException, ExtractionException {
|
||||
KioskList kl = service.getKioskList();
|
||||
KioskExtractor extractor = kl.getExtractorByUrl(url, pageUrl);
|
||||
return extractor.getPage(pageUrl);
|
||||
|
@ -47,8 +51,7 @@ public class KioskInfo extends ListInfo<StreamInfoItem> {
|
|||
return getInfo(NewPipe.getServiceByUrl(url), url);
|
||||
}
|
||||
|
||||
public static KioskInfo getInfo(StreamingService service,
|
||||
String url) throws IOException, ExtractionException {
|
||||
public static KioskInfo getInfo(StreamingService service, String url) throws IOException, ExtractionException {
|
||||
KioskList kl = service.getKioskList();
|
||||
KioskExtractor extractor = kl.getExtractorByUrl(url, null);
|
||||
extractor.fetchPage();
|
||||
|
|
|
@ -2,28 +2,33 @@ package org.schabi.newpipe.extractor.kiosk;
|
|||
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public class KioskList {
|
||||
|
||||
public interface KioskExtractorFactory {
|
||||
KioskExtractor createNewKiosk(final StreamingService streamingService,
|
||||
final String url,
|
||||
final String kioskId,
|
||||
final Localization localization)
|
||||
final String kioskId)
|
||||
throws ExtractionException, IOException;
|
||||
}
|
||||
|
||||
private final int service_id;
|
||||
private final StreamingService service;
|
||||
private final HashMap<String, KioskEntry> kioskList = new HashMap<>();
|
||||
private String defaultKiosk = null;
|
||||
|
||||
@Nullable private Localization forcedLocalization;
|
||||
@Nullable private ContentCountry forcedContentCountry;
|
||||
|
||||
private class KioskEntry {
|
||||
public KioskEntry(KioskExtractorFactory ef, ListLinkHandlerFactory h) {
|
||||
extractorFactory = ef;
|
||||
|
@ -33,8 +38,8 @@ public class KioskList {
|
|||
final ListLinkHandlerFactory handlerFactory;
|
||||
}
|
||||
|
||||
public KioskList(int service_id) {
|
||||
this.service_id = service_id;
|
||||
public KioskList(StreamingService service) {
|
||||
this.service = service;
|
||||
}
|
||||
|
||||
public void addKioskEntry(KioskExtractorFactory extractorFactory, ListLinkHandlerFactory handlerFactory, String id)
|
||||
|
@ -89,8 +94,13 @@ public class KioskList {
|
|||
if(ke == null) {
|
||||
throw new ExtractionException("No kiosk found with the type: " + kioskId);
|
||||
} else {
|
||||
return ke.extractorFactory.createNewKiosk(NewPipe.getService(service_id),
|
||||
ke.handlerFactory.fromId(kioskId).getUrl(), kioskId, localization);
|
||||
final KioskExtractor kioskExtractor = ke.extractorFactory.createNewKiosk(service,
|
||||
ke.handlerFactory.fromId(kioskId).getUrl(), kioskId);
|
||||
|
||||
if (forcedLocalization != null) kioskExtractor.forceLocalization(forcedLocalization);
|
||||
if (forcedContentCountry != null) kioskExtractor.forceContentCountry(forcedContentCountry);
|
||||
|
||||
return kioskExtractor;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,7 +118,7 @@ public class KioskList {
|
|||
for(Map.Entry<String, KioskEntry> e : kioskList.entrySet()) {
|
||||
KioskEntry ke = e.getValue();
|
||||
if(ke.handlerFactory.acceptUrl(url)) {
|
||||
return getExtractorById(e.getKey(), nextPageUrl, localization);
|
||||
return getExtractorById(ke.handlerFactory.getId(url), nextPageUrl, localization);
|
||||
}
|
||||
}
|
||||
throw new ExtractionException("Could not find a kiosk that fits to the url: " + url);
|
||||
|
@ -117,4 +127,12 @@ public class KioskList {
|
|||
public ListLinkHandlerFactory getListLinkHandlerFactoryByType(String type) {
|
||||
return kioskList.get(type).handlerFactory;
|
||||
}
|
||||
|
||||
public void forceLocalization(@Nullable Localization localization) {
|
||||
this.forcedLocalization = localization;
|
||||
}
|
||||
|
||||
public void forceContentCountry(@Nullable ContentCountry contentCountry) {
|
||||
this.forcedContentCountry = contentCountry;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,9 @@ package org.schabi.newpipe.extractor.linkhandler;
|
|||
|
||||
import java.io.Serializable;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
public class LinkHandler implements Serializable {
|
||||
protected final String originalUrl;
|
||||
protected final String url;
|
||||
|
@ -28,4 +31,8 @@ public class LinkHandler implements Serializable {
|
|||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public String getBaseUrl() throws ParsingException {
|
||||
return Utils.getBaseUrl(url);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.schabi.newpipe.extractor.linkhandler;
|
|||
|
||||
import org.schabi.newpipe.extractor.exceptions.FoundAdException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 26.07.16.
|
||||
|
@ -33,26 +34,41 @@ public abstract class LinkHandlerFactory {
|
|||
public abstract String getUrl(String id) throws ParsingException;
|
||||
public abstract boolean onAcceptUrl(final String url) throws ParsingException;
|
||||
|
||||
public String getUrl(String id, String baseUrl) throws ParsingException{
|
||||
return getUrl(id);
|
||||
}
|
||||
|
||||
///////////////////////////////////
|
||||
// Logic
|
||||
///////////////////////////////////
|
||||
|
||||
public LinkHandler fromUrl(String url) throws ParsingException {
|
||||
final String baseUrl = Utils.getBaseUrl(url);
|
||||
return fromUrl(url, baseUrl);
|
||||
}
|
||||
|
||||
public LinkHandler fromUrl(String url, String baseUrl) throws ParsingException {
|
||||
if(url == null) throw new IllegalArgumentException("url can not be null");
|
||||
if(!acceptUrl(url)) {
|
||||
throw new ParsingException("Malformed unacceptable url: " + url);
|
||||
}
|
||||
|
||||
final String id = getId(url);
|
||||
return new LinkHandler(url, getUrl(id), id);
|
||||
return new LinkHandler(url, getUrl(id,baseUrl), id);
|
||||
}
|
||||
|
||||
|
||||
public LinkHandler fromId(String id) throws ParsingException {
|
||||
if(id == null) throw new IllegalArgumentException("id can not be null");
|
||||
final String url = getUrl(id);
|
||||
return new LinkHandler(url, url, id);
|
||||
}
|
||||
|
||||
public LinkHandler fromId(String id, String baseUrl) throws ParsingException {
|
||||
if(id == null) throw new IllegalArgumentException("id can not be null");
|
||||
final String url = getUrl(id, baseUrl);
|
||||
return new LinkHandler(url, url, id);
|
||||
}
|
||||
|
||||
/**
|
||||
* When a VIEW_ACTION is caught this function will test if the url delivered within the calling
|
||||
* Intent was meant to be watched with this Service.
|
||||
|
@ -65,4 +81,5 @@ public abstract class LinkHandlerFactory {
|
|||
throw fe;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,10 +1,11 @@
|
|||
package org.schabi.newpipe.extractor.linkhandler;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
||||
|
||||
///////////////////////////////////
|
||||
|
@ -14,23 +15,37 @@ public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
|||
public List<String> getContentFilter(String url) throws ParsingException { return new ArrayList<>(0);}
|
||||
public String getSortFilter(String url) throws ParsingException {return ""; }
|
||||
public abstract String getUrl(String id, List<String> contentFilter, String sortFilter) throws ParsingException;
|
||||
|
||||
public String getUrl(String id, List<String> contentFilter, String sortFilter, String baseUrl) throws ParsingException {
|
||||
return getUrl(id, contentFilter, sortFilter);
|
||||
}
|
||||
|
||||
///////////////////////////////////
|
||||
// Logic
|
||||
///////////////////////////////////
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public ListLinkHandler fromUrl(String url) throws ParsingException {
|
||||
String baseUrl = Utils.getBaseUrl(url);
|
||||
return fromUrl(url, baseUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListLinkHandler fromUrl(String url, String baseUrl) throws ParsingException {
|
||||
if(url == null) throw new IllegalArgumentException("url may not be null");
|
||||
|
||||
return new ListLinkHandler(super.fromUrl(url), getContentFilter(url), getSortFilter(url));
|
||||
return new ListLinkHandler(super.fromUrl(url, baseUrl), getContentFilter(url), getSortFilter(url));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListLinkHandler fromId(String id) throws ParsingException {
|
||||
return new ListLinkHandler(super.fromId(id), new ArrayList<String>(0), "");
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListLinkHandler fromId(String id, String baseUrl) throws ParsingException {
|
||||
return new ListLinkHandler(super.fromId(id, baseUrl), new ArrayList<String>(0), "");
|
||||
}
|
||||
|
||||
public ListLinkHandler fromQuery(String id,
|
||||
List<String> contentFilters,
|
||||
|
@ -38,8 +53,15 @@ public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
|||
final String url = getUrl(id, contentFilters, sortFilter);
|
||||
return new ListLinkHandler(url, url, id, contentFilters, sortFilter);
|
||||
}
|
||||
|
||||
public ListLinkHandler fromQuery(String id,
|
||||
List<String> contentFilters,
|
||||
String sortFilter, String baseUrl) throws ParsingException {
|
||||
final String url = getUrl(id, contentFilters, sortFilter, baseUrl);
|
||||
return new ListLinkHandler(url, url, id, contentFilters, sortFilter);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* For makeing ListLinkHandlerFactory compatible with LinkHandlerFactory we need to override this,
|
||||
* however it should not be overridden by the actual implementation.
|
||||
|
@ -50,6 +72,11 @@ public abstract class ListLinkHandlerFactory extends LinkHandlerFactory {
|
|||
return getUrl(id, new ArrayList<String>(0), "");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, String baseUrl) throws ParsingException {
|
||||
return getUrl(id, new ArrayList<String>(0), "", baseUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Will returns content filter the corresponding extractor can handle like "channels", "videos", "music", etc.
|
||||
*
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
package org.schabi.newpipe.extractor.localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Represents a country that should be used when fetching content.
|
||||
* <p>
|
||||
* YouTube, for example, give different results in their feed depending on which country is selected.
|
||||
* </p>
|
||||
*/
|
||||
public class ContentCountry implements Serializable {
|
||||
public static final ContentCountry DEFAULT = new ContentCountry(Localization.DEFAULT.getCountryCode());
|
||||
|
||||
@Nonnull private final String countryCode;
|
||||
|
||||
public static List<ContentCountry> listFrom(String... countryCodeList) {
|
||||
final List<ContentCountry> toReturn = new ArrayList<>();
|
||||
for (String countryCode : countryCodeList) {
|
||||
toReturn.add(new ContentCountry(countryCode));
|
||||
}
|
||||
return Collections.unmodifiableList(toReturn);
|
||||
}
|
||||
|
||||
public ContentCountry(@Nonnull String countryCode) {
|
||||
this.countryCode = countryCode;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public String getCountryCode() {
|
||||
return countryCode;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getCountryCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof ContentCountry)) return false;
|
||||
|
||||
ContentCountry that = (ContentCountry) o;
|
||||
|
||||
return countryCode.equals(that.countryCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return countryCode.hashCode();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
package org.schabi.newpipe.extractor.localization;
|
||||
|
||||
import edu.umd.cs.findbugs.annotations.NonNull;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Calendar;
|
||||
|
||||
/**
|
||||
* A wrapper class that provides a field to describe if the date is precise or just an approximation.
|
||||
*/
|
||||
public class DateWrapper implements Serializable {
|
||||
@NonNull private final Calendar date;
|
||||
private final boolean isApproximation;
|
||||
|
||||
public DateWrapper(@NonNull Calendar date) {
|
||||
this(date, false);
|
||||
}
|
||||
|
||||
public DateWrapper(@NonNull Calendar date, boolean isApproximation) {
|
||||
this.date = date;
|
||||
this.isApproximation = isApproximation;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return the wrapped date.
|
||||
*/
|
||||
@NonNull
|
||||
public Calendar date() {
|
||||
return date;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return if the date is considered is precise or just an approximation (e.g. service only returns an approximation
|
||||
* like 2 weeks ago instead of a precise date).
|
||||
*/
|
||||
public boolean isApproximation() {
|
||||
return isApproximation;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
package org.schabi.newpipe.extractor.localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.Serializable;
|
||||
import java.util.*;
|
||||
|
||||
public class Localization implements Serializable {
|
||||
public static final Localization DEFAULT = new Localization("en", "GB");
|
||||
|
||||
@Nonnull private final String languageCode;
|
||||
@Nullable private final String countryCode;
|
||||
|
||||
/**
|
||||
* @param localizationCodeList a list of localization code, formatted like {@link #getLocalizationCode()}
|
||||
*/
|
||||
public static List<Localization> listFrom(String... localizationCodeList) {
|
||||
final List<Localization> toReturn = new ArrayList<>();
|
||||
for (String localizationCode : localizationCodeList) {
|
||||
toReturn.add(fromLocalizationCode(localizationCode));
|
||||
}
|
||||
return Collections.unmodifiableList(toReturn);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param localizationCode a localization code, formatted like {@link #getLocalizationCode()}
|
||||
*/
|
||||
public static Localization fromLocalizationCode(String localizationCode) {
|
||||
final int indexSeparator = localizationCode.indexOf("-");
|
||||
|
||||
final String languageCode, countryCode;
|
||||
if (indexSeparator != -1) {
|
||||
languageCode = localizationCode.substring(0, indexSeparator);
|
||||
countryCode = localizationCode.substring(indexSeparator + 1);
|
||||
} else {
|
||||
languageCode = localizationCode;
|
||||
countryCode = null;
|
||||
}
|
||||
|
||||
return new Localization(languageCode, countryCode);
|
||||
}
|
||||
|
||||
public Localization(@Nonnull String languageCode, @Nullable String countryCode) {
|
||||
this.languageCode = languageCode;
|
||||
this.countryCode = countryCode;
|
||||
}
|
||||
|
||||
public Localization(@Nonnull String languageCode) {
|
||||
this(languageCode, null);
|
||||
}
|
||||
|
||||
public String getLanguageCode() {
|
||||
return languageCode;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public String getCountryCode() {
|
||||
return countryCode == null ? "" : countryCode;
|
||||
}
|
||||
|
||||
public Locale asLocale() {
|
||||
return new Locale(getLanguageCode(), getCountryCode());
|
||||
}
|
||||
|
||||
public static Localization fromLocale(@Nonnull Locale locale) {
|
||||
return new Localization(locale.getLanguage(), locale.getCountry());
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a formatted string in the form of: {@code language-Country}, or
|
||||
* just {@code language} if country is {@code null}.
|
||||
*/
|
||||
public String getLocalizationCode() {
|
||||
return languageCode + (countryCode == null ? "" : "-" + countryCode);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Localization[" + getLocalizationCode() + "]";
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof Localization)) return false;
|
||||
|
||||
Localization that = (Localization) o;
|
||||
|
||||
if (!languageCode.equals(that.languageCode)) return false;
|
||||
return countryCode != null ? countryCode.equals(that.countryCode) : that.countryCode == null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = languageCode.hashCode();
|
||||
result = 31 * result + (countryCode != null ? countryCode.hashCode() : 0);
|
||||
return result;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,174 @@
|
|||
package org.schabi.newpipe.extractor.localization;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.timeago.PatternsHolder;
|
||||
import org.schabi.newpipe.extractor.timeago.TimeAgoUnit;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
|
||||
import java.util.Calendar;
|
||||
import java.util.Collection;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/**
|
||||
* A helper class that is meant to be used by services that need to parse upload dates in the
|
||||
* format '2 days ago' or similar.
|
||||
*/
|
||||
public class TimeAgoParser {
|
||||
private final PatternsHolder patternsHolder;
|
||||
private final Calendar consistentNow;
|
||||
|
||||
/**
|
||||
* Creates a helper to parse upload dates in the format '2 days ago'.
|
||||
* <p>
|
||||
* Instantiate a new {@link TimeAgoParser} every time you extract a new batch of items.
|
||||
* </p>
|
||||
* @param patternsHolder An object that holds the "time ago" patterns, special cases, and the language word separator.
|
||||
*/
|
||||
public TimeAgoParser(PatternsHolder patternsHolder) {
|
||||
this.patternsHolder = patternsHolder;
|
||||
consistentNow = Calendar.getInstance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a textual date in the format '2 days ago' into a Calendar representation which is then wrapped in a
|
||||
* {@link DateWrapper} object.
|
||||
* <p>
|
||||
* Beginning with days ago, the date is considered as an approximation.
|
||||
*
|
||||
* @param textualDate The original date as provided by the streaming service
|
||||
* @return The parsed time (can be approximated)
|
||||
* @throws ParsingException if the time unit could not be recognized
|
||||
*/
|
||||
public DateWrapper parse(String textualDate) throws ParsingException {
|
||||
for (Map.Entry<TimeAgoUnit, Map<String, Integer>> caseUnitEntry : patternsHolder.specialCases().entrySet()) {
|
||||
final TimeAgoUnit timeAgoUnit = caseUnitEntry.getKey();
|
||||
for (Map.Entry<String, Integer> caseMapToAmountEntry : caseUnitEntry.getValue().entrySet()) {
|
||||
final String caseText = caseMapToAmountEntry.getKey();
|
||||
final Integer caseAmount = caseMapToAmountEntry.getValue();
|
||||
|
||||
if (textualDateMatches(textualDate, caseText)) {
|
||||
return getResultFor(caseAmount, timeAgoUnit);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
int timeAgoAmount;
|
||||
try {
|
||||
timeAgoAmount = parseTimeAgoAmount(textualDate);
|
||||
} catch (NumberFormatException e) {
|
||||
// If there is no valid number in the textual date,
|
||||
// assume it is 1 (as in 'a second ago').
|
||||
timeAgoAmount = 1;
|
||||
}
|
||||
|
||||
final TimeAgoUnit timeAgoUnit = parseTimeAgoUnit(textualDate);
|
||||
return getResultFor(timeAgoAmount, timeAgoUnit);
|
||||
}
|
||||
|
||||
private int parseTimeAgoAmount(String textualDate) throws NumberFormatException {
|
||||
String timeValueStr = textualDate.replaceAll("\\D+", "");
|
||||
return Integer.parseInt(timeValueStr);
|
||||
}
|
||||
|
||||
private TimeAgoUnit parseTimeAgoUnit(String textualDate) throws ParsingException {
|
||||
for (Map.Entry<TimeAgoUnit, Collection<String>> entry : patternsHolder.asMap().entrySet()) {
|
||||
final TimeAgoUnit timeAgoUnit = entry.getKey();
|
||||
|
||||
for (String agoPhrase : entry.getValue()) {
|
||||
if (textualDateMatches(textualDate, agoPhrase)) {
|
||||
return timeAgoUnit;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw new ParsingException("Unable to parse the date: " + textualDate);
|
||||
}
|
||||
|
||||
private boolean textualDateMatches(String textualDate, String agoPhrase) {
|
||||
if (textualDate.equals(agoPhrase)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (patternsHolder.wordSeparator().isEmpty()) {
|
||||
return textualDate.toLowerCase().contains(agoPhrase.toLowerCase());
|
||||
} else {
|
||||
final String escapedPhrase = Pattern.quote(agoPhrase.toLowerCase());
|
||||
final String escapedSeparator;
|
||||
if (patternsHolder.wordSeparator().equals(" ")) {
|
||||
// From JDK8 → \h - Treat horizontal spaces as a normal one (non-breaking space, thin space, etc.)
|
||||
escapedSeparator = "[ \\t\\xA0\\u1680\\u180e\\u2000-\\u200a\\u202f\\u205f\\u3000]";
|
||||
} else {
|
||||
escapedSeparator = Pattern.quote(patternsHolder.wordSeparator());
|
||||
}
|
||||
|
||||
// (^|separator)pattern($|separator)
|
||||
// Check if the pattern is surrounded by separators or start/end of the string.
|
||||
final String pattern =
|
||||
"(^|" + escapedSeparator + ")" + escapedPhrase + "($|" + escapedSeparator + ")";
|
||||
|
||||
return Parser.isMatch(pattern, textualDate.toLowerCase());
|
||||
}
|
||||
}
|
||||
|
||||
private DateWrapper getResultFor(int timeAgoAmount, TimeAgoUnit timeAgoUnit) {
|
||||
final Calendar calendarTime = getNow();
|
||||
boolean isApproximation = false;
|
||||
|
||||
switch (timeAgoUnit) {
|
||||
case SECONDS:
|
||||
calendarTime.add(Calendar.SECOND, -timeAgoAmount);
|
||||
break;
|
||||
|
||||
case MINUTES:
|
||||
calendarTime.add(Calendar.MINUTE, -timeAgoAmount);
|
||||
break;
|
||||
|
||||
case HOURS:
|
||||
calendarTime.add(Calendar.HOUR_OF_DAY, -timeAgoAmount);
|
||||
break;
|
||||
|
||||
case DAYS:
|
||||
calendarTime.add(Calendar.DAY_OF_MONTH, -timeAgoAmount);
|
||||
isApproximation = true;
|
||||
break;
|
||||
|
||||
case WEEKS:
|
||||
calendarTime.add(Calendar.WEEK_OF_YEAR, -timeAgoAmount);
|
||||
isApproximation = true;
|
||||
break;
|
||||
|
||||
case MONTHS:
|
||||
calendarTime.add(Calendar.MONTH, -timeAgoAmount);
|
||||
isApproximation = true;
|
||||
break;
|
||||
|
||||
case YEARS:
|
||||
calendarTime.add(Calendar.YEAR, -timeAgoAmount);
|
||||
// Prevent `PrettyTime` from showing '12 months ago'.
|
||||
calendarTime.add(Calendar.DAY_OF_MONTH, -1);
|
||||
isApproximation = true;
|
||||
break;
|
||||
}
|
||||
|
||||
if (isApproximation) {
|
||||
markApproximatedTime(calendarTime);
|
||||
}
|
||||
|
||||
return new DateWrapper(calendarTime, isApproximation);
|
||||
}
|
||||
|
||||
private Calendar getNow() {
|
||||
return (Calendar) consistentNow.clone();
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the time as approximated by setting minutes, seconds and milliseconds to 0.
|
||||
* @param calendarTime Time to be marked as approximated
|
||||
*/
|
||||
private void markApproximatedTime(Calendar calendarTime) {
|
||||
calendarTime.set(Calendar.MINUTE, 0);
|
||||
calendarTime.set(Calendar.SECOND, 0);
|
||||
calendarTime.set(Calendar.MILLISECOND, 0);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
package org.schabi.newpipe.extractor.localization;
|
||||
|
||||
import org.schabi.newpipe.extractor.timeago.PatternsHolder;
|
||||
import org.schabi.newpipe.extractor.timeago.PatternsManager;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class TimeAgoPatternsManager {
|
||||
@Nullable
|
||||
private static PatternsHolder getPatternsFor(@Nonnull Localization localization) {
|
||||
return PatternsManager.getPatterns(localization.getLanguageCode(), localization.getCountryCode());
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public static TimeAgoParser getTimeAgoParserFor(@Nonnull Localization localization) {
|
||||
final PatternsHolder holder = getPatternsFor(localization);
|
||||
|
||||
if (holder == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new TimeAgoParser(holder);
|
||||
}
|
||||
}
|
|
@ -5,12 +5,11 @@ import org.schabi.newpipe.extractor.StreamingService;
|
|||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
public abstract class PlaylistExtractor extends ListExtractor<StreamInfoItem> {
|
||||
|
||||
public PlaylistExtractor(StreamingService service, ListLinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public PlaylistExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
public abstract String getThumbnailUrl() throws ParsingException;
|
||||
|
|
|
@ -9,9 +9,10 @@ import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
|||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
||||
|
||||
|
@ -46,6 +47,9 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
extractor.getServiceId(),
|
||||
extractor.getLinkHandler(),
|
||||
extractor.getName());
|
||||
// collect uploader extraction failures until we are sure this is not
|
||||
// just a playlist without an uploader
|
||||
List<Throwable> uploaderParsingErrors = new ArrayList<Throwable>(3);
|
||||
|
||||
try {
|
||||
info.setOriginalUrl(extractor.getOriginalUrl());
|
||||
|
@ -65,23 +69,31 @@ public class PlaylistInfo extends ListInfo<StreamInfoItem> {
|
|||
try {
|
||||
info.setUploaderUrl(extractor.getUploaderUrl());
|
||||
} catch (Exception e) {
|
||||
info.addError(e);
|
||||
info.setUploaderUrl("");
|
||||
uploaderParsingErrors.add(e);
|
||||
}
|
||||
try {
|
||||
info.setUploaderName(extractor.getUploaderName());
|
||||
} catch (Exception e) {
|
||||
info.addError(e);
|
||||
info.setUploaderName("");
|
||||
uploaderParsingErrors.add(e);
|
||||
}
|
||||
try {
|
||||
info.setUploaderAvatarUrl(extractor.getUploaderAvatarUrl());
|
||||
} catch (Exception e) {
|
||||
info.addError(e);
|
||||
info.setUploaderAvatarUrl("");
|
||||
uploaderParsingErrors.add(e);
|
||||
}
|
||||
try {
|
||||
info.setBannerUrl(extractor.getBannerUrl());
|
||||
} catch (Exception e) {
|
||||
info.addError(e);
|
||||
}
|
||||
// do not fail if everything but the uploader infos could be collected
|
||||
if (uploaderParsingErrors.size() > 0 &&
|
||||
(!info.getErrors().isEmpty() || uploaderParsingErrors.size() < 3)) {
|
||||
info.addAllErrors(uploaderParsingErrors);
|
||||
}
|
||||
|
||||
final InfoItemsPage<StreamInfoItem> itemsPage = ExtractorHelper.getItemsPageOrLogError(info, extractor);
|
||||
info.setRelatedItems(itemsPage.getItems());
|
||||
|
|
|
@ -6,7 +6,8 @@ import org.schabi.newpipe.extractor.StreamingService;
|
|||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public abstract class SearchExtractor extends ListExtractor<InfoItem> {
|
||||
|
||||
|
@ -18,10 +19,8 @@ public abstract class SearchExtractor extends ListExtractor<InfoItem> {
|
|||
|
||||
private final InfoItemsSearchCollector collector;
|
||||
|
||||
public SearchExtractor(StreamingService service,
|
||||
SearchQueryHandler linkHandler,
|
||||
Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public SearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
collector = new InfoItemsSearchCollector(service.getServiceId());
|
||||
}
|
||||
|
||||
|
@ -40,6 +39,7 @@ public abstract class SearchExtractor extends ListExtractor<InfoItem> {
|
|||
return (SearchQueryHandler) super.getLinkHandler();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getName() {
|
||||
return getLinkHandler().getSearchString();
|
||||
|
|
|
@ -7,7 +7,6 @@ import org.schabi.newpipe.extractor.StreamingService;
|
|||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
|
|
@ -1,24 +1,12 @@
|
|||
package org.schabi.newpipe.extractor.services.media_ccc;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.SuggestionExtractor;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.*;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCConferenceExtractor;
|
||||
|
@ -31,7 +19,13 @@ import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearc
|
|||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCStreamLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
||||
|
||||
public class MediaCCCService extends StreamingService {
|
||||
public MediaCCCService(int id) {
|
||||
|
@ -39,8 +33,8 @@ public class MediaCCCService extends StreamingService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SearchExtractor getSearchExtractor(SearchQueryHandler query, Localization localization) {
|
||||
return new MediaCCCSearchExtractor(this, query, localization);
|
||||
public SearchExtractor getSearchExtractor(SearchQueryHandler query) {
|
||||
return new MediaCCCSearchExtractor(this, query);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -64,28 +58,28 @@ public class MediaCCCService extends StreamingService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public StreamExtractor getStreamExtractor(LinkHandler linkHandler, Localization localization) {
|
||||
return new MediaCCCStreamExtractor(this, linkHandler, localization);
|
||||
public StreamExtractor getStreamExtractor(LinkHandler linkHandler) {
|
||||
return new MediaCCCStreamExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler, Localization localization) {
|
||||
return new MediaCCCConferenceExtractor(this, linkHandler, localization);
|
||||
public ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler) {
|
||||
return new MediaCCCConferenceExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler, Localization localization) {
|
||||
public PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionExtractor getSuggestionExtractor(Localization localization) {
|
||||
public SuggestionExtractor getSuggestionExtractor() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public KioskList getKioskList() throws ExtractionException {
|
||||
KioskList list = new KioskList(getServiceId());
|
||||
KioskList list = new KioskList(this);
|
||||
|
||||
// add kiosks here e.g.:
|
||||
try {
|
||||
|
@ -93,10 +87,9 @@ public class MediaCCCService extends StreamingService {
|
|||
@Override
|
||||
public KioskExtractor createNewKiosk(StreamingService streamingService,
|
||||
String url,
|
||||
String kioskId,
|
||||
Localization localization) throws ExtractionException, IOException {
|
||||
String kioskId) throws ExtractionException, IOException {
|
||||
return new MediaCCCConferenceKiosk(MediaCCCService.this,
|
||||
new MediaCCCConferencesListLinkHandlerFactory().fromUrl(url), kioskId, localization);
|
||||
new MediaCCCConferencesListLinkHandlerFactory().fromUrl(url), kioskId);
|
||||
}
|
||||
}, new MediaCCCConferencesListLinkHandlerFactory(), "conferences");
|
||||
list.setDefaultKiosk("conferences");
|
||||
|
@ -118,9 +111,14 @@ public class MediaCCCService extends StreamingService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler, Localization localization)
|
||||
public CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getBaseUrl() {
|
||||
return "https://media.ccc.de";
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -4,16 +4,15 @@ import com.grack.nanojson.JsonArray;
|
|||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.infoItems.MediaCCCStreamInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
@ -22,8 +21,8 @@ public class MediaCCCConferenceExtractor extends ChannelExtractor {
|
|||
|
||||
private JsonObject conferenceData;
|
||||
|
||||
public MediaCCCConferenceExtractor(StreamingService service, ListLinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public MediaCCCConferenceExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -75,7 +74,7 @@ public class MediaCCCConferenceExtractor extends ChannelExtractor {
|
|||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
try {
|
||||
conferenceData = JsonParser.object().from(downloader.download(getUrl()));
|
||||
conferenceData = JsonParser.object().from(downloader.get(getUrl()).responseBody());
|
||||
} catch (JsonParserException jpe) {
|
||||
throw new ExtractionException("Could not parse json returnd by url: " + getUrl());
|
||||
}
|
||||
|
@ -87,6 +86,7 @@ public class MediaCCCConferenceExtractor extends ChannelExtractor {
|
|||
return conferenceData.getString("title");
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getOriginalUrl() throws ParsingException {
|
||||
return "https://media.ccc.de/c/" + conferenceData.getString("acronym");
|
||||
|
|
|
@ -4,16 +4,15 @@ import com.grack.nanojson.JsonArray;
|
|||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItem;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.infoItems.MediaCCCConferenceInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
@ -24,9 +23,8 @@ public class MediaCCCConferenceKiosk extends KioskExtractor<ChannelInfoItem> {
|
|||
|
||||
public MediaCCCConferenceKiosk(StreamingService streamingService,
|
||||
ListLinkHandler linkHandler,
|
||||
String kioskId,
|
||||
Localization localization) {
|
||||
super(streamingService, linkHandler, kioskId, localization);
|
||||
String kioskId) {
|
||||
super(streamingService, linkHandler, kioskId);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
@ -53,7 +51,7 @@ public class MediaCCCConferenceKiosk extends KioskExtractor<ChannelInfoItem> {
|
|||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
String site = downloader.download(getLinkHandler().getUrl());
|
||||
String site = downloader.get(getLinkHandler().getUrl(), getExtractorLocalization()).responseBody();
|
||||
try {
|
||||
doc = JsonParser.object().from(site);
|
||||
} catch (JsonParserException jpe) {
|
||||
|
|
|
@ -0,0 +1,27 @@
|
|||
package org.schabi.newpipe.extractor.services.media_ccc.extractors;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
|
||||
public class MediaCCCParsingHelper {
|
||||
private MediaCCCParsingHelper() {
|
||||
}
|
||||
|
||||
public static Calendar parseDateFrom(String textualUploadDate) throws ParsingException {
|
||||
Date date;
|
||||
try {
|
||||
date = new SimpleDateFormat("yyyy-MM-dd").parse(textualUploadDate);
|
||||
} catch (ParseException e) {
|
||||
throw new ParsingException("Could not parse date: \"" + textualUploadDate + "\"", e);
|
||||
}
|
||||
|
||||
final Calendar uploadDate = Calendar.getInstance();
|
||||
uploadDate.setTime(date);
|
||||
return uploadDate;
|
||||
}
|
||||
|
||||
}
|
|
@ -4,11 +4,11 @@ import com.grack.nanojson.JsonArray;
|
|||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItem;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
|
@ -16,26 +16,24 @@ import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
|
|||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.infoItems.MediaCCCStreamInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCConferencesListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.CONFERENCES;
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.EVENTS;
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.ALL;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import static org.schabi.newpipe.extractor.services.media_ccc.linkHandler.MediaCCCSearchQueryHandlerFactory.*;
|
||||
|
||||
public class MediaCCCSearchExtractor extends SearchExtractor {
|
||||
|
||||
private JsonObject doc;
|
||||
private MediaCCCConferenceKiosk conferenceKiosk;
|
||||
|
||||
public MediaCCCSearchExtractor(StreamingService service, SearchQueryHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public MediaCCCSearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
try {
|
||||
conferenceKiosk = new MediaCCCConferenceKiosk(service,
|
||||
new MediaCCCConferencesListLinkHandlerFactory().fromId("conferences"),
|
||||
"conferences",
|
||||
localization);
|
||||
"conferences");
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
@ -88,7 +86,7 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
|
|||
|| getLinkHandler().getContentFilters().isEmpty()) {
|
||||
final String site;
|
||||
final String url = getUrl();
|
||||
site = downloader.download(url, getLocalization());
|
||||
site = downloader.get(url, getExtractorLocalization()).responseBody();
|
||||
try {
|
||||
doc = JsonParser.object().from(site);
|
||||
} catch (JsonParserException jpe) {
|
||||
|
|
|
@ -4,15 +4,14 @@ import com.grack.nanojson.JsonArray;
|
|||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.stream.*;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
@ -24,16 +23,22 @@ public class MediaCCCStreamExtractor extends StreamExtractor {
|
|||
private JsonObject data;
|
||||
private JsonObject conferenceData;
|
||||
|
||||
public MediaCCCStreamExtractor(StreamingService service, LinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public MediaCCCStreamExtractor(StreamingService service, LinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getUploadDate() throws ParsingException {
|
||||
public String getTextualUploadDate() throws ParsingException {
|
||||
return data.getString("release_date");
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public DateWrapper getUploadDate() throws ParsingException {
|
||||
return new DateWrapper(MediaCCCParsingHelper.parseDateFrom(getTextualUploadDate()));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
|
@ -200,9 +205,9 @@ public class MediaCCCStreamExtractor extends StreamExtractor {
|
|||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
try {
|
||||
data = JsonParser.object().from(
|
||||
downloader.download(getLinkHandler().getUrl()));
|
||||
downloader.get(getLinkHandler().getUrl()).responseBody());
|
||||
conferenceData = JsonParser.object()
|
||||
.from(downloader.download(getUploaderUrl()));
|
||||
.from(downloader.get(getUploaderUrl()).responseBody());
|
||||
} catch (JsonParserException jpe) {
|
||||
throw new ExtractionException("Could not parse json returned by url: " + getLinkHandler().getUrl(), jpe);
|
||||
}
|
||||
|
@ -215,6 +220,7 @@ public class MediaCCCStreamExtractor extends StreamExtractor {
|
|||
return data.getString("title");
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getOriginalUrl() throws ParsingException {
|
||||
return data.getString("frontend_link");
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
package org.schabi.newpipe.extractor.services.media_ccc.extractors;
|
||||
|
||||
import org.schabi.newpipe.extractor.SuggestionExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -10,8 +10,8 @@ import java.util.List;
|
|||
|
||||
public class MediaCCCSuggestionExtractor extends SuggestionExtractor {
|
||||
|
||||
public MediaCCCSuggestionExtractor(int serviceId, Localization localization) {
|
||||
super(serviceId, localization);
|
||||
public MediaCCCSuggestionExtractor(StreamingService service) {
|
||||
super(service);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -2,9 +2,13 @@ package org.schabi.newpipe.extractor.services.media_ccc.extractors.infoItems;
|
|||
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.services.media_ccc.extractors.MediaCCCParsingHelper;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class MediaCCCStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
||||
|
||||
JsonObject event;
|
||||
|
@ -44,11 +48,18 @@ public class MediaCCCStreamInfoItemExtractor implements StreamInfoItemExtractor
|
|||
return event.getString("conference_url");
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public String getUploadDate() throws ParsingException {
|
||||
public String getTextualUploadDate() throws ParsingException {
|
||||
return event.getString("release_date");
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public DateWrapper getUploadDate() throws ParsingException {
|
||||
return new DateWrapper(MediaCCCParsingHelper.parseDateFrom(getTextualUploadDate()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
return event.getString("title");
|
||||
|
|
|
@ -0,0 +1,63 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.jsoup.helper.StringUtil;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
public class PeertubeInstance {
|
||||
|
||||
private final String url;
|
||||
private String name;
|
||||
public static final PeertubeInstance defaultInstance = new PeertubeInstance("https://framatube.org", "FramaTube");
|
||||
|
||||
public PeertubeInstance(String url) {
|
||||
this.url = url;
|
||||
this.name = "PeerTube";
|
||||
}
|
||||
|
||||
public PeertubeInstance(String url , String name) {
|
||||
this.url = url;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
public void fetchInstanceMetaData() throws Exception {
|
||||
Downloader downloader = NewPipe.getDownloader();
|
||||
Response response = null;
|
||||
|
||||
try {
|
||||
response = downloader.get(url + "/api/v1/config");
|
||||
} catch (ReCaptchaException | IOException e) {
|
||||
throw new Exception("unable to configure instance " + url, e);
|
||||
}
|
||||
|
||||
if(null == response || StringUtil.isBlank(response.responseBody())) {
|
||||
throw new Exception("unable to configure instance " + url);
|
||||
}
|
||||
|
||||
try {
|
||||
JsonObject json = JsonParser.object().from(response.responseBody());
|
||||
this.name = JsonUtils.getString(json, "instance.name");
|
||||
} catch (JsonParserException | ParsingException e) {
|
||||
throw new Exception("unable to parse instance config", e);
|
||||
}
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube;
|
||||
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
|
||||
import org.jsoup.helper.StringUtil;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
|
||||
import com.grack.nanojson.JsonObject;
|
||||
|
||||
public class PeertubeParsingHelper {
|
||||
|
||||
private PeertubeParsingHelper() {
|
||||
}
|
||||
|
||||
public static void validate(JsonObject json) throws ContentNotAvailableException {
|
||||
String error = json.getString("error");
|
||||
if(!StringUtil.isBlank(error)) {
|
||||
throw new ContentNotAvailableException(error);
|
||||
}
|
||||
}
|
||||
|
||||
public static Calendar parseDateFrom(String textualUploadDate) throws ParsingException {
|
||||
Date date;
|
||||
try {
|
||||
date = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.S'Z'").parse(textualUploadDate);
|
||||
} catch (ParseException e) {
|
||||
throw new ParsingException("Could not parse date: \"" + textualUploadDate + "\"", e);
|
||||
}
|
||||
|
||||
final Calendar uploadDate = Calendar.getInstance();
|
||||
uploadDate.setTime(date);
|
||||
return uploadDate;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,160 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube;
|
||||
|
||||
import static java.util.Arrays.asList;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.COMMENTS;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskList;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeCommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeStreamExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeSuggestionExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeTrendingExtractor;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeChannelLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeCommentsLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeSearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeStreamLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeTrendingLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
public class PeertubeService extends StreamingService {
|
||||
|
||||
private PeertubeInstance instance;
|
||||
|
||||
public PeertubeService(int id) {
|
||||
this(id, PeertubeInstance.defaultInstance);
|
||||
}
|
||||
|
||||
public PeertubeService(int id, PeertubeInstance instance) {
|
||||
super(id, "PeerTube", asList(VIDEO, COMMENTS));
|
||||
this.instance = instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LinkHandlerFactory getStreamLHFactory() {
|
||||
return PeertubeStreamLinkHandlerFactory.getInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListLinkHandlerFactory getChannelLHFactory() {
|
||||
return PeertubeChannelLinkHandlerFactory.getInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListLinkHandlerFactory getPlaylistLHFactory() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchQueryHandlerFactory getSearchQHFactory() {
|
||||
return PeertubeSearchQueryHandlerFactory.getInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ListLinkHandlerFactory getCommentsLHFactory() {
|
||||
return PeertubeCommentsLinkHandlerFactory.getInstance();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchExtractor getSearchExtractor(SearchQueryHandler queryHandler) {
|
||||
return new PeertubeSearchExtractor(this, queryHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionExtractor getSuggestionExtractor() {
|
||||
return new PeertubeSuggestionExtractor(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SubscriptionExtractor getSubscriptionExtractor() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
return new PeertubeChannelExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamExtractor getStreamExtractor(LinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
return new PeertubeStreamExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
return new PeertubeCommentsExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getBaseUrl() {
|
||||
return instance.getUrl();
|
||||
}
|
||||
|
||||
public PeertubeInstance getInstance() {
|
||||
return this.instance;
|
||||
}
|
||||
|
||||
public void setInstance(PeertubeInstance instance) {
|
||||
this.instance = instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public KioskList getKioskList() throws ExtractionException {
|
||||
KioskList.KioskExtractorFactory kioskFactory = new KioskList.KioskExtractorFactory() {
|
||||
@Override
|
||||
public KioskExtractor createNewKiosk(StreamingService streamingService,
|
||||
String url,
|
||||
String id)
|
||||
throws ExtractionException {
|
||||
return new PeertubeTrendingExtractor(PeertubeService.this,
|
||||
new PeertubeTrendingLinkHandlerFactory().fromId(id), id);
|
||||
}
|
||||
};
|
||||
|
||||
KioskList list = new KioskList(this);
|
||||
|
||||
// add kiosks here e.g.:
|
||||
final PeertubeTrendingLinkHandlerFactory h = new PeertubeTrendingLinkHandlerFactory();
|
||||
try {
|
||||
list.addKioskEntry(kioskFactory, h, PeertubeTrendingLinkHandlerFactory.KIOSK_TRENDING);
|
||||
list.addKioskEntry(kioskFactory, h, PeertubeTrendingLinkHandlerFactory.KIOSK_MOST_LIKED);
|
||||
list.addKioskEntry(kioskFactory, h, PeertubeTrendingLinkHandlerFactory.KIOSK_RECENT);
|
||||
list.addKioskEntry(kioskFactory, h, PeertubeTrendingLinkHandlerFactory.KIOSK_LOCAL);
|
||||
list.setDefaultKiosk(PeertubeTrendingLinkHandlerFactory.KIOSK_TRENDING);
|
||||
} catch (Exception e) {
|
||||
throw new ExtractionException(e);
|
||||
}
|
||||
|
||||
return list;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,188 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.jsoup.helper.StringUtil;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
public class PeertubeChannelExtractor extends ChannelExtractor {
|
||||
|
||||
private static final String START_KEY = "start";
|
||||
private static final String COUNT_KEY = "count";
|
||||
private static final int ITEMS_PER_PAGE = 12;
|
||||
private static final String START_PATTERN = "start=(\\d*)";
|
||||
|
||||
private InfoItemsPage<StreamInfoItem> initPage;
|
||||
private long total;
|
||||
|
||||
private JsonObject json;
|
||||
private final String baseUrl;
|
||||
|
||||
public PeertubeChannelExtractor(StreamingService service, ListLinkHandler linkHandler) throws ParsingException {
|
||||
super(service, linkHandler);
|
||||
this.baseUrl = getBaseUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAvatarUrl() throws ParsingException {
|
||||
String value;
|
||||
try {
|
||||
value = JsonUtils.getString(json, "avatar.path");
|
||||
}catch(Exception e) {
|
||||
value = "/client/assets/images/default-avatar.png";
|
||||
}
|
||||
return baseUrl + value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getBannerUrl() throws ParsingException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getFeedUrl() throws ParsingException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSubscriberCount() throws ParsingException {
|
||||
Number number = JsonUtils.getNumber(json, "followersCount");
|
||||
return number.longValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() throws ParsingException {
|
||||
try {
|
||||
return JsonUtils.getString(json, "description");
|
||||
}catch(ParsingException e) {
|
||||
return "No description";
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
super.fetchPage();
|
||||
return initPage;
|
||||
}
|
||||
|
||||
private void collectStreamsFrom(StreamInfoItemsCollector collector, JsonObject json, String pageUrl) throws ParsingException {
|
||||
JsonArray contents;
|
||||
try {
|
||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||
}catch(Exception e) {
|
||||
throw new ParsingException("unable to extract channel streams", e);
|
||||
}
|
||||
|
||||
for(Object c: contents) {
|
||||
if(c instanceof JsonObject) {
|
||||
final JsonObject item = (JsonObject) c;
|
||||
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
||||
collector.commit(extractor);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNextPageUrl() throws IOException, ExtractionException {
|
||||
super.fetchPage();
|
||||
return initPage.getNextPageUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
Response response = getDownloader().get(pageUrl);
|
||||
JsonObject json = null;
|
||||
if(null != response && !StringUtil.isBlank(response.responseBody())) {
|
||||
try {
|
||||
json = JsonParser.object().from(response.responseBody());
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not parse json data for kiosk info", e);
|
||||
}
|
||||
}
|
||||
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
if(json != null) {
|
||||
PeertubeParsingHelper.validate(json);
|
||||
Number number = JsonUtils.getNumber(json, "total");
|
||||
if(number != null) this.total = number.longValue();
|
||||
collectStreamsFrom(collector, json, pageUrl);
|
||||
} else {
|
||||
throw new ExtractionException("Unable to get peertube kiosk info");
|
||||
}
|
||||
return new InfoItemsPage<>(collector, getNextPageUrl(pageUrl));
|
||||
}
|
||||
|
||||
|
||||
private String getNextPageUrl(String prevPageUrl) {
|
||||
String prevStart;
|
||||
try {
|
||||
prevStart = Parser.matchGroup1(START_PATTERN, prevPageUrl);
|
||||
} catch (RegexException e) {
|
||||
return "";
|
||||
}
|
||||
if(StringUtil.isBlank(prevStart)) return "";
|
||||
long nextStart = 0;
|
||||
try {
|
||||
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
||||
} catch (NumberFormatException e) {
|
||||
return "";
|
||||
}
|
||||
|
||||
if(nextStart >= total) {
|
||||
return "";
|
||||
}else {
|
||||
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
||||
Response response = downloader.get(getUrl());
|
||||
if(null != response && null != response.responseBody()) {
|
||||
setInitialData(response.responseBody());
|
||||
}else {
|
||||
throw new ExtractionException("Unable to extract peertube channel data");
|
||||
}
|
||||
|
||||
String pageUrl = getUrl() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
|
||||
this.initPage = getPage(pageUrl);
|
||||
}
|
||||
|
||||
private void setInitialData(String responseBody) throws ExtractionException {
|
||||
try {
|
||||
json = JsonParser.object().from(responseBody);
|
||||
} catch (JsonParserException e) {
|
||||
throw new ExtractionException("Unable to extract peertube channel data", e);
|
||||
}
|
||||
if(null == json) throw new ExtractionException("Unable to extract peertube channel data");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
return JsonUtils.getString(json, "displayName");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOriginalUrl() throws ParsingException {
|
||||
return baseUrl + "/accounts/" + getId();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,123 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.jsoup.helper.StringUtil;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsInfoItem;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
|
||||
public class PeertubeCommentsExtractor extends CommentsExtractor {
|
||||
|
||||
private static final String START_KEY = "start";
|
||||
private static final String COUNT_KEY = "count";
|
||||
private static final int ITEMS_PER_PAGE = 12;
|
||||
private static final String START_PATTERN = "start=(\\d*)";
|
||||
|
||||
private InfoItemsPage<CommentsInfoItem> initPage;
|
||||
private long total;
|
||||
|
||||
public PeertubeCommentsExtractor(StreamingService service, ListLinkHandler uiHandler) {
|
||||
super(service, uiHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
return "Comments";
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<CommentsInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
super.fetchPage();
|
||||
return initPage;
|
||||
}
|
||||
|
||||
private void collectStreamsFrom(CommentsInfoItemsCollector collector, JsonObject json, String pageUrl) throws ParsingException {
|
||||
JsonArray contents;
|
||||
try {
|
||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||
}catch(Exception e) {
|
||||
throw new ParsingException("unable to extract comments info", e);
|
||||
}
|
||||
|
||||
for(Object c: contents) {
|
||||
if(c instanceof JsonObject) {
|
||||
final JsonObject item = (JsonObject) c;
|
||||
PeertubeCommentsInfoItemExtractor extractor = new PeertubeCommentsInfoItemExtractor(item, this);
|
||||
collector.commit(extractor);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNextPageUrl() throws IOException, ExtractionException {
|
||||
super.fetchPage();
|
||||
return initPage.getNextPageUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<CommentsInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
Response response = getDownloader().get(pageUrl);
|
||||
JsonObject json = null;
|
||||
if(null != response && !StringUtil.isBlank(response.responseBody())) {
|
||||
try {
|
||||
json = JsonParser.object().from(response.responseBody());
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not parse json data for comments info", e);
|
||||
}
|
||||
}
|
||||
|
||||
CommentsInfoItemsCollector collector = new CommentsInfoItemsCollector(getServiceId());
|
||||
if(json != null) {
|
||||
Number number = JsonUtils.getNumber(json, "total");
|
||||
if(number != null) this.total = number.longValue();
|
||||
collectStreamsFrom(collector, json, pageUrl);
|
||||
} else {
|
||||
throw new ExtractionException("Unable to get peertube comments info");
|
||||
}
|
||||
return new InfoItemsPage<>(collector, getNextPageUrl(pageUrl));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
||||
String pageUrl = getUrl() + "?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
|
||||
this.initPage = getPage(pageUrl);
|
||||
}
|
||||
|
||||
private String getNextPageUrl(String prevPageUrl) {
|
||||
String prevStart;
|
||||
try {
|
||||
prevStart = Parser.matchGroup1(START_PATTERN, prevPageUrl);
|
||||
} catch (RegexException e) {
|
||||
return "";
|
||||
}
|
||||
if(StringUtil.isBlank(prevStart)) return "";
|
||||
long nextStart = 0;
|
||||
try {
|
||||
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
||||
} catch (NumberFormatException e) {
|
||||
return "";
|
||||
}
|
||||
|
||||
if(nextStart >= total) {
|
||||
return "";
|
||||
}else {
|
||||
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,104 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
|
||||
import com.grack.nanojson.JsonObject;
|
||||
|
||||
|
||||
public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtractor {
|
||||
|
||||
private final JsonObject item;
|
||||
private final String url;
|
||||
private final String baseUrl;
|
||||
|
||||
public PeertubeCommentsInfoItemExtractor(JsonObject item, PeertubeCommentsExtractor extractor) throws ParsingException {
|
||||
this.item = item;
|
||||
this.url = extractor.getUrl();
|
||||
this.baseUrl = extractor.getBaseUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
return url;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
String value;
|
||||
try {
|
||||
value = JsonUtils.getString(item, "account.avatar.path");
|
||||
}catch(Exception e) {
|
||||
value = "/client/assets/images/default-avatar.png";
|
||||
}
|
||||
return baseUrl + value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
return JsonUtils.getString(item, "account.displayName");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTextualPublishedTime() throws ParsingException {
|
||||
return JsonUtils.getString(item, "createdAt");
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateWrapper getPublishedTime() throws ParsingException {
|
||||
String textualUploadDate = getTextualPublishedTime();
|
||||
return new DateWrapper(PeertubeParsingHelper.parseDateFrom(textualUploadDate));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getLikeCount() throws ParsingException {
|
||||
return -1;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCommentText() throws ParsingException {
|
||||
String htmlText = JsonUtils.getString(item, "text");
|
||||
try {
|
||||
Document doc = Jsoup.parse(htmlText);
|
||||
return doc.body().text();
|
||||
}catch(Exception e) {
|
||||
return htmlText.replaceAll("(?s)<[^>]*>(\\s*<[^>]*>)*", "");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getCommentId() throws ParsingException {
|
||||
Number value = JsonUtils.getNumber(item, "id");
|
||||
return value.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAuthorThumbnail() throws ParsingException {
|
||||
String value;
|
||||
try {
|
||||
value = JsonUtils.getString(item, "account.avatar.path");
|
||||
}catch(Exception e) {
|
||||
value = "/client/assets/images/default-avatar.png";
|
||||
}
|
||||
return baseUrl + value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAuthorName() throws ParsingException {
|
||||
return JsonUtils.getString(item, "account.displayName");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAuthorEndpoint() throws ParsingException {
|
||||
String name = JsonUtils.getString(item, "account.name");
|
||||
String host = JsonUtils.getString(item, "account.host");
|
||||
return ServiceList.PeerTube.getChannelLHFactory().fromId(name + "@" + host, baseUrl).getUrl();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,86 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
|
||||
public class PeertubePlaylistExtractor extends PlaylistExtractor{
|
||||
|
||||
public PeertubePlaylistExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getBannerUrl() throws ParsingException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderUrl() throws ParsingException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderName() throws ParsingException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderAvatarUrl() throws ParsingException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getStreamCount() throws ParsingException {
|
||||
// TODO Auto-generated method stub
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNextPageUrl() throws IOException, ExtractionException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
||||
// TODO Auto-generated method stub
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,130 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.jsoup.helper.StringUtil;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.InfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.InfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
|
||||
public class PeertubeSearchExtractor extends SearchExtractor {
|
||||
|
||||
private static final String START_KEY = "start";
|
||||
private static final String COUNT_KEY = "count";
|
||||
private static final int ITEMS_PER_PAGE = 12;
|
||||
private static final String START_PATTERN = "start=(\\d*)";
|
||||
|
||||
private InfoItemsPage<InfoItem> initPage;
|
||||
private long total;
|
||||
|
||||
public PeertubeSearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSearchSuggestion() throws ParsingException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
super.fetchPage();
|
||||
return initPage;
|
||||
}
|
||||
|
||||
private InfoItemsCollector<InfoItem, InfoItemExtractor> collectStreamsFrom(JsonObject json) throws ParsingException {
|
||||
|
||||
final InfoItemsSearchCollector collector = getInfoItemSearchCollector();
|
||||
|
||||
JsonArray contents;
|
||||
try {
|
||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||
}catch(Exception e) {
|
||||
throw new ParsingException("unable to extract search info", e);
|
||||
}
|
||||
|
||||
String baseUrl = getBaseUrl();
|
||||
for(Object c: contents) {
|
||||
if(c instanceof JsonObject) {
|
||||
final JsonObject item = (JsonObject) c;
|
||||
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
||||
collector.commit(extractor);
|
||||
}
|
||||
}
|
||||
|
||||
return collector;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNextPageUrl() throws IOException, ExtractionException {
|
||||
super.fetchPage();
|
||||
return initPage.getNextPageUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
Response response = getDownloader().get(pageUrl);
|
||||
JsonObject json = null;
|
||||
if(null != response && !StringUtil.isBlank(response.responseBody())) {
|
||||
try {
|
||||
json = JsonParser.object().from(response.responseBody());
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not parse json data for search info", e);
|
||||
}
|
||||
}
|
||||
|
||||
if(json != null) {
|
||||
Number number = JsonUtils.getNumber(json, "total");
|
||||
if(number != null) this.total = number.longValue();
|
||||
return new InfoItemsPage<>(collectStreamsFrom(json), getNextPageUrl(pageUrl));
|
||||
} else {
|
||||
throw new ExtractionException("Unable to get peertube search info");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
||||
String pageUrl = getUrl() + "&" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
|
||||
this.initPage = getPage(pageUrl);
|
||||
}
|
||||
|
||||
private String getNextPageUrl(String prevPageUrl) {
|
||||
String prevStart;
|
||||
try {
|
||||
prevStart = Parser.matchGroup1(START_PATTERN, prevPageUrl);
|
||||
} catch (RegexException e) {
|
||||
return "";
|
||||
}
|
||||
if(StringUtil.isBlank(prevStart)) return "";
|
||||
long nextStart = 0;
|
||||
try {
|
||||
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
||||
} catch (NumberFormatException e) {
|
||||
return "";
|
||||
}
|
||||
|
||||
if(nextStart >= total) {
|
||||
return "";
|
||||
}else {
|
||||
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,342 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.jsoup.helper.StringUtil;
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.services.peertube.linkHandler.PeertubeSearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.AudioStream;
|
||||
import org.schabi.newpipe.extractor.stream.Stream;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
import org.schabi.newpipe.extractor.stream.SubtitlesStream;
|
||||
import org.schabi.newpipe.extractor.stream.VideoStream;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
|
||||
public class PeertubeStreamExtractor extends StreamExtractor {
|
||||
|
||||
|
||||
private JsonObject json;
|
||||
private List<SubtitlesStream> subtitles = new ArrayList<>();
|
||||
private final String baseUrl;
|
||||
|
||||
public PeertubeStreamExtractor(StreamingService service, LinkHandler linkHandler) throws ParsingException {
|
||||
super(service, linkHandler);
|
||||
this.baseUrl = getBaseUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTextualUploadDate() throws ParsingException {
|
||||
return JsonUtils.getString(json, "publishedAt");
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateWrapper getUploadDate() throws ParsingException {
|
||||
final String textualUploadDate = getTextualUploadDate();
|
||||
|
||||
if (textualUploadDate == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new DateWrapper(PeertubeParsingHelper.parseDateFrom(textualUploadDate));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
return baseUrl + JsonUtils.getString(json, "thumbnailPath");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() throws ParsingException {
|
||||
try {
|
||||
return JsonUtils.getString(json, "description");
|
||||
}catch(ParsingException e) {
|
||||
return "No description";
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getAgeLimit() throws ParsingException {
|
||||
return NO_AGE_LIMIT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLength() throws ParsingException {
|
||||
Number value = JsonUtils.getNumber(json, "duration");
|
||||
return value.longValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getTimeStamp() throws ParsingException {
|
||||
//TODO fetch timestamp from url if present;
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getViewCount() throws ParsingException {
|
||||
Number value = JsonUtils.getNumber(json, "views");
|
||||
return value.longValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLikeCount() throws ParsingException {
|
||||
Number value = JsonUtils.getNumber(json, "likes");
|
||||
return value.longValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getDislikeCount() throws ParsingException {
|
||||
Number value = JsonUtils.getNumber(json, "dislikes");
|
||||
return value.longValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderUrl() throws ParsingException {
|
||||
String name = JsonUtils.getString(json, "account.name");
|
||||
String host = JsonUtils.getString(json, "account.host");
|
||||
return getService().getChannelLHFactory().fromId(name + "@" + host, baseUrl).getUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderName() throws ParsingException {
|
||||
return JsonUtils.getString(json, "account.displayName");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderAvatarUrl() throws ParsingException {
|
||||
String value;
|
||||
try {
|
||||
value = JsonUtils.getString(json, "account.avatar.path");
|
||||
}catch(Exception e) {
|
||||
value = "/client/assets/images/default-avatar.png";
|
||||
}
|
||||
return baseUrl + value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDashMpdUrl() throws ParsingException {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHlsUrl() throws ParsingException {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AudioStream> getAudioStreams() throws IOException, ExtractionException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<VideoStream> getVideoStreams() throws IOException, ExtractionException {
|
||||
assertPageFetched();
|
||||
List<VideoStream> videoStreams = new ArrayList<>();
|
||||
try {
|
||||
JsonArray streams = json.getArray("files", new JsonArray());
|
||||
for(Object s: streams) {
|
||||
if(!(s instanceof JsonObject)) continue;
|
||||
JsonObject stream = (JsonObject) s;
|
||||
String url = JsonUtils.getString(stream, "fileUrl");
|
||||
String torrentUrl = JsonUtils.getString(stream, "torrentUrl");
|
||||
String resolution = JsonUtils.getString(stream, "resolution.label");
|
||||
String extension = url.substring(url.lastIndexOf(".") + 1);
|
||||
MediaFormat format = MediaFormat.getFromSuffix(extension);
|
||||
VideoStream videoStream = new VideoStream(url, torrentUrl, format, resolution);
|
||||
if (!Stream.containSimilarStream(videoStream, videoStreams)) {
|
||||
videoStreams.add(videoStream);
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get video streams", e);
|
||||
}
|
||||
|
||||
return videoStreams;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public List<VideoStream> getVideoOnlyStreams() throws IOException, ExtractionException {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SubtitlesStream> getSubtitlesDefault() throws IOException, ExtractionException {
|
||||
return subtitles;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<SubtitlesStream> getSubtitles(final MediaFormat format) throws IOException, ExtractionException {
|
||||
List<SubtitlesStream> filteredSubs = new ArrayList<>();
|
||||
for(SubtitlesStream sub: subtitles) {
|
||||
if(sub.getFormat() == format) {
|
||||
filteredSubs.add(sub);
|
||||
}
|
||||
}
|
||||
return filteredSubs;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamType getStreamType() throws ParsingException {
|
||||
return StreamType.VIDEO_STREAM;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamInfoItem getNextStream() throws IOException, ExtractionException {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamInfoItemsCollector getRelatedStreams() throws IOException, ExtractionException {
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
List<String> tags = getTags();
|
||||
String apiUrl = null;
|
||||
if(!tags.isEmpty()) {
|
||||
apiUrl = getRelatedStreamsUrl(tags);
|
||||
|
||||
}else {
|
||||
apiUrl = getUploaderUrl() + "/videos?start=0&count=8";
|
||||
}
|
||||
if(!StringUtil.isBlank(apiUrl)) getStreamsFromApi(collector, apiUrl);
|
||||
return collector;
|
||||
}
|
||||
|
||||
private List<String> getTags(){
|
||||
try {
|
||||
return (List) JsonUtils.getArray(json, "tags");
|
||||
} catch (Exception e) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
}
|
||||
|
||||
private String getRelatedStreamsUrl(List<String> tags) throws UnsupportedEncodingException {
|
||||
String url = baseUrl + PeertubeSearchQueryHandlerFactory.SEARCH_ENDPOINT;
|
||||
StringBuilder params = new StringBuilder();
|
||||
params.append("start=0&count=8&sort=-createdAt");
|
||||
for(String tag : tags) {
|
||||
params.append("&tagsOneOf=");
|
||||
params.append(URLEncoder.encode(tag, "UTF-8"));
|
||||
}
|
||||
return url + "?" + params.toString();
|
||||
}
|
||||
|
||||
private void getStreamsFromApi(StreamInfoItemsCollector collector, String apiUrl) throws ReCaptchaException, IOException, ParsingException {
|
||||
Response response = getDownloader().get(apiUrl);
|
||||
JsonObject relatedVideosJson = null;
|
||||
if(null != response && !StringUtil.isBlank(response.responseBody())) {
|
||||
try {
|
||||
relatedVideosJson = JsonParser.object().from(response.responseBody());
|
||||
} catch (JsonParserException e) {
|
||||
throw new ParsingException("Could not parse json data for related videos", e);
|
||||
}
|
||||
}
|
||||
|
||||
if(relatedVideosJson != null) {
|
||||
collectStreamsFrom(collector, relatedVideosJson);
|
||||
}
|
||||
}
|
||||
|
||||
private void collectStreamsFrom(StreamInfoItemsCollector collector, JsonObject json) throws ParsingException {
|
||||
JsonArray contents;
|
||||
try {
|
||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||
}catch(Exception e) {
|
||||
throw new ParsingException("unable to extract related videos", e);
|
||||
}
|
||||
|
||||
for(Object c: contents) {
|
||||
if(c instanceof JsonObject) {
|
||||
final JsonObject item = (JsonObject) c;
|
||||
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
||||
//do not add the same stream in related streams
|
||||
if(!extractor.getUrl().equals(getUrl())) collector.commit(extractor);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getErrorMessage() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
||||
Response response = downloader.get(getUrl());
|
||||
if(null != response && null != response.responseBody()) {
|
||||
setInitialData(response.responseBody());
|
||||
}else {
|
||||
throw new ExtractionException("Unable to extract peertube channel data");
|
||||
}
|
||||
|
||||
loadSubtitles();
|
||||
}
|
||||
|
||||
private void setInitialData(String responseBody) throws ExtractionException {
|
||||
try {
|
||||
json = JsonParser.object().from(responseBody);
|
||||
} catch (JsonParserException e) {
|
||||
throw new ExtractionException("Unable to extract peertube stream data", e);
|
||||
}
|
||||
if(null == json) throw new ExtractionException("Unable to extract peertube stream data");
|
||||
PeertubeParsingHelper.validate(json);
|
||||
}
|
||||
|
||||
private void loadSubtitles() {
|
||||
if (subtitles.isEmpty()) {
|
||||
try {
|
||||
Response response = getDownloader().get(getUrl() + "/captions");
|
||||
JsonObject captionsJson = JsonParser.object().from(response.responseBody());
|
||||
JsonArray captions = JsonUtils.getArray(captionsJson, "data");
|
||||
for(Object c: captions) {
|
||||
if(c instanceof JsonObject) {
|
||||
JsonObject caption = (JsonObject)c;
|
||||
String url = baseUrl + JsonUtils.getString(caption, "captionPath");
|
||||
String languageCode = JsonUtils.getString(caption, "language.id");
|
||||
String ext = url.substring(url.lastIndexOf(".") + 1);
|
||||
MediaFormat fmt = MediaFormat.getFromSuffix(ext);
|
||||
if(fmt != null && languageCode != null) subtitles.add(new SubtitlesStream(fmt, languageCode, url, false));
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// ignore all exceptions
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
return JsonUtils.getString(json, "name");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOriginalUrl() throws ParsingException {
|
||||
return baseUrl + "/videos/watch/" + getId();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
|
||||
import com.grack.nanojson.JsonObject;
|
||||
|
||||
public class PeertubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
||||
|
||||
protected final JsonObject item;
|
||||
private final String baseUrl;
|
||||
|
||||
public PeertubeStreamInfoItemExtractor(JsonObject item, String baseUrl) {
|
||||
this.item = item;
|
||||
this.baseUrl = baseUrl;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
String uuid = JsonUtils.getString(item, "uuid");
|
||||
return ServiceList.PeerTube.getStreamLHFactory().fromId(uuid, baseUrl).getUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
String value = JsonUtils.getString(item, "thumbnailPath");
|
||||
return baseUrl + value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
return JsonUtils.getString(item, "name");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isAd() throws ParsingException {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getViewCount() throws ParsingException {
|
||||
Number value = JsonUtils.getNumber(item, "views");
|
||||
return value.longValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderUrl() throws ParsingException {
|
||||
String name = JsonUtils.getString(item, "account.name");
|
||||
String host = JsonUtils.getString(item, "account.host");
|
||||
return ServiceList.PeerTube.getChannelLHFactory().fromId(name + "@" + host, baseUrl).getUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploaderName() throws ParsingException {
|
||||
return JsonUtils.getString(item, "account.displayName");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTextualUploadDate() throws ParsingException {
|
||||
return JsonUtils.getString(item, "publishedAt");
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateWrapper getUploadDate() throws ParsingException {
|
||||
final String textualUploadDate = getTextualUploadDate();
|
||||
|
||||
if (textualUploadDate == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new DateWrapper(PeertubeParsingHelper.parseDateFrom(textualUploadDate));
|
||||
}
|
||||
|
||||
@Override
|
||||
public StreamType getStreamType() throws ParsingException {
|
||||
return StreamType.VIDEO_STREAM;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getDuration() throws ParsingException {
|
||||
Number value = JsonUtils.getNumber(item, "duration");
|
||||
return value.longValue();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,21 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
|
||||
public class PeertubeSubscriptionExtractor extends SubscriptionExtractor {
|
||||
|
||||
public PeertubeSubscriptionExtractor(StreamingService service, List<ContentSource> supportedSources) {
|
||||
super(service, supportedSources);
|
||||
// TODO Auto-generated constructor stub
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getRelatedUrl() {
|
||||
// TODO Auto-generated method stub
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,22 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
public class PeertubeSuggestionExtractor extends SuggestionExtractor{
|
||||
|
||||
public PeertubeSuggestionExtractor(StreamingService service) {
|
||||
super(service);
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> suggestionList(String query) throws IOException, ExtractionException {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,124 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.extractors;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.jsoup.helper.StringUtil;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
|
||||
public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
||||
|
||||
private static final String START_KEY = "start";
|
||||
private static final String COUNT_KEY = "count";
|
||||
private static final int ITEMS_PER_PAGE = 12;
|
||||
private static final String START_PATTERN = "start=(\\d*)";
|
||||
|
||||
private InfoItemsPage<StreamInfoItem> initPage;
|
||||
private long total;
|
||||
|
||||
public PeertubeTrendingExtractor(StreamingService streamingService, ListLinkHandler linkHandler, String kioskId) {
|
||||
super(streamingService, linkHandler, kioskId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
return getId();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
|
||||
super.fetchPage();
|
||||
return initPage;
|
||||
}
|
||||
|
||||
private void collectStreamsFrom(StreamInfoItemsCollector collector, JsonObject json, String pageUrl) throws ParsingException {
|
||||
JsonArray contents;
|
||||
try {
|
||||
contents = (JsonArray) JsonUtils.getValue(json, "data");
|
||||
}catch(Exception e) {
|
||||
throw new ParsingException("unable to extract kiosk info", e);
|
||||
}
|
||||
|
||||
String baseUrl = getBaseUrl();
|
||||
for(Object c: contents) {
|
||||
if(c instanceof JsonObject) {
|
||||
final JsonObject item = (JsonObject) c;
|
||||
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
|
||||
collector.commit(extractor);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getNextPageUrl() throws IOException, ExtractionException {
|
||||
super.fetchPage();
|
||||
return initPage.getNextPageUrl();
|
||||
}
|
||||
|
||||
@Override
|
||||
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
Response response = getDownloader().get(pageUrl);
|
||||
JsonObject json = null;
|
||||
if(null != response && !StringUtil.isBlank(response.responseBody())) {
|
||||
try {
|
||||
json = JsonParser.object().from(response.responseBody());
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not parse json data for kiosk info", e);
|
||||
}
|
||||
}
|
||||
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
if(json != null) {
|
||||
Number number = JsonUtils.getNumber(json, "total");
|
||||
if(number != null) this.total = number.longValue();
|
||||
collectStreamsFrom(collector, json, pageUrl);
|
||||
} else {
|
||||
throw new ExtractionException("Unable to get peertube kiosk info");
|
||||
}
|
||||
return new InfoItemsPage<>(collector, getNextPageUrl(pageUrl));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
||||
String pageUrl = getUrl() + "&" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
|
||||
this.initPage = getPage(pageUrl);
|
||||
}
|
||||
|
||||
private String getNextPageUrl(String prevPageUrl) {
|
||||
String prevStart;
|
||||
try {
|
||||
prevStart = Parser.matchGroup1(START_PATTERN, prevPageUrl);
|
||||
} catch (RegexException e) {
|
||||
return "";
|
||||
}
|
||||
if(StringUtil.isBlank(prevStart)) return "";
|
||||
long nextStart = 0;
|
||||
try {
|
||||
nextStart = Long.valueOf(prevStart) + ITEMS_PER_PAGE;
|
||||
} catch (NumberFormatException e) {
|
||||
return "";
|
||||
}
|
||||
|
||||
if(nextStart >= total) {
|
||||
return "";
|
||||
}else {
|
||||
return prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + String.valueOf(nextStart));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
|
||||
public class PeertubeChannelLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
|
||||
private static final PeertubeChannelLinkHandlerFactory instance = new PeertubeChannelLinkHandlerFactory();
|
||||
private static final String ID_PATTERN = "/accounts/([^/?&#]*)";
|
||||
private static final String ACCOUNTS_ENDPOINT = "/api/v1/accounts/";
|
||||
|
||||
public static PeertubeChannelLinkHandlerFactory getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId(String url) throws ParsingException {
|
||||
return Parser.matchGroup1(ID_PATTERN, url);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilters, String searchFilter) throws ParsingException {
|
||||
String baseUrl = ServiceList.PeerTube.getBaseUrl();
|
||||
return getUrl(id, contentFilters, searchFilter, baseUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilter, String sortFilter, String baseUrl)
|
||||
throws ParsingException {
|
||||
return baseUrl + ACCOUNTS_ENDPOINT + id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onAcceptUrl(String url) {
|
||||
return url.contains("/accounts/");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.exceptions.FoundAdException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
|
||||
public class PeertubeCommentsLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
|
||||
private static final PeertubeCommentsLinkHandlerFactory instance = new PeertubeCommentsLinkHandlerFactory();
|
||||
private static final String ID_PATTERN = "/videos/(watch/)?([^/?&#]*)";
|
||||
private static final String COMMENTS_ENDPOINT = "/api/v1/videos/%s/comment-threads";
|
||||
|
||||
public static PeertubeCommentsLinkHandlerFactory getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId(String url) throws ParsingException, IllegalArgumentException {
|
||||
return Parser.matchGroup(ID_PATTERN, url, 2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onAcceptUrl(final String url) throws FoundAdException {
|
||||
return url.contains("/videos/");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilter, String sortFilter) throws ParsingException {
|
||||
String baseUrl = ServiceList.PeerTube.getBaseUrl();
|
||||
return getUrl(id, contentFilter, sortFilter, baseUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilter, String sortFilter, String baseUrl) throws ParsingException {
|
||||
return baseUrl + String.format(COMMENTS_ENDPOINT, id);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
|
||||
public class PeertubePlaylistLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
|
||||
private static final PeertubePlaylistLinkHandlerFactory instance = new PeertubePlaylistLinkHandlerFactory();
|
||||
private static final String ID_PATTERN = "/video-channels/([^/?&#]*)";
|
||||
private static final String VIDEO_CHANNELS_ENDPOINT = "/api/v1/video-channels/";
|
||||
|
||||
public static PeertubePlaylistLinkHandlerFactory getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilters, String sortFilter) {
|
||||
String baseUrl = ServiceList.PeerTube.getBaseUrl();
|
||||
return getUrl(id, contentFilters, sortFilter, baseUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilters, String sortFilter, String baseUrl) {
|
||||
return baseUrl + VIDEO_CHANNELS_ENDPOINT + id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId(String url) throws ParsingException {
|
||||
return Parser.matchGroup1(ID_PATTERN, url);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean onAcceptUrl(final String url) {
|
||||
return url.contains("/video-channels/");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.List;
|
||||
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
|
||||
public class PeertubeSearchQueryHandlerFactory extends SearchQueryHandlerFactory {
|
||||
|
||||
public static final String CHARSET_UTF_8 = "UTF-8";
|
||||
public static final String VIDEOS = "videos";
|
||||
public static final String SEARCH_ENDPOINT = "/api/v1/search/videos";
|
||||
|
||||
public static PeertubeSearchQueryHandlerFactory getInstance() {
|
||||
return new PeertubeSearchQueryHandlerFactory();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String searchString, List<String> contentFilters, String sortFilter) throws ParsingException {
|
||||
String baseUrl = ServiceList.PeerTube.getBaseUrl();
|
||||
return getUrl(searchString, contentFilters, sortFilter, baseUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String searchString, List<String> contentFilters, String sortFilter, String baseUrl) throws ParsingException {
|
||||
try {
|
||||
final String url = baseUrl + SEARCH_ENDPOINT
|
||||
+ "?search=" + URLEncoder.encode(searchString, CHARSET_UTF_8);
|
||||
|
||||
return url;
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new ParsingException("Could not encode query", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String[] getAvailableContentFilter() {
|
||||
return new String[] { VIDEOS };
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.exceptions.FoundAdException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
|
||||
public class PeertubeStreamLinkHandlerFactory extends LinkHandlerFactory {
|
||||
|
||||
private static final PeertubeStreamLinkHandlerFactory instance = new PeertubeStreamLinkHandlerFactory();
|
||||
private static final String ID_PATTERN = "/videos/(watch/)?([^/?&#]*)";
|
||||
private static final String VIDEO_ENDPOINT = "/api/v1/videos/";
|
||||
|
||||
private PeertubeStreamLinkHandlerFactory() {
|
||||
}
|
||||
|
||||
public static PeertubeStreamLinkHandlerFactory getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id) {
|
||||
String baseUrl = ServiceList.PeerTube.getBaseUrl();
|
||||
return getUrl(id, baseUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, String baseUrl) {
|
||||
return baseUrl + VIDEO_ENDPOINT + id;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId(String url) throws ParsingException, IllegalArgumentException {
|
||||
return Parser.matchGroup(ID_PATTERN, url, 2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onAcceptUrl(final String url) throws FoundAdException {
|
||||
return url.contains("/videos/");
|
||||
}
|
||||
}
|
|
@ -0,0 +1,77 @@
|
|||
package org.schabi.newpipe.extractor.services.peertube.linkHandler;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.schabi.newpipe.extractor.ServiceList;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
|
||||
public class PeertubeTrendingLinkHandlerFactory extends ListLinkHandlerFactory {
|
||||
|
||||
|
||||
private static final PeertubeTrendingLinkHandlerFactory instance = new PeertubeTrendingLinkHandlerFactory();
|
||||
|
||||
public static final Map<String, String> KIOSK_MAP;
|
||||
public static final Map<String, String> REVERSE_KIOSK_MAP;
|
||||
public static final String KIOSK_TRENDING = "Trending";
|
||||
public static final String KIOSK_MOST_LIKED = "Most liked";
|
||||
public static final String KIOSK_RECENT = "Recently added";
|
||||
public static final String KIOSK_LOCAL = "Local";
|
||||
|
||||
static {
|
||||
Map<String, String> map = new HashMap<>();
|
||||
map.put(KIOSK_TRENDING, "%s/api/v1/videos?sort=-trending");
|
||||
map.put(KIOSK_MOST_LIKED, "%s/api/v1/videos?sort=-likes");
|
||||
map.put(KIOSK_RECENT, "%s/api/v1/videos?sort=-publishedAt");
|
||||
map.put(KIOSK_LOCAL, "%s/api/v1/videos?sort=-publishedAt&filter=local");
|
||||
KIOSK_MAP = Collections.unmodifiableMap(map);
|
||||
|
||||
Map<String, String> reverseMap = new HashMap<>();
|
||||
for(Map.Entry<String, String> entry : KIOSK_MAP.entrySet()){
|
||||
reverseMap.put(entry.getValue(), entry.getKey());
|
||||
}
|
||||
REVERSE_KIOSK_MAP = Collections.unmodifiableMap(reverseMap);
|
||||
}
|
||||
|
||||
public static PeertubeTrendingLinkHandlerFactory getInstance() {
|
||||
return instance;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilters, String sortFilter) {
|
||||
String baseUrl = ServiceList.PeerTube.getBaseUrl();
|
||||
return getUrl(id, contentFilters, sortFilter, baseUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUrl(String id, List<String> contentFilters, String sortFilter, String baseUrl) {
|
||||
return String.format(KIOSK_MAP.get(id), baseUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getId(String url) throws ParsingException {
|
||||
String baseUrl = ServiceList.PeerTube.getBaseUrl();
|
||||
url = url.replace(baseUrl, "%s");
|
||||
if (url.contains("/videos/trending")) {
|
||||
return KIOSK_TRENDING;
|
||||
} else if (url.contains("/videos/most-liked")) {
|
||||
return KIOSK_MOST_LIKED;
|
||||
} else if (url.contains("/videos/recently-added")) {
|
||||
return KIOSK_RECENT;
|
||||
} else if (url.contains("/videos/local")) {
|
||||
return KIOSK_LOCAL;
|
||||
} else if (REVERSE_KIOSK_MAP.containsKey(url)) {
|
||||
return REVERSE_KIOSK_MAP.get(url);
|
||||
} else {
|
||||
throw new ParsingException("no id found for this url");
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean onAcceptUrl(final String url) {
|
||||
return url.contains("/videos?") || url.contains("/videos/trending") || url.contains("/videos/most-liked") || url.contains("/videos/recently-added") || url.contains("/videos/local");
|
||||
}
|
||||
}
|
|
@ -4,15 +4,14 @@ import com.grack.nanojson.JsonArray;
|
|||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
@ -25,8 +24,8 @@ public class SoundcloudChannelExtractor extends ChannelExtractor {
|
|||
private StreamInfoItemsCollector streamInfoItemsCollector = null;
|
||||
private String nextPageUrl = null;
|
||||
|
||||
public SoundcloudChannelExtractor(StreamingService service, ListLinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public SoundcloudChannelExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -36,7 +35,7 @@ public class SoundcloudChannelExtractor extends ChannelExtractor {
|
|||
String apiUrl = "https://api-v2.soundcloud.com/users/" + userId +
|
||||
"?client_id=" + SoundcloudParsingHelper.clientId();
|
||||
|
||||
String response = downloader.download(apiUrl);
|
||||
String response = downloader.get(apiUrl, getExtractorLocalization()).responseBody();
|
||||
try {
|
||||
user = JsonParser.object().from(response);
|
||||
} catch (JsonParserException e) {
|
||||
|
|
|
@ -24,7 +24,9 @@ public class SoundcloudChannelInfoItemExtractor implements ChannelInfoItemExtrac
|
|||
|
||||
@Override
|
||||
public String getThumbnailUrl() {
|
||||
return itemObject.getString("avatar_url", "");
|
||||
String avatarUrl = itemObject.getString("avatar_url", "");
|
||||
String avatarUrlBetterResolution = avatarUrl.replace("large.jpg", "crop.jpg");
|
||||
return avatarUrlBetterResolution;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,18 +1,15 @@
|
|||
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
|
||||
private StreamInfoItemsCollector collector = null;
|
||||
|
@ -20,9 +17,8 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
|
|||
|
||||
public SoundcloudChartsExtractor(StreamingService service,
|
||||
ListLinkHandler linkHandler,
|
||||
String kioskId,
|
||||
Localization localization) {
|
||||
super(service, linkHandler, kioskId, localization);
|
||||
String kioskId) {
|
||||
super(service, linkHandler, kioskId);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -7,9 +7,12 @@ import com.grack.nanojson.JsonParserException;
|
|||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.jsoup.select.Elements;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
|
@ -21,71 +24,96 @@ import java.io.IOException;
|
|||
import java.net.URLEncoder;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.*;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
import static org.schabi.newpipe.extractor.ServiceList.SoundCloud;
|
||||
import static org.schabi.newpipe.extractor.utils.Utils.replaceHttpWithHttps;
|
||||
|
||||
public class SoundcloudParsingHelper {
|
||||
private static final String HARDCODED_CLIENT_ID = "bkcJLoXNaiFlsLaKBQXOxO5FhW0NJVnu"; // Updated on 29/11/19
|
||||
private static String clientId;
|
||||
|
||||
private SoundcloudParsingHelper() {
|
||||
}
|
||||
|
||||
public static String clientId() throws ReCaptchaException, IOException, RegexException {
|
||||
public static String clientId() throws ExtractionException, IOException {
|
||||
if (clientId != null && !clientId.isEmpty()) return clientId;
|
||||
|
||||
Downloader dl = NewPipe.getDownloader();
|
||||
String response = dl.download("https://soundcloud.com");
|
||||
|
||||
Document doc = Jsoup.parse(response);
|
||||
Element jsElement = doc.select("script[src^=https://a-v2.sndcdn.com/assets/app]").first();
|
||||
clientId = HARDCODED_CLIENT_ID;
|
||||
if (checkIfHardcodedClientIdIsValid(dl)) {
|
||||
return clientId;
|
||||
}
|
||||
|
||||
final Response download = dl.get("https://soundcloud.com");
|
||||
final String responseBody = download.responseBody();
|
||||
final String clientIdPattern = ",client_id:\"(.*?)\"";
|
||||
|
||||
try {
|
||||
final HashMap<String, String> headers = new HashMap<>();
|
||||
headers.put("Range", "bytes=0-16384");
|
||||
String js = dl.download(jsElement.attr("src"), headers);
|
||||
Document doc = Jsoup.parse(responseBody);
|
||||
final Elements possibleScripts = doc.select("script[src*=\"sndcdn.com/assets/\"][src$=\".js\"]");
|
||||
// The one containing the client id will likely be the last one
|
||||
Collections.reverse(possibleScripts);
|
||||
|
||||
return clientId = Parser.matchGroup1(clientIdPattern, js);
|
||||
} catch (IOException | RegexException ignored) {
|
||||
// Ignore it and proceed to download the whole js file
|
||||
final HashMap<String, List<String>> headers = new HashMap<>();
|
||||
headers.put("Range", singletonList("bytes=0-16384"));
|
||||
|
||||
for (Element element : possibleScripts) {
|
||||
final String srcUrl = element.attr("src");
|
||||
if (srcUrl != null && !srcUrl.isEmpty()) {
|
||||
try {
|
||||
return clientId = Parser.matchGroup1(clientIdPattern, dl.get(srcUrl, headers).responseBody());
|
||||
} catch (RegexException ignored) {
|
||||
// Ignore it and proceed to try searching other script
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
String js = dl.download(jsElement.attr("src"));
|
||||
return clientId = Parser.matchGroup1(clientIdPattern, js);
|
||||
// Officially give up
|
||||
throw new ExtractionException("Couldn't extract client id");
|
||||
}
|
||||
|
||||
public static String toDateString(String time) throws ParsingException {
|
||||
static boolean checkIfHardcodedClientIdIsValid(Downloader dl) {
|
||||
final String apiUrl = "https://api.soundcloud.com/connect?client_id=" + HARDCODED_CLIENT_ID;
|
||||
try {
|
||||
Date date;
|
||||
// Have two date formats, one for the 'api.soundc...' and the other 'api-v2.soundc...'.
|
||||
try {
|
||||
date = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").parse(time);
|
||||
} catch (Exception e) {
|
||||
date = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss +0000").parse(time);
|
||||
}
|
||||
|
||||
SimpleDateFormat newDateFormat = new SimpleDateFormat("yyyy-MM-dd");
|
||||
return newDateFormat.format(date);
|
||||
} catch (ParseException e) {
|
||||
throw new ParsingException(e.getMessage(), e);
|
||||
// Should return 200 to indicate that the client id is valid, a 401 is returned otherwise.
|
||||
return dl.head(apiUrl).responseCode() == 200;
|
||||
} catch (Exception ignored) {
|
||||
// No need to throw an exception here. If something went wrong, the client_id is wrong
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static Calendar parseDate(String textualUploadDate) throws ParsingException {
|
||||
Date date;
|
||||
try {
|
||||
date = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'").parse(textualUploadDate);
|
||||
} catch (ParseException e1) {
|
||||
try {
|
||||
date = new SimpleDateFormat("yyyy/MM/dd HH:mm:ss +0000").parse(textualUploadDate);
|
||||
} catch (ParseException e2) {
|
||||
throw new ParsingException("Could not parse date: \"" + textualUploadDate + "\"" + ", " + e1.getMessage(), e2);
|
||||
}
|
||||
}
|
||||
|
||||
final Calendar uploadDate = Calendar.getInstance();
|
||||
uploadDate.setTime(date);
|
||||
return uploadDate;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call the endpoint "/resolve" of the api.<p>
|
||||
*
|
||||
* See https://developers.soundcloud.com/docs/api/reference#resolve
|
||||
*/
|
||||
public static JsonObject resolveFor(Downloader downloader, String url) throws IOException, ReCaptchaException, ParsingException {
|
||||
public static JsonObject resolveFor(Downloader downloader, String url) throws IOException, ExtractionException {
|
||||
String apiUrl = "https://api.soundcloud.com/resolve"
|
||||
+ "?url=" + URLEncoder.encode(url, "UTF-8")
|
||||
+ "&client_id=" + clientId();
|
||||
|
||||
try {
|
||||
return JsonParser.object().from(downloader.download(apiUrl));
|
||||
final String response = downloader.get(apiUrl, SoundCloud.getLocalization()).responseBody();
|
||||
return JsonParser.object().from(response);
|
||||
} catch (JsonParserException e) {
|
||||
throw new ParsingException("Could not parse json response", e);
|
||||
}
|
||||
|
@ -98,8 +126,8 @@ public class SoundcloudParsingHelper {
|
|||
*/
|
||||
public static String resolveUrlWithEmbedPlayer(String apiUrl) throws IOException, ReCaptchaException, ParsingException {
|
||||
|
||||
String response = NewPipe.getDownloader().download("https://w.soundcloud.com/player/?url="
|
||||
+ URLEncoder.encode(apiUrl, "UTF-8"));
|
||||
String response = NewPipe.getDownloader().get("https://w.soundcloud.com/player/?url="
|
||||
+ URLEncoder.encode(apiUrl, "UTF-8"), SoundCloud.getLocalization()).responseBody();
|
||||
|
||||
return Jsoup.parse(response).select("link[rel=\"canonical\"]").first().attr("abs:href");
|
||||
}
|
||||
|
@ -111,9 +139,12 @@ public class SoundcloudParsingHelper {
|
|||
*/
|
||||
public static String resolveIdWithEmbedPlayer(String url) throws IOException, ReCaptchaException, ParsingException {
|
||||
|
||||
String response = NewPipe.getDownloader().download("https://w.soundcloud.com/player/?url="
|
||||
+ URLEncoder.encode(url, "UTF-8"));
|
||||
return Parser.matchGroup1(",\"id\":(.*?),", response);
|
||||
String response = NewPipe.getDownloader().get("https://w.soundcloud.com/player/?url="
|
||||
+ URLEncoder.encode(url, "UTF-8"), SoundCloud.getLocalization()).responseBody();
|
||||
// handle playlists / sets different and get playlist id via uir field in JSON
|
||||
if (url.contains("sets") && !url.endsWith("sets") && !url.endsWith("sets/"))
|
||||
return Parser.matchGroup1("\"uri\":\\s*\"https:\\/\\/api\\.soundcloud\\.com\\/playlists\\/((\\d)*?)\"", response);
|
||||
return Parser.matchGroup1(",\"id\":(([^}\\n])*?),", response);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -140,7 +171,7 @@ public class SoundcloudParsingHelper {
|
|||
* @return the next streams url, empty if don't have
|
||||
*/
|
||||
public static String getUsersFromApi(ChannelInfoItemsCollector collector, String apiUrl) throws IOException, ReCaptchaException, ParsingException {
|
||||
String response = NewPipe.getDownloader().download(apiUrl);
|
||||
String response = NewPipe.getDownloader().get(apiUrl, SoundCloud.getLocalization()).responseBody();
|
||||
JsonObject responseObject;
|
||||
try {
|
||||
responseObject = JsonParser.object().from(response);
|
||||
|
@ -191,7 +222,7 @@ public class SoundcloudParsingHelper {
|
|||
* @return the next streams url, empty if don't have
|
||||
*/
|
||||
public static String getStreamsFromApi(StreamInfoItemsCollector collector, String apiUrl, boolean charts) throws IOException, ReCaptchaException, ParsingException {
|
||||
String response = NewPipe.getDownloader().download(apiUrl);
|
||||
String response = NewPipe.getDownloader().get(apiUrl, SoundCloud.getLocalization()).responseBody();
|
||||
JsonObject responseObject;
|
||||
try {
|
||||
responseObject = JsonParser.object().from(response);
|
||||
|
|
|
@ -3,15 +3,14 @@ package org.schabi.newpipe.extractor.services.soundcloud;
|
|||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
@ -24,8 +23,8 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
|||
private StreamInfoItemsCollector streamInfoItemsCollector = null;
|
||||
private String nextPageUrl = null;
|
||||
|
||||
public SoundcloudPlaylistExtractor(StreamingService service, ListLinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public SoundcloudPlaylistExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -36,7 +35,7 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
|||
"?client_id=" + SoundcloudParsingHelper.clientId() +
|
||||
"&representation=compact";
|
||||
|
||||
String response = downloader.download(apiUrl);
|
||||
String response = downloader.get(apiUrl, getExtractorLocalization()).responseBody();
|
||||
try {
|
||||
playlist = JsonParser.object().from(response);
|
||||
} catch (JsonParserException e) {
|
||||
|
@ -71,13 +70,15 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
|
|||
final String thumbnailUrl = item.getThumbnailUrl();
|
||||
if (thumbnailUrl == null || thumbnailUrl.isEmpty()) continue;
|
||||
|
||||
return thumbnailUrl;
|
||||
String thumbnailUrlBetterResolution = thumbnailUrl.replace("large.jpg", "crop.jpg");
|
||||
return thumbnailUrlBetterResolution;
|
||||
}
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
return artworkUrl;
|
||||
String artworkUrlBetterResolution = artworkUrl.replace("large.jpg", "crop.jpg");
|
||||
return artworkUrlBetterResolution;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -32,7 +32,10 @@ public class SoundcloudPlaylistInfoItemExtractor implements PlaylistInfoItemExtr
|
|||
// Over-engineering at its finest
|
||||
if (itemObject.isString(ARTWORK_URL_KEY)) {
|
||||
final String artworkUrl = itemObject.getString(ARTWORK_URL_KEY, "");
|
||||
if (!artworkUrl.isEmpty()) return artworkUrl;
|
||||
if (!artworkUrl.isEmpty()) {
|
||||
String artworkUrlBetterResolution = artworkUrl.replace("large.jpg", "crop.jpg");
|
||||
return artworkUrlBetterResolution;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
|
@ -42,8 +45,11 @@ public class SoundcloudPlaylistInfoItemExtractor implements PlaylistInfoItemExtr
|
|||
|
||||
// First look for track artwork url
|
||||
if (trackObject.isString(ARTWORK_URL_KEY)) {
|
||||
final String url = trackObject.getString(ARTWORK_URL_KEY, "");
|
||||
if (!url.isEmpty()) return url;
|
||||
String artworkUrl = trackObject.getString(ARTWORK_URL_KEY, "");
|
||||
if (!artworkUrl.isEmpty()) {
|
||||
String artworkUrlBetterResolution = artworkUrl.replace("large.jpg", "crop.jpg");
|
||||
return artworkUrlBetterResolution;
|
||||
}
|
||||
}
|
||||
|
||||
// Then look for track creator avatar url
|
||||
|
|
|
@ -5,12 +5,12 @@ import com.grack.nanojson.JsonObject;
|
|||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.*;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
@ -25,10 +25,8 @@ public class SoundcloudSearchExtractor extends SearchExtractor {
|
|||
|
||||
private JsonArray searchCollection;
|
||||
|
||||
public SoundcloudSearchExtractor(StreamingService service,
|
||||
SearchQueryHandler linkHandler,
|
||||
Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public SoundcloudSearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -51,7 +49,8 @@ public class SoundcloudSearchExtractor extends SearchExtractor {
|
|||
public InfoItemsPage<InfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
final Downloader dl = getDownloader();
|
||||
try {
|
||||
searchCollection = JsonParser.object().from(dl.download(pageUrl)).getArray("collection");
|
||||
final String response = dl.get(pageUrl, getExtractorLocalization()).responseBody();
|
||||
searchCollection = JsonParser.object().from(response).getArray("collection");
|
||||
} catch (JsonParserException e) {
|
||||
throw new ParsingException("Could not parse json response", e);
|
||||
}
|
||||
|
@ -64,7 +63,8 @@ public class SoundcloudSearchExtractor extends SearchExtractor {
|
|||
final Downloader dl = getDownloader();
|
||||
final String url = getUrl();
|
||||
try {
|
||||
searchCollection = JsonParser.object().from(dl.download(url)).getArray("collection");
|
||||
final String response = dl.get(url, getExtractorLocalization()).responseBody();
|
||||
searchCollection = JsonParser.object().from(response).getArray("collection");
|
||||
} catch (JsonParserException e) {
|
||||
throw new ParsingException("Could not parse json response", e);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
|
@ -48,10 +49,10 @@ public class SoundcloudSearchQueryHandlerFactory extends SearchQueryHandlerFacto
|
|||
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
throw new ParsingException("Could not encode query", e);
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException("Could not get client id", e);
|
||||
} catch (ReCaptchaException e) {
|
||||
throw new ParsingException("ReCaptcha required", e);
|
||||
} catch (IOException | ExtractionException e) {
|
||||
throw new ParsingException("Could not get client id", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,7 +4,6 @@ import static java.util.Collections.singletonList;
|
|||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.AUDIO;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.SuggestionExtractor;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
|
@ -20,17 +19,16 @@ import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
|||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
public class SoundcloudService extends StreamingService {
|
||||
|
||||
public SoundcloudService(int id) {
|
||||
super(id, "SoundCloud", singletonList(AUDIO));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public SearchExtractor getSearchExtractor(SearchQueryHandler queryHandler, Localization localization) {
|
||||
return new SoundcloudSearchExtractor(this, queryHandler, localization);
|
||||
public String getBaseUrl() {
|
||||
return "https://soundcloud.com";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -55,23 +53,28 @@ public class SoundcloudService extends StreamingService {
|
|||
|
||||
|
||||
@Override
|
||||
public StreamExtractor getStreamExtractor(LinkHandler LinkHandler, Localization localization) {
|
||||
return new SoundcloudStreamExtractor(this, LinkHandler, localization);
|
||||
public StreamExtractor getStreamExtractor(LinkHandler LinkHandler) {
|
||||
return new SoundcloudStreamExtractor(this, LinkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler, Localization localization) {
|
||||
return new SoundcloudChannelExtractor(this, linkHandler, localization);
|
||||
public ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler) {
|
||||
return new SoundcloudChannelExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler, Localization localization) {
|
||||
return new SoundcloudPlaylistExtractor(this, linkHandler, localization);
|
||||
public PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler) {
|
||||
return new SoundcloudPlaylistExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionExtractor getSuggestionExtractor(Localization localization) {
|
||||
return new SoundcloudSuggestionExtractor(getServiceId(), localization);
|
||||
public SearchExtractor getSearchExtractor(SearchQueryHandler queryHandler) {
|
||||
return new SoundcloudSearchExtractor(this, queryHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SoundcloudSuggestionExtractor getSuggestionExtractor() {
|
||||
return new SoundcloudSuggestionExtractor(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -80,15 +83,14 @@ public class SoundcloudService extends StreamingService {
|
|||
@Override
|
||||
public KioskExtractor createNewKiosk(StreamingService streamingService,
|
||||
String url,
|
||||
String id,
|
||||
Localization local)
|
||||
String id)
|
||||
throws ExtractionException {
|
||||
return new SoundcloudChartsExtractor(SoundcloudService.this,
|
||||
new SoundcloudChartsLinkHandlerFactory().fromUrl(url), id, local);
|
||||
new SoundcloudChartsLinkHandlerFactory().fromUrl(url), id);
|
||||
}
|
||||
};
|
||||
|
||||
KioskList list = new KioskList(getServiceId());
|
||||
KioskList list = new KioskList(this);
|
||||
|
||||
// add kiosks here e.g.:
|
||||
final SoundcloudChartsLinkHandlerFactory h = new SoundcloudChartsLinkHandlerFactory();
|
||||
|
@ -103,7 +105,6 @@ public class SoundcloudService extends StreamingService {
|
|||
return list;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public SubscriptionExtractor getSubscriptionExtractor() {
|
||||
return new SoundcloudSubscriptionExtractor(this);
|
||||
|
@ -115,9 +116,9 @@ public class SoundcloudService extends StreamingService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler, Localization localization)
|
||||
public CommentsExtractor getCommentsExtractor(ListLinkHandler linkHandler)
|
||||
throws ExtractionException {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -1,15 +1,17 @@
|
|||
package org.schabi.newpipe.extractor.services.soundcloud;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.*;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.stream.*;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
@ -22,8 +24,8 @@ import java.util.List;
|
|||
public class SoundcloudStreamExtractor extends StreamExtractor {
|
||||
private JsonObject track;
|
||||
|
||||
public SoundcloudStreamExtractor(StreamingService service, LinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public SoundcloudStreamExtractor(StreamingService service, LinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -50,14 +52,25 @@ public class SoundcloudStreamExtractor extends StreamExtractor {
|
|||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getUploadDate() throws ParsingException {
|
||||
return SoundcloudParsingHelper.toDateString(track.getString("created_at"));
|
||||
public String getTextualUploadDate() {
|
||||
return track.getString("created_at");
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public DateWrapper getUploadDate() throws ParsingException {
|
||||
return new DateWrapper(SoundcloudParsingHelper.parseDate(getTextualUploadDate()));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getThumbnailUrl() {
|
||||
return track.getString("artwork_url", "");
|
||||
String artworkUrl = track.getString("artwork_url", "");
|
||||
if (artworkUrl.isEmpty()) {
|
||||
artworkUrl = track.getObject("user").getString("avatar_url", "");
|
||||
}
|
||||
String artworkUrlBetterResolution = artworkUrl.replace("large.jpg", "crop.jpg");
|
||||
return artworkUrlBetterResolution;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
@ -131,10 +144,10 @@ public class SoundcloudStreamExtractor extends StreamExtractor {
|
|||
List<AudioStream> audioStreams = new ArrayList<>();
|
||||
Downloader dl = NewPipe.getDownloader();
|
||||
|
||||
String apiUrl = "https://api.soundcloud.com/i1/tracks/" + urlEncode(getId()) + "/streams"
|
||||
String apiUrl = "https://api-v2.soundcloud.com/tracks/" + urlEncode(getId())
|
||||
+ "?client_id=" + urlEncode(SoundcloudParsingHelper.clientId());
|
||||
|
||||
String response = dl.download(apiUrl);
|
||||
String response = dl.get(apiUrl, getExtractorLocalization()).responseBody();
|
||||
JsonObject responseObject;
|
||||
try {
|
||||
responseObject = JsonParser.object().from(response);
|
||||
|
@ -142,11 +155,45 @@ public class SoundcloudStreamExtractor extends StreamExtractor {
|
|||
throw new ParsingException("Could not parse json response", e);
|
||||
}
|
||||
|
||||
String mp3Url = responseObject.getString("http_mp3_128_url");
|
||||
if (mp3Url != null && !mp3Url.isEmpty()) {
|
||||
audioStreams.add(new AudioStream(mp3Url, MediaFormat.MP3, 128));
|
||||
} else {
|
||||
throw new ExtractionException("Could not get SoundCloud's track audio url");
|
||||
// Streams can be streamable and downloadable - or explicitly not.
|
||||
// For playing the track, it is only necessary to have a streamable track.
|
||||
// If this is not the case, this track might not be published yet.
|
||||
if (!responseObject.getBoolean("streamable")) return audioStreams;
|
||||
|
||||
try {
|
||||
JsonArray transcodings = responseObject.getObject("media").getArray("transcodings");
|
||||
|
||||
// get information about what stream formats are available
|
||||
for (Object transcoding : transcodings) {
|
||||
|
||||
JsonObject t = (JsonObject) transcoding;
|
||||
String url = t.getString("url");
|
||||
|
||||
if (url != null && !url.isEmpty()) {
|
||||
|
||||
// We can only play the mp3 format, but not handle m3u playlists / streams.
|
||||
// what about Opus?
|
||||
if (t.getString("preset").contains("mp3")
|
||||
&& t.getObject("format").getString("protocol").equals("progressive")) {
|
||||
// This url points to the endpoint which generates a unique and short living url to the stream.
|
||||
// TODO: move this to a separate method to generate valid urls when needed (e.g. resuming a paused stream)
|
||||
url += "?client_id=" + SoundcloudParsingHelper.clientId();
|
||||
String res = dl.get(url).responseBody();
|
||||
|
||||
try {
|
||||
JsonObject mp3UrlObject = JsonParser.object().from(res);
|
||||
// Links in this file are also only valid for a short period.
|
||||
audioStreams.add(new AudioStream(mp3UrlObject.getString("url"),
|
||||
MediaFormat.MP3, 128));
|
||||
} catch (JsonParserException e) {
|
||||
throw new ParsingException("Could not parse streamable url", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} catch (NullPointerException e) {
|
||||
throw new ExtractionException("Could not get SoundCloud's track audio url", e);
|
||||
}
|
||||
|
||||
return audioStreams;
|
||||
|
|
|
@ -2,6 +2,7 @@ package org.schabi.newpipe.extractor.services.soundcloud;
|
|||
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
|
||||
|
@ -41,8 +42,17 @@ public class SoundcloudStreamInfoItemExtractor implements StreamInfoItemExtracto
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getUploadDate() throws ParsingException {
|
||||
return SoundcloudParsingHelper.toDateString(itemObject.getString("created_at"));
|
||||
public String getTextualUploadDate() {
|
||||
return itemObject.getString("created_at");
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateWrapper getUploadDate() throws ParsingException {
|
||||
return new DateWrapper(SoundcloudParsingHelper.parseDate(getTextualUploadDate()));
|
||||
}
|
||||
|
||||
private String getCreatedAt() {
|
||||
return itemObject.getString("created_at");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -52,7 +62,12 @@ public class SoundcloudStreamInfoItemExtractor implements StreamInfoItemExtracto
|
|||
|
||||
@Override
|
||||
public String getThumbnailUrl() {
|
||||
return itemObject.getString("artwork_url");
|
||||
String artworkUrl = itemObject.getString("artwork_url", "");
|
||||
if (artworkUrl.isEmpty()) {
|
||||
artworkUrl = itemObject.getObject("user").getString("avatar_url");
|
||||
}
|
||||
String artworkUrlBetterResolution = artworkUrl.replace("large.jpg", "crop.jpg");
|
||||
return artworkUrlBetterResolution;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -4,12 +4,12 @@ import com.grack.nanojson.JsonArray;
|
|||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.SuggestionExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URLEncoder;
|
||||
|
@ -20,8 +20,8 @@ public class SoundcloudSuggestionExtractor extends SuggestionExtractor {
|
|||
|
||||
public static final String CHARSET_UTF_8 = "UTF-8";
|
||||
|
||||
public SoundcloudSuggestionExtractor(int serviceId, Localization localization) {
|
||||
super(serviceId, localization);
|
||||
public SoundcloudSuggestionExtractor(StreamingService service) {
|
||||
super(service);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -35,7 +35,7 @@ public class SoundcloudSuggestionExtractor extends SuggestionExtractor {
|
|||
+ "&client_id=" + SoundcloudParsingHelper.clientId()
|
||||
+ "&limit=10";
|
||||
|
||||
String response = dl.download(url);
|
||||
String response = dl.get(url, getExtractorLocalization()).responseBody();
|
||||
try {
|
||||
JsonArray collection = JsonParser.object().from(response).getArray("collection");
|
||||
for (Object suggestion : collection) {
|
||||
|
|
|
@ -39,9 +39,9 @@ public class ItagItem {
|
|||
new ItagItem(139, AUDIO, M4A, 48),
|
||||
new ItagItem(140, AUDIO, M4A, 128),
|
||||
new ItagItem(141, AUDIO, M4A, 256),
|
||||
new ItagItem(249, AUDIO, OPUS, 50),
|
||||
new ItagItem(250, AUDIO, OPUS, 70),
|
||||
new ItagItem(251, AUDIO, OPUS, 160),
|
||||
new ItagItem(249, AUDIO, WEBMA_OPUS, 50),
|
||||
new ItagItem(250, AUDIO, WEBMA_OPUS, 70),
|
||||
new ItagItem(251, AUDIO, WEBMA_OPUS, 160),
|
||||
|
||||
/// VIDEO ONLY ////////////////////////////////////////////
|
||||
// ID Type Format Resolution FPS ///
|
||||
|
|
|
@ -6,8 +6,9 @@ import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCap
|
|||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.LIVE;
|
||||
import static org.schabi.newpipe.extractor.StreamingService.ServiceInfo.MediaCapability.VIDEO;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.SuggestionExtractor;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
|
@ -19,6 +20,8 @@ import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
|||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.localization.ContentCountry;
|
||||
import org.schabi.newpipe.extractor.localization.Localization;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.extractors.YoutubeChannelExtractor;
|
||||
|
@ -37,7 +40,7 @@ import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeStreamLi
|
|||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeTrendingLinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.stream.StreamExtractor;
|
||||
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 23.08.15.
|
||||
|
@ -66,10 +69,10 @@ public class YoutubeService extends StreamingService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public SearchExtractor getSearchExtractor(SearchQueryHandler query, Localization localization) {
|
||||
return new YoutubeSearchExtractor(this, query, localization);
|
||||
public String getBaseUrl() {
|
||||
return "https://youtube.com";
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public LinkHandlerFactory getStreamLHFactory() {
|
||||
return YoutubeStreamLinkHandlerFactory.getInstance();
|
||||
|
@ -91,28 +94,33 @@ public class YoutubeService extends StreamingService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public StreamExtractor getStreamExtractor(LinkHandler linkHandler, Localization localization) {
|
||||
return new YoutubeStreamExtractor(this, linkHandler, localization);
|
||||
public StreamExtractor getStreamExtractor(LinkHandler linkHandler) {
|
||||
return new YoutubeStreamExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler, Localization localization) {
|
||||
return new YoutubeChannelExtractor(this, linkHandler, localization);
|
||||
public ChannelExtractor getChannelExtractor(ListLinkHandler linkHandler) {
|
||||
return new YoutubeChannelExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler, Localization localization) {
|
||||
return new YoutubePlaylistExtractor(this, linkHandler, localization);
|
||||
public PlaylistExtractor getPlaylistExtractor(ListLinkHandler linkHandler) {
|
||||
return new YoutubePlaylistExtractor(this, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionExtractor getSuggestionExtractor(Localization localization) {
|
||||
return new YoutubeSuggestionExtractor(getServiceId(), localization);
|
||||
public SearchExtractor getSearchExtractor(SearchQueryHandler query) {
|
||||
return new YoutubeSearchExtractor(this, query);
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionExtractor getSuggestionExtractor() {
|
||||
return new YoutubeSuggestionExtractor(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KioskList getKioskList() throws ExtractionException {
|
||||
KioskList list = new KioskList(getServiceId());
|
||||
KioskList list = new KioskList(this);
|
||||
|
||||
// add kiosks here e.g.:
|
||||
try {
|
||||
|
@ -120,11 +128,10 @@ public class YoutubeService extends StreamingService {
|
|||
@Override
|
||||
public KioskExtractor createNewKiosk(StreamingService streamingService,
|
||||
String url,
|
||||
String id,
|
||||
Localization local)
|
||||
String id)
|
||||
throws ExtractionException {
|
||||
return new YoutubeTrendingExtractor(YoutubeService.this,
|
||||
new YoutubeTrendingLinkHandlerFactory().fromUrl(url), id, local);
|
||||
new YoutubeTrendingLinkHandlerFactory().fromUrl(url), id);
|
||||
}
|
||||
}, new YoutubeTrendingLinkHandlerFactory(), "Trending");
|
||||
list.setDefaultKiosk("Trending");
|
||||
|
@ -146,9 +153,52 @@ public class YoutubeService extends StreamingService {
|
|||
}
|
||||
|
||||
@Override
|
||||
public CommentsExtractor getCommentsExtractor(ListLinkHandler urlIdHandler, Localization localization)
|
||||
public CommentsExtractor getCommentsExtractor(ListLinkHandler urlIdHandler)
|
||||
throws ExtractionException {
|
||||
return new YoutubeCommentsExtractor(this, urlIdHandler, localization);
|
||||
return new YoutubeCommentsExtractor(this, urlIdHandler);
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
// Localization
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
// https://www.youtube.com/picker_ajax?action_language_json=1
|
||||
private static final List<Localization> SUPPORTED_LANGUAGES = Localization.listFrom(
|
||||
"en-GB"
|
||||
/*"af", "am", "ar", "az", "be", "bg", "bn", "bs", "ca", "cs", "da", "de",
|
||||
"el", "en", "en-GB", "es", "es-419", "es-US", "et", "eu", "fa", "fi", "fil", "fr",
|
||||
"fr-CA", "gl", "gu", "hi", "hr", "hu", "hy", "id", "is", "it", "iw", "ja",
|
||||
"ka", "kk", "km", "kn", "ko", "ky", "lo", "lt", "lv", "mk", "ml", "mn",
|
||||
"mr", "ms", "my", "ne", "nl", "no", "pa", "pl", "pt", "pt-PT", "ro", "ru",
|
||||
"si", "sk", "sl", "sq", "sr", "sr-Latn", "sv", "sw", "ta", "te", "th", "tr",
|
||||
"uk", "ur", "uz", "vi", "zh-CN", "zh-HK", "zh-TW", "zu"*/
|
||||
);
|
||||
|
||||
// https://www.youtube.com/picker_ajax?action_country_json=1
|
||||
private static final List<ContentCountry> SUPPORTED_COUNTRIES = ContentCountry.listFrom(
|
||||
"AD", "AE", "AF", "AG", "AI", "AL", "AM", "AO", "AQ", "AR", "AS", "AT", "AU", "AW", "AX", "AZ", "BA",
|
||||
"BB", "BD", "BE", "BF", "BG", "BH", "BI", "BJ", "BL", "BM", "BN", "BO", "BQ", "BR", "BS", "BT", "BV",
|
||||
"BW", "BY", "BZ", "CA", "CC", "CD", "CF", "CG", "CH", "CI", "CK", "CL", "CM", "CN", "CO", "CR", "CU",
|
||||
"CV", "CW", "CX", "CY", "CZ", "DE", "DJ", "DK", "DM", "DO", "DZ", "EC", "EE", "EG", "EH", "ER", "ES",
|
||||
"ET", "FI", "FJ", "FK", "FM", "FO", "FR", "GA", "GB", "GD", "GE", "GF", "GG", "GH", "GI", "GL", "GM",
|
||||
"GN", "GP", "GQ", "GR", "GS", "GT", "GU", "GW", "GY", "HK", "HM", "HN", "HR", "HT", "HU", "ID", "IE",
|
||||
"IL", "IM", "IN", "IO", "IQ", "IR", "IS", "IT", "JE", "JM", "JO", "JP", "KE", "KG", "KH", "KI", "KM",
|
||||
"KN", "KP", "KR", "KW", "KY", "KZ", "LA", "LB", "LC", "LI", "LK", "LR", "LS", "LT", "LU", "LV", "LY",
|
||||
"MA", "MC", "MD", "ME", "MF", "MG", "MH", "MK", "ML", "MM", "MN", "MO", "MP", "MQ", "MR", "MS", "MT",
|
||||
"MU", "MV", "MW", "MX", "MY", "MZ", "NA", "NC", "NE", "NF", "NG", "NI", "NL", "NO", "NP", "NR", "NU",
|
||||
"NZ", "OM", "PA", "PE", "PF", "PG", "PH", "PK", "PL", "PM", "PN", "PR", "PS", "PT", "PW", "PY", "QA",
|
||||
"RE", "RO", "RS", "RU", "RW", "SA", "SB", "SC", "SD", "SE", "SG", "SH", "SI", "SJ", "SK", "SL", "SM",
|
||||
"SN", "SO", "SR", "SS", "ST", "SV", "SX", "SY", "SZ", "TC", "TD", "TF", "TG", "TH", "TJ", "TK", "TL",
|
||||
"TM", "TN", "TO", "TR", "TT", "TV", "TW", "TZ", "UA", "UG", "UM", "US", "UY", "UZ", "VA", "VC", "VE",
|
||||
"VG", "VI", "VN", "VU", "WF", "WS", "YE", "YT", "ZA", "ZM", "ZW"
|
||||
);
|
||||
|
||||
@Override
|
||||
public List<Localization> getSupportedLocalizations() {
|
||||
return SUPPORTED_LANGUAGES;
|
||||
}
|
||||
|
||||
public List<ContentCountry> getSupportedCountries() {
|
||||
return SUPPORTED_COUNTRIES;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,29 +1,27 @@
|
|||
package org.schabi.newpipe.extractor.services.youtube.extractors;
|
||||
|
||||
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.channel.ChannelExtractor;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.utils.DonationLinkHelper;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 25.07.16.
|
||||
|
@ -47,20 +45,21 @@ import java.util.ArrayList;
|
|||
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public class YoutubeChannelExtractor extends ChannelExtractor {
|
||||
/*package-private*/ static final String CHANNEL_URL_BASE = "https://www.youtube.com/channel/";
|
||||
private static final String CHANNEL_FEED_BASE = "https://www.youtube.com/feeds/videos.xml?channel_id=";
|
||||
private static final String CHANNEL_URL_PARAMETERS = "/videos?view=0&flow=list&sort=dd&live_view=10000";
|
||||
|
||||
private Document doc;
|
||||
|
||||
public YoutubeChannelExtractor(StreamingService service, ListLinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public YoutubeChannelExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
String channelUrl = super.getUrl() + CHANNEL_URL_PARAMETERS;
|
||||
String pageContent = downloader.download(channelUrl);
|
||||
doc = Jsoup.parse(pageContent, channelUrl);
|
||||
final Response response = downloader.get(channelUrl, getExtractorLocalization());
|
||||
doc = YoutubeParsingHelper.parseAndCheckPage(channelUrl, response);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -72,7 +71,7 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
|||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
try {
|
||||
return "https://www.youtube.com/channel/" + getId();
|
||||
return CHANNEL_URL_BASE + getId();
|
||||
} catch (ParsingException e) {
|
||||
return super.getUrl();
|
||||
}
|
||||
|
@ -81,6 +80,11 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
|||
@Nonnull
|
||||
@Override
|
||||
public String getId() throws ParsingException {
|
||||
try {
|
||||
return doc.select("meta[itemprop=\"channelId\"]").first().attr("content");
|
||||
} catch (Exception ignored) {}
|
||||
|
||||
// fallback method; does not work with channels that have no "Subscribe" button (e.g. EminemVEVO)
|
||||
try {
|
||||
Element element = doc.getElementsByClass("yt-uix-subscription-button").first();
|
||||
if (element == null) element = doc.getElementsByClass("yt-uix-subscription-preferences-button").first();
|
||||
|
@ -134,10 +138,12 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
|||
|
||||
@Override
|
||||
public long getSubscriberCount() throws ParsingException {
|
||||
|
||||
final Element el = doc.select("span[class*=\"yt-subscription-button-subscriber-count\"]").first();
|
||||
if (el != null) {
|
||||
String elTitle = el.attr("title");
|
||||
try {
|
||||
return Long.parseLong(Utils.removeNonDigitCharacters(el.text()));
|
||||
return Utils.mixedNumberWordToLong(elTitle);
|
||||
} catch (NumberFormatException e) {
|
||||
throw new ParsingException("Could not get subscriber count", e);
|
||||
}
|
||||
|
@ -178,7 +184,8 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
|||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
JsonObject ajaxJson;
|
||||
try {
|
||||
ajaxJson = JsonParser.object().from(NewPipe.getDownloader().download(pageUrl));
|
||||
final String response = getDownloader().get(pageUrl, getExtractorLocalization()).responseBody();
|
||||
ajaxJson = JsonParser.object().from(response);
|
||||
} catch (JsonParserException pe) {
|
||||
throw new ParsingException("Could not parse json data for next streams", pe);
|
||||
}
|
||||
|
@ -218,9 +225,11 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
|
|||
|
||||
final String uploaderName = getName();
|
||||
final String uploaderUrl = getUrl();
|
||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||
|
||||
for (final Element li : element.children()) {
|
||||
if (li.select("div[class=\"feed-item-dismissable\"]").first() != null) {
|
||||
collector.commit(new YoutubeStreamInfoItemExtractor(li) {
|
||||
collector.commit(new YoutubeStreamInfoItemExtractor(li, timeAgoParser) {
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
try {
|
||||
|
|
|
@ -5,6 +5,9 @@ import org.schabi.newpipe.extractor.channel.ChannelInfoItemExtractor;
|
|||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 12.02.17.
|
||||
*
|
||||
|
@ -53,8 +56,26 @@ public class YoutubeChannelInfoItemExtractor implements ChannelInfoItemExtractor
|
|||
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
return el.select("a[class*=\"yt-uix-tile-link\"]").first()
|
||||
.attr("abs:href");
|
||||
try {
|
||||
String buttonTrackingUrl = el.select("button[class*=\"yt-uix-button\"]").first()
|
||||
.attr("abs:data-href");
|
||||
|
||||
Pattern channelIdPattern = Pattern.compile("(?:.*?)\\%252Fchannel\\%252F([A-Za-z0-9\\-\\_]+)(?:.*)");
|
||||
Matcher match = channelIdPattern.matcher(buttonTrackingUrl);
|
||||
|
||||
if (match.matches()) {
|
||||
return YoutubeChannelExtractor.CHANNEL_URL_BASE + match.group(1);
|
||||
}
|
||||
} catch(Exception ignored) {}
|
||||
|
||||
// fallback method for channels without "Subscribe" button (or just in case yt changes things)
|
||||
// provides an url with "/user/NAME", inconsistent with stream and channel extractor: tests will fail
|
||||
try {
|
||||
return el.select("a[class*=\"yt-uix-tile-link\"]").first()
|
||||
.attr("abs:href");
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get channel url", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -1,36 +1,31 @@
|
|||
package org.schabi.newpipe.extractor.services.youtube.extractors;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import org.schabi.newpipe.extractor.DownloadRequest;
|
||||
import org.schabi.newpipe.extractor.DownloadResponse;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsInfoItem;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.*;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static java.util.Collections.singletonList;
|
||||
|
||||
|
||||
public class YoutubeCommentsExtractor extends CommentsExtractor {
|
||||
|
@ -44,8 +39,8 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
|
|||
private String title;
|
||||
private InfoItemsPage<CommentsInfoItem> initPage;
|
||||
|
||||
public YoutubeCommentsExtractor(StreamingService service, ListLinkHandler uiHandler, Localization localization) {
|
||||
super(service, uiHandler, localization);
|
||||
public YoutubeCommentsExtractor(StreamingService service, ListLinkHandler uiHandler) {
|
||||
super(service, uiHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -130,7 +125,7 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
|
|||
|
||||
for(Object c: comments) {
|
||||
if(c instanceof JsonObject) {
|
||||
CommentsInfoItemExtractor extractor = new YoutubeCommentsInfoItemExtractor((JsonObject) c, getUrl());
|
||||
CommentsInfoItemExtractor extractor = new YoutubeCommentsInfoItemExtractor((JsonObject) c, getUrl(), getTimeAgoParser());
|
||||
collector.commit(extractor);
|
||||
}
|
||||
}
|
||||
|
@ -147,12 +142,11 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
|
||||
Map<String, List<String>> requestHeaders = new HashMap<>();
|
||||
requestHeaders.put("User-Agent", Arrays.asList(USER_AGENT));
|
||||
DownloadRequest request = new DownloadRequest(null, requestHeaders);
|
||||
DownloadResponse response = downloader.get(getUrl(), request);
|
||||
String responseBody = response.getResponseBody();
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
final Map<String, List<String>> requestHeaders = new HashMap<>();
|
||||
requestHeaders.put("User-Agent", singletonList(USER_AGENT));
|
||||
final Response response = downloader.get(getUrl(), requestHeaders, getExtractorLocalization());
|
||||
String responseBody = response.responseBody();
|
||||
ytClientVersion = findValue(responseBody, "INNERTUBE_CONTEXT_CLIENT_VERSION\":\"", "\"");
|
||||
ytClientName = Parser.matchGroup1(YT_CLIENT_NAME_PATTERN, responseBody);
|
||||
String commentsTokenInside = findValue(responseBody, "commentSectionRenderer", "}");
|
||||
|
@ -160,6 +154,7 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
|
|||
initPage = getPage(getNextPageUrl(commentsToken));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
return title;
|
||||
|
@ -168,13 +163,11 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
|
|||
private String makeAjaxRequest(String siteUrl) throws IOException, ReCaptchaException {
|
||||
|
||||
Map<String, List<String>> requestHeaders = new HashMap<>();
|
||||
requestHeaders.put("Accept", Arrays.asList("*/*"));
|
||||
requestHeaders.put("User-Agent", Arrays.asList(USER_AGENT));
|
||||
requestHeaders.put("X-YouTube-Client-Version", Arrays.asList(ytClientVersion));
|
||||
requestHeaders.put("X-YouTube-Client-Name", Arrays.asList(ytClientName));
|
||||
DownloadRequest request = new DownloadRequest(null, requestHeaders);
|
||||
|
||||
return NewPipe.getDownloader().get(siteUrl, request).getResponseBody();
|
||||
requestHeaders.put("Accept", singletonList("*/*"));
|
||||
requestHeaders.put("User-Agent", singletonList(USER_AGENT));
|
||||
requestHeaders.put("X-YouTube-Client-Version", singletonList(ytClientVersion));
|
||||
requestHeaders.put("X-YouTube-Client-Name", singletonList(ytClientName));
|
||||
return getDownloader().get(siteUrl, requestHeaders, getExtractorLocalization()).responseBody();
|
||||
}
|
||||
|
||||
private String getDataString(Map<String, String> params) throws UnsupportedEncodingException {
|
||||
|
|
|
@ -1,21 +1,26 @@
|
|||
package org.schabi.newpipe.extractor.services.youtube.extractors;
|
||||
|
||||
import org.schabi.newpipe.extractor.comments.CommentsInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonObject;
|
||||
import org.schabi.newpipe.extractor.comments.CommentsInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
public class YoutubeCommentsInfoItemExtractor implements CommentsInfoItemExtractor {
|
||||
|
||||
private final JsonObject json;
|
||||
private final String url;
|
||||
private final TimeAgoParser timeAgoParser;
|
||||
|
||||
public YoutubeCommentsInfoItemExtractor(JsonObject json, String url) {
|
||||
public YoutubeCommentsInfoItemExtractor(JsonObject json, String url, TimeAgoParser timeAgoParser) {
|
||||
this.json = json;
|
||||
this.url = url;
|
||||
this.timeAgoParser = timeAgoParser;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -43,7 +48,7 @@ public class YoutubeCommentsInfoItemExtractor implements CommentsInfoItemExtract
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getPublishedTime() throws ParsingException {
|
||||
public String getTextualPublishedTime() throws ParsingException {
|
||||
try {
|
||||
return YoutubeCommentsExtractor.getYoutubeText(JsonUtils.getObject(json, "publishedTimeText"));
|
||||
} catch (Exception e) {
|
||||
|
@ -51,8 +56,19 @@ public class YoutubeCommentsInfoItemExtractor implements CommentsInfoItemExtract
|
|||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Integer getLikeCount() throws ParsingException {
|
||||
public DateWrapper getPublishedTime() throws ParsingException {
|
||||
String textualPublishedTime = getTextualPublishedTime();
|
||||
if (timeAgoParser != null && textualPublishedTime != null && !textualPublishedTime.isEmpty()) {
|
||||
return timeAgoParser.parse(textualPublishedTime);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getLikeCount() throws ParsingException {
|
||||
try {
|
||||
return JsonUtils.getNumber(json, "likeCount").intValue();
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -6,18 +6,19 @@ import com.grack.nanojson.JsonParserException;
|
|||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandlerFactory;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
@ -29,14 +30,15 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
|
||||
private Document doc;
|
||||
|
||||
public YoutubePlaylistExtractor(StreamingService service, ListLinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public YoutubePlaylistExtractor(StreamingService service, ListLinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
String pageContent = downloader.download(getUrl());
|
||||
doc = Jsoup.parse(pageContent, getUrl());
|
||||
final String url = getUrl();
|
||||
final Response response = downloader.get(url, getExtractorLocalization());
|
||||
doc = YoutubeParsingHelper.parseAndCheckPage(url, response);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -50,7 +52,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
try {
|
||||
return doc.select("div[id=pl-header] h1[class=pl-header-title]").first().text();
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get playlist name");
|
||||
throw new ParsingException("Could not get playlist name", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -59,7 +61,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
try {
|
||||
return doc.select("div[id=pl-header] div[class=pl-header-thumb] img").first().attr("abs:src");
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get playlist thumbnail");
|
||||
throw new ParsingException("Could not get playlist thumbnail", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -72,9 +74,11 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
@Override
|
||||
public String getUploaderUrl() throws ParsingException {
|
||||
try {
|
||||
return doc.select("ul[class=\"pl-header-details\"] li").first().select("a").first().attr("abs:href");
|
||||
return YoutubeChannelExtractor.CHANNEL_URL_BASE +
|
||||
doc.select("button[class*=\"yt-uix-subscription-button\"]")
|
||||
.first().attr("data-channel-external-id");
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get playlist uploader name");
|
||||
throw new ParsingException("Could not get playlist uploader url", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -83,7 +87,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
try {
|
||||
return doc.select("span[class=\"qualified-channel-title-text\"]").first().select("a").first().text();
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get playlist uploader name");
|
||||
throw new ParsingException("Could not get playlist uploader name", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,7 +96,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
try {
|
||||
return doc.select("div[id=gh-banner] img[class=channel-header-profile-image]").first().attr("abs:src");
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get playlist uploader avatar");
|
||||
throw new ParsingException("Could not get playlist uploader avatar", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -137,7 +141,8 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
JsonObject pageJson;
|
||||
try {
|
||||
pageJson = JsonParser.object().from(getDownloader().download(pageUrl));
|
||||
final String responseBody = getDownloader().get(pageUrl, getExtractorLocalization()).responseBody();
|
||||
pageJson = JsonParser.object().from(responseBody);
|
||||
} catch (JsonParserException pe) {
|
||||
throw new ParsingException("Could not parse ajax json", pe);
|
||||
}
|
||||
|
@ -183,12 +188,14 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
}
|
||||
|
||||
final LinkHandlerFactory streamLinkHandlerFactory = getService().getStreamLHFactory();
|
||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||
|
||||
for (final Element li : element.children()) {
|
||||
if(isDeletedItem(li)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
collector.commit(new YoutubeStreamInfoItemExtractor(li) {
|
||||
collector.commit(new YoutubeStreamInfoItemExtractor(li, timeAgoParser) {
|
||||
public Element uploaderLink;
|
||||
|
||||
@Override
|
||||
|
@ -248,11 +255,13 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
|
|||
|
||||
@Override
|
||||
public String getUploaderUrl() throws ParsingException {
|
||||
// this url is not always in the form "/channel/..."
|
||||
// sometimes Youtube provides urls in the from "/user/..."
|
||||
return getUploaderLink().attr("abs:href");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUploadDate() throws ParsingException {
|
||||
public String getTextualUploadDate() throws ParsingException {
|
||||
return "";
|
||||
}
|
||||
|
||||
|
|
|
@ -49,10 +49,11 @@ public class YoutubePlaylistInfoItemExtractor implements PlaylistInfoItemExtract
|
|||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
try {
|
||||
final Element div = el.select("div[class=\"yt-lockup-meta\"]").first();
|
||||
final Element a = el.select("div[class=\"yt-lockup-meta\"]")
|
||||
.select("ul[class=\"yt-lockup-meta-info\"]")
|
||||
.select("li").select("a").first();
|
||||
|
||||
if(div != null) {
|
||||
final Element a = div.select("a").first();
|
||||
if(a != null) {
|
||||
return a.attr("abs:href");
|
||||
}
|
||||
|
||||
|
|
|
@ -3,15 +3,17 @@ package org.schabi.newpipe.extractor.services.youtube.extractors;
|
|||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
|
||||
import org.schabi.newpipe.extractor.search.SearchExtractor;
|
||||
import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
@ -44,26 +46,21 @@ public class YoutubeSearchExtractor extends SearchExtractor {
|
|||
|
||||
private Document doc;
|
||||
|
||||
public YoutubeSearchExtractor(StreamingService service,
|
||||
SearchQueryHandler linkHandler,
|
||||
Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public YoutubeSearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
final String site;
|
||||
final String url = getUrl();
|
||||
//String url = builder.build().toString();
|
||||
//if we've been passed a valid language code, append it to the URL
|
||||
site = downloader.download(url, getLocalization());
|
||||
|
||||
doc = Jsoup.parse(site, url);
|
||||
final Response response = downloader.get(url, getExtractorLocalization());
|
||||
doc = YoutubeParsingHelper.parseAndCheckPage(url, response);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
return super.getUrl() + "&gl="+ getLocalization().getCountry();
|
||||
return super.getUrl() + "&gl=" + getExtractorContentCountry().getCountryCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -89,8 +86,8 @@ public class YoutubeSearchExtractor extends SearchExtractor {
|
|||
|
||||
@Override
|
||||
public InfoItemsPage<InfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
|
||||
String site = getDownloader().download(pageUrl);
|
||||
doc = Jsoup.parse(site, pageUrl);
|
||||
final String response = getDownloader().get(pageUrl, getExtractorLocalization()).responseBody();
|
||||
doc = Jsoup.parse(response, pageUrl);
|
||||
|
||||
return new InfoItemsPage<>(collectItems(doc), getNextPageUrlFromCurrentUrl(pageUrl));
|
||||
}
|
||||
|
@ -111,6 +108,7 @@ public class YoutubeSearchExtractor extends SearchExtractor {
|
|||
InfoItemsSearchCollector collector = getInfoItemSearchCollector();
|
||||
|
||||
Element list = doc.select("ol[class=\"item-section\"]").first();
|
||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||
|
||||
for (Element item : list.children()) {
|
||||
/* First we need to determine which kind of item we are working with.
|
||||
|
@ -131,7 +129,7 @@ public class YoutubeSearchExtractor extends SearchExtractor {
|
|||
|
||||
// video item type
|
||||
} else if ((el = item.select("div[class*=\"yt-lockup-video\"]").first()) != null) {
|
||||
collector.commit(new YoutubeStreamInfoItemExtractor(el));
|
||||
collector.commit(new YoutubeStreamInfoItemExtractor(el, timeAgoParser));
|
||||
} else if ((el = item.select("div[class*=\"yt-lockup-channel\"]").first()) != null) {
|
||||
collector.commit(new YoutubeChannelInfoItemExtractor(el));
|
||||
} else if ((el = item.select("div[class*=\"yt-lockup-playlist\"]").first()) != null &&
|
||||
|
|
|
@ -11,15 +11,23 @@ import org.jsoup.select.Elements;
|
|||
import org.mozilla.javascript.Context;
|
||||
import org.mozilla.javascript.Function;
|
||||
import org.mozilla.javascript.ScriptableObject;
|
||||
import org.schabi.newpipe.extractor.*;
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Request;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.services.youtube.ItagItem;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.stream.*;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.utils.JsonUtils;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
|
@ -30,6 +38,8 @@ import java.io.UnsupportedEncodingException;
|
|||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.*;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 06.08.15.
|
||||
|
@ -64,12 +74,6 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
}
|
||||
}
|
||||
|
||||
public class GemaException extends ContentNotAvailableException {
|
||||
GemaException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
public class SubtitlesException extends ContentNotAvailableException {
|
||||
SubtitlesException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
|
@ -83,14 +87,15 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
private JsonObject playerArgs;
|
||||
@Nonnull
|
||||
private final Map<String, String> videoInfoPage = new HashMap<>();
|
||||
private JsonObject playerResponse;
|
||||
|
||||
@Nonnull
|
||||
private List<SubtitlesInfo> subtitlesInfos = new ArrayList<>();
|
||||
|
||||
private boolean isAgeRestricted;
|
||||
|
||||
public YoutubeStreamExtractor(StreamingService service, LinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public YoutubeStreamExtractor(StreamingService service, LinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
/*//////////////////////////////////////////////////////////////////////////
|
||||
|
@ -116,10 +121,12 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
return name;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getUploadDate() throws ParsingException {
|
||||
assertPageFetched();
|
||||
public String getTextualUploadDate() throws ParsingException {
|
||||
if (getStreamType().equals(StreamType.LIVE_STREAM)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
return doc.select("meta[itemprop=datePublished]").attr(CONTENT);
|
||||
} catch (Exception e) {//todo: add fallback method
|
||||
|
@ -127,6 +134,17 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public DateWrapper getUploadDate() throws ParsingException {
|
||||
final String textualUploadDate = getTextualUploadDate();
|
||||
|
||||
if (textualUploadDate == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new DateWrapper(YoutubeParsingHelper.parseDateFrom(textualUploadDate));
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
|
@ -162,14 +180,54 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
}
|
||||
}
|
||||
|
||||
// onclick="yt.www.watch.player.seekTo(0*3600+00*60+00);return false;"
|
||||
// :00 is NOT recognized as a timestamp in description or comments.
|
||||
// 0:00 is recognized in both description and comments.
|
||||
// https://www.youtube.com/watch?v=4cccfDXu1vA
|
||||
private final static Pattern DESCRIPTION_TIMESTAMP_ONCLICK_REGEX = Pattern.compile(
|
||||
"seekTo\\("
|
||||
+ "(?:(\\d+)\\*3600\\+)?" // hours?
|
||||
+ "(\\d+)\\*60\\+" // minutes
|
||||
+ "(\\d+)" // seconds
|
||||
+ "\\)");
|
||||
|
||||
@SafeVarargs
|
||||
private static <T> T coalesce(T... args) {
|
||||
for (T arg : args) {
|
||||
if (arg != null) return arg;
|
||||
}
|
||||
throw new IllegalArgumentException("all arguments to coalesce() were null");
|
||||
}
|
||||
|
||||
private String parseHtmlAndGetFullLinks(String descriptionHtml)
|
||||
throws MalformedURLException, UnsupportedEncodingException, ParsingException {
|
||||
final Document description = Jsoup.parse(descriptionHtml, getUrl());
|
||||
for(Element a : description.select("a")) {
|
||||
final URL redirectLink = new URL(
|
||||
a.attr("abs:href"));
|
||||
final String queryString = redirectLink.getQuery();
|
||||
if(queryString != null) {
|
||||
final String rawUrl = a.attr("abs:href");
|
||||
final URL redirectLink = new URL(rawUrl);
|
||||
|
||||
final Matcher onClickTimestamp;
|
||||
final String queryString;
|
||||
if ((onClickTimestamp = DESCRIPTION_TIMESTAMP_ONCLICK_REGEX.matcher(a.attr("onclick")))
|
||||
.find()) {
|
||||
a.removeAttr("onclick");
|
||||
|
||||
String hours = coalesce(onClickTimestamp.group(1), "0");
|
||||
String minutes = onClickTimestamp.group(2);
|
||||
String seconds = onClickTimestamp.group(3);
|
||||
|
||||
int timestamp = 0;
|
||||
timestamp += Integer.parseInt(hours) * 3600;
|
||||
timestamp += Integer.parseInt(minutes) * 60;
|
||||
timestamp += Integer.parseInt(seconds);
|
||||
|
||||
String setTimestamp = "&t=" + timestamp;
|
||||
|
||||
// Even after clicking https://youtu.be/...?t=6,
|
||||
// getUrl() is https://www.youtube.com/watch?v=..., never youtu.be, never &t=.
|
||||
a.attr("href", getUrl() + setTimestamp);
|
||||
|
||||
} else if((queryString = redirectLink.getQuery()) != null) {
|
||||
// if the query string is null we are not dealing with a redirect link,
|
||||
// so we don't need to override it.
|
||||
final String link =
|
||||
|
@ -179,11 +237,15 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
// if link is null the a tag is a hashtag.
|
||||
// They refer to the youtube search. We do not handle them.
|
||||
a.text(link);
|
||||
a.attr("href", link);
|
||||
} else if(redirectLink.toString().contains("https://www.youtube.com/")) {
|
||||
a.text(redirectLink.toString());
|
||||
a.attr("href", redirectLink.toString());
|
||||
}
|
||||
} else if(redirectLink.toString().contains("https://www.youtube.com/")) {
|
||||
descriptionHtml = descriptionHtml.replace(rawUrl, redirectLink.toString());
|
||||
a.text(redirectLink.toString());
|
||||
a.attr("href", redirectLink.toString());
|
||||
}
|
||||
}
|
||||
return description.select("body").first().html();
|
||||
|
@ -206,29 +268,26 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
@Override
|
||||
public long getLength() throws ParsingException {
|
||||
assertPageFetched();
|
||||
if(playerArgs != null) {
|
||||
try {
|
||||
long returnValue = Long.parseLong(playerArgs.get("length_seconds") + "");
|
||||
if (returnValue >= 0) return returnValue;
|
||||
} catch (Exception ignored) {
|
||||
// Try other method...
|
||||
}
|
||||
}
|
||||
|
||||
String lengthString = videoInfoPage.get("length_seconds");
|
||||
// try getting duration from playerargs
|
||||
try {
|
||||
return Long.parseLong(lengthString);
|
||||
} catch (Exception ignored) {
|
||||
// Try other method...
|
||||
}
|
||||
|
||||
// TODO: 25.11.17 Implement a way to get the length for age restricted videos #44
|
||||
try {
|
||||
// Fallback to HTML method
|
||||
return Long.parseLong(doc.select("div[class~=\"ytp-progress-bar\"][role=\"slider\"]").first()
|
||||
.attr("aria-valuemax"));
|
||||
String durationMs = playerResponse
|
||||
.getObject("streamingData")
|
||||
.getArray("formats")
|
||||
.getObject(0)
|
||||
.getString("approxDurationMs");
|
||||
return Long.parseLong(durationMs)/1000;
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get video length", e);
|
||||
}
|
||||
|
||||
//try getting value from age gated video
|
||||
try {
|
||||
String duration = playerResponse
|
||||
.getObject("videoDetails")
|
||||
.getString("lengthSeconds");
|
||||
return Long.parseLong(duration);
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Every methode to get the duration has failed: ", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -246,12 +305,65 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
public long getViewCount() throws ParsingException {
|
||||
assertPageFetched();
|
||||
try {
|
||||
if (getStreamType().equals(StreamType.LIVE_STREAM)) {
|
||||
return getLiveStreamWatchingCount();
|
||||
}
|
||||
|
||||
return Long.parseLong(doc.select("meta[itemprop=interactionCount]").attr(CONTENT));
|
||||
} catch (Exception e) {//todo: find fallback method
|
||||
throw new ParsingException("Could not get number of views", e);
|
||||
}
|
||||
}
|
||||
|
||||
private long getLiveStreamWatchingCount() throws ExtractionException, IOException, JsonParserException {
|
||||
// https://www.youtube.com/youtubei/v1/updated_metadata?alt=json&key=
|
||||
String innerTubeKey = null, clientVersion = null;
|
||||
if (playerArgs != null && !playerArgs.isEmpty()) {
|
||||
innerTubeKey = playerArgs.getString("innertube_api_key");
|
||||
clientVersion = playerArgs.getString("innertube_context_client_version");
|
||||
} else if (!videoInfoPage.isEmpty()) {
|
||||
innerTubeKey = videoInfoPage.get("innertube_api_key");
|
||||
clientVersion = videoInfoPage.get("innertube_context_client_version");
|
||||
}
|
||||
|
||||
if (innerTubeKey == null || innerTubeKey.isEmpty()) {
|
||||
throw new ExtractionException("Couldn't get innerTube key");
|
||||
}
|
||||
|
||||
if (clientVersion == null || clientVersion.isEmpty()) {
|
||||
throw new ExtractionException("Couldn't get innerTube client version");
|
||||
}
|
||||
|
||||
final String metadataUrl = "https://www.youtube.com/youtubei/v1/updated_metadata?alt=json&key=" + innerTubeKey;
|
||||
final byte[] dataBody = ("{\"context\":{\"client\":{\"clientName\":1,\"clientVersion\":\"" + clientVersion + "\"}}" +
|
||||
",\"videoId\":\"" + getId() + "\"}").getBytes("UTF-8");
|
||||
final Response response = getDownloader().execute(Request.newBuilder()
|
||||
.post(metadataUrl, dataBody)
|
||||
.addHeader("Content-Type", "application/json")
|
||||
.build());
|
||||
final JsonObject jsonObject = JsonParser.object().from(response.responseBody());
|
||||
|
||||
for (Object actionEntry : jsonObject.getArray("actions")) {
|
||||
if (!(actionEntry instanceof JsonObject)) continue;
|
||||
final JsonObject entry = (JsonObject) actionEntry;
|
||||
|
||||
final JsonObject updateViewershipAction = entry.getObject("updateViewershipAction", null);
|
||||
if (updateViewershipAction == null) continue;
|
||||
|
||||
final JsonArray viewCountRuns = JsonUtils.getArray(updateViewershipAction, "viewership.videoViewCountRenderer.viewCount.runs");
|
||||
if (viewCountRuns.isEmpty()) continue;
|
||||
|
||||
final JsonObject textObject = viewCountRuns.getObject(0);
|
||||
if (!textObject.has("text")) {
|
||||
throw new ExtractionException("Response don't have \"text\" element");
|
||||
}
|
||||
|
||||
return Long.parseLong(Utils.removeNonDigitCharacters(textObject.getString("text")));
|
||||
}
|
||||
|
||||
throw new ExtractionException("Could not find correct results in response");
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getLikeCount() throws ParsingException {
|
||||
assertPageFetched();
|
||||
|
@ -385,31 +497,24 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
@Override
|
||||
public String getHlsUrl() throws ParsingException {
|
||||
assertPageFetched();
|
||||
try {
|
||||
String hlsvp = "";
|
||||
if (playerArgs != null) {
|
||||
if( playerArgs.isString("hlsvp") ) {
|
||||
hlsvp = playerArgs.getString("hlsvp", "");
|
||||
}else {
|
||||
hlsvp = JsonParser.object()
|
||||
.from(playerArgs.getString("player_response", "{}"))
|
||||
.getObject("streamingData", new JsonObject())
|
||||
.getString("hlsManifestUrl", "");
|
||||
}
|
||||
}
|
||||
|
||||
return hlsvp;
|
||||
try {
|
||||
return playerResponse.getObject("streamingData").getString("hlsManifestUrl");
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get hls manifest url", e);
|
||||
if (playerArgs != null && playerArgs.isString("hlsvp")) {
|
||||
return playerArgs.getString("hlsvp");
|
||||
} else {
|
||||
throw new ParsingException("Could not get hls manifest url", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<AudioStream> getAudioStreams() throws IOException, ExtractionException {
|
||||
public List<AudioStream> getAudioStreams() throws ExtractionException {
|
||||
assertPageFetched();
|
||||
List<AudioStream> audioStreams = new ArrayList<>();
|
||||
try {
|
||||
for (Map.Entry<String, ItagItem> entry : getItags(ADAPTIVE_FMTS, ItagItem.ItagType.AUDIO).entrySet()) {
|
||||
for (Map.Entry<String, ItagItem> entry : getItags(ADAPTIVE_FORMATS, ItagItem.ItagType.AUDIO).entrySet()) {
|
||||
ItagItem itag = entry.getValue();
|
||||
|
||||
AudioStream audioStream = new AudioStream(entry.getKey(), itag.getMediaFormat(), itag.avgBitrate);
|
||||
|
@ -425,11 +530,11 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<VideoStream> getVideoStreams() throws IOException, ExtractionException {
|
||||
public List<VideoStream> getVideoStreams() throws ExtractionException {
|
||||
assertPageFetched();
|
||||
List<VideoStream> videoStreams = new ArrayList<>();
|
||||
try {
|
||||
for (Map.Entry<String, ItagItem> entry : getItags(URL_ENCODED_FMT_STREAM_MAP, ItagItem.ItagType.VIDEO).entrySet()) {
|
||||
for (Map.Entry<String, ItagItem> entry : getItags(FORMATS, ItagItem.ItagType.VIDEO).entrySet()) {
|
||||
ItagItem itag = entry.getValue();
|
||||
|
||||
VideoStream videoStream = new VideoStream(entry.getKey(), itag.getMediaFormat(), itag.resolutionString);
|
||||
|
@ -449,7 +554,7 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
assertPageFetched();
|
||||
List<VideoStream> videoOnlyStreams = new ArrayList<>();
|
||||
try {
|
||||
for (Map.Entry<String, ItagItem> entry : getItags(ADAPTIVE_FMTS, ItagItem.ItagType.VIDEO_ONLY).entrySet()) {
|
||||
for (Map.Entry<String, ItagItem> entry : getItags(ADAPTIVE_FORMATS, ItagItem.ItagType.VIDEO_ONLY).entrySet()) {
|
||||
ItagItem itag = entry.getValue();
|
||||
|
||||
VideoStream videoStream = new VideoStream(entry.getKey(), itag.getMediaFormat(), itag.resolutionString, true);
|
||||
|
@ -486,7 +591,7 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
assertPageFetched();
|
||||
try {
|
||||
if (playerArgs != null && (playerArgs.has("ps") && playerArgs.get("ps").toString().equals("live") ||
|
||||
playerArgs.get(URL_ENCODED_FMT_STREAM_MAP).toString().isEmpty())) {
|
||||
(!playerResponse.getObject("streamingData").has(FORMATS)))) {
|
||||
return StreamType.LIVE_STREAM;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
|
@ -500,13 +605,14 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
assertPageFetched();
|
||||
try {
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||
|
||||
Elements watch = doc.select("div[class=\"watch-sidebar-section\"]");
|
||||
if (watch.size() < 1) {
|
||||
return null;// prevent the snackbar notification "report error" on age-restricted videos
|
||||
}
|
||||
|
||||
collector.commit(extractVideoPreviewInfo(watch.first().select("li").first()));
|
||||
|
||||
collector.commit(extractVideoPreviewInfo(watch.first().select("li").first(), timeAgoParser));
|
||||
return collector.getItems().get(0);
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get next video", e);
|
||||
|
@ -518,12 +624,14 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
assertPageFetched();
|
||||
try {
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||
|
||||
Element ul = doc.select("ul[id=\"watch-related\"]").first();
|
||||
if (ul != null) {
|
||||
for (Element li : ul.children()) {
|
||||
// first check if we have a playlist. If so leave them out
|
||||
if (li.select("a[class*=\"content-link\"]").first() != null) {
|
||||
collector.commit(extractVideoPreviewInfo(li));
|
||||
collector.commit(extractVideoPreviewInfo(li, timeAgoParser));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -538,21 +646,20 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
*/
|
||||
@Override
|
||||
public String getErrorMessage() {
|
||||
String errorMessage = doc.select("h1[id=\"unavailable-message\"]").first().text();
|
||||
StringBuilder errorReason;
|
||||
Element errorElement = doc.select("h1[id=\"unavailable-message\"]").first();
|
||||
|
||||
if (errorMessage == null || errorMessage.isEmpty()) {
|
||||
if (errorElement == null) {
|
||||
errorReason = null;
|
||||
} else if (errorMessage.contains("GEMA")) {
|
||||
// Gema sometimes blocks youtube music content in germany:
|
||||
// https://www.gema.de/en/
|
||||
// Detailed description:
|
||||
// https://en.wikipedia.org/wiki/GEMA_%28German_organization%29
|
||||
errorReason = new StringBuilder("GEMA");
|
||||
} else {
|
||||
errorReason = new StringBuilder(errorMessage);
|
||||
errorReason.append(" ");
|
||||
errorReason.append(doc.select("[id=\"unavailable-submessage\"]").first().text());
|
||||
String errorMessage = errorElement.text();
|
||||
if (errorMessage == null || errorMessage.isEmpty()) {
|
||||
errorReason = null;
|
||||
} else {
|
||||
errorReason = new StringBuilder(errorMessage);
|
||||
errorReason.append(" ");
|
||||
errorReason.append(doc.select("[id=\"unavailable-submessage\"]").first().text());
|
||||
}
|
||||
}
|
||||
|
||||
return errorReason != null ? errorReason.toString() : null;
|
||||
|
@ -562,8 +669,8 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
// Fetch page
|
||||
//////////////////////////////////////////////////////////////////////////*/
|
||||
|
||||
private static final String URL_ENCODED_FMT_STREAM_MAP = "url_encoded_fmt_stream_map";
|
||||
private static final String ADAPTIVE_FMTS = "adaptive_fmts";
|
||||
private static final String FORMATS = "formats";
|
||||
private static final String ADAPTIVE_FORMATS = "adaptiveFormats";
|
||||
private static final String HTTPS = "https:";
|
||||
private static final String CONTENT = "content";
|
||||
private static final String DECRYPTION_FUNC_NAME = "decrypt";
|
||||
|
@ -581,34 +688,29 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
|
||||
private String pageHtml = null;
|
||||
|
||||
private String getPageHtml(Downloader downloader) throws IOException, ExtractionException {
|
||||
final String verifiedUrl = getUrl() + VERIFIED_URL_PARAMS;
|
||||
if (pageHtml == null) {
|
||||
pageHtml = downloader.download(verifiedUrl);
|
||||
}
|
||||
return pageHtml;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
final String pageContent = getPageHtml(downloader);
|
||||
doc = Jsoup.parse(pageContent, getUrl());
|
||||
final String verifiedUrl = getUrl() + VERIFIED_URL_PARAMS;
|
||||
final Response response = downloader.get(verifiedUrl, getExtractorLocalization());
|
||||
pageHtml = response.responseBody();
|
||||
doc = YoutubeParsingHelper.parseAndCheckPage(verifiedUrl, response);
|
||||
|
||||
final String playerUrl;
|
||||
// Check if the video is age restricted
|
||||
if (pageContent.contains("<meta property=\"og:restrictions:age")) {
|
||||
if (!doc.select("meta[property=\"og:restrictions:age\"]").isEmpty()) {
|
||||
final EmbeddedInfo info = getEmbeddedInfo();
|
||||
final String videoInfoUrl = getVideoInfoUrl(getId(), info.sts);
|
||||
final String infoPageResponse = downloader.download(videoInfoUrl);
|
||||
final String infoPageResponse = downloader.get(videoInfoUrl, getExtractorLocalization()).responseBody();
|
||||
videoInfoPage.putAll(Parser.compatParseMap(infoPageResponse));
|
||||
playerUrl = info.url;
|
||||
isAgeRestricted = true;
|
||||
} else {
|
||||
final JsonObject ytPlayerConfig = getPlayerConfig(pageContent);
|
||||
final JsonObject ytPlayerConfig = getPlayerConfig();
|
||||
playerArgs = getPlayerArgs(ytPlayerConfig);
|
||||
playerUrl = getPlayerUrl(ytPlayerConfig);
|
||||
isAgeRestricted = false;
|
||||
}
|
||||
playerResponse = getPlayerResponse();
|
||||
|
||||
if (decryptionCode.isEmpty()) {
|
||||
decryptionCode = loadDecryptionCode(playerUrl);
|
||||
|
@ -619,15 +721,13 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
}
|
||||
}
|
||||
|
||||
private JsonObject getPlayerConfig(String pageContent) throws ParsingException {
|
||||
private JsonObject getPlayerConfig() throws ParsingException {
|
||||
try {
|
||||
String ytPlayerConfigRaw = Parser.matchGroup1("ytplayer.config\\s*=\\s*(\\{.*?\\});", pageContent);
|
||||
String ytPlayerConfigRaw = Parser.matchGroup1("ytplayer.config\\s*=\\s*(\\{.*?\\});", pageHtml);
|
||||
return JsonParser.object().from(ytPlayerConfigRaw);
|
||||
} catch (Parser.RegexException e) {
|
||||
String errorReason = getErrorMessage();
|
||||
switch (errorReason) {
|
||||
case "GEMA":
|
||||
throw new GemaException(errorReason);
|
||||
case "":
|
||||
throw new ContentNotAvailableException("Content not available: player config empty", e);
|
||||
default:
|
||||
|
@ -670,12 +770,26 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
}
|
||||
}
|
||||
|
||||
private JsonObject getPlayerResponse() throws ParsingException {
|
||||
try {
|
||||
String playerResponseStr;
|
||||
if(playerArgs != null) {
|
||||
playerResponseStr = playerArgs.getString("player_response");
|
||||
} else {
|
||||
playerResponseStr = videoInfoPage.get("player_response");
|
||||
}
|
||||
return JsonParser.object().from(playerResponseStr);
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not parse yt player response", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private EmbeddedInfo getEmbeddedInfo() throws ParsingException, ReCaptchaException {
|
||||
try {
|
||||
final Downloader downloader = NewPipe.getDownloader();
|
||||
final String embedUrl = "https://www.youtube.com/embed/" + getId();
|
||||
final String embedPageContent = downloader.download(embedUrl);
|
||||
final String embedPageContent = downloader.get(embedUrl, getExtractorLocalization()).responseBody();
|
||||
|
||||
// Get player url
|
||||
final String assetsPattern = "\"assets\":.+?\"js\":\\s*(\"[^\"]+\")";
|
||||
|
@ -685,16 +799,19 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
playerUrl = HTTPS + playerUrl;
|
||||
}
|
||||
|
||||
// Get embed sts
|
||||
final String stsPattern = "\"sts\"\\s*:\\s*(\\d+)";
|
||||
final String sts = Parser.matchGroup1(stsPattern, embedPageContent);
|
||||
try {
|
||||
// Get embed sts
|
||||
final String stsPattern = "\"sts\"\\s*:\\s*(\\d+)";
|
||||
final String sts = Parser.matchGroup1(stsPattern, embedPageContent);
|
||||
return new EmbeddedInfo(playerUrl, sts);
|
||||
} catch (Exception i) {
|
||||
// if it failes we simply reply with no sts as then it does not seem to be necessary
|
||||
return new EmbeddedInfo(playerUrl, "");
|
||||
}
|
||||
|
||||
return new EmbeddedInfo(playerUrl, sts);
|
||||
} catch (IOException e) {
|
||||
throw new ParsingException(
|
||||
"Could load decryption code form restricted video for the Youtube service.", e);
|
||||
} catch (ReCaptchaException e) {
|
||||
throw new ReCaptchaException("reCaptcha Challenge requested");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -707,7 +824,7 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
playerUrl = "https://youtube.com" + playerUrl;
|
||||
}
|
||||
|
||||
final String playerCode = downloader.download(playerUrl);
|
||||
final String playerCode = downloader.get(playerUrl, getExtractorLocalization()).responseBody();
|
||||
final String decryptionFunctionName = getDecryptionFuncName(playerCode);
|
||||
|
||||
final String functionPattern = "("
|
||||
|
@ -776,25 +893,12 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
// If the video is age restricted getPlayerConfig will fail
|
||||
if(isAgeRestricted) return Collections.emptyList();
|
||||
|
||||
final JsonObject playerConfig;
|
||||
try {
|
||||
playerConfig = getPlayerConfig(getPageHtml(NewPipe.getDownloader()));
|
||||
} catch (IOException | ExtractionException e) {
|
||||
throw new SubtitlesException("Unable to download player configs", e);
|
||||
}
|
||||
final String playerResponse = playerConfig.getObject("args", new JsonObject())
|
||||
.getString("player_response");
|
||||
|
||||
final JsonObject captions;
|
||||
try {
|
||||
if (playerResponse == null || !JsonParser.object().from(playerResponse).has("captions")) {
|
||||
// Captions does not exist
|
||||
return Collections.emptyList();
|
||||
}
|
||||
captions = JsonParser.object().from(playerResponse).getObject("captions");
|
||||
} catch (JsonParserException e) {
|
||||
throw new SubtitlesException("Unable to parse subtitles listing", e);
|
||||
if (!playerResponse.has("captions")) {
|
||||
// Captions does not exist
|
||||
return Collections.emptyList();
|
||||
}
|
||||
captions = playerResponse.getObject("captions");
|
||||
|
||||
final JsonObject renderer = captions.getObject("playerCaptionsTracklistRenderer", new JsonObject());
|
||||
final JsonArray captionsArray = renderer.getArray("captionTracks", new JsonArray());
|
||||
|
@ -863,39 +967,36 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
"&sts=" + sts + "&ps=default&gl=US&hl=en";
|
||||
}
|
||||
|
||||
private Map<String, ItagItem> getItags(String encodedUrlMapKey, ItagItem.ItagType itagTypeWanted) throws ParsingException {
|
||||
private Map<String, ItagItem> getItags(String streamingDataKey, ItagItem.ItagType itagTypeWanted) throws ParsingException {
|
||||
Map<String, ItagItem> urlAndItags = new LinkedHashMap<>();
|
||||
|
||||
String encodedUrlMap = "";
|
||||
if (playerArgs != null && playerArgs.isString(encodedUrlMapKey)) {
|
||||
encodedUrlMap = playerArgs.getString(encodedUrlMapKey, "");
|
||||
} else if (videoInfoPage.containsKey(encodedUrlMapKey)) {
|
||||
encodedUrlMap = videoInfoPage.get(encodedUrlMapKey);
|
||||
JsonObject streamingData = playerResponse.getObject("streamingData");
|
||||
if (!streamingData.has(streamingDataKey)) {
|
||||
return urlAndItags;
|
||||
}
|
||||
|
||||
for (String url_data_str : encodedUrlMap.split(",")) {
|
||||
try {
|
||||
// This loop iterates through multiple streams, therefore tags
|
||||
// is related to one and the same stream at a time.
|
||||
Map<String, String> tags = Parser.compatParseMap(
|
||||
org.jsoup.parser.Parser.unescapeEntities(url_data_str, true));
|
||||
JsonArray formats = streamingData.getArray(streamingDataKey);
|
||||
for (int i = 0; i != formats.size(); ++i) {
|
||||
JsonObject formatData = formats.getObject(i);
|
||||
int itag = formatData.getInt("itag");
|
||||
|
||||
int itag = Integer.parseInt(tags.get("itag"));
|
||||
|
||||
if (ItagItem.isSupported(itag)) {
|
||||
if (ItagItem.isSupported(itag)) {
|
||||
try {
|
||||
ItagItem itagItem = ItagItem.getItag(itag);
|
||||
if (itagItem.itagType == itagTypeWanted) {
|
||||
String streamUrl = tags.get("url");
|
||||
// if video has a signature: decrypt it and add it to the url
|
||||
if (tags.get("s") != null) {
|
||||
streamUrl = streamUrl + "&signature=" + decryptSignature(tags.get("s"), decryptionCode);
|
||||
String streamUrl;
|
||||
if (formatData.has("url")) {
|
||||
streamUrl = formatData.getString("url");
|
||||
} else {
|
||||
// this url has an encrypted signature
|
||||
Map<String, String> cipher = Parser.compatParseMap(formatData.getString("cipher"));
|
||||
streamUrl = cipher.get("url") + "&" + cipher.get("sp") + "=" + decryptSignature(cipher.get("s"), decryptionCode);
|
||||
}
|
||||
|
||||
urlAndItags.put(streamUrl, itagItem);
|
||||
}
|
||||
} catch (UnsupportedEncodingException ignored) {
|
||||
|
||||
}
|
||||
} catch (DecryptException e) {
|
||||
throw e;
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -906,8 +1007,8 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
* Provides information about links to other videos on the video page, such as related videos.
|
||||
* This is encapsulated in a StreamInfoItem object, which is a subset of the fields in a full StreamInfo.
|
||||
*/
|
||||
private StreamInfoItemExtractor extractVideoPreviewInfo(final Element li) {
|
||||
return new YoutubeStreamInfoItemExtractor(li) {
|
||||
private StreamInfoItemExtractor extractVideoPreviewInfo(final Element li, final TimeAgoParser timeAgoParser) {
|
||||
return new YoutubeStreamInfoItemExtractor(li, timeAgoParser) {
|
||||
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
|
@ -934,23 +1035,10 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
}
|
||||
|
||||
@Override
|
||||
public String getUploadDate() throws ParsingException {
|
||||
public String getTextualUploadDate() throws ParsingException {
|
||||
return "";
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getViewCount() throws ParsingException {
|
||||
try {
|
||||
if (getStreamType() == StreamType.LIVE_STREAM) return -1;
|
||||
|
||||
return Long.parseLong(Utils.removeNonDigitCharacters(
|
||||
li.select("span.view-count").first().text()));
|
||||
} catch (Exception e) {
|
||||
//related videos sometimes have no view count
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getThumbnailUrl() throws ParsingException {
|
||||
Element img = li.select("img").first();
|
||||
|
@ -968,4 +1056,61 @@ public class YoutubeStreamExtractor extends StreamExtractor {
|
|||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Override
|
||||
public List<Frameset> getFrames() throws ExtractionException {
|
||||
try {
|
||||
final String script = doc.select("#player-api").first().siblingElements().select("script").html();
|
||||
int p = script.indexOf("ytplayer.config");
|
||||
if (p == -1) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
p = script.indexOf('{', p);
|
||||
int e = script.indexOf("ytplayer.load", p);
|
||||
if (e == -1) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
JsonObject jo = JsonParser.object().from(script.substring(p, e - 1));
|
||||
final String resp = jo.getObject("args").getString("player_response");
|
||||
jo = JsonParser.object().from(resp);
|
||||
final String[] spec = jo.getObject("storyboards").getObject("playerStoryboardSpecRenderer").getString("spec").split("\\|");
|
||||
final String url = spec[0];
|
||||
final ArrayList<Frameset> result = new ArrayList<>(spec.length - 1);
|
||||
for (int i = 1; i < spec.length; ++i) {
|
||||
final String[] parts = spec[i].split("#");
|
||||
if (parts.length != 8) {
|
||||
continue;
|
||||
}
|
||||
final int frameWidth = Integer.parseInt(parts[0]);
|
||||
final int frameHeight = Integer.parseInt(parts[1]);
|
||||
final int totalCount = Integer.parseInt(parts[2]);
|
||||
final int framesPerPageX = Integer.parseInt(parts[3]);
|
||||
final int framesPerPageY = Integer.parseInt(parts[4]);
|
||||
final String baseUrl = url.replace("$L", String.valueOf(i - 1)).replace("$N", parts[6]) + "&sigh=" + parts[7];
|
||||
final List<String> urls;
|
||||
if (baseUrl.contains("$M")) {
|
||||
final int totalPages = (int) Math.ceil(totalCount / (double) (framesPerPageX * framesPerPageY));
|
||||
urls = new ArrayList<>(totalPages);
|
||||
for (int j = 0; j < totalPages; j++) {
|
||||
urls.add(baseUrl.replace("$M", String.valueOf(j)));
|
||||
}
|
||||
} else {
|
||||
urls = Collections.singletonList(baseUrl);
|
||||
}
|
||||
result.add(new Frameset(
|
||||
urls,
|
||||
frameWidth,
|
||||
frameHeight,
|
||||
totalCount,
|
||||
framesPerPageX,
|
||||
framesPerPageY
|
||||
));
|
||||
}
|
||||
result.trimToSize();
|
||||
return result;
|
||||
} catch (Exception e) {
|
||||
throw new ExtractionException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,12 +1,20 @@
|
|||
package org.schabi.newpipe.extractor.services.youtube.extractors;
|
||||
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.jsoup.select.Elements;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.stream.StreamType;
|
||||
import org.schabi.newpipe.extractor.utils.Utils;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
|
||||
/*
|
||||
* Copyright (C) Christian Schabesberger 2016 <chris.schabesberger@mailbox.org>
|
||||
* YoutubeStreamInfoItemExtractor.java is part of NewPipe.
|
||||
|
@ -28,9 +36,18 @@ import org.schabi.newpipe.extractor.utils.Utils;
|
|||
public class YoutubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
||||
|
||||
private final Element item;
|
||||
private final TimeAgoParser timeAgoParser;
|
||||
|
||||
public YoutubeStreamInfoItemExtractor(Element item) {
|
||||
private String cachedUploadDate;
|
||||
|
||||
/**
|
||||
* Creates an extractor of StreamInfoItems from a YouTube page.
|
||||
* @param item The page element
|
||||
* @param timeAgoParser A parser of the textual dates or {@code null}.
|
||||
*/
|
||||
public YoutubeStreamInfoItemExtractor(Element item, @Nullable TimeAgoParser timeAgoParser) {
|
||||
this.item = item;
|
||||
this.timeAgoParser = timeAgoParser;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -61,7 +78,7 @@ public class YoutubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
|||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
try {
|
||||
Element el = item.select("div[class*=\"yt-lockup-video\"").first();
|
||||
Element el = item.select("div[class*=\"yt-lockup-video\"]").first();
|
||||
Element dl = el.select("h3").first().select("a").first();
|
||||
return dl.attr("abs:href");
|
||||
} catch (Exception e) {
|
||||
|
@ -72,7 +89,7 @@ public class YoutubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
|||
@Override
|
||||
public String getName() throws ParsingException {
|
||||
try {
|
||||
Element el = item.select("div[class*=\"yt-lockup-video\"").first();
|
||||
Element el = item.select("div[class*=\"yt-lockup-video\"]").first();
|
||||
Element dl = el.select("h3").first().select("a").first();
|
||||
return dl.text();
|
||||
} catch (Exception e) {
|
||||
|
@ -107,6 +124,8 @@ public class YoutubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
|||
|
||||
@Override
|
||||
public String getUploaderUrl() throws ParsingException {
|
||||
// this url is not always in the form "/channel/..."
|
||||
// sometimes Youtube provides urls in the from "/user/..."
|
||||
try {
|
||||
try {
|
||||
return item.select("div[class=\"yt-lockup-byline\"]").first()
|
||||
|
@ -119,46 +138,98 @@ public class YoutubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
|||
.text().split(" - ")[0];
|
||||
} catch (Exception e) {
|
||||
System.out.println(item.html());
|
||||
throw new ParsingException("Could not get uploader", e);
|
||||
throw new ParsingException("Could not get uploader url", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public String getUploadDate() throws ParsingException {
|
||||
public String getTextualUploadDate() throws ParsingException {
|
||||
if (getStreamType().equals(StreamType.LIVE_STREAM)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (cachedUploadDate != null) {
|
||||
return cachedUploadDate;
|
||||
}
|
||||
|
||||
try {
|
||||
if (isVideoReminder()) {
|
||||
final Calendar calendar = getDateFromReminder();
|
||||
if (calendar != null) {
|
||||
return cachedUploadDate = new SimpleDateFormat("yyyy-MM-dd HH:mm")
|
||||
.format(calendar.getTime());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Element meta = item.select("div[class=\"yt-lockup-meta\"]").first();
|
||||
if (meta == null) return "";
|
||||
|
||||
Element li = meta.select("li").first();
|
||||
if(li == null) return "";
|
||||
final Elements li = meta.select("li");
|
||||
if (li.isEmpty()) return "";
|
||||
|
||||
return meta.select("li").first().text();
|
||||
return cachedUploadDate = li.first().text();
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not get upload date", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public DateWrapper getUploadDate() throws ParsingException {
|
||||
if (getStreamType().equals(StreamType.LIVE_STREAM)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (isVideoReminder()) {
|
||||
return new DateWrapper(getDateFromReminder());
|
||||
}
|
||||
|
||||
String textualUploadDate = getTextualUploadDate();
|
||||
if (timeAgoParser != null && textualUploadDate != null && !textualUploadDate.isEmpty()) {
|
||||
return timeAgoParser.parse(textualUploadDate);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getViewCount() throws ParsingException {
|
||||
String input;
|
||||
try {
|
||||
// TODO: Return the actual live stream's watcher count
|
||||
// -1 for no view count
|
||||
if (getStreamType() == StreamType.LIVE_STREAM) return -1;
|
||||
|
||||
Element meta = item.select("div[class=\"yt-lockup-meta\"]").first();
|
||||
if (meta == null) return -1;
|
||||
final Element spanViewCount = item.select("span.view-count").first();
|
||||
if (spanViewCount != null) {
|
||||
input = spanViewCount.text();
|
||||
|
||||
// This case can happen if google releases a special video
|
||||
if(meta.select("li").size() < 2) return -1;
|
||||
} else if (getStreamType().equals(StreamType.LIVE_STREAM)) {
|
||||
Element meta = item.select("ul.yt-lockup-meta-info").first();
|
||||
if (meta == null) return 0;
|
||||
|
||||
input = meta.select("li").get(1).text();
|
||||
final Elements li = meta.select("li");
|
||||
if (li.isEmpty()) return 0;
|
||||
|
||||
} catch (IndexOutOfBoundsException e) {
|
||||
throw new ParsingException("Could not parse yt-lockup-meta although available: " + getUrl(), e);
|
||||
input = li.first().text();
|
||||
} else {
|
||||
try {
|
||||
Element meta = item.select("div.yt-lockup-meta").first();
|
||||
if (meta == null) return -1;
|
||||
|
||||
// This case can happen if google releases a special video
|
||||
if (meta.select("li").size() < 2) return -1;
|
||||
|
||||
input = meta.select("li").get(1).text();
|
||||
} catch (IndexOutOfBoundsException e) {
|
||||
throw new ParsingException("Could not parse yt-lockup-meta although available: " + getUrl(), e);
|
||||
}
|
||||
}
|
||||
|
||||
if (input == null) {
|
||||
throw new ParsingException("Input is null");
|
||||
}
|
||||
|
||||
try {
|
||||
|
||||
return Long.parseLong(Utils.removeNonDigitCharacters(input));
|
||||
} catch (NumberFormatException e) {
|
||||
// if this happens the video probably has no views
|
||||
|
@ -189,6 +260,32 @@ public class YoutubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
private boolean isVideoReminder() {
|
||||
return !item.select("span.yt-uix-livereminder").isEmpty();
|
||||
}
|
||||
|
||||
private Calendar getDateFromReminder() throws ParsingException {
|
||||
final Element timeFuture = item.select("span.yt-badge.localized-date").first();
|
||||
|
||||
if (timeFuture == null) {
|
||||
throw new ParsingException("Span timeFuture is null");
|
||||
}
|
||||
|
||||
final String timestamp = timeFuture.attr("data-timestamp");
|
||||
if (!timestamp.isEmpty()) {
|
||||
try {
|
||||
final Calendar calendar = Calendar.getInstance();
|
||||
calendar.setTime(new Date(Long.parseLong(timestamp) * 1000L));
|
||||
return calendar;
|
||||
} catch (Exception e) {
|
||||
throw new ParsingException("Could not parse = \"" + timestamp + "\"");
|
||||
}
|
||||
}
|
||||
|
||||
throw new ParsingException("Could not parse date from reminder element: \"" + timeFuture + "\"");
|
||||
}
|
||||
|
||||
/**
|
||||
* Generic method that checks if the element contains any clues that it's a livestream item
|
||||
*/
|
||||
|
|
|
@ -63,16 +63,10 @@ public class YoutubeSubscriptionExtractor extends SubscriptionExtractor {
|
|||
String title = outline.attr("title");
|
||||
String xmlUrl = outline.attr("abs:xmlUrl");
|
||||
|
||||
if (title.isEmpty() || xmlUrl.isEmpty()) {
|
||||
throw new InvalidSourceException("document has invalid entries");
|
||||
}
|
||||
|
||||
try {
|
||||
String id = Parser.matchGroup1(ID_PATTERN, xmlUrl);
|
||||
result.add(new SubscriptionItem(service.getServiceId(), BASE_CHANNEL_URL + id, title));
|
||||
} catch (Parser.RegexException e) {
|
||||
throw new InvalidSourceException("document has invalid entries", e);
|
||||
}
|
||||
} catch (Parser.RegexException ignored) { /* ignore invalid subscriptions */ }
|
||||
}
|
||||
|
||||
return result;
|
||||
|
|
|
@ -3,12 +3,12 @@ package org.schabi.newpipe.extractor.services.youtube.extractors;
|
|||
import com.grack.nanojson.JsonArray;
|
||||
import com.grack.nanojson.JsonParser;
|
||||
import com.grack.nanojson.JsonParserException;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.SuggestionExtractor;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.URLEncoder;
|
||||
|
@ -39,8 +39,8 @@ public class YoutubeSuggestionExtractor extends SuggestionExtractor {
|
|||
|
||||
public static final String CHARSET_UTF_8 = "UTF-8";
|
||||
|
||||
public YoutubeSuggestionExtractor(int serviceId, Localization localization) {
|
||||
super(serviceId, localization);
|
||||
public YoutubeSuggestionExtractor(StreamingService service) {
|
||||
super(service);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -52,10 +52,10 @@ public class YoutubeSuggestionExtractor extends SuggestionExtractor {
|
|||
+ "?client=" + "youtube" //"firefox" for JSON, 'toolbar' for xml
|
||||
+ "&jsonp=" + "JP"
|
||||
+ "&ds=" + "yt"
|
||||
+ "&hl=" + URLEncoder.encode(getLocalization().getCountry(), CHARSET_UTF_8)
|
||||
+ "&gl=" + URLEncoder.encode(getExtractorContentCountry().getCountryCode(), CHARSET_UTF_8)
|
||||
+ "&q=" + URLEncoder.encode(query, CHARSET_UTF_8);
|
||||
|
||||
String response = dl.download(url);
|
||||
String response = dl.get(url, getExtractorLocalization()).responseBody();
|
||||
// trim JSONP part "JP(...)"
|
||||
response = response.substring(3, response.length()-1);
|
||||
try {
|
||||
|
|
|
@ -20,19 +20,20 @@ package org.schabi.newpipe.extractor.services.youtube.extractors;
|
|||
* along with NewPipe. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.jsoup.select.Elements;
|
||||
import org.schabi.newpipe.extractor.Downloader;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.downloader.Downloader;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.kiosk.KioskExtractor;
|
||||
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
|
||||
import org.schabi.newpipe.extractor.localization.TimeAgoParser;
|
||||
import org.schabi.newpipe.extractor.services.youtube.linkHandler.YoutubeParsingHelper;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
|
||||
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
|
@ -43,21 +44,17 @@ public class YoutubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
|||
|
||||
public YoutubeTrendingExtractor(StreamingService service,
|
||||
ListLinkHandler linkHandler,
|
||||
String kioskId,
|
||||
Localization localization) {
|
||||
super(service, linkHandler, kioskId, localization);
|
||||
String kioskId) {
|
||||
super(service, linkHandler, kioskId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
|
||||
final String contentCountry = getLocalization().getCountry();
|
||||
String url = getUrl();
|
||||
if(contentCountry != null && !contentCountry.isEmpty()) {
|
||||
url += "?gl=" + contentCountry;
|
||||
}
|
||||
final String url = getUrl() +
|
||||
"?gl=" + getExtractorContentCountry().getCountryCode();
|
||||
|
||||
String pageContent = downloader.download(url);
|
||||
doc = Jsoup.parse(pageContent, url);
|
||||
final Response response = downloader.get(url, getExtractorLocalization());
|
||||
doc = YoutubeParsingHelper.parseAndCheckPage(url, response);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -88,10 +85,13 @@ public class YoutubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
|||
public InfoItemsPage<StreamInfoItem> getInitialPage() throws ParsingException {
|
||||
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
|
||||
Elements uls = doc.select("ul[class*=\"expanded-shelf-content-list\"]");
|
||||
|
||||
final TimeAgoParser timeAgoParser = getTimeAgoParser();
|
||||
|
||||
for(Element ul : uls) {
|
||||
for(final Element li : ul.children()) {
|
||||
final Element el = li.select("div[class*=\"yt-lockup-dismissable\"]").first();
|
||||
collector.commit(new YoutubeStreamInfoItemExtractor(li) {
|
||||
collector.commit(new YoutubeStreamInfoItemExtractor(li, timeAgoParser) {
|
||||
@Override
|
||||
public String getUrl() throws ParsingException {
|
||||
try {
|
||||
|
@ -126,6 +126,8 @@ public class YoutubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
|
|||
}
|
||||
|
||||
private Element getUploaderLink() {
|
||||
// this url is not always in the form "/channel/..."
|
||||
// sometimes Youtube provides urls in the from "/user/..."
|
||||
Element uploaderEl = el.select("div[class*=\"yt-lockup-byline \"]").first();
|
||||
return uploaderEl.select("a").first();
|
||||
}
|
||||
|
|
|
@ -1,9 +1,17 @@
|
|||
package org.schabi.newpipe.extractor.services.youtube.linkHandler;
|
||||
|
||||
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.schabi.newpipe.extractor.downloader.Response;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
|
||||
|
||||
import java.net.URL;
|
||||
import java.text.ParseException;
|
||||
import java.text.SimpleDateFormat;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 02.03.16.
|
||||
|
@ -30,10 +38,27 @@ public class YoutubeParsingHelper {
|
|||
private YoutubeParsingHelper() {
|
||||
}
|
||||
|
||||
private static final String[] RECAPTCHA_DETECTION_SELECTORS = {
|
||||
"form[action*=\"/das_captcha\"]",
|
||||
"input[name*=\"action_recaptcha_verify\"]"
|
||||
};
|
||||
|
||||
public static Document parseAndCheckPage(final String url, final Response response) throws ReCaptchaException {
|
||||
final Document document = Jsoup.parse(response.responseBody(), url);
|
||||
|
||||
for (String detectionSelector : RECAPTCHA_DETECTION_SELECTORS) {
|
||||
if (!document.select(detectionSelector).isEmpty()) {
|
||||
throw new ReCaptchaException("reCAPTCHA challenge requested (detected with selector: \"" + detectionSelector + "\")", url);
|
||||
}
|
||||
}
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
public static boolean isYoutubeURL(URL url) {
|
||||
String host = url.getHost();
|
||||
return host.equalsIgnoreCase("youtube.com") || host.equalsIgnoreCase("www.youtube.com")
|
||||
|| host.equalsIgnoreCase("m.youtube.com");
|
||||
|| host.equalsIgnoreCase("m.youtube.com") || host.equalsIgnoreCase("music.youtube.com");
|
||||
}
|
||||
|
||||
public static boolean isYoutubeServiceURL(URL url) {
|
||||
|
@ -48,7 +73,7 @@ public class YoutubeParsingHelper {
|
|||
|
||||
public static boolean isInvidioURL(URL url) {
|
||||
String host = url.getHost();
|
||||
return host.equalsIgnoreCase("invidio.us") || host.equalsIgnoreCase("www.invidio.us");
|
||||
return host.equalsIgnoreCase("invidio.us") || host.equalsIgnoreCase("dev.invidio.us") || host.equalsIgnoreCase("www.invidio.us") || host.equalsIgnoreCase("invidious.snopyta.org") || host.equalsIgnoreCase("de.invidious.snopyta.org") || host.equalsIgnoreCase("fi.invidious.snopyta.org") || host.equalsIgnoreCase("vid.wxzm.sx") || host.equalsIgnoreCase("invidious.kabi.tk") || host.equalsIgnoreCase("invidiou.sh") || host.equalsIgnoreCase("www.invidiou.sh") || host.equalsIgnoreCase("no.invidiou.sh") || host.equalsIgnoreCase("invidious.enkirton.net") || host.equalsIgnoreCase("tube.poal.co") || host.equalsIgnoreCase("invidious.13ad.de") || host.equalsIgnoreCase("yt.elukerio.org");
|
||||
}
|
||||
|
||||
public static long parseDurationString(String input)
|
||||
|
@ -92,4 +117,17 @@ public class YoutubeParsingHelper {
|
|||
+ Long.parseLong(minutes)) * 60)
|
||||
+ Long.parseLong(seconds);
|
||||
}
|
||||
|
||||
public static Calendar parseDateFrom(String textualUploadDate) throws ParsingException {
|
||||
Date date;
|
||||
try {
|
||||
date = new SimpleDateFormat("yyyy-MM-dd").parse(textualUploadDate);
|
||||
} catch (ParseException e) {
|
||||
throw new ParsingException("Could not parse date: \"" + textualUploadDate + "\"", e);
|
||||
}
|
||||
|
||||
final Calendar uploadDate = Calendar.getInstance();
|
||||
uploadDate.setTime(date);
|
||||
return uploadDate;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -114,7 +114,8 @@ public class YoutubeStreamLinkHandlerFactory extends LinkHandlerFactory {
|
|||
|
||||
case "YOUTUBE.COM":
|
||||
case "WWW.YOUTUBE.COM":
|
||||
case "M.YOUTUBE.COM": {
|
||||
case "M.YOUTUBE.COM":
|
||||
case "MUSIC.YOUTUBE.COM": {
|
||||
if (path.equals("attribution_link")) {
|
||||
String uQueryValue = Utils.getQueryValue(url, "u");
|
||||
|
||||
|
@ -163,7 +164,20 @@ public class YoutubeStreamLinkHandlerFactory extends LinkHandlerFactory {
|
|||
}
|
||||
|
||||
case "WWW.INVIDIO.US":
|
||||
case "INVIDIO.US": { // code-block for hooktube.com and invidio.us
|
||||
case "DEV.INVIDIO.US":
|
||||
case "INVIDIO.US":
|
||||
case "INVIDIOUS.SNOPYTA.ORG":
|
||||
case "DE.INVIDIOUS.SNOPYTA.ORG":
|
||||
case "FI.INVIDIOUS.SNOPYTA.ORG":
|
||||
case "VID.WXZM.SX":
|
||||
case "INVIDIOUS.KABI.TK":
|
||||
case "INVIDIOU.SH":
|
||||
case "WWW.INVIDIOU.SH":
|
||||
case "NO.INVIDIOU.SH":
|
||||
case "INVIDIOUS.ENKIRTON.NET":
|
||||
case "TUBE.POAL.CO":
|
||||
case "INVIDIOUS.13AD.DE":
|
||||
case "YT.ELUKERIO.ORG": { // code-block for hooktube.com and Invidious instances
|
||||
if (path.equals("watch")) {
|
||||
String viewQueryValue = Utils.getQueryValue(url, "v");
|
||||
if (viewQueryValue != null) {
|
||||
|
|
|
@ -0,0 +1,66 @@
|
|||
package org.schabi.newpipe.extractor.stream;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
public final class Frameset {
|
||||
|
||||
private List<String> urls;
|
||||
private int frameWidth;
|
||||
private int frameHeight;
|
||||
private int totalCount;
|
||||
private int framesPerPageX;
|
||||
private int framesPerPageY;
|
||||
|
||||
public Frameset(List<String> urls, int frameWidth, int frameHeight, int totalCount, int framesPerPageX, int framesPerPageY) {
|
||||
this.urls = urls;
|
||||
this.totalCount = totalCount;
|
||||
this.frameWidth = frameWidth;
|
||||
this.frameHeight = frameHeight;
|
||||
this.framesPerPageX = framesPerPageX;
|
||||
this.framesPerPageY = framesPerPageY;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return list of urls to images with frames
|
||||
*/
|
||||
public List<String> getUrls() {
|
||||
return urls;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return total count of frames
|
||||
*/
|
||||
public int getTotalCount() {
|
||||
return totalCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return maximum frames count by x
|
||||
*/
|
||||
public int getFramesPerPageX() {
|
||||
return framesPerPageX;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return maximum frames count by y
|
||||
*/
|
||||
public int getFramesPerPageY() {
|
||||
return framesPerPageY;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return width of a one frame, in pixels
|
||||
*/
|
||||
public int getFrameWidth() {
|
||||
return frameWidth;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return height of a one frame, in pixels
|
||||
*/
|
||||
public int getFrameHeight() {
|
||||
return frameHeight;
|
||||
}
|
||||
}
|
|
@ -1,13 +1,17 @@
|
|||
package org.schabi.newpipe.extractor.stream;
|
||||
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.List;
|
||||
|
||||
import org.schabi.newpipe.extractor.MediaFormat;
|
||||
|
||||
/**
|
||||
* Creates a stream object from url, format and optional torrent url
|
||||
*/
|
||||
public abstract class Stream implements Serializable {
|
||||
private final MediaFormat mediaFormat;
|
||||
public final String url;
|
||||
public final String torrentUrl;
|
||||
|
||||
/**
|
||||
* @deprecated Use {@link #getFormat()} or {@link #getFormatId()}
|
||||
|
@ -15,8 +19,26 @@ public abstract class Stream implements Serializable {
|
|||
@Deprecated
|
||||
public final int format;
|
||||
|
||||
/**
|
||||
* Instantiates a new stream object.
|
||||
*
|
||||
* @param url the url
|
||||
* @param format the format
|
||||
*/
|
||||
public Stream(String url, MediaFormat format) {
|
||||
this(url, null, format);
|
||||
}
|
||||
|
||||
/**
|
||||
* Instantiates a new stream object.
|
||||
*
|
||||
* @param url the url
|
||||
* @param torrentUrl the url to torrent file, example https://webtorrent.io/torrents/big-buck-bunny.torrent
|
||||
* @param format the format
|
||||
*/
|
||||
public Stream(String url, String torrentUrl, MediaFormat format) {
|
||||
this.url = url;
|
||||
this.torrentUrl = torrentUrl;
|
||||
this.format = format.id;
|
||||
this.mediaFormat = format;
|
||||
}
|
||||
|
@ -46,14 +68,38 @@ public abstract class Stream implements Serializable {
|
|||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the url.
|
||||
*
|
||||
* @return the url
|
||||
*/
|
||||
public String getUrl() {
|
||||
return url;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the torrent url.
|
||||
*
|
||||
* @return the torrent url, example https://webtorrent.io/torrents/big-buck-bunny.torrent
|
||||
*/
|
||||
public String getTorrentUrl() {
|
||||
return torrentUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the format.
|
||||
*
|
||||
* @return the format
|
||||
*/
|
||||
public MediaFormat getFormat() {
|
||||
return mediaFormat;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the format id.
|
||||
*
|
||||
* @return the format id
|
||||
*/
|
||||
public int getFormatId() {
|
||||
return mediaFormat.id;
|
||||
}
|
||||
|
|
|
@ -26,11 +26,13 @@ import org.schabi.newpipe.extractor.StreamingService;
|
|||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.linkhandler.LinkHandler;
|
||||
import org.schabi.newpipe.extractor.utils.Localization;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.utils.Parser;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
|
@ -40,18 +42,36 @@ public abstract class StreamExtractor extends Extractor {
|
|||
|
||||
public static final int NO_AGE_LIMIT = 0;
|
||||
|
||||
public StreamExtractor(StreamingService service, LinkHandler linkHandler, Localization localization) {
|
||||
super(service, linkHandler, localization);
|
||||
public StreamExtractor(StreamingService service, LinkHandler linkHandler) {
|
||||
super(service, linkHandler);
|
||||
}
|
||||
|
||||
/**
|
||||
* The day on which the stream got uploaded/created. The return information should be in the format
|
||||
* dd.mm.yyyy, however it NewPipe will not crash if its not.
|
||||
* @return The day on which the stream got uploaded.
|
||||
* @throws ParsingException
|
||||
* The original textual date provided by the service. Should be used as a fallback if
|
||||
* {@link #getUploadDate()} isn't provided by the service, or it fails for some reason.
|
||||
*
|
||||
* <p>If the stream is a live stream, {@code null} should be returned.</p>
|
||||
*
|
||||
* @return The original textual date provided by the service, or {@code null}.
|
||||
* @throws ParsingException if there is an error in the extraction
|
||||
* @see #getUploadDate()
|
||||
*/
|
||||
@Nonnull
|
||||
public abstract String getUploadDate() throws ParsingException;
|
||||
@Nullable
|
||||
public abstract String getTextualUploadDate() throws ParsingException;
|
||||
|
||||
/**
|
||||
* A more general {@code Calendar} instance set to the date provided by the service.<br>
|
||||
* Implementations usually will just parse the date returned from the {@link #getTextualUploadDate()}.
|
||||
*
|
||||
* <p>If the stream is a live stream, {@code null} should be returned.</p>
|
||||
*
|
||||
* @return The date this item was uploaded, or {@code null}.
|
||||
* @throws ParsingException if there is an error in the extraction
|
||||
* or the extracted date couldn't be parsed.
|
||||
* @see #getTextualUploadDate()
|
||||
*/
|
||||
@Nullable
|
||||
public abstract DateWrapper getUploadDate() throws ParsingException;
|
||||
|
||||
/**
|
||||
* This will return the url to the thumbnail of the stream. Try to return the medium resolution here.
|
||||
|
@ -256,7 +276,18 @@ public abstract class StreamExtractor extends Extractor {
|
|||
public abstract StreamInfoItemsCollector getRelatedStreams() throws IOException, ExtractionException;
|
||||
|
||||
/**
|
||||
* Should analyse the webpage's document and extracts any error message there might be. (e.g. GEMA block)
|
||||
* Should return a list of Frameset object that contains preview of stream frames
|
||||
* @return list of preview frames or empty list if frames preview is not supported or not found for specified stream
|
||||
* @throws IOException
|
||||
* @throws ExtractionException
|
||||
*/
|
||||
@Nonnull
|
||||
public List<Frameset> getFrames() throws IOException, ExtractionException {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
/**
|
||||
* Should analyse the webpage's document and extracts any error message there might be.
|
||||
*
|
||||
* @return Error message; null if there is no error message.
|
||||
*/
|
||||
|
|
|
@ -1,18 +1,19 @@
|
|||
package org.schabi.newpipe.extractor.stream;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.schabi.newpipe.extractor.Info;
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.NewPipe;
|
||||
import org.schabi.newpipe.extractor.StreamingService;
|
||||
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
|
||||
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
import org.schabi.newpipe.extractor.utils.DashMpdParser;
|
||||
import org.schabi.newpipe.extractor.utils.ExtractorHelper;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 26.08.15.
|
||||
*
|
||||
|
@ -228,6 +229,11 @@ public class StreamInfo extends Info {
|
|||
} catch (Exception e) {
|
||||
streamInfo.addError(e);
|
||||
}
|
||||
try {
|
||||
streamInfo.setTextualUploadDate(extractor.getTextualUploadDate());
|
||||
} catch (Exception e) {
|
||||
streamInfo.addError(e);
|
||||
}
|
||||
try {
|
||||
streamInfo.setUploadDate(extractor.getUploadDate());
|
||||
} catch (Exception e) {
|
||||
|
@ -271,7 +277,8 @@ public class StreamInfo extends Info {
|
|||
|
||||
private StreamType streamType;
|
||||
private String thumbnailUrl = "";
|
||||
private String uploadDate = "";
|
||||
private String textualUploadDate;
|
||||
private DateWrapper uploadDate;
|
||||
private long duration = -1;
|
||||
private int ageLimit = -1;
|
||||
private String description;
|
||||
|
@ -327,11 +334,19 @@ public class StreamInfo extends Info {
|
|||
this.thumbnailUrl = thumbnailUrl;
|
||||
}
|
||||
|
||||
public String getUploadDate() {
|
||||
public String getTextualUploadDate() {
|
||||
return textualUploadDate;
|
||||
}
|
||||
|
||||
public void setTextualUploadDate(String textualUploadDate) {
|
||||
this.textualUploadDate = textualUploadDate;
|
||||
}
|
||||
|
||||
public DateWrapper getUploadDate() {
|
||||
return uploadDate;
|
||||
}
|
||||
|
||||
public void setUploadDate(String uploadDate) {
|
||||
public void setUploadDate(DateWrapper uploadDate) {
|
||||
this.uploadDate = uploadDate;
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,9 @@ package org.schabi.newpipe.extractor.stream;
|
|||
*/
|
||||
|
||||
import org.schabi.newpipe.extractor.InfoItem;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
/**
|
||||
* Info object for previews of unopened videos, eg search results, related videos
|
||||
|
@ -29,7 +32,8 @@ public class StreamInfoItem extends InfoItem {
|
|||
private final StreamType streamType;
|
||||
|
||||
private String uploaderName;
|
||||
private String uploadDate;
|
||||
private String textualUploadDate;
|
||||
@Nullable private DateWrapper uploadDate;
|
||||
private long viewCount = -1;
|
||||
private long duration = -1;
|
||||
|
||||
|
@ -52,14 +56,6 @@ public class StreamInfoItem extends InfoItem {
|
|||
this.uploaderName = uploader_name;
|
||||
}
|
||||
|
||||
public String getUploadDate() {
|
||||
return uploadDate;
|
||||
}
|
||||
|
||||
public void setUploadDate(String upload_date) {
|
||||
this.uploadDate = upload_date;
|
||||
}
|
||||
|
||||
public long getViewCount() {
|
||||
return viewCount;
|
||||
}
|
||||
|
@ -84,12 +80,30 @@ public class StreamInfoItem extends InfoItem {
|
|||
this.uploaderUrl = uploaderUrl;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getTextualUploadDate() {
|
||||
return textualUploadDate;
|
||||
}
|
||||
|
||||
public void setTextualUploadDate(String uploadDate) {
|
||||
this.textualUploadDate = uploadDate;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public DateWrapper getUploadDate() {
|
||||
return uploadDate;
|
||||
}
|
||||
|
||||
public void setUploadDate(@Nullable DateWrapper uploadDate) {
|
||||
this.uploadDate = uploadDate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "StreamInfoItem{" +
|
||||
"streamType=" + streamType +
|
||||
", uploaderName='" + uploaderName + '\'' +
|
||||
", uploadDate='" + uploadDate + '\'' +
|
||||
", textualUploadDate='" + textualUploadDate + '\'' +
|
||||
", viewCount=" + viewCount +
|
||||
", duration=" + duration +
|
||||
", uploaderUrl='" + uploaderUrl + '\'' +
|
||||
|
|
|
@ -2,6 +2,9 @@ package org.schabi.newpipe.extractor.stream;
|
|||
|
||||
import org.schabi.newpipe.extractor.InfoItemExtractor;
|
||||
import org.schabi.newpipe.extractor.exceptions.ParsingException;
|
||||
import org.schabi.newpipe.extractor.localization.DateWrapper;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
/*
|
||||
* Created by Christian Schabesberger on 28.02.16.
|
||||
|
@ -64,10 +67,30 @@ public interface StreamInfoItemExtractor extends InfoItemExtractor {
|
|||
String getUploaderUrl() throws ParsingException;
|
||||
|
||||
/**
|
||||
* Extract the uploader name
|
||||
* @return the uploader name
|
||||
* @throws ParsingException thrown if there is an error in the extraction
|
||||
* The original textual date provided by the service. Should be used as a fallback if
|
||||
* {@link #getUploadDate()} isn't provided by the service, or it fails for some reason.
|
||||
*
|
||||
* @return The original textual date provided by the service or {@code null} if not provided.
|
||||
* @throws ParsingException if there is an error in the extraction
|
||||
* @see #getUploadDate()
|
||||
*/
|
||||
String getUploadDate() throws ParsingException;
|
||||
@Nullable
|
||||
String getTextualUploadDate() throws ParsingException;
|
||||
|
||||
/**
|
||||
* Extracts the upload date and time of this item and parses it.
|
||||
* <p>
|
||||
* If the service doesn't provide an exact time, an approximation can be returned.
|
||||
* <br>
|
||||
* If the service doesn't provide any date at all, then {@code null} should be returned.
|
||||
* </p>
|
||||
*
|
||||
* @return The date and time (can be approximated) this item was uploaded or {@code null}.
|
||||
* @throws ParsingException if there is an error in the extraction
|
||||
* or the extracted date couldn't be parsed.
|
||||
* @see #getTextualUploadDate()
|
||||
*/
|
||||
@Nullable
|
||||
DateWrapper getUploadDate() throws ParsingException;
|
||||
|
||||
}
|
||||
|
|
|
@ -61,10 +61,15 @@ public class StreamInfoItemsCollector extends InfoItemsCollector<StreamInfoItem,
|
|||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setUploadDate(extractor.getUploadDate());
|
||||
resultItem.setTextualUploadDate(extractor.getTextualUploadDate());
|
||||
} catch (Exception e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setUploadDate(extractor.getUploadDate());
|
||||
} catch (ParsingException e) {
|
||||
addError(e);
|
||||
}
|
||||
try {
|
||||
resultItem.setViewCount(extractor.getViewCount());
|
||||
} catch (Exception e) {
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue