Merge pull request #822 from Stypox/more-refactors

More refactors
This commit is contained in:
Stypox 2022-05-02 19:03:54 +02:00 committed by GitHub
commit 2e1c5c119d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 105 additions and 101 deletions

View File

@ -2,6 +2,8 @@
package org.schabi.newpipe.extractor.services.bandcamp.extractors; package org.schabi.newpipe.extractor.services.bandcamp.extractors;
import static org.schabi.newpipe.extractor.utils.Utils.replaceHttpWithHttps;
import com.grack.nanojson.JsonArray; import com.grack.nanojson.JsonArray;
import com.grack.nanojson.JsonObject; import com.grack.nanojson.JsonObject;
@ -19,6 +21,8 @@ import org.schabi.newpipe.extractor.stream.StreamInfoItem;
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector; import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
import java.io.IOException; import java.io.IOException;
import java.util.Objects;
import java.util.stream.Stream;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
@ -48,21 +52,18 @@ public class BandcampChannelExtractor extends ChannelExtractor {
*/ */
try { try {
final String html = getDownloader() final String html = getDownloader()
.get(channelInfo.getString("bandcamp_url") .get(replaceHttpWithHttps(channelInfo.getString("bandcamp_url")))
.replace("http://", "https://"))
.responseBody(); .responseBody();
return Jsoup.parse(html) return Stream.of(Jsoup.parse(html).getElementById("customHeader"))
.getElementById("customHeader") .filter(Objects::nonNull)
.getElementsByTag("img") .flatMap(element -> element.getElementsByTag("img").stream())
.first() .map(element -> element.attr("src"))
.attr("src"); .findFirst()
.orElse(""); // no banner available
} catch (final IOException | ReCaptchaException e) { } catch (final IOException | ReCaptchaException e) {
throw new ParsingException("Could not download artist web site", e); throw new ParsingException("Could not download artist web site", e);
} catch (final NullPointerException e) {
// No banner available
return "";
} }
} }

View File

@ -28,13 +28,7 @@ public class BandcampChannelInfoItemExtractor implements ChannelInfoItemExtracto
@Override @Override
public String getThumbnailUrl() throws ParsingException { public String getThumbnailUrl() throws ParsingException {
final Element img = searchResult.getElementsByClass("art").first() return BandcampExtractorHelper.getThumbnailUrlFromSearchResult(searchResult);
.getElementsByTag("img").first();
if (img != null) {
return img.attr("src");
} else {
return null;
}
} }
@Override @Override

View File

@ -4,6 +4,8 @@ import org.jsoup.nodes.Element;
import org.schabi.newpipe.extractor.comments.CommentsInfoItemExtractor; import org.schabi.newpipe.extractor.comments.CommentsInfoItemExtractor;
import org.schabi.newpipe.extractor.exceptions.ParsingException; import org.schabi.newpipe.extractor.exceptions.ParsingException;
import java.util.Objects;
public class BandcampCommentsInfoItemExtractor implements CommentsInfoItemExtractor { public class BandcampCommentsInfoItemExtractor implements CommentsInfoItemExtractor {
private final Element writing; private final Element writing;
@ -16,7 +18,7 @@ public class BandcampCommentsInfoItemExtractor implements CommentsInfoItemExtrac
@Override @Override
public String getName() throws ParsingException { public String getName() throws ParsingException {
return writing.getElementsByClass("text").first().ownText(); return getCommentText();
} }
@Override @Override
@ -30,13 +32,21 @@ public class BandcampCommentsInfoItemExtractor implements CommentsInfoItemExtrac
} }
@Override @Override
public String getCommentText() { public String getCommentText() throws ParsingException {
return writing.getElementsByClass("text").first().ownText(); return writing.getElementsByClass("text").stream()
.filter(Objects::nonNull)
.map(Element::ownText)
.findFirst()
.orElseThrow(() -> new ParsingException("Could not get comment text"));
} }
@Override @Override
public String getUploaderName() throws ParsingException { public String getUploaderName() throws ParsingException {
return writing.getElementsByClass("name").first().text(); return writing.getElementsByClass("name").stream()
.filter(Objects::nonNull)
.map(Element::text)
.findFirst()
.orElseThrow(() -> new ParsingException("Could not get uploader name"));
} }
@Override @Override

View File

@ -8,6 +8,7 @@ import com.grack.nanojson.JsonParserException;
import com.grack.nanojson.JsonWriter; import com.grack.nanojson.JsonWriter;
import org.jsoup.Jsoup; import org.jsoup.Jsoup;
import org.jsoup.nodes.Element;
import org.schabi.newpipe.extractor.NewPipe; import org.schabi.newpipe.extractor.NewPipe;
import org.schabi.newpipe.extractor.exceptions.ParsingException; import org.schabi.newpipe.extractor.exceptions.ParsingException;
import org.schabi.newpipe.extractor.exceptions.ReCaptchaException; import org.schabi.newpipe.extractor.exceptions.ReCaptchaException;
@ -19,6 +20,8 @@ import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import java.util.Locale; import java.util.Locale;
import javax.annotation.Nullable;
public final class BandcampExtractorHelper { public final class BandcampExtractorHelper {
public static final String BASE_URL = "https://bandcamp.com"; public static final String BASE_URL = "https://bandcamp.com";
@ -134,4 +137,14 @@ public final class BandcampExtractorHelper {
throw new ParsingException("Could not parse date '" + textDate + "'", e); throw new ParsingException("Could not parse date '" + textDate + "'", e);
} }
} }
@Nullable
public static String getThumbnailUrlFromSearchResult(final Element searchResult) {
return searchResult.getElementsByClass("art").stream()
.flatMap(element -> element.getElementsByTag("img").stream())
.map(element -> element.attr("src"))
.filter(string -> !string.isEmpty())
.findFirst()
.orElse(null);
}
} }

View File

@ -1,8 +1,15 @@
package org.schabi.newpipe.extractor.services.bandcamp.extractors; package org.schabi.newpipe.extractor.services.bandcamp.extractors;
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.getImageUrl;
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampStreamExtractor.getAlbumInfoJson;
import static org.schabi.newpipe.extractor.utils.JsonUtils.getJsonData;
import static org.schabi.newpipe.extractor.utils.Utils.EMPTY_STRING;
import static org.schabi.newpipe.extractor.utils.Utils.HTTPS;
import com.grack.nanojson.JsonArray; import com.grack.nanojson.JsonArray;
import com.grack.nanojson.JsonObject; import com.grack.nanojson.JsonObject;
import com.grack.nanojson.JsonParserException; import com.grack.nanojson.JsonParserException;
import org.jsoup.Jsoup; import org.jsoup.Jsoup;
import org.jsoup.nodes.Document; import org.jsoup.nodes.Document;
import org.schabi.newpipe.extractor.Page; import org.schabi.newpipe.extractor.Page;
@ -17,15 +24,9 @@ import org.schabi.newpipe.extractor.services.bandcamp.extractors.streaminfoitem.
import org.schabi.newpipe.extractor.stream.StreamInfoItem; import org.schabi.newpipe.extractor.stream.StreamInfoItem;
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector; import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
import javax.annotation.Nonnull;
import java.io.IOException; import java.io.IOException;
import java.util.Objects;
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.getImageUrl; import javax.annotation.Nonnull;
import static org.schabi.newpipe.extractor.utils.JsonUtils.getJsonData;
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampStreamExtractor.getAlbumInfoJson;
import static org.schabi.newpipe.extractor.utils.Utils.EMPTY_STRING;
import static org.schabi.newpipe.extractor.utils.Utils.HTTPS;
public class BandcampPlaylistExtractor extends PlaylistExtractor { public class BandcampPlaylistExtractor extends PlaylistExtractor {
@ -92,12 +93,10 @@ public class BandcampPlaylistExtractor extends PlaylistExtractor {
@Override @Override
public String getUploaderAvatarUrl() { public String getUploaderAvatarUrl() {
try { return document.getElementsByClass("band-photo").stream()
return Objects.requireNonNull(document.getElementsByClass("band-photo").first()) .map(element -> element.attr("src"))
.attr("src"); .findFirst()
} catch (final NullPointerException e) { .orElse(EMPTY_STRING);
return EMPTY_STRING;
}
} }
@Override @Override

View File

@ -38,12 +38,6 @@ public class BandcampPlaylistInfoItemExtractor implements PlaylistInfoItemExtrac
@Override @Override
public String getThumbnailUrl() { public String getThumbnailUrl() {
final Element img = searchResult.getElementsByClass("art").first() return BandcampExtractorHelper.getThumbnailUrlFromSearchResult(searchResult);
.getElementsByTag("img").first();
if (img != null) {
return img.attr("src");
} else {
return null;
}
} }
} }

View File

@ -5,6 +5,7 @@ import com.grack.nanojson.JsonObject;
import com.grack.nanojson.JsonParser; import com.grack.nanojson.JsonParser;
import com.grack.nanojson.JsonParserException; import com.grack.nanojson.JsonParserException;
import org.jsoup.Jsoup; import org.jsoup.Jsoup;
import org.jsoup.nodes.Element;
import org.schabi.newpipe.extractor.MediaFormat; import org.schabi.newpipe.extractor.MediaFormat;
import org.schabi.newpipe.extractor.NewPipe; import org.schabi.newpipe.extractor.NewPipe;
import org.schabi.newpipe.extractor.StreamingService; import org.schabi.newpipe.extractor.StreamingService;
@ -77,9 +78,11 @@ public class BandcampRadioStreamExtractor extends BandcampStreamExtractor {
@Nonnull @Nonnull
@Override @Override
public String getUploaderName() { public String getUploaderName() throws ParsingException {
return Jsoup.parse(showInfo.getString("image_caption")) return Jsoup.parse(showInfo.getString("image_caption")).getElementsByTag("a").stream()
.getElementsByTag("a").first().text(); .map(Element::text)
.findFirst()
.orElseThrow(() -> new ParsingException("Could not get uploader name"));
} }
@Nullable @Nullable

View File

@ -18,6 +18,7 @@ import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
import org.schabi.newpipe.extractor.MultiInfoItemsCollector; import org.schabi.newpipe.extractor.MultiInfoItemsCollector;
import org.schabi.newpipe.extractor.search.SearchExtractor; import org.schabi.newpipe.extractor.search.SearchExtractor;
import org.schabi.newpipe.extractor.services.bandcamp.extractors.streaminfoitem.BandcampSearchStreamInfoItemExtractor; import org.schabi.newpipe.extractor.services.bandcamp.extractors.streaminfoitem.BandcampSearchStreamInfoItemExtractor;
import org.schabi.newpipe.extractor.utils.Utils;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import java.io.IOException; import java.io.IOException;
@ -34,7 +35,7 @@ public class BandcampSearchExtractor extends SearchExtractor {
@NonNull @NonNull
@Override @Override
public String getSearchSuggestion() { public String getSearchSuggestion() {
return ""; return Utils.EMPTY_STRING;
} }
@Override @Override
@ -50,40 +51,30 @@ public class BandcampSearchExtractor extends SearchExtractor {
public InfoItemsPage<InfoItem> getPage(final Page page) public InfoItemsPage<InfoItem> getPage(final Page page)
throws IOException, ExtractionException { throws IOException, ExtractionException {
final String html = getDownloader().get(page.getUrl()).responseBody();
final MultiInfoItemsCollector collector = new MultiInfoItemsCollector(getServiceId()); final MultiInfoItemsCollector collector = new MultiInfoItemsCollector(getServiceId());
final Document d = Jsoup.parse(getDownloader().get(page.getUrl()).responseBody());
for (final Element searchResult : d.getElementsByClass("searchresult")) {
final Document d = Jsoup.parse(html); final String type = searchResult.getElementsByClass("result-info").stream()
.flatMap(element -> element.getElementsByClass("itemtype").stream())
final Elements searchResultsElements = d.getElementsByClass("searchresult"); .map(Element::text)
.findFirst()
for (final Element searchResult : searchResultsElements) { .orElse(Utils.EMPTY_STRING);
final String type = searchResult.getElementsByClass("result-info").first()
.getElementsByClass("itemtype").first().text();
switch (type) { switch (type) {
default:
continue;
case "FAN":
// don't display fan results
break;
case "ARTIST": case "ARTIST":
collector.commit(new BandcampChannelInfoItemExtractor(searchResult)); collector.commit(new BandcampChannelInfoItemExtractor(searchResult));
break; break;
case "ALBUM": case "ALBUM":
collector.commit(new BandcampPlaylistInfoItemExtractor(searchResult)); collector.commit(new BandcampPlaylistInfoItemExtractor(searchResult));
break; break;
case "TRACK": case "TRACK":
collector.commit(new BandcampSearchStreamInfoItemExtractor(searchResult, null)); collector.commit(new BandcampSearchStreamInfoItemExtractor(searchResult, null));
break; break;
default:
// don't display fan results ("FAN") or other things
break;
} }
} }
// Count pages // Count pages
@ -92,7 +83,10 @@ public class BandcampSearchExtractor extends SearchExtractor {
return new InfoItemsPage<>(collector, null); return new InfoItemsPage<>(collector, null);
} }
final Elements pages = pageLists.first().getElementsByTag("li"); final Elements pages = pageLists.stream()
.map(element -> element.getElementsByTag("li"))
.findFirst()
.orElseGet(Elements::new);
// Find current page // Find current page
int currentPage = -1; int currentPage = -1;

View File

@ -119,7 +119,7 @@ public class BandcampStreamExtractor extends StreamExtractor {
@Override @Override
public String getThumbnailUrl() throws ParsingException { public String getThumbnailUrl() throws ParsingException {
if (albumJson.isNull("art_id")) { if (albumJson.isNull("art_id")) {
return ""; return Utils.EMPTY_STRING;
} else { } else {
return getImageUrl(albumJson.getLong("art_id"), true); return getImageUrl(albumJson.getLong("art_id"), true);
} }
@ -128,11 +128,10 @@ public class BandcampStreamExtractor extends StreamExtractor {
@Nonnull @Nonnull
@Override @Override
public String getUploaderAvatarUrl() { public String getUploaderAvatarUrl() {
try { return document.getElementsByClass("band-photo").stream()
return document.getElementsByClass("band-photo").first().attr("src"); .map(element -> element.attr("src"))
} catch (final NullPointerException e) { .findFirst()
return ""; .orElse(Utils.EMPTY_STRING);
}
} }
@Nonnull @Nonnull

View File

@ -4,7 +4,6 @@ package org.schabi.newpipe.extractor.services.bandcamp.extractors;
import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.BASE_API_URL; import static org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper.BASE_API_URL;
import com.grack.nanojson.JsonArray;
import com.grack.nanojson.JsonObject; import com.grack.nanojson.JsonObject;
import com.grack.nanojson.JsonParser; import com.grack.nanojson.JsonParser;
import com.grack.nanojson.JsonParserException; import com.grack.nanojson.JsonParserException;
@ -17,9 +16,9 @@ import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
import java.io.IOException; import java.io.IOException;
import java.net.URLEncoder; import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.stream.Collectors;
public class BandcampSuggestionExtractor extends SuggestionExtractor { public class BandcampSuggestionExtractor extends SuggestionExtractor {
@ -36,20 +35,12 @@ public class BandcampSuggestionExtractor extends SuggestionExtractor {
final JsonObject fuzzyResults = JsonParser.object().from(downloader final JsonObject fuzzyResults = JsonParser.object().from(downloader
.get(AUTOCOMPLETE_URL + URLEncoder.encode(query, "UTF-8")).responseBody()); .get(AUTOCOMPLETE_URL + URLEncoder.encode(query, "UTF-8")).responseBody());
final JsonArray jsonArray = fuzzyResults.getObject("auto") return fuzzyResults.getObject("auto").getArray("results").stream()
.getArray("results"); .filter(JsonObject.class::isInstance)
.map(JsonObject.class::cast)
final List<String> suggestions = new ArrayList<>(); .map(jsonObject -> jsonObject.getString("name"))
.distinct()
for (final Object fuzzyResult : jsonArray) { .collect(Collectors.toList());
final String res = ((JsonObject) fuzzyResult).getString("name");
if (!suggestions.contains(res)) {
suggestions.add(res);
}
}
return suggestions;
} catch (final JsonParserException e) { } catch (final JsonParserException e) {
return Collections.emptyList(); return Collections.emptyList();
} }

View File

@ -2,6 +2,7 @@ package org.schabi.newpipe.extractor.services.bandcamp.extractors.streaminfoitem
import org.jsoup.nodes.Element; import org.jsoup.nodes.Element;
import org.schabi.newpipe.extractor.exceptions.ParsingException; import org.schabi.newpipe.extractor.exceptions.ParsingException;
import org.schabi.newpipe.extractor.services.bandcamp.extractors.BandcampExtractorHelper;
import javax.annotation.Nullable; import javax.annotation.Nullable;
@ -46,13 +47,7 @@ public class BandcampSearchStreamInfoItemExtractor extends BandcampStreamInfoIte
@Override @Override
public String getThumbnailUrl() throws ParsingException { public String getThumbnailUrl() throws ParsingException {
final Element img = searchResult.getElementsByClass("art").first() return BandcampExtractorHelper.getThumbnailUrlFromSearchResult(searchResult);
.getElementsByTag("img").first();
if (img != null) {
return img.attr("src");
} else {
return null;
}
} }
@Override @Override

View File

@ -138,7 +138,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
} }
@Override @Override
public void onFetchPage(final Downloader downloader) public void onFetchPage(@Nonnull final Downloader downloader)
throws IOException, ExtractionException { throws IOException, ExtractionException {
final Response response = downloader.get( final Response response = downloader.get(
baseUrl + PeertubeChannelLinkHandlerFactory.API_ENDPOINT + getId()); baseUrl + PeertubeChannelLinkHandlerFactory.API_ENDPOINT + getId());

View File

@ -23,12 +23,15 @@ import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelp
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY; import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty; import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
import javax.annotation.Nonnull;
public class PeertubeCommentsExtractor extends CommentsExtractor { public class PeertubeCommentsExtractor extends CommentsExtractor {
public PeertubeCommentsExtractor(final StreamingService service, public PeertubeCommentsExtractor(final StreamingService service,
final ListLinkHandler uiHandler) { final ListLinkHandler uiHandler) {
super(service, uiHandler); super(service, uiHandler);
} }
@Nonnull
@Override @Override
public InfoItemsPage<CommentsInfoItem> getInitialPage() public InfoItemsPage<CommentsInfoItem> getInitialPage()
throws IOException, ExtractionException { throws IOException, ExtractionException {
@ -84,6 +87,6 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
} }
@Override @Override
public void onFetchPage(final Downloader downloader) { public void onFetchPage(@Nonnull final Downloader downloader) {
} }
} }

View File

@ -63,6 +63,7 @@ public class PeertubeSearchExtractor extends SearchExtractor {
return Collections.emptyList(); return Collections.emptyList();
} }
@Nonnull
@Override @Override
public InfoItemsPage<InfoItem> getInitialPage() throws IOException, ExtractionException { public InfoItemsPage<InfoItem> getInitialPage() throws IOException, ExtractionException {
return getPage(new Page(getUrl() + "&" + START_KEY + "=0&" return getPage(new Page(getUrl() + "&" + START_KEY + "=0&"

View File

@ -388,7 +388,8 @@ public class PeertubeStreamExtractor extends StreamExtractor {
} }
@Override @Override
public void onFetchPage(final Downloader downloader) throws IOException, ExtractionException { public void onFetchPage(@Nonnull final Downloader downloader)
throws IOException, ExtractionException {
final Response response = downloader.get( final Response response = downloader.get(
baseUrl + PeertubeStreamLinkHandlerFactory.VIDEO_API_ENDPOINT + getId()); baseUrl + PeertubeStreamLinkHandlerFactory.VIDEO_API_ENDPOINT + getId());
if (response != null) { if (response != null) {

View File

@ -33,11 +33,13 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
super(streamingService, linkHandler, kioskId); super(streamingService, linkHandler, kioskId);
} }
@Nonnull
@Override @Override
public String getName() throws ParsingException { public String getName() throws ParsingException {
return getId(); return getId();
} }
@Nonnull
@Override @Override
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException { public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
return getPage(new Page(getUrl() + "&" + START_KEY + "=0&" return getPage(new Page(getUrl() + "&" + START_KEY + "=0&"

View File

@ -13,6 +13,8 @@ import static org.schabi.newpipe.extractor.services.youtube.ItagItem.ItagType.VI
import org.schabi.newpipe.extractor.MediaFormat; import org.schabi.newpipe.extractor.MediaFormat;
import org.schabi.newpipe.extractor.exceptions.ParsingException; import org.schabi.newpipe.extractor.exceptions.ParsingException;
import javax.annotation.Nonnull;
public class ItagItem { public class ItagItem {
/** /**
* List can be found here * List can be found here
@ -96,6 +98,7 @@ public class ItagItem {
return false; return false;
} }
@Nonnull
public static ItagItem getItag(final int itagId) throws ParsingException { public static ItagItem getItag(final int itagId) throws ParsingException {
for (final ItagItem item : ITAG_LIST) { for (final ItagItem item : ITAG_LIST) {
if (itagId == item.id) { if (itagId == item.id) {

View File

@ -975,7 +975,7 @@ public final class YoutubeParsingHelper {
if (html) { if (html) {
text = text.replaceAll("\\n", "<br>"); text = text.replaceAll("\\n", "<br>");
text = text.replaceAll(" ", " &nbsp;"); text = text.replaceAll(" {2}", " &nbsp;");
} }
return text; return text;

View File

@ -134,6 +134,7 @@ public class YoutubeSearchExtractor extends SearchExtractor {
return !showingResultsForRenderer.isEmpty(); return !showingResultsForRenderer.isEmpty();
} }
@Nonnull
@Override @Override
public List<MetaInfo> getMetaInfo() throws ParsingException { public List<MetaInfo> getMetaInfo() throws ParsingException {
return YoutubeParsingHelper.getMetaInfo( return YoutubeParsingHelper.getMetaInfo(

View File

@ -29,8 +29,8 @@ public final class Utils {
@Deprecated @Deprecated
public static final String UTF_8 = "UTF-8"; public static final String UTF_8 = "UTF-8";
public static final String EMPTY_STRING = ""; public static final String EMPTY_STRING = "";
private static final Pattern M_PATTERN = Pattern.compile("(https?)?:\\/\\/m\\."); private static final Pattern M_PATTERN = Pattern.compile("(https?)?://m\\.");
private static final Pattern WWW_PATTERN = Pattern.compile("(https?)?:\\/\\/www\\."); private static final Pattern WWW_PATTERN = Pattern.compile("(https?)?://www\\.");
private Utils() { private Utils() {
// no instance // no instance