2023-06-19 23:42:47 +02:00
|
|
|
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
|
|
|
|
// SPDX-License-Identifier: Apache-2.0
|
2017-11-20 06:10:04 +01:00
|
|
|
|
2023-08-11 04:46:45 +02:00
|
|
|
package subscription // import "miniflux.app/v2/internal/reader/subscription"
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
import (
|
2023-10-22 04:50:29 +02:00
|
|
|
"bytes"
|
2024-06-13 16:30:15 +02:00
|
|
|
"errors"
|
2020-08-02 20:24:02 +02:00
|
|
|
"fmt"
|
2017-11-21 02:12:37 +01:00
|
|
|
"io"
|
2023-10-22 04:50:29 +02:00
|
|
|
"log/slog"
|
2024-06-13 16:30:15 +02:00
|
|
|
"net/url"
|
2020-08-02 20:24:02 +02:00
|
|
|
"regexp"
|
2024-06-13 16:30:15 +02:00
|
|
|
"strings"
|
2017-11-21 02:12:37 +01:00
|
|
|
|
2023-08-11 04:46:45 +02:00
|
|
|
"miniflux.app/v2/internal/config"
|
2023-10-22 20:10:56 +02:00
|
|
|
"miniflux.app/v2/internal/integration/rssbridge"
|
2023-10-22 04:50:29 +02:00
|
|
|
"miniflux.app/v2/internal/locale"
|
2023-10-23 01:07:06 +02:00
|
|
|
"miniflux.app/v2/internal/model"
|
2023-10-22 04:50:29 +02:00
|
|
|
"miniflux.app/v2/internal/reader/fetcher"
|
2023-08-11 04:46:45 +02:00
|
|
|
"miniflux.app/v2/internal/reader/parser"
|
2023-08-14 04:09:01 +02:00
|
|
|
"miniflux.app/v2/internal/urllib"
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
"github.com/PuerkitoBio/goquery"
|
2024-03-20 20:44:41 +01:00
|
|
|
"golang.org/x/net/html/charset"
|
2017-11-20 06:10:04 +01:00
|
|
|
)
|
|
|
|
|
2024-06-13 16:30:15 +02:00
|
|
|
type youtubeKind string
|
|
|
|
|
|
|
|
const (
|
|
|
|
youtubeIDKindChannel youtubeKind = "channel"
|
|
|
|
youtubeIDKindVideo youtubeKind = "video"
|
|
|
|
youtubeIDKindPlaylist youtubeKind = "playlist"
|
|
|
|
)
|
|
|
|
|
2017-11-20 06:10:04 +01:00
|
|
|
var (
|
2024-06-13 16:30:15 +02:00
|
|
|
youtubeHostRegex = regexp.MustCompile(`youtube\.com$`)
|
|
|
|
youtubeChannelRegex = regexp.MustCompile(`channel/(.*)$`)
|
|
|
|
|
|
|
|
errNotYoutubeUrl = fmt.Errorf("this website is not a YouTube page")
|
2017-11-20 06:10:04 +01:00
|
|
|
)
|
|
|
|
|
2023-10-23 01:07:06 +02:00
|
|
|
type SubscriptionFinder struct {
|
|
|
|
requestBuilder *fetcher.RequestBuilder
|
|
|
|
feedDownloaded bool
|
|
|
|
feedResponseInfo *model.FeedCreationRequestFromSubscriptionDiscovery
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewSubscriptionFinder(requestBuilder *fetcher.RequestBuilder) *SubscriptionFinder {
|
|
|
|
return &SubscriptionFinder{
|
|
|
|
requestBuilder: requestBuilder,
|
|
|
|
}
|
|
|
|
}
|
2020-08-02 20:24:02 +02:00
|
|
|
|
2023-10-23 01:07:06 +02:00
|
|
|
func (f *SubscriptionFinder) IsFeedAlreadyDownloaded() bool {
|
|
|
|
return f.feedDownloaded
|
|
|
|
}
|
|
|
|
|
|
|
|
func (f *SubscriptionFinder) FeedResponseInfo() *model.FeedCreationRequestFromSubscriptionDiscovery {
|
|
|
|
return f.feedResponseInfo
|
|
|
|
}
|
2023-10-22 04:50:29 +02:00
|
|
|
|
2023-10-23 01:07:06 +02:00
|
|
|
func (f *SubscriptionFinder) FindSubscriptions(websiteURL, rssBridgeURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
|
|
|
responseHandler := fetcher.NewResponseHandler(f.requestBuilder.ExecuteRequest(websiteURL))
|
2023-10-22 04:50:29 +02:00
|
|
|
defer responseHandler.Close()
|
2020-09-10 08:28:54 +02:00
|
|
|
|
2023-10-22 04:50:29 +02:00
|
|
|
if localizedError := responseHandler.LocalizedError(); localizedError != nil {
|
|
|
|
slog.Warn("Unable to find subscriptions", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
|
|
|
|
return nil, localizedError
|
2020-09-10 08:28:54 +02:00
|
|
|
}
|
|
|
|
|
2023-10-22 04:50:29 +02:00
|
|
|
responseBody, localizedError := responseHandler.ReadBody(config.Opts.HTTPClientMaxBodySize())
|
|
|
|
if localizedError != nil {
|
|
|
|
slog.Warn("Unable to find subscriptions", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
|
|
|
|
return nil, localizedError
|
2018-01-20 07:42:55 +01:00
|
|
|
}
|
|
|
|
|
2023-10-23 01:07:06 +02:00
|
|
|
f.feedResponseInfo = &model.FeedCreationRequestFromSubscriptionDiscovery{
|
|
|
|
Content: bytes.NewReader(responseBody),
|
|
|
|
ETag: responseHandler.ETag(),
|
|
|
|
LastModified: responseHandler.LastModified(),
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
|
2023-10-23 01:07:06 +02:00
|
|
|
// Step 1) Check if the website URL is a feed.
|
2024-03-12 13:11:56 +01:00
|
|
|
if feedFormat, _ := parser.DetectFeedFormat(f.feedResponseInfo.Content); feedFormat != parser.FormatUnknown {
|
2023-10-23 01:07:06 +02:00
|
|
|
f.feedDownloaded = true
|
|
|
|
return Subscriptions{NewSubscription(responseHandler.EffectiveURL(), responseHandler.EffectiveURL(), feedFormat)}, nil
|
|
|
|
}
|
|
|
|
|
2024-06-13 16:30:15 +02:00
|
|
|
subscriptions := make(Subscriptions, 1)
|
2023-10-23 01:07:06 +02:00
|
|
|
|
2024-06-13 16:30:15 +02:00
|
|
|
// Step 2) Parse URL to find feeds from YouTube.
|
|
|
|
kind, _, err := youtubeURLIDExtractor(websiteURL)
|
2017-11-20 06:10:04 +01:00
|
|
|
|
2024-06-13 16:30:15 +02:00
|
|
|
// If YouTube url has been detected, return the YouTube feed
|
|
|
|
if err == nil || !errors.Is(err, errNotYoutubeUrl) {
|
|
|
|
switch kind {
|
|
|
|
case youtubeIDKindChannel:
|
|
|
|
slog.Debug("Try to detect feeds from YouTube channel page", slog.String("website_url", websiteURL))
|
|
|
|
subscriptions, localizedError = f.FindSubscriptionsFromYouTubeChannelPage(websiteURL)
|
|
|
|
if localizedError != nil {
|
|
|
|
return nil, localizedError
|
|
|
|
}
|
|
|
|
case youtubeIDKindVideo:
|
|
|
|
slog.Debug("Try to detect feeds from YouTube video page", slog.String("website_url", websiteURL))
|
|
|
|
subscriptions, localizedError = f.FindSubscriptionsFromYouTubeVideoPage(websiteURL)
|
|
|
|
if localizedError != nil {
|
|
|
|
return nil, localizedError
|
|
|
|
}
|
|
|
|
case youtubeIDKindPlaylist:
|
|
|
|
slog.Debug("Try to detect feeds from YouTube playlist page", slog.String("website_url", websiteURL))
|
|
|
|
subscriptions, localizedError = f.FindSubscriptionsFromYouTubePlaylistPage(websiteURL)
|
|
|
|
if localizedError != nil {
|
|
|
|
return nil, localizedError
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if len(subscriptions) > 0 {
|
|
|
|
slog.Debug("Subscriptions found from YouTube page", slog.String("website_url", websiteURL), slog.Any("subscriptions", subscriptions))
|
|
|
|
return subscriptions, nil
|
|
|
|
}
|
2024-03-30 09:05:50 +01:00
|
|
|
}
|
|
|
|
|
2024-06-13 16:30:15 +02:00
|
|
|
// Step 3) Parse web page to find feeds from HTML meta tags.
|
2023-12-02 01:27:18 +01:00
|
|
|
slog.Debug("Try to detect feeds from HTML meta tags",
|
|
|
|
slog.String("website_url", websiteURL),
|
|
|
|
slog.String("content_type", responseHandler.ContentType()),
|
|
|
|
)
|
|
|
|
subscriptions, localizedError = f.FindSubscriptionsFromWebPage(websiteURL, responseHandler.ContentType(), bytes.NewReader(responseBody))
|
2023-10-23 01:07:06 +02:00
|
|
|
if localizedError != nil {
|
|
|
|
return nil, localizedError
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(subscriptions) > 0 {
|
|
|
|
slog.Debug("Subscriptions found from web page", slog.String("website_url", websiteURL), slog.Any("subscriptions", subscriptions))
|
|
|
|
return subscriptions, nil
|
|
|
|
}
|
2023-10-22 04:50:29 +02:00
|
|
|
|
2024-06-13 16:30:15 +02:00
|
|
|
// Step 4) Check if the website URL can use RSS-Bridge.
|
2023-10-23 01:07:06 +02:00
|
|
|
if rssBridgeURL != "" {
|
|
|
|
slog.Debug("Try to detect feeds with RSS-Bridge", slog.String("website_url", websiteURL))
|
|
|
|
subscriptions, localizedError := f.FindSubscriptionsFromRSSBridge(websiteURL, rssBridgeURL)
|
|
|
|
if localizedError != nil {
|
|
|
|
return nil, localizedError
|
2023-10-22 04:50:29 +02:00
|
|
|
}
|
|
|
|
|
2023-10-23 01:07:06 +02:00
|
|
|
if len(subscriptions) > 0 {
|
|
|
|
slog.Debug("Subscriptions found from RSS-Bridge", slog.String("website_url", websiteURL), slog.Any("subscriptions", subscriptions))
|
2023-10-22 04:50:29 +02:00
|
|
|
return subscriptions, nil
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-06-13 16:30:15 +02:00
|
|
|
// Step 5) Check if the website has a known feed URL.
|
2023-10-23 01:07:06 +02:00
|
|
|
slog.Debug("Try to detect feeds from well-known URLs", slog.String("website_url", websiteURL))
|
|
|
|
subscriptions, localizedError = f.FindSubscriptionsFromWellKnownURLs(websiteURL)
|
|
|
|
if localizedError != nil {
|
|
|
|
return nil, localizedError
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(subscriptions) > 0 {
|
|
|
|
slog.Debug("Subscriptions found with well-known URLs", slog.String("website_url", websiteURL), slog.Any("subscriptions", subscriptions))
|
|
|
|
return subscriptions, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil, nil
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2023-12-02 01:27:18 +01:00
|
|
|
func (f *SubscriptionFinder) FindSubscriptionsFromWebPage(websiteURL, contentType string, body io.Reader) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
2017-11-20 06:10:04 +01:00
|
|
|
queries := map[string]string{
|
2023-10-23 01:07:06 +02:00
|
|
|
"link[type='application/rss+xml']": parser.FormatRSS,
|
|
|
|
"link[type='application/atom+xml']": parser.FormatAtom,
|
|
|
|
"link[type='application/json']": parser.FormatJSON,
|
|
|
|
"link[type='application/feed+json']": parser.FormatJSON,
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2024-03-20 20:44:41 +01:00
|
|
|
htmlDocumentReader, err := charset.NewReader(body, contentType)
|
2023-12-02 01:27:18 +01:00
|
|
|
if err != nil {
|
|
|
|
return nil, locale.NewLocalizedErrorWrapper(err, "error.unable_to_parse_html_document", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
doc, err := goquery.NewDocumentFromReader(htmlDocumentReader)
|
2017-11-20 06:10:04 +01:00
|
|
|
if err != nil {
|
2023-10-22 04:50:29 +02:00
|
|
|
return nil, locale.NewLocalizedErrorWrapper(err, "error.unable_to_parse_html_document", err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2023-10-23 01:07:06 +02:00
|
|
|
var subscriptions Subscriptions
|
2023-12-01 22:35:24 +01:00
|
|
|
subscriptionURLs := make(map[string]bool)
|
2017-11-20 06:10:04 +01:00
|
|
|
for query, kind := range queries {
|
|
|
|
doc.Find(query).Each(func(i int, s *goquery.Selection) {
|
|
|
|
subscription := new(Subscription)
|
|
|
|
subscription.Type = kind
|
|
|
|
|
|
|
|
if title, exists := s.Attr("title"); exists {
|
|
|
|
subscription.Title = title
|
|
|
|
}
|
|
|
|
|
|
|
|
if feedURL, exists := s.Attr("href"); exists {
|
2023-02-27 02:09:50 +01:00
|
|
|
if feedURL != "" {
|
2023-12-01 22:35:24 +01:00
|
|
|
subscription.URL, err = urllib.AbsoluteURL(websiteURL, feedURL)
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
2023-02-27 02:09:50 +01:00
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if subscription.Title == "" {
|
|
|
|
subscription.Title = subscription.URL
|
|
|
|
}
|
|
|
|
|
2023-12-01 22:35:24 +01:00
|
|
|
if subscription.URL != "" && !subscriptionURLs[subscription.URL] {
|
|
|
|
subscriptionURLs[subscription.URL] = true
|
2017-11-20 06:10:04 +01:00
|
|
|
subscriptions = append(subscriptions, subscription)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
return subscriptions, nil
|
|
|
|
}
|
2020-06-16 22:52:20 +02:00
|
|
|
|
2023-10-23 01:07:06 +02:00
|
|
|
func (f *SubscriptionFinder) FindSubscriptionsFromWellKnownURLs(websiteURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
2020-06-16 22:52:20 +02:00
|
|
|
knownURLs := map[string]string{
|
2024-02-26 16:58:39 +01:00
|
|
|
"atom.xml": parser.FormatAtom,
|
|
|
|
"feed.xml": parser.FormatAtom,
|
|
|
|
"feed/": parser.FormatAtom,
|
|
|
|
"rss.xml": parser.FormatRSS,
|
|
|
|
"rss/": parser.FormatRSS,
|
|
|
|
"index.rss": parser.FormatRSS,
|
|
|
|
"index.xml": parser.FormatRSS,
|
|
|
|
"feed.atom": parser.FormatAtom,
|
2020-06-16 22:52:20 +02:00
|
|
|
}
|
|
|
|
|
2023-08-14 04:09:01 +02:00
|
|
|
websiteURLRoot := urllib.RootURL(websiteURL)
|
2023-03-27 20:28:13 +02:00
|
|
|
baseURLs := []string{
|
|
|
|
// Look for knownURLs in the root.
|
|
|
|
websiteURLRoot,
|
|
|
|
}
|
2023-10-22 04:50:29 +02:00
|
|
|
|
2023-03-27 20:28:13 +02:00
|
|
|
// Look for knownURLs in current subdirectory, such as 'example.com/blog/'.
|
2023-08-14 04:09:01 +02:00
|
|
|
websiteURL, _ = urllib.AbsoluteURL(websiteURL, "./")
|
2023-03-27 20:28:13 +02:00
|
|
|
if websiteURL != websiteURLRoot {
|
|
|
|
baseURLs = append(baseURLs, websiteURL)
|
2020-06-16 22:52:20 +02:00
|
|
|
}
|
|
|
|
|
2023-10-23 01:07:06 +02:00
|
|
|
var subscriptions Subscriptions
|
2023-03-27 20:28:13 +02:00
|
|
|
for _, baseURL := range baseURLs {
|
|
|
|
for knownURL, kind := range knownURLs {
|
2023-08-14 04:09:01 +02:00
|
|
|
fullURL, err := urllib.AbsoluteURL(baseURL, knownURL)
|
2023-03-27 20:28:13 +02:00
|
|
|
if err != nil {
|
|
|
|
continue
|
|
|
|
}
|
2023-10-22 04:50:29 +02:00
|
|
|
|
2023-03-27 20:28:13 +02:00
|
|
|
// Some websites redirects unknown URLs to the home page.
|
|
|
|
// As result, the list of known URLs is returned to the subscription list.
|
|
|
|
// We don't want the user to choose between invalid feed URLs.
|
2023-10-23 01:07:06 +02:00
|
|
|
f.requestBuilder.WithoutRedirects()
|
2023-03-27 20:28:13 +02:00
|
|
|
|
2023-10-23 01:07:06 +02:00
|
|
|
responseHandler := fetcher.NewResponseHandler(f.requestBuilder.ExecuteRequest(fullURL))
|
2024-02-29 00:30:54 +01:00
|
|
|
localizedError := responseHandler.LocalizedError()
|
|
|
|
responseHandler.Close()
|
2023-10-22 04:50:29 +02:00
|
|
|
|
2024-02-29 00:30:54 +01:00
|
|
|
if localizedError != nil {
|
2024-02-26 17:07:28 +01:00
|
|
|
slog.Debug("Unable to subscribe", slog.String("fullURL", fullURL), slog.Any("error", localizedError.Error()))
|
2023-03-27 20:28:13 +02:00
|
|
|
continue
|
|
|
|
}
|
2020-06-16 22:52:20 +02:00
|
|
|
|
2024-02-26 17:07:28 +01:00
|
|
|
subscriptions = append(subscriptions, &Subscription{
|
|
|
|
Type: kind,
|
|
|
|
Title: fullURL,
|
|
|
|
URL: fullURL,
|
|
|
|
})
|
2020-06-16 22:52:20 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return subscriptions, nil
|
|
|
|
}
|
2023-10-23 01:07:06 +02:00
|
|
|
|
|
|
|
func (f *SubscriptionFinder) FindSubscriptionsFromRSSBridge(websiteURL, rssBridgeURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
|
|
|
slog.Debug("Trying to detect feeds using RSS-Bridge",
|
|
|
|
slog.String("website_url", websiteURL),
|
|
|
|
slog.String("rssbridge_url", rssBridgeURL),
|
|
|
|
)
|
|
|
|
|
|
|
|
bridges, err := rssbridge.DetectBridges(rssBridgeURL, websiteURL)
|
|
|
|
if err != nil {
|
|
|
|
return nil, locale.NewLocalizedErrorWrapper(err, "error.unable_to_detect_rssbridge", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
slog.Debug("RSS-Bridge results",
|
|
|
|
slog.String("website_url", websiteURL),
|
|
|
|
slog.String("rssbridge_url", rssBridgeURL),
|
|
|
|
slog.Int("nb_bridges", len(bridges)),
|
|
|
|
)
|
|
|
|
|
|
|
|
if len(bridges) == 0 {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
2024-02-26 17:07:28 +01:00
|
|
|
subscriptions := make(Subscriptions, 0, len(bridges))
|
2023-10-23 01:07:06 +02:00
|
|
|
for _, bridge := range bridges {
|
|
|
|
subscriptions = append(subscriptions, &Subscription{
|
|
|
|
Title: bridge.BridgeMeta.Name,
|
|
|
|
URL: bridge.URL,
|
|
|
|
Type: parser.FormatAtom,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
return subscriptions, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (f *SubscriptionFinder) FindSubscriptionsFromYouTubeChannelPage(websiteURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
2024-06-13 16:30:15 +02:00
|
|
|
kind, id, _ := youtubeURLIDExtractor(websiteURL)
|
2023-10-23 01:07:06 +02:00
|
|
|
|
2024-06-13 16:30:15 +02:00
|
|
|
if kind == youtubeIDKindChannel {
|
|
|
|
feedURL := fmt.Sprintf(`https://www.youtube.com/feeds/videos.xml?channel_id=%s`, id)
|
2023-10-23 01:07:06 +02:00
|
|
|
return Subscriptions{NewSubscription(websiteURL, feedURL, parser.FormatAtom)}, nil
|
|
|
|
}
|
|
|
|
slog.Debug("This website is not a YouTube channel page, the regex doesn't match", slog.String("website_url", websiteURL))
|
|
|
|
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (f *SubscriptionFinder) FindSubscriptionsFromYouTubeVideoPage(websiteURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
2024-06-13 16:30:15 +02:00
|
|
|
kind, _, err := youtubeURLIDExtractor(websiteURL)
|
|
|
|
if err != nil {
|
|
|
|
slog.Debug("Could not parse url", slog.String("website_url", websiteURL))
|
|
|
|
}
|
|
|
|
|
|
|
|
if kind != youtubeIDKindVideo {
|
2023-10-23 01:07:06 +02:00
|
|
|
slog.Debug("This website is not a YouTube video page, the regex doesn't match", slog.String("website_url", websiteURL))
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
responseHandler := fetcher.NewResponseHandler(f.requestBuilder.ExecuteRequest(websiteURL))
|
|
|
|
defer responseHandler.Close()
|
|
|
|
|
|
|
|
if localizedError := responseHandler.LocalizedError(); localizedError != nil {
|
|
|
|
return nil, localizedError
|
|
|
|
}
|
|
|
|
|
|
|
|
doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
|
|
|
|
if docErr != nil {
|
|
|
|
return nil, locale.NewLocalizedErrorWrapper(docErr, "error.unable_to_parse_html_document", docErr)
|
|
|
|
}
|
|
|
|
|
|
|
|
if channelID, exists := doc.Find(`meta[itemprop="channelId"]`).First().Attr("content"); exists {
|
|
|
|
feedURL := fmt.Sprintf(`https://www.youtube.com/feeds/videos.xml?channel_id=%s`, channelID)
|
|
|
|
return Subscriptions{NewSubscription(websiteURL, feedURL, parser.FormatAtom)}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil, nil
|
|
|
|
}
|
2024-03-30 09:05:50 +01:00
|
|
|
|
|
|
|
func (f *SubscriptionFinder) FindSubscriptionsFromYouTubePlaylistPage(websiteURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
2024-06-13 16:30:15 +02:00
|
|
|
kind, id, _ := youtubeURLIDExtractor(websiteURL)
|
2024-03-30 09:05:50 +01:00
|
|
|
|
2024-06-13 16:30:15 +02:00
|
|
|
if kind == youtubeIDKindPlaylist {
|
|
|
|
feedURL := fmt.Sprintf(`https://www.youtube.com/feeds/videos.xml?playlist_id=%s`, id)
|
2024-03-30 09:05:50 +01:00
|
|
|
return Subscriptions{NewSubscription(websiteURL, feedURL, parser.FormatAtom)}, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
slog.Debug("This website is not a YouTube playlist page, the regex doesn't match", slog.String("website_url", websiteURL))
|
|
|
|
|
|
|
|
return nil, nil
|
|
|
|
}
|
2024-06-13 16:30:15 +02:00
|
|
|
|
|
|
|
func youtubeURLIDExtractor(websiteURL string) (idKind youtubeKind, id string, err error) {
|
|
|
|
decodedUrl, err := url.Parse(websiteURL)
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if !youtubeHostRegex.MatchString(decodedUrl.Host) {
|
|
|
|
slog.Debug("This website is not a YouTube page, the regex doesn't match", slog.String("website_url", websiteURL))
|
|
|
|
err = errNotYoutubeUrl
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
switch {
|
|
|
|
case strings.HasPrefix(decodedUrl.Path, "/channel"):
|
|
|
|
idKind = youtubeIDKindChannel
|
|
|
|
matches := youtubeChannelRegex.FindStringSubmatch(decodedUrl.Path)
|
|
|
|
id = matches[1]
|
|
|
|
return
|
|
|
|
case strings.HasPrefix(decodedUrl.Path, "/watch") && decodedUrl.Query().Has("list"):
|
|
|
|
idKind = youtubeIDKindPlaylist
|
|
|
|
id = decodedUrl.Query().Get("list")
|
|
|
|
return
|
|
|
|
case strings.HasPrefix(decodedUrl.Path, "/watch"):
|
|
|
|
idKind = youtubeIDKindVideo
|
|
|
|
id = decodedUrl.Query().Get("v")
|
|
|
|
return
|
|
|
|
case strings.HasPrefix(decodedUrl.Path, "/playlist"):
|
|
|
|
idKind = youtubeIDKindPlaylist
|
|
|
|
id = decodedUrl.Query().Get("list")
|
|
|
|
return
|
|
|
|
}
|
|
|
|
err = fmt.Errorf("unable to extract youtube id from URL: %s", websiteURL)
|
|
|
|
return
|
|
|
|
}
|