Refactor feed discovery and avoid an extra HTTP request if the url provided is the feed
This commit is contained in:
parent
14e25ab9fe
commit
eeaab72a9f
31 changed files with 455 additions and 200 deletions
|
@ -7,9 +7,11 @@ import (
|
|||
json_parser "encoding/json"
|
||||
"net/http"
|
||||
|
||||
"miniflux.app/v2/internal/config"
|
||||
"miniflux.app/v2/internal/http/request"
|
||||
"miniflux.app/v2/internal/http/response/json"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/reader/fetcher"
|
||||
"miniflux.app/v2/internal/reader/subscription"
|
||||
"miniflux.app/v2/internal/validator"
|
||||
)
|
||||
|
@ -32,14 +34,17 @@ func (h *handler) discoverSubscriptions(w http.ResponseWriter, r *http.Request)
|
|||
rssbridgeURL = intg.RSSBridgeURL
|
||||
}
|
||||
|
||||
subscriptions, localizedError := subscription.FindSubscriptions(
|
||||
requestBuilder := fetcher.NewRequestBuilder()
|
||||
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
|
||||
requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
|
||||
requestBuilder.WithUserAgent(subscriptionDiscoveryRequest.UserAgent)
|
||||
requestBuilder.WithCookie(subscriptionDiscoveryRequest.Cookie)
|
||||
requestBuilder.WithUsernameAndPassword(subscriptionDiscoveryRequest.Username, subscriptionDiscoveryRequest.Password)
|
||||
requestBuilder.UseProxy(subscriptionDiscoveryRequest.FetchViaProxy)
|
||||
requestBuilder.IgnoreTLSErrors(subscriptionDiscoveryRequest.AllowSelfSignedCertificates)
|
||||
|
||||
subscriptions, localizedError := subscription.NewSubscriptionFinder(requestBuilder).FindSubscriptions(
|
||||
subscriptionDiscoveryRequest.URL,
|
||||
subscriptionDiscoveryRequest.UserAgent,
|
||||
subscriptionDiscoveryRequest.Cookie,
|
||||
subscriptionDiscoveryRequest.Username,
|
||||
subscriptionDiscoveryRequest.Password,
|
||||
subscriptionDiscoveryRequest.FetchViaProxy,
|
||||
subscriptionDiscoveryRequest.AllowSelfSignedCertificates,
|
||||
rssbridgeURL,
|
||||
)
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ import (
|
|||
"miniflux.app/v2/internal/integration"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/proxy"
|
||||
"miniflux.app/v2/internal/reader/fetcher"
|
||||
mff "miniflux.app/v2/internal/reader/handler"
|
||||
mfs "miniflux.app/v2/internal/reader/subscription"
|
||||
"miniflux.app/v2/internal/storage"
|
||||
|
@ -667,13 +668,22 @@ func (h *handler) quickAddHandler(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
url := r.Form.Get(ParamQuickAdd)
|
||||
if !validator.IsValidURL(url) {
|
||||
json.BadRequest(w, r, fmt.Errorf("googlereader: invalid URL: %s", url))
|
||||
feedURL := r.Form.Get(ParamQuickAdd)
|
||||
if !validator.IsValidURL(feedURL) {
|
||||
json.BadRequest(w, r, fmt.Errorf("googlereader: invalid URL: %s", feedURL))
|
||||
return
|
||||
}
|
||||
|
||||
subscriptions, localizedError := mfs.FindSubscriptions(url, "", "", "", "", false, false, "")
|
||||
requestBuilder := fetcher.NewRequestBuilder()
|
||||
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
|
||||
requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
|
||||
|
||||
var rssBridgeURL string
|
||||
if intg, err := h.store.Integration(userID); err == nil && intg != nil && intg.RSSBridgeEnabled {
|
||||
rssBridgeURL = intg.RSSBridgeURL
|
||||
}
|
||||
|
||||
subscriptions, localizedError := mfs.NewSubscriptionFinder(requestBuilder).FindSubscriptions(feedURL, rssBridgeURL)
|
||||
if localizedError != nil {
|
||||
json.ServerError(w, r, localizedError.Error())
|
||||
return
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "Ce flux existe déjà.",
|
||||
"error.unable_to_parse_feed": "Impossible d'analyser ce flux : %v.",
|
||||
"error.feed_not_found": "Impossible de trouver ce flux.",
|
||||
"error.unable_to_detect_rssbridge": "Impossible de détecter un flux RSS en utilisant RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Impossible de détecter un flux RSS en utilisant RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Impossible de détecter le format du flux : %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -451,5 +451,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -468,5 +468,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -468,5 +468,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -469,5 +469,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -452,5 +452,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -460,5 +460,6 @@
|
|||
"error.duplicated_feed": "This feed already exists.",
|
||||
"error.unable_to_parse_feed": "Unable to parse this feed: %v.",
|
||||
"error.feed_not_found": "This feed does not exist or does not belong to this user.",
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v."
|
||||
"error.unable_to_detect_rssbridge": "Unable to detect feed using RSS-Bridge: %v.",
|
||||
"error.feed_format_not_detected": "Unable to detect feed format: %v."
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ package model // import "miniflux.app/v2/internal/model"
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
|
@ -144,6 +145,31 @@ type FeedCreationRequest struct {
|
|||
UrlRewriteRules string `json:"urlrewrite_rules"`
|
||||
}
|
||||
|
||||
type FeedCreationRequestFromSubscriptionDiscovery struct {
|
||||
Content io.ReadSeeker
|
||||
ETag string
|
||||
LastModified string
|
||||
|
||||
FeedURL string `json:"feed_url"`
|
||||
CategoryID int64 `json:"category_id"`
|
||||
UserAgent string `json:"user_agent"`
|
||||
Cookie string `json:"cookie"`
|
||||
Username string `json:"username"`
|
||||
Password string `json:"password"`
|
||||
Crawler bool `json:"crawler"`
|
||||
Disabled bool `json:"disabled"`
|
||||
NoMediaPlayer bool `json:"no_media_player"`
|
||||
IgnoreHTTPCache bool `json:"ignore_http_cache"`
|
||||
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
|
||||
FetchViaProxy bool `json:"fetch_via_proxy"`
|
||||
ScraperRules string `json:"scraper_rules"`
|
||||
RewriteRules string `json:"rewrite_rules"`
|
||||
BlocklistRules string `json:"blocklist_rules"`
|
||||
KeeplistRules string `json:"keeplist_rules"`
|
||||
HideGlobally bool `json:"hide_globally"`
|
||||
UrlRewriteRules string `json:"urlrewrite_rules"`
|
||||
}
|
||||
|
||||
// FeedModificationRequest represents the request to update a feed.
|
||||
type FeedModificationRequest struct {
|
||||
FeedURL *string `json:"feed_url"`
|
||||
|
|
|
@ -95,14 +95,14 @@ func (r *ResponseHandler) ReadBody(maxBodySize int64) ([]byte, *locale.Localized
|
|||
func (r *ResponseHandler) LocalizedError() *locale.LocalizedErrorWrapper {
|
||||
if r.clientErr != nil {
|
||||
switch r.clientErr.(type) {
|
||||
case x509.CertificateInvalidError, x509.UnknownAuthorityError, x509.HostnameError:
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.tls_error", r.clientErr.Error())
|
||||
case x509.CertificateInvalidError, x509.HostnameError:
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.tls_error", r.clientErr)
|
||||
case *net.OpError:
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.network_operation", r.clientErr.Error())
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.network_operation", r.clientErr)
|
||||
case net.Error:
|
||||
networkErr := r.clientErr.(net.Error)
|
||||
if networkErr.Timeout() {
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.network_timeout", r.clientErr.Error())
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.network_timeout", r.clientErr)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ func (r *ResponseHandler) LocalizedError() *locale.LocalizedErrorWrapper {
|
|||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.http_empty_response")
|
||||
}
|
||||
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.http_client_error", r.clientErr.Error())
|
||||
return locale.NewLocalizedErrorWrapper(fmt.Errorf("fetcher: %w", r.clientErr), "error.http_client_error", r.clientErr)
|
||||
}
|
||||
|
||||
switch r.httpResponse.StatusCode {
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
package handler // import "miniflux.app/v2/internal/reader/handler"
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"log/slog"
|
||||
"time"
|
||||
|
@ -25,6 +26,83 @@ var (
|
|||
ErrDuplicatedFeed = errors.New("fetcher: duplicated feed")
|
||||
)
|
||||
|
||||
func CreateFeedFromSubscriptionDiscovery(store *storage.Storage, userID int64, feedCreationRequest *model.FeedCreationRequestFromSubscriptionDiscovery) (*model.Feed, *locale.LocalizedErrorWrapper) {
|
||||
slog.Debug("Begin feed creation process from subscription discovery",
|
||||
slog.Int64("user_id", userID),
|
||||
slog.String("feed_url", feedCreationRequest.FeedURL),
|
||||
)
|
||||
|
||||
user, storeErr := store.UserByID(userID)
|
||||
if storeErr != nil {
|
||||
return nil, locale.NewLocalizedErrorWrapper(storeErr, "error.database_error", storeErr)
|
||||
}
|
||||
|
||||
if !store.CategoryIDExists(userID, feedCreationRequest.CategoryID) {
|
||||
return nil, locale.NewLocalizedErrorWrapper(ErrCategoryNotFound, "error.category_not_found")
|
||||
}
|
||||
|
||||
if store.FeedURLExists(userID, feedCreationRequest.FeedURL) {
|
||||
return nil, locale.NewLocalizedErrorWrapper(ErrDuplicatedFeed, "error.duplicated_feed")
|
||||
}
|
||||
|
||||
subscription, parseErr := parser.ParseFeed(feedCreationRequest.FeedURL, feedCreationRequest.Content)
|
||||
if parseErr != nil {
|
||||
return nil, locale.NewLocalizedErrorWrapper(parseErr, "error.unable_to_parse_feed", parseErr)
|
||||
}
|
||||
|
||||
subscription.UserID = userID
|
||||
subscription.UserAgent = feedCreationRequest.UserAgent
|
||||
subscription.Cookie = feedCreationRequest.Cookie
|
||||
subscription.Username = feedCreationRequest.Username
|
||||
subscription.Password = feedCreationRequest.Password
|
||||
subscription.Crawler = feedCreationRequest.Crawler
|
||||
subscription.Disabled = feedCreationRequest.Disabled
|
||||
subscription.IgnoreHTTPCache = feedCreationRequest.IgnoreHTTPCache
|
||||
subscription.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates
|
||||
subscription.FetchViaProxy = feedCreationRequest.FetchViaProxy
|
||||
subscription.ScraperRules = feedCreationRequest.ScraperRules
|
||||
subscription.RewriteRules = feedCreationRequest.RewriteRules
|
||||
subscription.BlocklistRules = feedCreationRequest.BlocklistRules
|
||||
subscription.KeeplistRules = feedCreationRequest.KeeplistRules
|
||||
subscription.UrlRewriteRules = feedCreationRequest.UrlRewriteRules
|
||||
subscription.EtagHeader = feedCreationRequest.ETag
|
||||
subscription.LastModifiedHeader = feedCreationRequest.LastModified
|
||||
subscription.FeedURL = feedCreationRequest.FeedURL
|
||||
subscription.WithCategoryID(feedCreationRequest.CategoryID)
|
||||
subscription.CheckedNow()
|
||||
|
||||
processor.ProcessFeedEntries(store, subscription, user, true)
|
||||
|
||||
if storeErr := store.CreateFeed(subscription); storeErr != nil {
|
||||
return nil, locale.NewLocalizedErrorWrapper(storeErr, "error.database_error", storeErr)
|
||||
}
|
||||
|
||||
slog.Debug("Created feed",
|
||||
slog.Int64("user_id", userID),
|
||||
slog.Int64("feed_id", subscription.ID),
|
||||
slog.String("feed_url", subscription.FeedURL),
|
||||
)
|
||||
|
||||
requestBuilder := fetcher.NewRequestBuilder()
|
||||
requestBuilder.WithUsernameAndPassword(feedCreationRequest.Username, feedCreationRequest.Password)
|
||||
requestBuilder.WithUserAgent(feedCreationRequest.UserAgent)
|
||||
requestBuilder.WithCookie(feedCreationRequest.Cookie)
|
||||
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
|
||||
requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
|
||||
requestBuilder.UseProxy(feedCreationRequest.FetchViaProxy)
|
||||
requestBuilder.IgnoreTLSErrors(feedCreationRequest.AllowSelfSignedCertificates)
|
||||
|
||||
checkFeedIcon(
|
||||
store,
|
||||
requestBuilder,
|
||||
subscription.ID,
|
||||
subscription.SiteURL,
|
||||
subscription.IconURL,
|
||||
)
|
||||
|
||||
return subscription, nil
|
||||
}
|
||||
|
||||
// CreateFeed fetch, parse and store a new feed.
|
||||
func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model.FeedCreationRequest) (*model.Feed, *locale.LocalizedErrorWrapper) {
|
||||
slog.Debug("Begin feed creation process",
|
||||
|
@ -68,7 +146,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
|
|||
return nil, locale.NewLocalizedErrorWrapper(ErrDuplicatedFeed, "error.duplicated_feed")
|
||||
}
|
||||
|
||||
subscription, parseErr := parser.ParseFeed(responseHandler.EffectiveURL(), string(responseBody))
|
||||
subscription, parseErr := parser.ParseFeed(responseHandler.EffectiveURL(), bytes.NewReader(responseBody))
|
||||
if parseErr != nil {
|
||||
return nil, locale.NewLocalizedErrorWrapper(parseErr, "error.unable_to_parse_feed", parseErr)
|
||||
}
|
||||
|
@ -188,7 +266,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64, forceRefresh bool
|
|||
return localizedError
|
||||
}
|
||||
|
||||
updatedFeed, parseErr := parser.ParseFeed(responseHandler.EffectiveURL(), string(responseBody))
|
||||
updatedFeed, parseErr := parser.ParseFeed(responseHandler.EffectiveURL(), bytes.NewReader(responseBody))
|
||||
if parseErr != nil {
|
||||
localizedError := locale.NewLocalizedErrorWrapper(parseErr, "error.unable_to_parse_feed")
|
||||
|
||||
|
|
|
@ -4,8 +4,9 @@
|
|||
package parser // import "miniflux.app/v2/internal/reader/parser"
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/xml"
|
||||
"strings"
|
||||
"io"
|
||||
|
||||
rxml "miniflux.app/v2/internal/reader/xml"
|
||||
)
|
||||
|
@ -20,12 +21,16 @@ const (
|
|||
)
|
||||
|
||||
// DetectFeedFormat tries to guess the feed format from input data.
|
||||
func DetectFeedFormat(data string) string {
|
||||
if strings.HasPrefix(strings.TrimSpace(data), "{") {
|
||||
func DetectFeedFormat(r io.ReadSeeker) string {
|
||||
data := make([]byte, 512)
|
||||
r.Read(data)
|
||||
|
||||
if bytes.HasPrefix(bytes.TrimSpace(data), []byte("{")) {
|
||||
return FormatJSON
|
||||
}
|
||||
|
||||
decoder := rxml.NewDecoder(strings.NewReader(data))
|
||||
r.Seek(0, io.SeekStart)
|
||||
decoder := rxml.NewDecoder(r)
|
||||
|
||||
for {
|
||||
token, _ := decoder.Token()
|
||||
|
|
|
@ -4,12 +4,13 @@
|
|||
package parser // import "miniflux.app/v2/internal/reader/parser"
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDetectRDF(t *testing.T) {
|
||||
data := `<?xml version="1.0"?><rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns="http://my.netscape.com/rdf/simple/0.9/"></rdf:RDF>`
|
||||
format := DetectFeedFormat(data)
|
||||
format := DetectFeedFormat(strings.NewReader(data))
|
||||
|
||||
if format != FormatRDF {
|
||||
t.Errorf(`Wrong format detected: %q instead of %q`, format, FormatRDF)
|
||||
|
@ -18,7 +19,7 @@ func TestDetectRDF(t *testing.T) {
|
|||
|
||||
func TestDetectRSS(t *testing.T) {
|
||||
data := `<?xml version="1.0"?><rss version="2.0"><channel></channel></rss>`
|
||||
format := DetectFeedFormat(data)
|
||||
format := DetectFeedFormat(strings.NewReader(data))
|
||||
|
||||
if format != FormatRSS {
|
||||
t.Errorf(`Wrong format detected: %q instead of %q`, format, FormatRSS)
|
||||
|
@ -27,7 +28,7 @@ func TestDetectRSS(t *testing.T) {
|
|||
|
||||
func TestDetectAtom10(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?><feed xmlns="http://www.w3.org/2005/Atom"></feed>`
|
||||
format := DetectFeedFormat(data)
|
||||
format := DetectFeedFormat(strings.NewReader(data))
|
||||
|
||||
if format != FormatAtom {
|
||||
t.Errorf(`Wrong format detected: %q instead of %q`, format, FormatAtom)
|
||||
|
@ -36,7 +37,7 @@ func TestDetectAtom10(t *testing.T) {
|
|||
|
||||
func TestDetectAtom03(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="utf-8"?><feed version="0.3" xmlns="http://purl.org/atom/ns#" xmlns:dc="http://purl.org/dc/elements/1.1/" xml:lang="en"></feed>`
|
||||
format := DetectFeedFormat(data)
|
||||
format := DetectFeedFormat(strings.NewReader(data))
|
||||
|
||||
if format != FormatAtom {
|
||||
t.Errorf(`Wrong format detected: %q instead of %q`, format, FormatAtom)
|
||||
|
@ -45,7 +46,7 @@ func TestDetectAtom03(t *testing.T) {
|
|||
|
||||
func TestDetectAtomWithISOCharset(t *testing.T) {
|
||||
data := `<?xml version="1.0" encoding="ISO-8859-15"?><feed xmlns="http://www.w3.org/2005/Atom"></feed>`
|
||||
format := DetectFeedFormat(data)
|
||||
format := DetectFeedFormat(strings.NewReader(data))
|
||||
|
||||
if format != FormatAtom {
|
||||
t.Errorf(`Wrong format detected: %q instead of %q`, format, FormatAtom)
|
||||
|
@ -59,7 +60,7 @@ func TestDetectJSON(t *testing.T) {
|
|||
"title" : "Example"
|
||||
}
|
||||
`
|
||||
format := DetectFeedFormat(data)
|
||||
format := DetectFeedFormat(strings.NewReader(data))
|
||||
|
||||
if format != FormatJSON {
|
||||
t.Errorf(`Wrong format detected: %q instead of %q`, format, FormatJSON)
|
||||
|
@ -70,7 +71,7 @@ func TestDetectUnknown(t *testing.T) {
|
|||
data := `
|
||||
<!DOCTYPE html> <html> </html>
|
||||
`
|
||||
format := DetectFeedFormat(data)
|
||||
format := DetectFeedFormat(strings.NewReader(data))
|
||||
|
||||
if format != FormatUnknown {
|
||||
t.Errorf(`Wrong format detected: %q instead of %q`, format, FormatUnknown)
|
||||
|
|
|
@ -5,7 +5,7 @@ package parser // import "miniflux.app/v2/internal/reader/parser"
|
|||
|
||||
import (
|
||||
"errors"
|
||||
"strings"
|
||||
"io"
|
||||
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/reader/atom"
|
||||
|
@ -17,16 +17,21 @@ import (
|
|||
var ErrFeedFormatNotDetected = errors.New("parser: unable to detect feed format")
|
||||
|
||||
// ParseFeed analyzes the input data and returns a normalized feed object.
|
||||
func ParseFeed(baseURL, data string) (*model.Feed, error) {
|
||||
switch DetectFeedFormat(data) {
|
||||
func ParseFeed(baseURL string, r io.ReadSeeker) (*model.Feed, error) {
|
||||
r.Seek(0, io.SeekStart)
|
||||
switch DetectFeedFormat(r) {
|
||||
case FormatAtom:
|
||||
return atom.Parse(baseURL, strings.NewReader(data))
|
||||
r.Seek(0, io.SeekStart)
|
||||
return atom.Parse(baseURL, r)
|
||||
case FormatRSS:
|
||||
return rss.Parse(baseURL, strings.NewReader(data))
|
||||
r.Seek(0, io.SeekStart)
|
||||
return rss.Parse(baseURL, r)
|
||||
case FormatJSON:
|
||||
return json.Parse(baseURL, strings.NewReader(data))
|
||||
r.Seek(0, io.SeekStart)
|
||||
return json.Parse(baseURL, r)
|
||||
case FormatRDF:
|
||||
return rdf.Parse(baseURL, strings.NewReader(data))
|
||||
r.Seek(0, io.SeekStart)
|
||||
return rdf.Parse(baseURL, r)
|
||||
default:
|
||||
return nil, ErrFeedFormatNotDetected
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
package parser // import "miniflux.app/v2/internal/reader/parser"
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
|
@ -29,7 +30,7 @@ func TestParseAtom(t *testing.T) {
|
|||
|
||||
</feed>`
|
||||
|
||||
feed, err := ParseFeed("https://example.org/", data)
|
||||
feed, err := ParseFeed("https://example.org/", strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
@ -57,7 +58,7 @@ func TestParseAtomFeedWithRelativeURL(t *testing.T) {
|
|||
|
||||
</feed>`
|
||||
|
||||
feed, err := ParseFeed("https://example.org/blog/atom.xml", data)
|
||||
feed, err := ParseFeed("https://example.org/blog/atom.xml", strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
@ -91,7 +92,7 @@ func TestParseRSS(t *testing.T) {
|
|||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := ParseFeed("http://liftoff.msfc.nasa.gov/", data)
|
||||
feed, err := ParseFeed("http://liftoff.msfc.nasa.gov/", strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
@ -117,7 +118,7 @@ func TestParseRSSFeedWithRelativeURL(t *testing.T) {
|
|||
</channel>
|
||||
</rss>`
|
||||
|
||||
feed, err := ParseFeed("http://example.org/rss.xml", data)
|
||||
feed, err := ParseFeed("http://example.org/rss.xml", strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
@ -158,7 +159,7 @@ func TestParseRDF(t *testing.T) {
|
|||
</item>
|
||||
</rdf:RDF>`
|
||||
|
||||
feed, err := ParseFeed("http://example.org/", data)
|
||||
feed, err := ParseFeed("http://example.org/", strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
@ -187,7 +188,7 @@ func TestParseRDFWithRelativeURL(t *testing.T) {
|
|||
</item>
|
||||
</rdf:RDF>`
|
||||
|
||||
feed, err := ParseFeed("http://example.org/rdf.xml", data)
|
||||
feed, err := ParseFeed("http://example.org/rdf.xml", strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
@ -225,7 +226,7 @@ func TestParseJson(t *testing.T) {
|
|||
]
|
||||
}`
|
||||
|
||||
feed, err := ParseFeed("https://example.org/feed.json", data)
|
||||
feed, err := ParseFeed("https://example.org/feed.json", strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
@ -250,7 +251,7 @@ func TestParseJsonFeedWithRelativeURL(t *testing.T) {
|
|||
]
|
||||
}`
|
||||
|
||||
feed, err := ParseFeed("https://example.org/blog/feed.json", data)
|
||||
feed, err := ParseFeed("https://example.org/blog/feed.json", strings.NewReader(data))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
|
@ -285,14 +286,14 @@ func TestParseUnknownFeed(t *testing.T) {
|
|||
</html>
|
||||
`
|
||||
|
||||
_, err := ParseFeed("https://example.org/", data)
|
||||
_, err := ParseFeed("https://example.org/", strings.NewReader(data))
|
||||
if err == nil {
|
||||
t.Error("ParseFeed must returns an error")
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseEmptyFeed(t *testing.T) {
|
||||
_, err := ParseFeed("", "")
|
||||
_, err := ParseFeed("", strings.NewReader(""))
|
||||
if err == nil {
|
||||
t.Error("ParseFeed must returns an error")
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@ import (
|
|||
"miniflux.app/v2/internal/config"
|
||||
"miniflux.app/v2/internal/integration/rssbridge"
|
||||
"miniflux.app/v2/internal/locale"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/reader/fetcher"
|
||||
"miniflux.app/v2/internal/reader/parser"
|
||||
"miniflux.app/v2/internal/urllib"
|
||||
|
@ -25,20 +26,28 @@ var (
|
|||
youtubeVideoRegex = regexp.MustCompile(`youtube\.com/watch\?v=(.*)`)
|
||||
)
|
||||
|
||||
func FindSubscriptions(websiteURL, userAgent, cookie, username, password string, fetchViaProxy, allowSelfSignedCertificates bool, rssbridgeURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
||||
websiteURL = findYoutubeChannelFeed(websiteURL)
|
||||
websiteURL = parseYoutubeVideoPage(websiteURL)
|
||||
type SubscriptionFinder struct {
|
||||
requestBuilder *fetcher.RequestBuilder
|
||||
feedDownloaded bool
|
||||
feedResponseInfo *model.FeedCreationRequestFromSubscriptionDiscovery
|
||||
}
|
||||
|
||||
requestBuilder := fetcher.NewRequestBuilder()
|
||||
requestBuilder.WithUsernameAndPassword(username, password)
|
||||
requestBuilder.WithUserAgent(userAgent)
|
||||
requestBuilder.WithCookie(cookie)
|
||||
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
|
||||
requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
|
||||
requestBuilder.UseProxy(fetchViaProxy)
|
||||
requestBuilder.IgnoreTLSErrors(allowSelfSignedCertificates)
|
||||
func NewSubscriptionFinder(requestBuilder *fetcher.RequestBuilder) *SubscriptionFinder {
|
||||
return &SubscriptionFinder{
|
||||
requestBuilder: requestBuilder,
|
||||
}
|
||||
}
|
||||
|
||||
responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
|
||||
func (f *SubscriptionFinder) IsFeedAlreadyDownloaded() bool {
|
||||
return f.feedDownloaded
|
||||
}
|
||||
|
||||
func (f *SubscriptionFinder) FeedResponseInfo() *model.FeedCreationRequestFromSubscriptionDiscovery {
|
||||
return f.feedResponseInfo
|
||||
}
|
||||
|
||||
func (f *SubscriptionFinder) FindSubscriptions(websiteURL, rssBridgeURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
||||
responseHandler := fetcher.NewResponseHandler(f.requestBuilder.ExecuteRequest(websiteURL))
|
||||
defer responseHandler.Close()
|
||||
|
||||
if localizedError := responseHandler.LocalizedError(); localizedError != nil {
|
||||
|
@ -52,69 +61,97 @@ func FindSubscriptions(websiteURL, userAgent, cookie, username, password string,
|
|||
return nil, localizedError
|
||||
}
|
||||
|
||||
if format := parser.DetectFeedFormat(string(responseBody)); format != parser.FormatUnknown {
|
||||
var subscriptions Subscriptions
|
||||
subscriptions = append(subscriptions, &Subscription{
|
||||
Title: responseHandler.EffectiveURL(),
|
||||
URL: responseHandler.EffectiveURL(),
|
||||
Type: format,
|
||||
})
|
||||
f.feedResponseInfo = &model.FeedCreationRequestFromSubscriptionDiscovery{
|
||||
Content: bytes.NewReader(responseBody),
|
||||
ETag: responseHandler.ETag(),
|
||||
LastModified: responseHandler.LastModified(),
|
||||
}
|
||||
|
||||
// Step 1) Check if the website URL is a feed.
|
||||
if feedFormat := parser.DetectFeedFormat(f.feedResponseInfo.Content); feedFormat != parser.FormatUnknown {
|
||||
f.feedDownloaded = true
|
||||
return Subscriptions{NewSubscription(responseHandler.EffectiveURL(), responseHandler.EffectiveURL(), feedFormat)}, nil
|
||||
}
|
||||
|
||||
// Step 2) Check if the website URL is a YouTube channel.
|
||||
slog.Debug("Try to detect feeds from YouTube channel page", slog.String("website_url", websiteURL))
|
||||
subscriptions, localizedError := f.FindSubscriptionsFromYouTubeChannelPage(websiteURL)
|
||||
if localizedError != nil {
|
||||
return nil, localizedError
|
||||
}
|
||||
|
||||
if len(subscriptions) > 0 {
|
||||
slog.Debug("Subscriptions found from YouTube channel page", slog.String("website_url", websiteURL), slog.Any("subscriptions", subscriptions))
|
||||
return subscriptions, nil
|
||||
}
|
||||
|
||||
subscriptions, localizedError := parseWebPage(responseHandler.EffectiveURL(), bytes.NewReader(responseBody))
|
||||
if localizedError != nil || subscriptions != nil {
|
||||
return subscriptions, localizedError
|
||||
// Step 3) Check if the website URL is a YouTube video.
|
||||
slog.Debug("Try to detect feeds from YouTube video page", slog.String("website_url", websiteURL))
|
||||
subscriptions, localizedError = f.FindSubscriptionsFromYouTubeVideoPage(websiteURL)
|
||||
if localizedError != nil {
|
||||
return nil, localizedError
|
||||
}
|
||||
|
||||
if rssbridgeURL != "" {
|
||||
slog.Debug("Trying to detect feeds using RSS-Bridge",
|
||||
slog.String("website_url", websiteURL),
|
||||
slog.String("rssbridge_url", rssbridgeURL),
|
||||
)
|
||||
if len(subscriptions) > 0 {
|
||||
slog.Debug("Subscriptions found from YouTube video page", slog.String("website_url", websiteURL), slog.Any("subscriptions", subscriptions))
|
||||
return subscriptions, nil
|
||||
}
|
||||
|
||||
bridges, err := rssbridge.DetectBridges(rssbridgeURL, websiteURL)
|
||||
if err != nil {
|
||||
return nil, locale.NewLocalizedErrorWrapper(err, "error.unable_to_detect_rssbridge", err)
|
||||
// Step 4) Parse web page to find feeds from HTML meta tags.
|
||||
slog.Debug("Try to detect feeds from HTML meta tags", slog.String("website_url", websiteURL))
|
||||
subscriptions, localizedError = f.FindSubscriptionsFromWebPage(websiteURL, bytes.NewReader(responseBody))
|
||||
if localizedError != nil {
|
||||
return nil, localizedError
|
||||
}
|
||||
|
||||
if len(subscriptions) > 0 {
|
||||
slog.Debug("Subscriptions found from web page", slog.String("website_url", websiteURL), slog.Any("subscriptions", subscriptions))
|
||||
return subscriptions, nil
|
||||
}
|
||||
|
||||
// Step 5) Check if the website URL can use RSS-Bridge.
|
||||
if rssBridgeURL != "" {
|
||||
slog.Debug("Try to detect feeds with RSS-Bridge", slog.String("website_url", websiteURL))
|
||||
subscriptions, localizedError := f.FindSubscriptionsFromRSSBridge(websiteURL, rssBridgeURL)
|
||||
if localizedError != nil {
|
||||
return nil, localizedError
|
||||
}
|
||||
|
||||
slog.Debug("RSS-Bridge results",
|
||||
slog.String("website_url", websiteURL),
|
||||
slog.String("rssbridge_url", rssbridgeURL),
|
||||
slog.Int("nb_bridges", len(bridges)),
|
||||
)
|
||||
|
||||
if len(bridges) > 0 {
|
||||
var subscriptions Subscriptions
|
||||
for _, bridge := range bridges {
|
||||
subscriptions = append(subscriptions, &Subscription{
|
||||
Title: bridge.BridgeMeta.Name,
|
||||
URL: bridge.URL,
|
||||
Type: "atom",
|
||||
})
|
||||
}
|
||||
if len(subscriptions) > 0 {
|
||||
slog.Debug("Subscriptions found from RSS-Bridge", slog.String("website_url", websiteURL), slog.Any("subscriptions", subscriptions))
|
||||
return subscriptions, nil
|
||||
}
|
||||
}
|
||||
|
||||
return tryWellKnownUrls(websiteURL, userAgent, cookie, username, password, fetchViaProxy, allowSelfSignedCertificates)
|
||||
}
|
||||
|
||||
func parseWebPage(websiteURL string, data io.Reader) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
||||
var subscriptions Subscriptions
|
||||
queries := map[string]string{
|
||||
"link[type='application/rss+xml']": "rss",
|
||||
"link[type='application/atom+xml']": "atom",
|
||||
"link[type='application/json']": "json",
|
||||
"link[type='application/feed+json']": "json",
|
||||
// Step 6) Check if the website has a known feed URL.
|
||||
slog.Debug("Try to detect feeds from well-known URLs", slog.String("website_url", websiteURL))
|
||||
subscriptions, localizedError = f.FindSubscriptionsFromWellKnownURLs(websiteURL)
|
||||
if localizedError != nil {
|
||||
return nil, localizedError
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(data)
|
||||
if len(subscriptions) > 0 {
|
||||
slog.Debug("Subscriptions found with well-known URLs", slog.String("website_url", websiteURL), slog.Any("subscriptions", subscriptions))
|
||||
return subscriptions, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (f *SubscriptionFinder) FindSubscriptionsFromWebPage(websiteURL string, body io.Reader) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
||||
queries := map[string]string{
|
||||
"link[type='application/rss+xml']": parser.FormatRSS,
|
||||
"link[type='application/atom+xml']": parser.FormatAtom,
|
||||
"link[type='application/json']": parser.FormatJSON,
|
||||
"link[type='application/feed+json']": parser.FormatJSON,
|
||||
}
|
||||
|
||||
doc, err := goquery.NewDocumentFromReader(body)
|
||||
if err != nil {
|
||||
return nil, locale.NewLocalizedErrorWrapper(err, "error.unable_to_parse_html_document", err)
|
||||
}
|
||||
|
||||
var subscriptions Subscriptions
|
||||
for query, kind := range queries {
|
||||
doc.Find(query).Each(func(i int, s *goquery.Selection) {
|
||||
subscription := new(Subscription)
|
||||
|
@ -143,52 +180,13 @@ func parseWebPage(websiteURL string, data io.Reader) (Subscriptions, *locale.Loc
|
|||
return subscriptions, nil
|
||||
}
|
||||
|
||||
func findYoutubeChannelFeed(websiteURL string) string {
|
||||
matches := youtubeChannelRegex.FindStringSubmatch(websiteURL)
|
||||
|
||||
if len(matches) == 2 {
|
||||
return fmt.Sprintf(`https://www.youtube.com/feeds/videos.xml?channel_id=%s`, matches[1])
|
||||
}
|
||||
return websiteURL
|
||||
}
|
||||
|
||||
func parseYoutubeVideoPage(websiteURL string) string {
|
||||
if !youtubeVideoRegex.MatchString(websiteURL) {
|
||||
return websiteURL
|
||||
}
|
||||
|
||||
requestBuilder := fetcher.NewRequestBuilder()
|
||||
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
|
||||
requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
|
||||
|
||||
responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(websiteURL))
|
||||
defer responseHandler.Close()
|
||||
|
||||
if localizedError := responseHandler.LocalizedError(); localizedError != nil {
|
||||
slog.Warn("Unable to find subscriptions", slog.String("website_url", websiteURL), slog.Any("error", localizedError.Error()))
|
||||
return websiteURL
|
||||
}
|
||||
|
||||
doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
|
||||
if docErr != nil {
|
||||
return websiteURL
|
||||
}
|
||||
|
||||
if channelID, exists := doc.Find(`meta[itemprop="channelId"]`).First().Attr("content"); exists {
|
||||
return fmt.Sprintf(`https://www.youtube.com/feeds/videos.xml?channel_id=%s`, channelID)
|
||||
}
|
||||
|
||||
return websiteURL
|
||||
}
|
||||
|
||||
func tryWellKnownUrls(websiteURL, userAgent, cookie, username, password string, fetchViaProxy, allowSelfSignedCertificates bool) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
||||
var subscriptions Subscriptions
|
||||
func (f *SubscriptionFinder) FindSubscriptionsFromWellKnownURLs(websiteURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
||||
knownURLs := map[string]string{
|
||||
"atom.xml": "atom",
|
||||
"feed.xml": "atom",
|
||||
"feed/": "atom",
|
||||
"rss.xml": "rss",
|
||||
"rss/": "rss",
|
||||
"atom.xml": parser.FormatAtom,
|
||||
"feed.xml": parser.FormatAtom,
|
||||
"feed/": parser.FormatAtom,
|
||||
"rss.xml": parser.FormatRSS,
|
||||
"rss/": parser.FormatRSS,
|
||||
}
|
||||
|
||||
websiteURLRoot := urllib.RootURL(websiteURL)
|
||||
|
@ -203,6 +201,7 @@ func tryWellKnownUrls(websiteURL, userAgent, cookie, username, password string,
|
|||
baseURLs = append(baseURLs, websiteURL)
|
||||
}
|
||||
|
||||
var subscriptions Subscriptions
|
||||
for _, baseURL := range baseURLs {
|
||||
for knownURL, kind := range knownURLs {
|
||||
fullURL, err := urllib.AbsoluteURL(baseURL, knownURL)
|
||||
|
@ -210,21 +209,12 @@ func tryWellKnownUrls(websiteURL, userAgent, cookie, username, password string,
|
|||
continue
|
||||
}
|
||||
|
||||
requestBuilder := fetcher.NewRequestBuilder()
|
||||
requestBuilder.WithUsernameAndPassword(username, password)
|
||||
requestBuilder.WithUserAgent(userAgent)
|
||||
requestBuilder.WithCookie(cookie)
|
||||
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
|
||||
requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
|
||||
requestBuilder.UseProxy(fetchViaProxy)
|
||||
requestBuilder.IgnoreTLSErrors(allowSelfSignedCertificates)
|
||||
|
||||
// Some websites redirects unknown URLs to the home page.
|
||||
// As result, the list of known URLs is returned to the subscription list.
|
||||
// We don't want the user to choose between invalid feed URLs.
|
||||
requestBuilder.WithoutRedirects()
|
||||
f.requestBuilder.WithoutRedirects()
|
||||
|
||||
responseHandler := fetcher.NewResponseHandler(requestBuilder.ExecuteRequest(fullURL))
|
||||
responseHandler := fetcher.NewResponseHandler(f.requestBuilder.ExecuteRequest(fullURL))
|
||||
defer responseHandler.Close()
|
||||
|
||||
if localizedError := responseHandler.LocalizedError(); localizedError != nil {
|
||||
|
@ -241,3 +231,75 @@ func tryWellKnownUrls(websiteURL, userAgent, cookie, username, password string,
|
|||
|
||||
return subscriptions, nil
|
||||
}
|
||||
|
||||
func (f *SubscriptionFinder) FindSubscriptionsFromRSSBridge(websiteURL, rssBridgeURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
||||
slog.Debug("Trying to detect feeds using RSS-Bridge",
|
||||
slog.String("website_url", websiteURL),
|
||||
slog.String("rssbridge_url", rssBridgeURL),
|
||||
)
|
||||
|
||||
bridges, err := rssbridge.DetectBridges(rssBridgeURL, websiteURL)
|
||||
if err != nil {
|
||||
return nil, locale.NewLocalizedErrorWrapper(err, "error.unable_to_detect_rssbridge", err)
|
||||
}
|
||||
|
||||
slog.Debug("RSS-Bridge results",
|
||||
slog.String("website_url", websiteURL),
|
||||
slog.String("rssbridge_url", rssBridgeURL),
|
||||
slog.Int("nb_bridges", len(bridges)),
|
||||
)
|
||||
|
||||
if len(bridges) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var subscriptions Subscriptions
|
||||
for _, bridge := range bridges {
|
||||
subscriptions = append(subscriptions, &Subscription{
|
||||
Title: bridge.BridgeMeta.Name,
|
||||
URL: bridge.URL,
|
||||
Type: parser.FormatAtom,
|
||||
})
|
||||
}
|
||||
|
||||
return subscriptions, nil
|
||||
}
|
||||
|
||||
func (f *SubscriptionFinder) FindSubscriptionsFromYouTubeChannelPage(websiteURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
||||
matches := youtubeChannelRegex.FindStringSubmatch(websiteURL)
|
||||
|
||||
if len(matches) == 2 {
|
||||
feedURL := fmt.Sprintf(`https://www.youtube.com/feeds/videos.xml?channel_id=%s`, matches[1])
|
||||
return Subscriptions{NewSubscription(websiteURL, feedURL, parser.FormatAtom)}, nil
|
||||
}
|
||||
|
||||
slog.Debug("This website is not a YouTube channel page, the regex doesn't match", slog.String("website_url", websiteURL))
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (f *SubscriptionFinder) FindSubscriptionsFromYouTubeVideoPage(websiteURL string) (Subscriptions, *locale.LocalizedErrorWrapper) {
|
||||
if !youtubeVideoRegex.MatchString(websiteURL) {
|
||||
slog.Debug("This website is not a YouTube video page, the regex doesn't match", slog.String("website_url", websiteURL))
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
responseHandler := fetcher.NewResponseHandler(f.requestBuilder.ExecuteRequest(websiteURL))
|
||||
defer responseHandler.Close()
|
||||
|
||||
if localizedError := responseHandler.LocalizedError(); localizedError != nil {
|
||||
return nil, localizedError
|
||||
}
|
||||
|
||||
doc, docErr := goquery.NewDocumentFromReader(responseHandler.Body(config.Opts.HTTPClientMaxBodySize()))
|
||||
if docErr != nil {
|
||||
return nil, locale.NewLocalizedErrorWrapper(docErr, "error.unable_to_parse_html_document", docErr)
|
||||
}
|
||||
|
||||
if channelID, exists := doc.Find(`meta[itemprop="channelId"]`).First().Attr("content"); exists {
|
||||
feedURL := fmt.Sprintf(`https://www.youtube.com/feeds/videos.xml?channel_id=%s`, channelID)
|
||||
return Subscriptions{NewSubscription(websiteURL, feedURL, parser.FormatAtom)}, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
|
|
|
@ -11,13 +11,20 @@ import (
|
|||
func TestFindYoutubeChannelFeed(t *testing.T) {
|
||||
scenarios := map[string]string{
|
||||
"https://www.youtube.com/channel/UC-Qj80avWItNRjkZ41rzHyw": "https://www.youtube.com/feeds/videos.xml?channel_id=UC-Qj80avWItNRjkZ41rzHyw",
|
||||
"http://example.org/feed": "http://example.org/feed",
|
||||
}
|
||||
|
||||
for websiteURL, expectedFeedURL := range scenarios {
|
||||
result := findYoutubeChannelFeed(websiteURL)
|
||||
if result != expectedFeedURL {
|
||||
t.Errorf(`Unexpected Feed, got %s, instead of %s`, result, expectedFeedURL)
|
||||
subscriptions, localizedError := NewSubscriptionFinder(nil).FindSubscriptionsFromYouTubeChannelPage(websiteURL)
|
||||
if localizedError != nil {
|
||||
t.Fatalf(`Parsing a correctly formatted YouTube channel page should not return any error: %v`, localizedError)
|
||||
}
|
||||
|
||||
if len(subscriptions) != 1 {
|
||||
t.Fatal(`Incorrect number of subscriptions returned`)
|
||||
}
|
||||
|
||||
if subscriptions[0].URL != expectedFeedURL {
|
||||
t.Errorf(`Unexpected Feed, got %s, instead of %s`, subscriptions[0].URL, expectedFeedURL)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -33,7 +40,7 @@ func TestParseWebPageWithRssFeed(t *testing.T) {
|
|||
</body>
|
||||
</html>`
|
||||
|
||||
subscriptions, err := parseWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
subscriptions, err := NewSubscriptionFinder(nil).FindSubscriptionsFromWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing a correctly formatted HTML page should not return any error: %v`, err)
|
||||
}
|
||||
|
@ -66,7 +73,7 @@ func TestParseWebPageWithAtomFeed(t *testing.T) {
|
|||
</body>
|
||||
</html>`
|
||||
|
||||
subscriptions, err := parseWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
subscriptions, err := NewSubscriptionFinder(nil).FindSubscriptionsFromWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing a correctly formatted HTML page should not return any error: %v`, err)
|
||||
}
|
||||
|
@ -99,7 +106,7 @@ func TestParseWebPageWithJSONFeed(t *testing.T) {
|
|||
</body>
|
||||
</html>`
|
||||
|
||||
subscriptions, err := parseWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
subscriptions, err := NewSubscriptionFinder(nil).FindSubscriptionsFromWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing a correctly formatted HTML page should not return any error: %v`, err)
|
||||
}
|
||||
|
@ -132,7 +139,7 @@ func TestParseWebPageWithOldJSONFeedMimeType(t *testing.T) {
|
|||
</body>
|
||||
</html>`
|
||||
|
||||
subscriptions, err := parseWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
subscriptions, err := NewSubscriptionFinder(nil).FindSubscriptionsFromWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing a correctly formatted HTML page should not return any error: %v`, err)
|
||||
}
|
||||
|
@ -165,7 +172,7 @@ func TestParseWebPageWithRelativeFeedURL(t *testing.T) {
|
|||
</body>
|
||||
</html>`
|
||||
|
||||
subscriptions, err := parseWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
subscriptions, err := NewSubscriptionFinder(nil).FindSubscriptionsFromWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing a correctly formatted HTML page should not return any error: %v`, err)
|
||||
}
|
||||
|
@ -198,7 +205,7 @@ func TestParseWebPageWithEmptyTitle(t *testing.T) {
|
|||
</body>
|
||||
</html>`
|
||||
|
||||
subscriptions, err := parseWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
subscriptions, err := NewSubscriptionFinder(nil).FindSubscriptionsFromWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing a correctly formatted HTML page should not return any error: %v`, err)
|
||||
}
|
||||
|
@ -232,7 +239,7 @@ func TestParseWebPageWithMultipleFeeds(t *testing.T) {
|
|||
</body>
|
||||
</html>`
|
||||
|
||||
subscriptions, err := parseWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
subscriptions, err := NewSubscriptionFinder(nil).FindSubscriptionsFromWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing a correctly formatted HTML page should not return any error: %v`, err)
|
||||
}
|
||||
|
@ -253,7 +260,7 @@ func TestParseWebPageWithEmptyFeedURL(t *testing.T) {
|
|||
</body>
|
||||
</html>`
|
||||
|
||||
subscriptions, err := parseWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
subscriptions, err := NewSubscriptionFinder(nil).FindSubscriptionsFromWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing a correctly formatted HTML page should not return any error: %v`, err)
|
||||
}
|
||||
|
@ -274,7 +281,7 @@ func TestParseWebPageWithNoHref(t *testing.T) {
|
|||
</body>
|
||||
</html>`
|
||||
|
||||
subscriptions, err := parseWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
subscriptions, err := NewSubscriptionFinder(nil).FindSubscriptionsFromWebPage("http://example.org/", strings.NewReader(htmlPage))
|
||||
if err != nil {
|
||||
t.Fatalf(`Parsing a correctly formatted HTML page should not return any error: %v`, err)
|
||||
}
|
||||
|
|
|
@ -12,6 +12,10 @@ type Subscription struct {
|
|||
Type string `json:"type"`
|
||||
}
|
||||
|
||||
func NewSubscription(title, url, kind string) *Subscription {
|
||||
return &Subscription{Title: title, URL: url, Type: kind}
|
||||
}
|
||||
|
||||
func (s Subscription) String() string {
|
||||
return fmt.Sprintf(`Title="%s", URL="%s", Type="%s"`, s.Title, s.URL, s.Type)
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ import (
|
|||
"miniflux.app/v2/internal/http/route"
|
||||
"miniflux.app/v2/internal/locale"
|
||||
"miniflux.app/v2/internal/model"
|
||||
"miniflux.app/v2/internal/reader/fetcher"
|
||||
feedHandler "miniflux.app/v2/internal/reader/handler"
|
||||
"miniflux.app/v2/internal/reader/subscription"
|
||||
"miniflux.app/v2/internal/ui/form"
|
||||
|
@ -51,20 +52,24 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
var rssbridgeURL string
|
||||
var rssBridgeURL string
|
||||
if intg, err := h.store.Integration(user.ID); err == nil && intg != nil && intg.RSSBridgeEnabled {
|
||||
rssbridgeURL = intg.RSSBridgeURL
|
||||
rssBridgeURL = intg.RSSBridgeURL
|
||||
}
|
||||
|
||||
subscriptions, localizedError := subscription.FindSubscriptions(
|
||||
requestBuilder := fetcher.NewRequestBuilder()
|
||||
requestBuilder.WithTimeout(config.Opts.HTTPClientTimeout())
|
||||
requestBuilder.WithProxy(config.Opts.HTTPClientProxy())
|
||||
requestBuilder.WithUserAgent(subscriptionForm.UserAgent)
|
||||
requestBuilder.WithCookie(subscriptionForm.Cookie)
|
||||
requestBuilder.WithUsernameAndPassword(subscriptionForm.Username, subscriptionForm.Password)
|
||||
requestBuilder.UseProxy(subscriptionForm.FetchViaProxy)
|
||||
requestBuilder.IgnoreTLSErrors(subscriptionForm.AllowSelfSignedCertificates)
|
||||
|
||||
subscriptionFinder := subscription.NewSubscriptionFinder(requestBuilder)
|
||||
subscriptions, localizedError := subscriptionFinder.FindSubscriptions(
|
||||
subscriptionForm.URL,
|
||||
subscriptionForm.UserAgent,
|
||||
subscriptionForm.Cookie,
|
||||
subscriptionForm.Username,
|
||||
subscriptionForm.Password,
|
||||
subscriptionForm.FetchViaProxy,
|
||||
subscriptionForm.AllowSelfSignedCertificates,
|
||||
rssbridgeURL,
|
||||
rssBridgeURL,
|
||||
)
|
||||
if localizedError != nil {
|
||||
v.Set("form", subscriptionForm)
|
||||
|
@ -79,7 +84,35 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
|
|||
v.Set("form", subscriptionForm)
|
||||
v.Set("errorMessage", locale.NewLocalizedError("error.subscription_not_found").Translate(user.Language))
|
||||
html.OK(w, r, v.Render("add_subscription"))
|
||||
case n == 1:
|
||||
case n == 1 && subscriptionFinder.IsFeedAlreadyDownloaded():
|
||||
feed, localizedError := feedHandler.CreateFeedFromSubscriptionDiscovery(h.store, user.ID, &model.FeedCreationRequestFromSubscriptionDiscovery{
|
||||
Content: subscriptionFinder.FeedResponseInfo().Content,
|
||||
ETag: subscriptionFinder.FeedResponseInfo().ETag,
|
||||
LastModified: subscriptionFinder.FeedResponseInfo().LastModified,
|
||||
CategoryID: subscriptionForm.CategoryID,
|
||||
FeedURL: subscriptions[0].URL,
|
||||
Crawler: subscriptionForm.Crawler,
|
||||
AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates,
|
||||
UserAgent: subscriptionForm.UserAgent,
|
||||
Cookie: subscriptionForm.Cookie,
|
||||
Username: subscriptionForm.Username,
|
||||
Password: subscriptionForm.Password,
|
||||
ScraperRules: subscriptionForm.ScraperRules,
|
||||
RewriteRules: subscriptionForm.RewriteRules,
|
||||
BlocklistRules: subscriptionForm.BlocklistRules,
|
||||
KeeplistRules: subscriptionForm.KeeplistRules,
|
||||
UrlRewriteRules: subscriptionForm.UrlRewriteRules,
|
||||
FetchViaProxy: subscriptionForm.FetchViaProxy,
|
||||
})
|
||||
if localizedError != nil {
|
||||
v.Set("form", subscriptionForm)
|
||||
v.Set("errorMessage", localizedError.Translate(user.Language))
|
||||
html.OK(w, r, v.Render("add_subscription"))
|
||||
return
|
||||
}
|
||||
|
||||
html.Redirect(w, r, route.Path(h.router, "feedEntries", "feedID", feed.ID))
|
||||
case n == 1 && !subscriptionFinder.IsFeedAlreadyDownloaded():
|
||||
feed, localizedError := feedHandler.CreateFeed(h.store, user.ID, &model.FeedCreationRequest{
|
||||
CategoryID: subscriptionForm.CategoryID,
|
||||
FeedURL: subscriptions[0].URL,
|
||||
|
|
Loading…
Reference in a new issue