2023-06-19 23:42:47 +02:00
// SPDX-FileCopyrightText: Copyright The Miniflux Authors. All rights reserved.
// SPDX-License-Identifier: Apache-2.0
2018-10-15 07:33:19 +02:00
2018-12-03 05:51:06 +01:00
package processor
2018-10-15 07:33:19 +02:00
import (
2021-01-27 13:50:34 +01:00
"errors"
"fmt"
2020-11-19 02:29:40 +01:00
"math"
2020-10-16 23:40:56 +02:00
"regexp"
2021-01-27 13:50:34 +01:00
"strconv"
2020-11-19 02:29:40 +01:00
"strings"
2020-09-28 01:01:06 +02:00
"time"
2020-11-19 02:29:40 +01:00
"unicode/utf8"
2020-09-28 01:01:06 +02:00
2021-09-08 05:04:22 +02:00
"miniflux.app/integration"
2020-09-28 01:01:06 +02:00
"miniflux.app/config"
2021-01-27 13:50:34 +01:00
"miniflux.app/http/client"
2018-10-15 07:33:19 +02:00
"miniflux.app/logger"
2020-09-28 01:01:06 +02:00
"miniflux.app/metric"
2018-10-15 07:33:19 +02:00
"miniflux.app/model"
2021-01-27 13:50:34 +01:00
"miniflux.app/reader/browser"
2018-10-15 07:33:19 +02:00
"miniflux.app/reader/rewrite"
"miniflux.app/reader/sanitizer"
"miniflux.app/reader/scraper"
"miniflux.app/storage"
2020-11-19 02:29:40 +01:00
2021-01-27 13:50:34 +01:00
"github.com/PuerkitoBio/goquery"
2020-11-19 02:29:40 +01:00
"github.com/rylans/getlang"
2018-10-15 07:33:19 +02:00
)
2021-01-27 13:50:34 +01:00
var (
2022-07-12 06:12:26 +02:00
youtubeRegex = regexp . MustCompile ( ` youtube\.com/watch\?v=(.*) ` )
2023-03-18 11:13:58 +01:00
odyseeRegex = regexp . MustCompile ( ` ^https://odysee\.com ` )
2022-07-12 06:12:26 +02:00
iso8601Regex = regexp . MustCompile ( ` ^P((?P<year>\d+)Y)?((?P<month>\d+)M)?((?P<week>\d+)W)?((?P<day>\d+)D)?(T((?P<hour>\d+)H)?((?P<minute>\d+)M)?((?P<second>\d+)S)?)?$ ` )
customReplaceRuleRegex = regexp . MustCompile ( ` rewrite\("(.*)"\|"(.*)"\) ` )
2021-01-27 13:50:34 +01:00
)
2018-12-03 05:51:06 +01:00
// ProcessFeedEntries downloads original web page for entries and apply filters.
2023-08-08 16:12:41 +02:00
func ProcessFeedEntries ( store * storage . Storage , feed * model . Feed , user * model . User , forceRefresh bool ) {
2020-10-20 07:07:35 +02:00
var filteredEntries model . Entries
2020-10-16 23:40:56 +02:00
2022-10-14 17:18:44 +02:00
// array used for bulk push
entriesToPush := model . Entries { }
2023-03-01 17:58:01 +01:00
// Process older entries first
for i := len ( feed . Entries ) - 1 ; i >= 0 ; i -- {
entry := feed . Entries [ i ]
2020-10-20 07:07:35 +02:00
logger . Debug ( "[Processor] Processing entry %q from feed %q" , entry . URL , feed . FeedURL )
if isBlockedEntry ( feed , entry ) || ! isAllowedEntry ( feed , entry ) {
continue
}
2022-07-12 06:12:26 +02:00
url := getUrlFromEntry ( feed , entry )
2021-03-09 05:10:53 +01:00
entryIsNew := ! store . EntryURLExists ( feed . ID , entry . URL )
2023-08-08 16:12:41 +02:00
if feed . Crawler && ( entryIsNew || forceRefresh ) {
2022-07-12 06:12:26 +02:00
logger . Debug ( "[Processor] Crawling entry %q from feed %q" , url , feed . FeedURL )
2021-03-09 05:10:53 +01:00
startTime := time . Now ( )
content , scraperErr := scraper . Fetch (
2022-07-12 06:12:26 +02:00
url ,
2021-03-09 05:10:53 +01:00
feed . ScraperRules ,
feed . UserAgent ,
2021-03-23 04:27:58 +01:00
feed . Cookie ,
2021-03-09 05:10:53 +01:00
feed . AllowSelfSignedCertificates ,
2021-08-28 11:30:04 +02:00
feed . FetchViaProxy ,
2021-03-09 05:10:53 +01:00
)
if config . Opts . HasMetricsCollector ( ) {
status := "success"
2020-09-28 01:01:06 +02:00
if scraperErr != nil {
2021-03-09 05:10:53 +01:00
status = "error"
2018-10-15 07:33:19 +02:00
}
2021-03-09 05:10:53 +01:00
metric . ScraperRequestDuration . WithLabelValues ( status ) . Observe ( time . Since ( startTime ) . Seconds ( ) )
}
if scraperErr != nil {
logger . Error ( ` [Processor] Unable to crawl this entry: %q => %v ` , entry . URL , scraperErr )
} else if content != "" {
// We replace the entry content only if the scraper doesn't return any error.
entry . Content = content
2018-10-15 07:33:19 +02:00
}
}
2023-04-08 11:02:36 +02:00
rewrite . Rewriter ( url , entry , feed . RewriteRules )
2018-10-15 07:33:19 +02:00
// The sanitizer should always run at the end of the process to make sure unsafe HTML is filtered.
2022-07-12 06:12:26 +02:00
entry . Content = sanitizer . Sanitize ( url , entry . Content )
2020-10-20 07:07:35 +02:00
2021-09-08 05:04:22 +02:00
if entryIsNew {
intg , err := store . Integration ( feed . UserID )
if err != nil {
logger . Error ( "[Processor] Get integrations for user %d failed: %v; the refresh process will go on, but no integrations will run this time." , feed . UserID , err )
} else if intg != nil {
localEntry := entry
go func ( ) {
integration . PushEntry ( localEntry , intg )
} ( )
2022-10-14 17:18:44 +02:00
entriesToPush = append ( entriesToPush , localEntry )
2021-09-08 05:04:22 +02:00
}
}
2021-08-30 16:53:05 +02:00
updateEntryReadingTime ( store , feed , entry , entryIsNew , user )
2020-10-20 07:07:35 +02:00
filteredEntries = append ( filteredEntries , entry )
2018-10-15 07:33:19 +02:00
}
2020-10-20 07:07:35 +02:00
2022-10-14 17:18:44 +02:00
intg , err := store . Integration ( feed . UserID )
if err != nil {
logger . Error ( "[Processor] Get integrations for user %d failed: %v; the refresh process will go on, but no integrations will run this time." , feed . UserID , err )
2022-11-30 17:05:36 +01:00
} else if intg != nil && len ( entriesToPush ) > 0 {
2022-10-14 17:18:44 +02:00
go func ( ) {
integration . PushEntries ( entriesToPush , intg )
} ( )
}
2020-10-20 07:07:35 +02:00
feed . Entries = filteredEntries
2018-10-15 07:33:19 +02:00
}
2018-12-03 05:51:06 +01:00
2020-10-20 07:07:35 +02:00
func isBlockedEntry ( feed * model . Feed , entry * model . Entry ) bool {
if feed . BlocklistRules != "" {
match , _ := regexp . MatchString ( feed . BlocklistRules , entry . Title )
if match {
logger . Debug ( "[Processor] Blocking entry %q from feed %q based on rule %q" , entry . Title , feed . FeedURL , feed . BlocklistRules )
return true
2020-10-16 23:40:56 +02:00
}
}
2020-10-20 07:07:35 +02:00
return false
}
func isAllowedEntry ( feed * model . Feed , entry * model . Entry ) bool {
if feed . KeeplistRules != "" {
match , _ := regexp . MatchString ( feed . KeeplistRules , entry . Title )
if match {
logger . Debug ( "[Processor] Allow entry %q from feed %q based on rule %q" , entry . Title , feed . FeedURL , feed . KeeplistRules )
return true
2020-10-16 23:40:56 +02:00
}
2020-10-20 07:07:35 +02:00
return false
2020-10-16 23:40:56 +02:00
}
2020-10-20 07:07:35 +02:00
return true
2020-10-16 23:40:56 +02:00
}
2018-12-03 05:51:06 +01:00
// ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
2021-08-30 16:53:05 +02:00
func ProcessEntryWebPage ( feed * model . Feed , entry * model . Entry , user * model . User ) error {
2020-09-28 01:01:06 +02:00
startTime := time . Now ( )
2022-07-12 06:12:26 +02:00
url := getUrlFromEntry ( feed , entry )
2021-02-21 22:42:49 +01:00
content , scraperErr := scraper . Fetch (
2022-07-12 06:12:26 +02:00
url ,
2021-02-21 22:42:49 +01:00
entry . Feed . ScraperRules ,
entry . Feed . UserAgent ,
2021-03-23 04:27:58 +01:00
entry . Feed . Cookie ,
2021-02-21 22:42:49 +01:00
feed . AllowSelfSignedCertificates ,
2021-08-28 11:30:04 +02:00
feed . FetchViaProxy ,
2021-02-21 22:42:49 +01:00
)
2020-09-28 01:01:06 +02:00
if config . Opts . HasMetricsCollector ( ) {
status := "success"
if scraperErr != nil {
status = "error"
}
metric . ScraperRequestDuration . WithLabelValues ( status ) . Observe ( time . Since ( startTime ) . Seconds ( ) )
}
if scraperErr != nil {
return scraperErr
2018-12-03 05:51:06 +01:00
}
if content != "" {
entry . Content = content
2021-08-30 16:53:05 +02:00
entry . ReadingTime = calculateReadingTime ( content , user )
2018-12-03 05:51:06 +01:00
}
2023-04-08 11:02:36 +02:00
rewrite . Rewriter ( url , entry , entry . Feed . RewriteRules )
entry . Content = sanitizer . Sanitize ( url , entry . Content )
2018-12-03 05:51:06 +01:00
return nil
}
2020-11-19 02:29:40 +01:00
2022-07-12 06:12:26 +02:00
func getUrlFromEntry ( feed * model . Feed , entry * model . Entry ) string {
var url = entry . URL
if feed . UrlRewriteRules != "" {
parts := customReplaceRuleRegex . FindStringSubmatch ( feed . UrlRewriteRules )
if len ( parts ) >= 3 {
re := regexp . MustCompile ( parts [ 1 ] )
url = re . ReplaceAllString ( entry . URL , parts [ 2 ] )
logger . Debug ( ` [Processor] Rewriting entry URL %s to %s ` , entry . URL , url )
} else {
logger . Debug ( "[Processor] Cannot find search and replace terms for replace rule %s" , feed . UrlRewriteRules )
}
}
return url
}
2021-08-30 16:53:05 +02:00
func updateEntryReadingTime ( store * storage . Storage , feed * model . Feed , entry * model . Entry , entryIsNew bool , user * model . User ) {
2021-03-09 05:10:53 +01:00
if shouldFetchYouTubeWatchTime ( entry ) {
if entryIsNew {
watchTime , err := fetchYouTubeWatchTime ( entry . URL )
if err != nil {
logger . Error ( "[Processor] Unable to fetch YouTube watch time: %q => %v" , entry . URL , err )
}
entry . ReadingTime = watchTime
} else {
entry . ReadingTime = store . GetReadTime ( entry , feed )
}
}
2023-03-18 11:13:58 +01:00
if shouldFetchOdyseeWatchTime ( entry ) {
if entryIsNew {
watchTime , err := fetchOdyseeWatchTime ( entry . URL )
if err != nil {
logger . Error ( "[Processor] Unable to fetch Odysee watch time: %q => %v" , entry . URL , err )
}
entry . ReadingTime = watchTime
} else {
entry . ReadingTime = store . GetReadTime ( entry , feed )
}
}
2021-03-09 05:10:53 +01:00
// Handle YT error case and non-YT entries.
if entry . ReadingTime == 0 {
2021-08-30 16:53:05 +02:00
entry . ReadingTime = calculateReadingTime ( entry . Content , user )
2021-03-09 05:10:53 +01:00
}
}
func shouldFetchYouTubeWatchTime ( entry * model . Entry ) bool {
if ! config . Opts . FetchYouTubeWatchTime ( ) {
return false
}
matches := youtubeRegex . FindStringSubmatch ( entry . URL )
urlMatchesYouTubePattern := len ( matches ) == 2
return urlMatchesYouTubePattern
}
2023-03-18 11:13:58 +01:00
func shouldFetchOdyseeWatchTime ( entry * model . Entry ) bool {
if ! config . Opts . FetchOdyseeWatchTime ( ) {
return false
}
matches := odyseeRegex . FindStringSubmatch ( entry . URL )
return matches != nil
}
2021-01-27 13:50:34 +01:00
func fetchYouTubeWatchTime ( url string ) ( int , error ) {
clt := client . NewClientWithConfig ( url , config . Opts )
response , browserErr := browser . Exec ( clt )
if browserErr != nil {
return 0 , browserErr
}
doc , docErr := goquery . NewDocumentFromReader ( response . Body )
if docErr != nil {
return 0 , docErr
}
durs , exists := doc . Find ( ` meta[itemprop="duration"] ` ) . First ( ) . Attr ( "content" )
if ! exists {
return 0 , errors . New ( "duration has not found" )
}
dur , err := parseISO8601 ( durs )
if err != nil {
return 0 , fmt . Errorf ( "unable to parse duration %s: %v" , durs , err )
}
return int ( dur . Minutes ( ) ) , nil
}
2023-03-18 11:13:58 +01:00
func fetchOdyseeWatchTime ( url string ) ( int , error ) {
clt := client . NewClientWithConfig ( url , config . Opts )
response , browserErr := browser . Exec ( clt )
if browserErr != nil {
return 0 , browserErr
}
doc , docErr := goquery . NewDocumentFromReader ( response . Body )
if docErr != nil {
return 0 , docErr
}
durs , exists := doc . Find ( ` meta[property="og:video:duration"] ` ) . First ( ) . Attr ( "content" )
// durs contains video watch time in seconds
if ! exists {
return 0 , errors . New ( "duration has not found" )
}
dur , err := strconv . ParseInt ( durs , 10 , 64 )
if err != nil {
return 0 , fmt . Errorf ( "unable to parse duration %s: %v" , durs , err )
}
return int ( dur / 60 ) , nil
}
2021-01-27 13:50:34 +01:00
// parseISO8601 parses an ISO 8601 duration string.
func parseISO8601 ( from string ) ( time . Duration , error ) {
var match [ ] string
var d time . Duration
if iso8601Regex . MatchString ( from ) {
match = iso8601Regex . FindStringSubmatch ( from )
} else {
return 0 , errors . New ( "could not parse duration string" )
}
for i , name := range iso8601Regex . SubexpNames ( ) {
part := match [ i ]
if i == 0 || name == "" || part == "" {
continue
}
val , err := strconv . ParseInt ( part , 10 , 64 )
if err != nil {
return 0 , err
}
switch name {
case "hour" :
d = d + ( time . Duration ( val ) * time . Hour )
case "minute" :
d = d + ( time . Duration ( val ) * time . Minute )
case "second" :
d = d + ( time . Duration ( val ) * time . Second )
default :
return 0 , fmt . Errorf ( "unknown field %s" , name )
}
}
return d , nil
}
2021-08-30 16:53:05 +02:00
func calculateReadingTime ( content string , user * model . User ) int {
2020-11-19 02:29:40 +01:00
sanitizedContent := sanitizer . StripTags ( content )
languageInfo := getlang . FromString ( sanitizedContent )
var timeToReadInt int
if languageInfo . LanguageCode ( ) == "ko" || languageInfo . LanguageCode ( ) == "zh" || languageInfo . LanguageCode ( ) == "jp" {
2021-08-30 16:53:05 +02:00
timeToReadInt = int ( math . Ceil ( float64 ( utf8 . RuneCountInString ( sanitizedContent ) ) / float64 ( user . CJKReadingSpeed ) ) )
2020-11-19 02:29:40 +01:00
} else {
nbOfWords := len ( strings . Fields ( sanitizedContent ) )
2021-08-30 16:53:05 +02:00
timeToReadInt = int ( math . Ceil ( float64 ( nbOfWords ) / float64 ( user . DefaultReadingSpeed ) ) )
2020-11-19 02:29:40 +01:00
}
return timeToReadInt
}