2017-11-20 06:10:04 +01:00
|
|
|
// Copyright 2017 Frédéric Guillot. All rights reserved.
|
|
|
|
// Use of this source code is governed by the Apache 2.0
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2018-08-25 06:51:50 +02:00
|
|
|
package processor // import "miniflux.app/reader/processor"
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
import (
|
2018-08-25 06:51:50 +02:00
|
|
|
"miniflux.app/logger"
|
|
|
|
"miniflux.app/model"
|
|
|
|
"miniflux.app/reader/rewrite"
|
|
|
|
"miniflux.app/reader/sanitizer"
|
|
|
|
"miniflux.app/reader/scraper"
|
|
|
|
"miniflux.app/storage"
|
2017-11-20 06:10:04 +01:00
|
|
|
)
|
|
|
|
|
2017-12-12 07:16:32 +01:00
|
|
|
// FeedProcessor handles the processing of feed contents.
|
|
|
|
type FeedProcessor struct {
|
2018-01-20 03:43:27 +01:00
|
|
|
userID int64
|
|
|
|
store *storage.Storage
|
2017-12-12 07:16:32 +01:00
|
|
|
feed *model.Feed
|
|
|
|
scraperRules string
|
|
|
|
rewriteRules string
|
2017-12-13 04:19:36 +01:00
|
|
|
crawler bool
|
2018-09-20 03:19:24 +02:00
|
|
|
userAgent string
|
2017-12-13 04:19:36 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// WithCrawler enables the crawler.
|
|
|
|
func (f *FeedProcessor) WithCrawler(value bool) {
|
|
|
|
f.crawler = value
|
2017-12-12 07:16:32 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// WithScraperRules adds scraper rules to the processing.
|
|
|
|
func (f *FeedProcessor) WithScraperRules(rules string) {
|
|
|
|
f.scraperRules = rules
|
|
|
|
}
|
|
|
|
|
2018-09-20 03:19:24 +02:00
|
|
|
// WithUserAgent sets the User-Agent header for fetching article content.
|
|
|
|
func (f *FeedProcessor) WithUserAgent(userAgent string) {
|
|
|
|
f.userAgent = userAgent
|
|
|
|
}
|
|
|
|
|
2017-12-12 07:16:32 +01:00
|
|
|
// WithRewriteRules adds rewrite rules to the processing.
|
|
|
|
func (f *FeedProcessor) WithRewriteRules(rules string) {
|
|
|
|
f.rewriteRules = rules
|
|
|
|
}
|
|
|
|
|
|
|
|
// Process applies rewrite and scraper rules.
|
|
|
|
func (f *FeedProcessor) Process() {
|
|
|
|
for _, entry := range f.feed.Entries {
|
2017-12-13 04:19:36 +01:00
|
|
|
if f.crawler {
|
2018-01-20 03:43:27 +01:00
|
|
|
if f.store.EntryURLExists(f.userID, entry.URL) {
|
|
|
|
logger.Debug(`[FeedProcessor] Do not crawl existing entry URL: "%s"`, entry.URL)
|
2017-12-13 04:19:36 +01:00
|
|
|
} else {
|
2018-09-20 03:19:24 +02:00
|
|
|
content, err := scraper.Fetch(entry.URL, f.scraperRules, f.userAgent)
|
2018-01-20 03:43:27 +01:00
|
|
|
if err != nil {
|
|
|
|
logger.Error("[FeedProcessor] %v", err)
|
|
|
|
} else {
|
|
|
|
entry.Content = content
|
|
|
|
}
|
2017-12-13 04:19:36 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-12 07:16:32 +01:00
|
|
|
entry.Content = rewrite.Rewriter(entry.URL, entry.Content, f.rewriteRules)
|
2018-07-06 06:17:03 +02:00
|
|
|
entry.Content = sanitizer.Sanitize(entry.URL, entry.Content)
|
2017-12-12 07:16:32 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewFeedProcessor returns a new FeedProcessor.
|
2018-01-20 03:43:27 +01:00
|
|
|
func NewFeedProcessor(userID int64, store *storage.Storage, feed *model.Feed) *FeedProcessor {
|
|
|
|
return &FeedProcessor{userID: userID, store: store, feed: feed, crawler: false}
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|