Refactor feed creation to allow setting most fields via API

Allow API clients to create disabled feeds or define field like "ignore_http_cache".
This commit is contained in:
Frédéric Guillot 2021-01-02 16:33:41 -08:00 committed by fguillot
parent ab82c4b300
commit f0610bdd9c
26 changed files with 370 additions and 264 deletions

View file

@ -7,7 +7,6 @@ package api // import "miniflux.app/api"
import ( import (
"net/http" "net/http"
"miniflux.app/reader/feed"
"miniflux.app/storage" "miniflux.app/storage"
"miniflux.app/worker" "miniflux.app/worker"
@ -15,8 +14,8 @@ import (
) )
// Serve declares API routes for the application. // Serve declares API routes for the application.
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool, feedHandler *feed.Handler) { func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool) {
handler := &handler{store, pool, feedHandler} handler := &handler{store, pool}
sr := router.PathPrefix("/v1").Subrouter() sr := router.PathPrefix("/v1").Subrouter()
middleware := newMiddleware(store) middleware := newMiddleware(store)

View file

@ -11,6 +11,7 @@ import (
"miniflux.app/http/request" "miniflux.app/http/request"
"miniflux.app/http/response/json" "miniflux.app/http/response/json"
feedHandler "miniflux.app/reader/handler"
) )
func (h *handler) createFeed(w http.ResponseWriter, r *http.Request) { func (h *handler) createFeed(w http.ResponseWriter, r *http.Request) {
@ -42,20 +43,22 @@ func (h *handler) createFeed(w http.ResponseWriter, r *http.Request) {
return return
} }
feed, err := h.feedHandler.CreateFeed( feed, err := feedHandler.CreateFeed(h.store, &feedHandler.FeedCreationArgs{
userID, UserID: userID,
feedInfo.CategoryID, CategoryID: feedInfo.CategoryID,
feedInfo.FeedURL, FeedURL: feedInfo.FeedURL,
feedInfo.Crawler, UserAgent: feedInfo.UserAgent,
feedInfo.UserAgent, Username: feedInfo.Username,
feedInfo.Username, Password: feedInfo.Password,
feedInfo.Password, Crawler: feedInfo.Crawler,
feedInfo.ScraperRules, Disabled: feedInfo.Disabled,
feedInfo.RewriteRules, IgnoreHTTPCache: feedInfo.IgnoreHTTPCache,
feedInfo.BlocklistRules, FetchViaProxy: feedInfo.FetchViaProxy,
feedInfo.KeeplistRules, ScraperRules: feedInfo.ScraperRules,
feedInfo.FetchViaProxy, RewriteRules: feedInfo.RewriteRules,
) BlocklistRules: feedInfo.BlocklistRules,
KeeplistRules: feedInfo.KeeplistRules,
})
if err != nil { if err != nil {
json.ServerError(w, r, err) json.ServerError(w, r, err)
return return
@ -73,7 +76,7 @@ func (h *handler) refreshFeed(w http.ResponseWriter, r *http.Request) {
return return
} }
err := h.feedHandler.RefreshFeed(userID, feedID) err := feedHandler.RefreshFeed(h.store, userID, feedID)
if err != nil { if err != nil {
json.ServerError(w, r, err) json.ServerError(w, r, err)
return return

View file

@ -5,7 +5,6 @@
package api // import "miniflux.app/api" package api // import "miniflux.app/api"
import ( import (
"miniflux.app/reader/feed"
"miniflux.app/storage" "miniflux.app/storage"
"miniflux.app/worker" "miniflux.app/worker"
) )
@ -13,5 +12,4 @@ import (
type handler struct { type handler struct {
store *storage.Storage store *storage.Storage
pool *worker.Pool pool *worker.Pool
feedHandler *feed.Handler
} }

View file

@ -54,6 +54,8 @@ type feedCreationRequest struct {
Username string `json:"username"` Username string `json:"username"`
Password string `json:"password"` Password string `json:"password"`
Crawler bool `json:"crawler"` Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"` FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"` ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"` RewriteRules string `json:"rewrite_rules"`

View file

@ -139,7 +139,9 @@ func Parse() {
// Run migrations and start the deamon. // Run migrations and start the deamon.
if config.Opts.RunMigrations() { if config.Opts.RunMigrations() {
database.Migrate(db) if err := database.Migrate(db); err != nil {
logger.Fatal(`%v`, err)
}
} }
if err := database.IsSchemaUpToDate(db); err != nil { if err := database.IsSchemaUpToDate(db); err != nil {

View file

@ -15,7 +15,6 @@ import (
"miniflux.app/config" "miniflux.app/config"
"miniflux.app/logger" "miniflux.app/logger"
"miniflux.app/metric" "miniflux.app/metric"
"miniflux.app/reader/feed"
"miniflux.app/service/httpd" "miniflux.app/service/httpd"
"miniflux.app/service/scheduler" "miniflux.app/service/scheduler"
"miniflux.app/storage" "miniflux.app/storage"
@ -29,8 +28,7 @@ func startDaemon(store *storage.Storage) {
signal.Notify(stop, os.Interrupt) signal.Notify(stop, os.Interrupt)
signal.Notify(stop, syscall.SIGTERM) signal.Notify(stop, syscall.SIGTERM)
feedHandler := feed.NewFeedHandler(store) pool := worker.NewPool(store, config.Opts.WorkerPoolSize())
pool := worker.NewPool(feedHandler, config.Opts.WorkerPoolSize())
if config.Opts.HasSchedulerService() && !config.Opts.HasMaintenanceMode() { if config.Opts.HasSchedulerService() && !config.Opts.HasMaintenanceMode() {
scheduler.Serve(store, pool) scheduler.Serve(store, pool)
@ -38,7 +36,7 @@ func startDaemon(store *storage.Storage) {
var httpServer *http.Server var httpServer *http.Server
if config.Opts.HasHTTPService() { if config.Opts.HasHTTPService() {
httpServer = httpd.Serve(store, pool, feedHandler) httpServer = httpd.Serve(store, pool)
} }
if config.Opts.HasMetricsCollector() { if config.Opts.HasMetricsCollector() {

View file

@ -116,7 +116,7 @@ func (c *Client) CreateUser(username, password string, isAdmin bool) (*User, err
} }
// UpdateUser updates a user in the system. // UpdateUser updates a user in the system.
func (c *Client) UpdateUser(userID int64, userChanges *UserModification) (*User, error) { func (c *Client) UpdateUser(userID int64, userChanges *UserModificationRequest) (*User, error) {
body, err := c.request.Put(fmt.Sprintf("/v1/users/%d", userID), userChanges) body, err := c.request.Put(fmt.Sprintf("/v1/users/%d", userID), userChanges)
if err != nil { if err != nil {
return nil, err return nil, err
@ -285,11 +285,8 @@ func (c *Client) Feed(feedID int64) (*Feed, error) {
} }
// CreateFeed creates a new feed. // CreateFeed creates a new feed.
func (c *Client) CreateFeed(url string, categoryID int64) (int64, error) { func (c *Client) CreateFeed(feedCreationRequest *FeedCreationRequest) (int64, error) {
body, err := c.request.Post("/v1/feeds", map[string]interface{}{ body, err := c.request.Post("/v1/feeds", feedCreationRequest)
"feed_url": url,
"category_id": categoryID,
})
if err != nil { if err != nil {
return 0, err return 0, err
} }
@ -309,7 +306,7 @@ func (c *Client) CreateFeed(url string, categoryID int64) (int64, error) {
} }
// UpdateFeed updates a feed. // UpdateFeed updates a feed.
func (c *Client) UpdateFeed(feedID int64, feedChanges *FeedModification) (*Feed, error) { func (c *Client) UpdateFeed(feedID int64, feedChanges *FeedModificationRequest) (*Feed, error) {
body, err := c.request.Put(fmt.Sprintf("/v1/feeds/%d", feedID), feedChanges) body, err := c.request.Put(fmt.Sprintf("/v1/feeds/%d", feedID), feedChanges)
if err != nil { if err != nil {
return nil, err return nil, err

View file

@ -49,8 +49,8 @@ type UserCreationRequest struct {
OpenIDConnectID string `json:"openid_connect_id"` OpenIDConnectID string `json:"openid_connect_id"`
} }
// UserModification represents the request to update a user. // UserModificationRequest represents the request to update a user.
type UserModification struct { type UserModificationRequest struct {
Username *string `json:"username"` Username *string `json:"username"`
Password *string `json:"password"` Password *string `json:"password"`
IsAdmin *bool `json:"is_admin"` IsAdmin *bool `json:"is_admin"`
@ -110,6 +110,9 @@ type Feed struct {
LastModifiedHeader string `json:"last_modified_header,omitempty"` LastModifiedHeader string `json:"last_modified_header,omitempty"`
ParsingErrorMsg string `json:"parsing_error_message,omitempty"` ParsingErrorMsg string `json:"parsing_error_message,omitempty"`
ParsingErrorCount int `json:"parsing_error_count,omitempty"` ParsingErrorCount int `json:"parsing_error_count,omitempty"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"` ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"` RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"` BlocklistRules string `json:"blocklist_rules"`
@ -121,8 +124,25 @@ type Feed struct {
Category *Category `json:"category,omitempty"` Category *Category `json:"category,omitempty"`
} }
// FeedModification represents changes for a feed. // FeedCreationRequest represents the request to create a feed.
type FeedModification struct { type FeedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
}
// FeedModificationRequest represents the request to update a feed.
type FeedModificationRequest struct {
FeedURL *string `json:"feed_url"` FeedURL *string `json:"feed_url"`
SiteURL *string `json:"site_url"` SiteURL *string `json:"site_url"`
Title *string `json:"title"` Title *string `json:"title"`

View file

@ -10,8 +10,6 @@ import (
// Postgresql driver import // Postgresql driver import
_ "github.com/lib/pq" _ "github.com/lib/pq"
"miniflux.app/logger"
) )
// NewConnectionPool configures the database connection pool. // NewConnectionPool configures the database connection pool.
@ -28,7 +26,7 @@ func NewConnectionPool(dsn string, minConnections, maxConnections int) (*sql.DB,
} }
// Migrate executes database migrations. // Migrate executes database migrations.
func Migrate(db *sql.DB) { func Migrate(db *sql.DB) error {
var currentVersion int var currentVersion int
db.QueryRow(`SELECT version FROM schema_version`).Scan(&currentVersion) db.QueryRow(`SELECT version FROM schema_version`).Scan(&currentVersion)
@ -41,28 +39,30 @@ func Migrate(db *sql.DB) {
tx, err := db.Begin() tx, err := db.Begin()
if err != nil { if err != nil {
logger.Fatal("[Migration v%d] %v", newVersion, err) return fmt.Errorf("[Migration v%d] %v", newVersion, err)
} }
if err := migrations[version](tx); err != nil { if err := migrations[version](tx); err != nil {
tx.Rollback() tx.Rollback()
logger.Fatal("[Migration v%d] %v", newVersion, err) return fmt.Errorf("[Migration v%d] %v", newVersion, err)
} }
if _, err := tx.Exec(`DELETE FROM schema_version`); err != nil { if _, err := tx.Exec(`DELETE FROM schema_version`); err != nil {
tx.Rollback() tx.Rollback()
logger.Fatal("[Migration v%d] %v", newVersion, err) return fmt.Errorf("[Migration v%d] %v", newVersion, err)
} }
if _, err := tx.Exec(`INSERT INTO schema_version (version) VALUES ($1)`, newVersion); err != nil { if _, err := tx.Exec(`INSERT INTO schema_version (version) VALUES ($1)`, newVersion); err != nil {
tx.Rollback() tx.Rollback()
logger.Fatal("[Migration v%d] %v", newVersion, err) return fmt.Errorf("[Migration v%d] %v", newVersion, err)
} }
if err := tx.Commit(); err != nil { if err := tx.Commit(); err != nil {
logger.Fatal("[Migration v%d] %v", newVersion, err) return fmt.Errorf("[Migration v%d] %v", newVersion, err)
} }
} }
return nil
} }
// IsSchemaUpToDate checks if the database schema is up to date. // IsSchemaUpToDate checks if the database schema is up to date.

View file

@ -73,19 +73,6 @@ func (f *Feed) WithCategoryID(categoryID int64) {
f.Category = &Category{ID: categoryID} f.Category = &Category{ID: categoryID}
} }
// WithBrowsingParameters defines browsing parameters.
func (f *Feed) WithBrowsingParameters(crawler bool, userAgent, username, password, scraperRules, rewriteRules, blocklistRules, keeplistRules string, fetchViaProxy bool) {
f.Crawler = crawler
f.UserAgent = userAgent
f.Username = username
f.Password = password
f.ScraperRules = scraperRules
f.RewriteRules = rewriteRules
f.FetchViaProxy = fetchViaProxy
f.BlocklistRules = blocklistRules
f.KeeplistRules = keeplistRules
}
// WithError adds a new error message and increment the error counter. // WithError adds a new error message and increment the error counter.
func (f *Feed) WithError(message string) { func (f *Feed) WithError(message string) {
f.ParsingErrorCount++ f.ParsingErrorCount++

View file

@ -46,57 +46,6 @@ func TestFeedCategorySetter(t *testing.T) {
} }
} }
func TestFeedBrowsingParams(t *testing.T) {
feed := &Feed{}
feed.WithBrowsingParameters(
true,
"Custom User Agent",
"Username",
"Secret",
"Scraper Rule",
"Rewrite Rule",
"Block Rule",
"Allow Rule",
true,
)
if !feed.Crawler {
t.Error(`The crawler must be activated`)
}
if feed.UserAgent != "Custom User Agent" {
t.Error(`The user agent must be set`)
}
if feed.Username != "Username" {
t.Error(`The username must be set`)
}
if feed.Password != "Secret" {
t.Error(`The password must be set`)
}
if feed.ScraperRules != "Scraper Rule" {
t.Errorf(`The scraper rules must be set`)
}
if feed.RewriteRules != "Rewrite Rule" {
t.Errorf(`The rewrite rules must be set`)
}
if feed.BlocklistRules != "Block Rule" {
t.Errorf(`The block list rules must be set`)
}
if feed.KeeplistRules != "Allow Rule" {
t.Errorf(`The keep list rules must be set`)
}
if !feed.FetchViaProxy {
t.Errorf(`The fetch via proxy is no set`)
}
}
func TestFeedErrorCounter(t *testing.T) { func TestFeedErrorCounter(t *testing.T) {
feed := &Feed{} feed := &Feed{}
feed.WithError("Some Error") feed.WithError("Some Error")

View file

@ -1,10 +0,0 @@
// Copyright 2018 Frédéric Guillot. All rights reserved.
// Use of this source code is governed by the Apache 2.0
// license that can be found in the LICENSE file.
/*
Package feed handles feed updates and creation.
*/
package feed // import "miniflux.app/reader/feed"

View file

@ -2,7 +2,7 @@
// Use of this source code is governed by the Apache 2.0 // Use of this source code is governed by the Apache 2.0
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package feed // import "miniflux.app/reader/feed" package handler // import "miniflux.app/reader/handler"
import ( import (
"fmt" "fmt"
@ -28,24 +28,37 @@ var (
errCategoryNotFound = "Category not found for this user" errCategoryNotFound = "Category not found for this user"
) )
// Handler contains all the logic to create and refresh feeds. // FeedCreationArgs represents the arguments required to create a new feed.
type Handler struct { type FeedCreationArgs struct {
store *storage.Storage UserID int64
CategoryID int64
FeedURL string
UserAgent string
Username string
Password string
Crawler bool
Disabled bool
IgnoreHTTPCache bool
FetchViaProxy bool
ScraperRules string
RewriteRules string
BlocklistRules string
KeeplistRules string
} }
// CreateFeed fetch, parse and store a new feed. // CreateFeed fetch, parse and store a new feed.
func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool, userAgent, username, password, scraperRules, rewriteRules, blocklistRules, keeplistRules string, fetchViaProxy bool) (*model.Feed, error) { func CreateFeed(store *storage.Storage, args *FeedCreationArgs) (*model.Feed, error) {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:CreateFeed] feedUrl=%s", url)) defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[CreateFeed] FeedURL=%s", args.FeedURL))
if !h.store.CategoryExists(userID, categoryID) { if !store.CategoryExists(args.UserID, args.CategoryID) {
return nil, errors.NewLocalizedError(errCategoryNotFound) return nil, errors.NewLocalizedError(errCategoryNotFound)
} }
request := client.NewClientWithConfig(url, config.Opts) request := client.NewClientWithConfig(args.FeedURL, config.Opts)
request.WithCredentials(username, password) request.WithCredentials(args.Username, args.Password)
request.WithUserAgent(userAgent) request.WithUserAgent(args.UserAgent)
if fetchViaProxy { if args.FetchViaProxy {
request.WithProxy() request.WithProxy()
} }
@ -54,7 +67,7 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
return nil, requestErr return nil, requestErr
} }
if h.store.FeedURLExists(userID, response.EffectiveURL) { if store.FeedURLExists(args.UserID, response.EffectiveURL) {
return nil, errors.NewLocalizedError(errDuplicate, response.EffectiveURL) return nil, errors.NewLocalizedError(errDuplicate, response.EffectiveURL)
} }
@ -63,31 +76,41 @@ func (h *Handler) CreateFeed(userID, categoryID int64, url string, crawler bool,
return nil, parseErr return nil, parseErr
} }
subscription.UserID = userID subscription.UserID = args.UserID
subscription.WithCategoryID(categoryID) subscription.UserAgent = args.UserAgent
subscription.WithBrowsingParameters(crawler, userAgent, username, password, scraperRules, rewriteRules, blocklistRules, keeplistRules, fetchViaProxy) subscription.Username = args.Username
subscription.Password = args.Password
subscription.Crawler = args.Crawler
subscription.Disabled = args.Disabled
subscription.IgnoreHTTPCache = args.IgnoreHTTPCache
subscription.FetchViaProxy = args.FetchViaProxy
subscription.ScraperRules = args.ScraperRules
subscription.RewriteRules = args.RewriteRules
subscription.BlocklistRules = args.BlocklistRules
subscription.KeeplistRules = args.KeeplistRules
subscription.WithCategoryID(args.CategoryID)
subscription.WithClientResponse(response) subscription.WithClientResponse(response)
subscription.CheckedNow() subscription.CheckedNow()
processor.ProcessFeedEntries(h.store, subscription) processor.ProcessFeedEntries(store, subscription)
if storeErr := h.store.CreateFeed(subscription); storeErr != nil { if storeErr := store.CreateFeed(subscription); storeErr != nil {
return nil, storeErr return nil, storeErr
} }
logger.Debug("[Handler:CreateFeed] Feed saved with ID: %d", subscription.ID) logger.Debug("[CreateFeed] Feed saved with ID: %d", subscription.ID)
checkFeedIcon(h.store, subscription.ID, subscription.SiteURL, fetchViaProxy) checkFeedIcon(store, subscription.ID, subscription.SiteURL, args.FetchViaProxy)
return subscription, nil return subscription, nil
} }
// RefreshFeed refreshes a feed. // RefreshFeed refreshes a feed.
func (h *Handler) RefreshFeed(userID, feedID int64) error { func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[Handler:RefreshFeed] feedID=%d", feedID)) defer timer.ExecutionTime(time.Now(), fmt.Sprintf("[RefreshFeed] feedID=%d", feedID))
userLanguage := h.store.UserLanguage(userID) userLanguage := store.UserLanguage(userID)
printer := locale.NewPrinter(userLanguage) printer := locale.NewPrinter(userLanguage)
originalFeed, storeErr := h.store.FeedByID(userID, feedID) originalFeed, storeErr := store.FeedByID(userID, feedID)
if storeErr != nil { if storeErr != nil {
return storeErr return storeErr
} }
@ -99,7 +122,7 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
weeklyEntryCount := 0 weeklyEntryCount := 0
if config.Opts.PollingScheduler() == model.SchedulerEntryFrequency { if config.Opts.PollingScheduler() == model.SchedulerEntryFrequency {
var weeklyCountErr error var weeklyCountErr error
weeklyEntryCount, weeklyCountErr = h.store.WeeklyFeedEntryCount(userID, feedID) weeklyEntryCount, weeklyCountErr = store.WeeklyFeedEntryCount(userID, feedID)
if weeklyCountErr != nil { if weeklyCountErr != nil {
return weeklyCountErr return weeklyCountErr
} }
@ -123,71 +146,66 @@ func (h *Handler) RefreshFeed(userID, feedID int64) error {
response, requestErr := browser.Exec(request) response, requestErr := browser.Exec(request)
if requestErr != nil { if requestErr != nil {
originalFeed.WithError(requestErr.Localize(printer)) originalFeed.WithError(requestErr.Localize(printer))
h.store.UpdateFeedError(originalFeed) store.UpdateFeedError(originalFeed)
return requestErr return requestErr
} }
if h.store.AnotherFeedURLExists(userID, originalFeed.ID, response.EffectiveURL) { if store.AnotherFeedURLExists(userID, originalFeed.ID, response.EffectiveURL) {
storeErr := errors.NewLocalizedError(errDuplicate, response.EffectiveURL) storeErr := errors.NewLocalizedError(errDuplicate, response.EffectiveURL)
originalFeed.WithError(storeErr.Error()) originalFeed.WithError(storeErr.Error())
h.store.UpdateFeedError(originalFeed) store.UpdateFeedError(originalFeed)
return storeErr return storeErr
} }
if originalFeed.IgnoreHTTPCache || response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) { if originalFeed.IgnoreHTTPCache || response.IsModified(originalFeed.EtagHeader, originalFeed.LastModifiedHeader) {
logger.Debug("[Handler:RefreshFeed] Feed #%d has been modified", feedID) logger.Debug("[RefreshFeed] Feed #%d has been modified", feedID)
updatedFeed, parseErr := parser.ParseFeed(response.EffectiveURL, response.BodyAsString()) updatedFeed, parseErr := parser.ParseFeed(response.EffectiveURL, response.BodyAsString())
if parseErr != nil { if parseErr != nil {
originalFeed.WithError(parseErr.Localize(printer)) originalFeed.WithError(parseErr.Localize(printer))
h.store.UpdateFeedError(originalFeed) store.UpdateFeedError(originalFeed)
return parseErr return parseErr
} }
originalFeed.Entries = updatedFeed.Entries originalFeed.Entries = updatedFeed.Entries
processor.ProcessFeedEntries(h.store, originalFeed) processor.ProcessFeedEntries(store, originalFeed)
// We don't update existing entries when the crawler is enabled (we crawl only inexisting entries). // We don't update existing entries when the crawler is enabled (we crawl only inexisting entries).
if storeErr := h.store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, !originalFeed.Crawler); storeErr != nil { if storeErr := store.RefreshFeedEntries(originalFeed.UserID, originalFeed.ID, originalFeed.Entries, !originalFeed.Crawler); storeErr != nil {
originalFeed.WithError(storeErr.Error()) originalFeed.WithError(storeErr.Error())
h.store.UpdateFeedError(originalFeed) store.UpdateFeedError(originalFeed)
return storeErr return storeErr
} }
// We update caching headers only if the feed has been modified, // We update caching headers only if the feed has been modified,
// because some websites don't return the same headers when replying with a 304. // because some websites don't return the same headers when replying with a 304.
originalFeed.WithClientResponse(response) originalFeed.WithClientResponse(response)
checkFeedIcon(h.store, originalFeed.ID, originalFeed.SiteURL, originalFeed.FetchViaProxy) checkFeedIcon(store, originalFeed.ID, originalFeed.SiteURL, originalFeed.FetchViaProxy)
} else { } else {
logger.Debug("[Handler:RefreshFeed] Feed #%d not modified", feedID) logger.Debug("[RefreshFeed] Feed #%d not modified", feedID)
} }
originalFeed.ResetErrorCounter() originalFeed.ResetErrorCounter()
if storeErr := h.store.UpdateFeed(originalFeed); storeErr != nil { if storeErr := store.UpdateFeed(originalFeed); storeErr != nil {
originalFeed.WithError(storeErr.Error()) originalFeed.WithError(storeErr.Error())
h.store.UpdateFeedError(originalFeed) store.UpdateFeedError(originalFeed)
return storeErr return storeErr
} }
return nil return nil
} }
// NewFeedHandler returns a feed handler.
func NewFeedHandler(store *storage.Storage) *Handler {
return &Handler{store}
}
func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy bool) { func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy bool) {
if !store.HasIcon(feedID) { if !store.HasIcon(feedID) {
icon, err := icon.FindIcon(websiteURL, fetchViaProxy) icon, err := icon.FindIcon(websiteURL, fetchViaProxy)
if err != nil { if err != nil {
logger.Debug("CheckFeedIcon: %v (feedID=%d websiteURL=%s)", err, feedID, websiteURL) logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL)
} else if icon == nil { } else if icon == nil {
logger.Debug("CheckFeedIcon: No icon found (feedID=%d websiteURL=%s)", feedID, websiteURL) logger.Debug(`[CheckFeedIcon] No icon found (feedID=%d websiteURL=%s)`, feedID, websiteURL)
} else { } else {
if err := store.CreateFeedIcon(feedID, icon); err != nil { if err := store.CreateFeedIcon(feedID, icon); err != nil {
logger.Debug("CheckFeedIcon: %v (feedID=%d websiteURL=%s)", err, feedID, websiteURL) logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL)
} }
} }
} }

View file

@ -18,7 +18,6 @@ import (
"miniflux.app/fever" "miniflux.app/fever"
"miniflux.app/http/request" "miniflux.app/http/request"
"miniflux.app/logger" "miniflux.app/logger"
"miniflux.app/reader/feed"
"miniflux.app/storage" "miniflux.app/storage"
"miniflux.app/ui" "miniflux.app/ui"
"miniflux.app/version" "miniflux.app/version"
@ -30,7 +29,7 @@ import (
) )
// Serve starts a new HTTP server. // Serve starts a new HTTP server.
func Serve(store *storage.Storage, pool *worker.Pool, feedHandler *feed.Handler) *http.Server { func Serve(store *storage.Storage, pool *worker.Pool) *http.Server {
certFile := config.Opts.CertFile() certFile := config.Opts.CertFile()
keyFile := config.Opts.CertKeyFile() keyFile := config.Opts.CertKeyFile()
certDomain := config.Opts.CertDomain() certDomain := config.Opts.CertDomain()
@ -40,7 +39,7 @@ func Serve(store *storage.Storage, pool *worker.Pool, feedHandler *feed.Handler)
ReadTimeout: 300 * time.Second, ReadTimeout: 300 * time.Second,
WriteTimeout: 300 * time.Second, WriteTimeout: 300 * time.Second,
IdleTimeout: 300 * time.Second, IdleTimeout: 300 * time.Second,
Handler: setupHandler(store, feedHandler, pool), Handler: setupHandler(store, pool),
} }
switch { switch {
@ -164,7 +163,7 @@ func startHTTPServer(server *http.Server) {
}() }()
} }
func setupHandler(store *storage.Storage, feedHandler *feed.Handler, pool *worker.Pool) *mux.Router { func setupHandler(store *storage.Storage, pool *worker.Pool) *mux.Router {
router := mux.NewRouter() router := mux.NewRouter()
if config.Opts.BasePath() != "" { if config.Opts.BasePath() != "" {
@ -182,8 +181,8 @@ func setupHandler(store *storage.Storage, feedHandler *feed.Handler, pool *worke
router.Use(middleware) router.Use(middleware)
fever.Serve(router, store) fever.Serve(router, store)
api.Serve(router, store, pool, feedHandler) api.Serve(router, store, pool)
ui.Serve(router, store, pool, feedHandler) ui.Serve(router, store, pool)
router.HandleFunc("/healthcheck", func(w http.ResponseWriter, r *http.Request) { router.HandleFunc("/healthcheck", func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("OK")) w.Write([]byte("OK"))

View file

@ -465,10 +465,11 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
rewrite_rules, rewrite_rules,
blocklist_rules, blocklist_rules,
keeplist_rules, keeplist_rules,
ignore_http_cache,
fetch_via_proxy fetch_via_proxy
) )
VALUES VALUES
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17) ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18)
RETURNING RETURNING
id id
` `
@ -490,6 +491,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
feed.RewriteRules, feed.RewriteRules,
feed.BlocklistRules, feed.BlocklistRules,
feed.KeeplistRules, feed.KeeplistRules,
feed.IgnoreHTTPCache,
feed.FetchViaProxy, feed.FetchViaProxy,
).Scan(&feed.ID) ).Scan(&feed.ID)
if err != nil { if err != nil {

View file

@ -100,7 +100,10 @@ func TestFilterEntriesByCategory(t *testing.T) {
t.Fatal(err) t.Fatal(err)
} }
feedID, err := client.CreateFeed(testFeedURL, category.ID) feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: category.ID,
})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -134,7 +137,10 @@ func TestFilterEntriesByStatuses(t *testing.T) {
t.Fatal(err) t.Fatal(err)
} }
feedID, err := client.CreateFeed(testFeedURL, category.ID) feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: category.ID,
})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -181,7 +187,10 @@ func TestSearchEntries(t *testing.T) {
t.Fatal(err) t.Fatal(err)
} }
feedID, err := client.CreateFeed(testFeedURL, categories[0].ID) feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }

View file

@ -26,7 +26,10 @@ func TestCannotCreateDuplicatedFeed(t *testing.T) {
client := createClient(t) client := createClient(t)
feed, category := createFeed(t, client) feed, category := createFeed(t, client)
_, err := client.CreateFeed(feed.FeedURL, category.ID) _, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: feed.FeedURL,
CategoryID: category.ID,
})
if err == nil { if err == nil {
t.Fatal(`Duplicated feeds should not be allowed`) t.Fatal(`Duplicated feeds should not be allowed`)
} }
@ -34,19 +37,145 @@ func TestCannotCreateDuplicatedFeed(t *testing.T) {
func TestCreateFeedWithInexistingCategory(t *testing.T) { func TestCreateFeedWithInexistingCategory(t *testing.T) {
client := createClient(t) client := createClient(t)
_, err := client.CreateFeed(&miniflux.FeedCreationRequest{
_, err := client.CreateFeed(testFeedURL, -1) FeedURL: testFeedURL,
CategoryID: -1,
})
if err == nil { if err == nil {
t.Fatal(`Feeds should not be created with inexisting category`) t.Fatal(`Feeds should not be created with inexisting category`)
} }
} }
func TestCreateDisabledFeed(t *testing.T) {
client := createClient(t)
categories, err := client.Categories()
if err != nil {
t.Fatal(err)
}
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
Disabled: true,
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feed ID, got %q`, feedID)
}
feed, err := client.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if !feed.Disabled {
t.Error(`The feed should be disabled`)
}
}
func TestCreateFeedWithDisabledCache(t *testing.T) {
client := createClient(t)
categories, err := client.Categories()
if err != nil {
t.Fatal(err)
}
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
IgnoreHTTPCache: true,
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feed ID, got %q`, feedID)
}
feed, err := client.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if !feed.IgnoreHTTPCache {
t.Error(`The feed should be ignoring HTTP cache`)
}
}
func TestCreateFeedWithCrawlerEnabled(t *testing.T) {
client := createClient(t)
categories, err := client.Categories()
if err != nil {
t.Fatal(err)
}
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
Crawler: true,
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feed ID, got %q`, feedID)
}
feed, err := client.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if !feed.Crawler {
t.Error(`The feed should have the scraper enabled`)
}
}
func TestCreateFeedWithScraperRule(t *testing.T) {
client := createClient(t)
categories, err := client.Categories()
if err != nil {
t.Fatal(err)
}
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
ScraperRules: "article",
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feed ID, got %q`, feedID)
}
feed, err := client.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if feed.ScraperRules != "article" {
t.Error(`The feed should have the custom scraper rule saved`)
}
}
func TestUpdateFeedURL(t *testing.T) { func TestUpdateFeedURL(t *testing.T) {
client := createClient(t) client := createClient(t)
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)
url := "test" url := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{FeedURL: &url}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{FeedURL: &url})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -56,7 +185,7 @@ func TestUpdateFeedURL(t *testing.T) {
} }
url = "" url = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{FeedURL: &url}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{FeedURL: &url})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -71,7 +200,7 @@ func TestUpdateFeedSiteURL(t *testing.T) {
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)
url := "test" url := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{SiteURL: &url}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{SiteURL: &url})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -81,7 +210,7 @@ func TestUpdateFeedSiteURL(t *testing.T) {
} }
url = "" url = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{SiteURL: &url}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{SiteURL: &url})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -96,7 +225,7 @@ func TestUpdateFeedTitle(t *testing.T) {
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)
newTitle := "My new feed" newTitle := "My new feed"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{Title: &newTitle}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Title: &newTitle})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -106,7 +235,7 @@ func TestUpdateFeedTitle(t *testing.T) {
} }
newTitle = "" newTitle = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{Title: &newTitle}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Title: &newTitle})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -121,7 +250,7 @@ func TestUpdateFeedCrawler(t *testing.T) {
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)
crawler := true crawler := true
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{Crawler: &crawler}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Crawler: &crawler})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -135,7 +264,7 @@ func TestUpdateFeedCrawler(t *testing.T) {
} }
crawler = false crawler = false
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{Crawler: &crawler}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Crawler: &crawler})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -150,7 +279,7 @@ func TestUpdateFeedScraperRules(t *testing.T) {
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)
scraperRules := "test" scraperRules := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{ScraperRules: &scraperRules}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{ScraperRules: &scraperRules})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -160,7 +289,7 @@ func TestUpdateFeedScraperRules(t *testing.T) {
} }
scraperRules = "" scraperRules = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{ScraperRules: &scraperRules}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{ScraperRules: &scraperRules})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -175,7 +304,7 @@ func TestUpdateFeedRewriteRules(t *testing.T) {
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)
rewriteRules := "test" rewriteRules := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{RewriteRules: &rewriteRules}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{RewriteRules: &rewriteRules})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -185,7 +314,7 @@ func TestUpdateFeedRewriteRules(t *testing.T) {
} }
rewriteRules = "" rewriteRules = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{RewriteRules: &rewriteRules}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{RewriteRules: &rewriteRules})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -200,7 +329,7 @@ func TestUpdateFeedKeeplistRules(t *testing.T) {
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)
keeplistRules := "test" keeplistRules := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{KeeplistRules: &keeplistRules}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{KeeplistRules: &keeplistRules})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -210,7 +339,7 @@ func TestUpdateFeedKeeplistRules(t *testing.T) {
} }
keeplistRules = "" keeplistRules = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{KeeplistRules: &keeplistRules}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{KeeplistRules: &keeplistRules})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -225,7 +354,7 @@ func TestUpdateFeedUserAgent(t *testing.T) {
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)
userAgent := "test" userAgent := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{UserAgent: &userAgent}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{UserAgent: &userAgent})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -235,7 +364,7 @@ func TestUpdateFeedUserAgent(t *testing.T) {
} }
userAgent = "" userAgent = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{UserAgent: &userAgent}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{UserAgent: &userAgent})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -250,7 +379,7 @@ func TestUpdateFeedUsername(t *testing.T) {
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)
username := "test" username := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{Username: &username}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Username: &username})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -260,7 +389,7 @@ func TestUpdateFeedUsername(t *testing.T) {
} }
username = "" username = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{Username: &username}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Username: &username})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -275,7 +404,7 @@ func TestUpdateFeedPassword(t *testing.T) {
feed, _ := createFeed(t, client) feed, _ := createFeed(t, client)
password := "test" password := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{Password: &password}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Password: &password})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -285,7 +414,7 @@ func TestUpdateFeedPassword(t *testing.T) {
} }
password = "" password = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{Password: &password}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Password: &password})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -304,7 +433,7 @@ func TestUpdateFeedCategory(t *testing.T) {
t.Fatal(err) t.Fatal(err)
} }
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModification{CategoryID: &newCategory.ID}) updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{CategoryID: &newCategory.ID})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -314,7 +443,7 @@ func TestUpdateFeedCategory(t *testing.T) {
} }
categoryID := int64(0) categoryID := int64(0)
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModification{CategoryID: &categoryID}) updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{CategoryID: &categoryID})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }

View file

@ -51,7 +51,10 @@ func createFeed(t *testing.T, client *miniflux.Client) (*miniflux.Feed, *miniflu
t.Fatal(err) t.Fatal(err)
} }
feedID, err := client.CreateFeed(testFeedURL, categories[0].ID) feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }

View file

@ -261,7 +261,7 @@ func TestUpdateUserTheme(t *testing.T) {
} }
theme := "dark_serif" theme := "dark_serif"
user, err = client.UpdateUser(user.ID, &miniflux.UserModification{Theme: &theme}) user, err = client.UpdateUser(user.ID, &miniflux.UserModificationRequest{Theme: &theme})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -282,7 +282,7 @@ func TestUpdateUserFields(t *testing.T) {
stylesheet := "body { color: red }" stylesheet := "body { color: red }"
swipe := false swipe := false
entriesPerPage := 5 entriesPerPage := 5
user, err = client.UpdateUser(user.ID, &miniflux.UserModification{ user, err = client.UpdateUser(user.ID, &miniflux.UserModificationRequest{
Stylesheet: &stylesheet, Stylesheet: &stylesheet,
EntrySwipe: &swipe, EntrySwipe: &swipe,
EntriesPerPage: &entriesPerPage, EntriesPerPage: &entriesPerPage,
@ -313,7 +313,7 @@ func TestUpdateUserThemeWithInvalidValue(t *testing.T) {
} }
theme := "something that doesn't exists" theme := "something that doesn't exists"
_, err = client.UpdateUser(user.ID, &miniflux.UserModification{Theme: &theme}) _, err = client.UpdateUser(user.ID, &miniflux.UserModificationRequest{Theme: &theme})
if err == nil { if err == nil {
t.Fatal(`Updating a user Theme with an invalid value should raise an error`) t.Fatal(`Updating a user Theme with an invalid value should raise an error`)
} }
@ -388,7 +388,7 @@ func TestCannotUpdateUserAsNonAdmin(t *testing.T) {
entriesPerPage := 10 entriesPerPage := 10
userAClient := miniflux.New(testBaseURL, usernameA, testStandardPassword) userAClient := miniflux.New(testBaseURL, usernameA, testStandardPassword)
userAAfterUpdate, err := userAClient.UpdateUser(userA.ID, &miniflux.UserModification{EntriesPerPage: &entriesPerPage}) userAAfterUpdate, err := userAClient.UpdateUser(userA.ID, &miniflux.UserModificationRequest{EntriesPerPage: &entriesPerPage})
if err != nil { if err != nil {
t.Fatal(`Standard users should be able to update themselves`) t.Fatal(`Standard users should be able to update themselves`)
} }
@ -398,13 +398,13 @@ func TestCannotUpdateUserAsNonAdmin(t *testing.T) {
} }
isAdmin := true isAdmin := true
_, err = userAClient.UpdateUser(userA.ID, &miniflux.UserModification{IsAdmin: &isAdmin}) _, err = userAClient.UpdateUser(userA.ID, &miniflux.UserModificationRequest{IsAdmin: &isAdmin})
if err == nil { if err == nil {
t.Fatal(`Standard users should not be able to become admin`) t.Fatal(`Standard users should not be able to become admin`)
} }
userBClient := miniflux.New(testBaseURL, usernameB, testStandardPassword) userBClient := miniflux.New(testBaseURL, usernameB, testStandardPassword)
_, err = userBClient.UpdateUser(userA.ID, &miniflux.UserModification{}) _, err = userBClient.UpdateUser(userA.ID, &miniflux.UserModificationRequest{})
if err == nil { if err == nil {
t.Fatal(`Standard users should not be able to update other users`) t.Fatal(`Standard users should not be able to update other users`)
} }
@ -414,7 +414,7 @@ func TestCannotUpdateUserAsNonAdmin(t *testing.T) {
} }
stylesheet := "test" stylesheet := "test"
userC, err := adminClient.UpdateUser(userA.ID, &miniflux.UserModification{Stylesheet: &stylesheet}) userC, err := adminClient.UpdateUser(userA.ID, &miniflux.UserModificationRequest{Stylesheet: &stylesheet})
if err != nil { if err != nil {
t.Fatal(`Admin users should be able to update any users`) t.Fatal(`Admin users should be able to update any users`)
} }

View file

@ -11,11 +11,12 @@ import (
"miniflux.app/http/response/html" "miniflux.app/http/response/html"
"miniflux.app/http/route" "miniflux.app/http/route"
"miniflux.app/logger" "miniflux.app/logger"
feedHandler "miniflux.app/reader/handler"
) )
func (h *handler) refreshFeed(w http.ResponseWriter, r *http.Request) { func (h *handler) refreshFeed(w http.ResponseWriter, r *http.Request) {
feedID := request.RouteInt64Param(r, "feedID") feedID := request.RouteInt64Param(r, "feedID")
if err := h.feedHandler.RefreshFeed(request.UserID(r), feedID); err != nil { if err := feedHandler.RefreshFeed(h.store, request.UserID(r), feedID); err != nil {
logger.Error("[UI:RefreshFeed] %v", err) logger.Error("[UI:RefreshFeed] %v", err)
} }

View file

@ -5,7 +5,6 @@
package ui // import "miniflux.app/ui" package ui // import "miniflux.app/ui"
import ( import (
"miniflux.app/reader/feed"
"miniflux.app/storage" "miniflux.app/storage"
"miniflux.app/template" "miniflux.app/template"
"miniflux.app/worker" "miniflux.app/worker"
@ -18,5 +17,4 @@ type handler struct {
store *storage.Storage store *storage.Storage
tpl *template.Engine tpl *template.Engine
pool *worker.Pool pool *worker.Pool
feedHandler *feed.Handler
} }

View file

@ -11,6 +11,7 @@ import (
"miniflux.app/http/request" "miniflux.app/http/request"
"miniflux.app/http/response/html" "miniflux.app/http/response/html"
"miniflux.app/http/route" "miniflux.app/http/route"
feedHandler "miniflux.app/reader/handler"
"miniflux.app/ui/form" "miniflux.app/ui/form"
"miniflux.app/ui/session" "miniflux.app/ui/session"
"miniflux.app/ui/view" "miniflux.app/ui/view"
@ -47,20 +48,20 @@ func (h *handler) showChooseSubscriptionPage(w http.ResponseWriter, r *http.Requ
return return
} }
feed, err := h.feedHandler.CreateFeed( feed, err := feedHandler.CreateFeed(h.store, &feedHandler.FeedCreationArgs{
user.ID, UserID: user.ID,
subscriptionForm.CategoryID, CategoryID: subscriptionForm.CategoryID,
subscriptionForm.URL, FeedURL: subscriptionForm.URL,
subscriptionForm.Crawler, Crawler: subscriptionForm.Crawler,
subscriptionForm.UserAgent, UserAgent: subscriptionForm.UserAgent,
subscriptionForm.Username, Username: subscriptionForm.Username,
subscriptionForm.Password, Password: subscriptionForm.Password,
subscriptionForm.ScraperRules, ScraperRules: subscriptionForm.ScraperRules,
subscriptionForm.RewriteRules, RewriteRules: subscriptionForm.RewriteRules,
subscriptionForm.BlocklistRules, BlocklistRules: subscriptionForm.BlocklistRules,
subscriptionForm.KeeplistRules, KeeplistRules: subscriptionForm.KeeplistRules,
subscriptionForm.FetchViaProxy, FetchViaProxy: subscriptionForm.FetchViaProxy,
) })
if err != nil { if err != nil {
view.Set("form", subscriptionForm) view.Set("form", subscriptionForm)
view.Set("errorMessage", err) view.Set("errorMessage", err)

View file

@ -12,6 +12,7 @@ import (
"miniflux.app/http/response/html" "miniflux.app/http/response/html"
"miniflux.app/http/route" "miniflux.app/http/route"
"miniflux.app/logger" "miniflux.app/logger"
feedHandler "miniflux.app/reader/handler"
"miniflux.app/reader/subscription" "miniflux.app/reader/subscription"
"miniflux.app/ui/form" "miniflux.app/ui/form"
"miniflux.app/ui/session" "miniflux.app/ui/session"
@ -74,20 +75,20 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
v.Set("errorMessage", "error.subscription_not_found") v.Set("errorMessage", "error.subscription_not_found")
html.OK(w, r, v.Render("add_subscription")) html.OK(w, r, v.Render("add_subscription"))
case n == 1: case n == 1:
feed, err := h.feedHandler.CreateFeed( feed, err := feedHandler.CreateFeed(h.store, &feedHandler.FeedCreationArgs{
user.ID, UserID: user.ID,
subscriptionForm.CategoryID, CategoryID: subscriptionForm.CategoryID,
subscriptions[0].URL, FeedURL: subscriptions[0].URL,
subscriptionForm.Crawler, Crawler: subscriptionForm.Crawler,
subscriptionForm.UserAgent, UserAgent: subscriptionForm.UserAgent,
subscriptionForm.Username, Username: subscriptionForm.Username,
subscriptionForm.Password, Password: subscriptionForm.Password,
subscriptionForm.ScraperRules, ScraperRules: subscriptionForm.ScraperRules,
subscriptionForm.RewriteRules, RewriteRules: subscriptionForm.RewriteRules,
subscriptionForm.BlocklistRules, BlocklistRules: subscriptionForm.BlocklistRules,
subscriptionForm.KeeplistRules, KeeplistRules: subscriptionForm.KeeplistRules,
subscriptionForm.FetchViaProxy, FetchViaProxy: subscriptionForm.FetchViaProxy,
) })
if err != nil { if err != nil {
v.Set("form", subscriptionForm) v.Set("form", subscriptionForm)
v.Set("errorMessage", err) v.Set("errorMessage", err)

View file

@ -8,19 +8,18 @@ import (
"net/http" "net/http"
"miniflux.app/config" "miniflux.app/config"
"miniflux.app/reader/feed"
"miniflux.app/storage" "miniflux.app/storage"
"miniflux.app/template" "miniflux.app/template"
"miniflux.app/worker" "miniflux.app/worker"
"github.com/gorilla/mux" "github.com/gorilla/mux"
"github.com/mitchellh/go-server-timing" servertiming "github.com/mitchellh/go-server-timing"
) )
// Serve declares all routes for the user interface. // Serve declares all routes for the user interface.
func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool, feedHandler *feed.Handler) { func Serve(router *mux.Router, store *storage.Storage, pool *worker.Pool) {
middleware := newMiddleware(router, store) middleware := newMiddleware(router, store)
handler := &handler{router, store, template.NewEngine(router), pool, feedHandler} handler := &handler{router, store, template.NewEngine(router), pool}
uiRouter := router.NewRoute().Subrouter() uiRouter := router.NewRoute().Subrouter()
uiRouter.Use(middleware.handleUserSession) uiRouter.Use(middleware.handleUserSession)

View file

@ -6,7 +6,7 @@ package worker // import "miniflux.app/worker"
import ( import (
"miniflux.app/model" "miniflux.app/model"
"miniflux.app/reader/feed" "miniflux.app/storage"
) )
// Pool handles a pool of workers. // Pool handles a pool of workers.
@ -22,13 +22,13 @@ func (p *Pool) Push(jobs model.JobList) {
} }
// NewPool creates a pool of background workers. // NewPool creates a pool of background workers.
func NewPool(feedHandler *feed.Handler, nbWorkers int) *Pool { func NewPool(store *storage.Storage, nbWorkers int) *Pool {
workerPool := &Pool{ workerPool := &Pool{
queue: make(chan model.Job), queue: make(chan model.Job),
} }
for i := 0; i < nbWorkers; i++ { for i := 0; i < nbWorkers; i++ {
worker := &Worker{id: i, feedHandler: feedHandler} worker := &Worker{id: i, store: store}
go worker.Run(workerPool.queue) go worker.Run(workerPool.queue)
} }

View file

@ -11,13 +11,14 @@ import (
"miniflux.app/logger" "miniflux.app/logger"
"miniflux.app/metric" "miniflux.app/metric"
"miniflux.app/model" "miniflux.app/model"
"miniflux.app/reader/feed" feedHandler "miniflux.app/reader/handler"
"miniflux.app/storage"
) )
// Worker refreshes a feed in the background. // Worker refreshes a feed in the background.
type Worker struct { type Worker struct {
id int id int
feedHandler *feed.Handler store *storage.Storage
} }
// Run wait for a job and refresh the given feed. // Run wait for a job and refresh the given feed.
@ -29,7 +30,7 @@ func (w *Worker) Run(c chan model.Job) {
logger.Debug("[Worker #%d] Received feed #%d for user #%d", w.id, job.FeedID, job.UserID) logger.Debug("[Worker #%d] Received feed #%d for user #%d", w.id, job.FeedID, job.UserID)
startTime := time.Now() startTime := time.Now()
refreshErr := w.feedHandler.RefreshFeed(job.UserID, job.FeedID) refreshErr := feedHandler.RefreshFeed(w.store, job.UserID, job.FeedID)
if config.Opts.HasMetricsCollector() { if config.Opts.HasMetricsCollector() {
status := "success" status := "success"