2017-11-20 06:10:04 +01:00
|
|
|
// Copyright 2017 Frédéric Guillot. All rights reserved.
|
|
|
|
// Use of this source code is governed by the Apache 2.0
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2018-08-25 06:51:50 +02:00
|
|
|
package storage // import "miniflux.app/storage"
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
import (
|
|
|
|
"database/sql"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
2017-11-22 07:36:00 +01:00
|
|
|
|
2018-08-25 06:51:50 +02:00
|
|
|
"miniflux.app/model"
|
|
|
|
"miniflux.app/timezone"
|
2017-11-20 06:10:04 +01:00
|
|
|
)
|
|
|
|
|
2020-05-23 02:48:53 +02:00
|
|
|
var feedListQuery = `
|
|
|
|
SELECT
|
|
|
|
f.id,
|
|
|
|
f.feed_url,
|
|
|
|
f.site_url,
|
|
|
|
f.title,
|
|
|
|
f.etag_header,
|
|
|
|
f.last_modified_header,
|
|
|
|
f.user_id,
|
|
|
|
f.checked_at at time zone u.timezone,
|
|
|
|
f.parsing_error_count,
|
|
|
|
f.parsing_error_msg,
|
|
|
|
f.scraper_rules,
|
|
|
|
f.rewrite_rules,
|
|
|
|
f.crawler,
|
|
|
|
f.user_agent,
|
|
|
|
f.username,
|
|
|
|
f.password,
|
2020-06-06 06:50:59 +02:00
|
|
|
f.ignore_http_cache,
|
2020-09-10 08:28:54 +02:00
|
|
|
f.fetch_via_proxy,
|
2020-05-23 02:48:53 +02:00
|
|
|
f.disabled,
|
|
|
|
f.category_id,
|
|
|
|
c.title as category_title,
|
|
|
|
fi.icon_id,
|
|
|
|
u.timezone
|
|
|
|
FROM
|
|
|
|
feeds f
|
|
|
|
LEFT JOIN
|
|
|
|
categories c ON c.id=f.category_id
|
|
|
|
LEFT JOIN
|
|
|
|
feed_icons fi ON fi.feed_id=f.id
|
|
|
|
LEFT JOIN
|
|
|
|
users u ON u.id=f.user_id
|
|
|
|
WHERE
|
|
|
|
f.user_id=$1
|
|
|
|
ORDER BY
|
|
|
|
f.parsing_error_count DESC, lower(f.title) ASC
|
|
|
|
`
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// FeedExists checks if the given feed exists.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (s *Storage) FeedExists(userID, feedID int64) bool {
|
2019-10-30 06:48:07 +01:00
|
|
|
var result bool
|
|
|
|
query := `SELECT true FROM feeds WHERE user_id=$1 AND id=$2`
|
2017-11-20 06:10:04 +01:00
|
|
|
s.db.QueryRow(query, userID, feedID).Scan(&result)
|
2019-10-30 06:48:07 +01:00
|
|
|
return result
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// FeedURLExists checks if feed URL already exists.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (s *Storage) FeedURLExists(userID int64, feedURL string) bool {
|
2019-10-30 06:48:07 +01:00
|
|
|
var result bool
|
|
|
|
query := `SELECT true FROM feeds WHERE user_id=$1 AND feed_url=$2`
|
2017-11-20 06:10:04 +01:00
|
|
|
s.db.QueryRow(query, userID, feedURL).Scan(&result)
|
2019-10-30 06:48:07 +01:00
|
|
|
return result
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2017-11-22 07:36:00 +01:00
|
|
|
// CountFeeds returns the number of feeds that belongs to the given user.
|
|
|
|
func (s *Storage) CountFeeds(userID int64) int {
|
|
|
|
var result int
|
|
|
|
err := s.db.QueryRow(`SELECT count(*) FROM feeds WHERE user_id=$1`, userID).Scan(&result)
|
|
|
|
if err != nil {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2018-08-27 01:18:07 +02:00
|
|
|
// CountErrorFeeds returns the number of feeds with parse errors that belong to the given user.
|
|
|
|
func (s *Storage) CountErrorFeeds(userID int64) int {
|
2019-10-30 06:48:07 +01:00
|
|
|
query := `SELECT count(*) FROM feeds WHERE user_id=$1 AND parsing_error_count>=$2`
|
2018-08-27 01:18:07 +02:00
|
|
|
var result int
|
2019-10-30 06:48:07 +01:00
|
|
|
err := s.db.QueryRow(query, userID, maxParsingError).Scan(&result)
|
2018-08-27 01:18:07 +02:00
|
|
|
if err != nil {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2020-05-23 02:48:53 +02:00
|
|
|
// Feeds returns all feeds that belongs to the given user.
|
2017-11-28 06:30:04 +01:00
|
|
|
func (s *Storage) Feeds(userID int64) (model.Feeds, error) {
|
2020-05-23 02:48:53 +02:00
|
|
|
return s.fetchFeeds(feedListQuery, "", userID)
|
|
|
|
}
|
|
|
|
|
|
|
|
// FeedsWithCounters returns all feeds of the given user with counters of read and unread entries.
|
|
|
|
func (s *Storage) FeedsWithCounters(userID int64) (model.Feeds, error) {
|
|
|
|
counterQuery := `
|
|
|
|
SELECT
|
|
|
|
feed_id,
|
|
|
|
status,
|
|
|
|
count(*)
|
|
|
|
FROM
|
|
|
|
entries
|
|
|
|
WHERE
|
|
|
|
user_id=$1 AND status IN ('read', 'unread')
|
|
|
|
GROUP BY
|
|
|
|
feed_id, status
|
|
|
|
`
|
|
|
|
return s.fetchFeeds(feedListQuery, counterQuery, userID)
|
|
|
|
}
|
|
|
|
|
|
|
|
// FeedsByCategoryWithCounters returns all feeds of the given user/category with counters of read and unread entries.
|
|
|
|
func (s *Storage) FeedsByCategoryWithCounters(userID, categoryID int64) (model.Feeds, error) {
|
|
|
|
feedQuery := `
|
2019-10-30 06:48:07 +01:00
|
|
|
SELECT
|
|
|
|
f.id,
|
|
|
|
f.feed_url,
|
|
|
|
f.site_url,
|
|
|
|
f.title,
|
|
|
|
f.etag_header,
|
|
|
|
f.last_modified_header,
|
|
|
|
f.user_id,
|
|
|
|
f.checked_at at time zone u.timezone,
|
|
|
|
f.parsing_error_count,
|
|
|
|
f.parsing_error_msg,
|
|
|
|
f.scraper_rules,
|
|
|
|
f.rewrite_rules,
|
|
|
|
f.crawler,
|
|
|
|
f.user_agent,
|
|
|
|
f.username,
|
|
|
|
f.password,
|
2020-06-06 06:50:59 +02:00
|
|
|
f.ignore_http_cache,
|
2020-09-10 08:28:54 +02:00
|
|
|
f.fetch_via_proxy,
|
2019-10-30 06:48:07 +01:00
|
|
|
f.disabled,
|
|
|
|
f.category_id,
|
|
|
|
c.title as category_title,
|
|
|
|
fi.icon_id,
|
|
|
|
u.timezone
|
2020-05-23 02:48:53 +02:00
|
|
|
FROM
|
|
|
|
feeds f
|
|
|
|
LEFT JOIN
|
|
|
|
categories c ON c.id=f.category_id
|
|
|
|
LEFT JOIN
|
|
|
|
feed_icons fi ON fi.feed_id=f.id
|
|
|
|
LEFT JOIN
|
|
|
|
users u ON u.id=f.user_id
|
|
|
|
WHERE
|
|
|
|
f.user_id=$1 AND f.category_id=$2
|
|
|
|
ORDER BY
|
|
|
|
f.parsing_error_count DESC, lower(f.title) ASC
|
|
|
|
`
|
|
|
|
|
|
|
|
counterQuery := `
|
|
|
|
SELECT
|
|
|
|
e.feed_id,
|
|
|
|
e.status,
|
|
|
|
count(*)
|
|
|
|
FROM
|
|
|
|
entries e
|
|
|
|
LEFT JOIN
|
|
|
|
feeds f ON f.id=e.feed_id
|
2019-10-30 06:48:07 +01:00
|
|
|
WHERE
|
2020-05-23 02:48:53 +02:00
|
|
|
e.user_id=$1 AND f.category_id=$2 AND e.status IN ('read', 'unread')
|
|
|
|
GROUP BY
|
|
|
|
e.feed_id, e.status
|
2019-10-30 06:48:07 +01:00
|
|
|
`
|
2020-05-23 02:48:53 +02:00
|
|
|
|
|
|
|
return s.fetchFeeds(feedQuery, counterQuery, userID, categoryID)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Storage) fetchFeedCounter(query string, args ...interface{}) (unreadCounters map[int64]int, readCounters map[int64]int, err error) {
|
|
|
|
rows, err := s.db.Query(query, args...)
|
2017-11-20 06:10:04 +01:00
|
|
|
if err != nil {
|
2020-05-23 02:48:53 +02:00
|
|
|
return nil, nil, fmt.Errorf(`store: unable to fetch feed counts: %v`, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
|
2020-05-23 02:48:53 +02:00
|
|
|
readCounters = make(map[int64]int)
|
|
|
|
unreadCounters = make(map[int64]int)
|
2017-11-20 06:10:04 +01:00
|
|
|
for rows.Next() {
|
2020-05-23 02:48:53 +02:00
|
|
|
var feedID int64
|
|
|
|
var status string
|
|
|
|
var count int
|
|
|
|
if err := rows.Scan(&feedID, &status, &count); err != nil {
|
|
|
|
return nil, nil, fmt.Errorf(`store: unable to fetch feed counter row: %v`, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2020-05-23 02:48:53 +02:00
|
|
|
if status == "read" {
|
|
|
|
readCounters[feedID] = count
|
|
|
|
} else if status == "unread" {
|
|
|
|
unreadCounters[feedID] = count
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-23 02:48:53 +02:00
|
|
|
return readCounters, unreadCounters, nil
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2020-05-23 02:48:53 +02:00
|
|
|
func (s *Storage) fetchFeeds(feedQuery, counterQuery string, args ...interface{}) (model.Feeds, error) {
|
|
|
|
var (
|
|
|
|
readCounters map[int64]int
|
|
|
|
unreadCounters map[int64]int
|
|
|
|
)
|
2019-11-18 04:44:12 +01:00
|
|
|
|
2020-05-23 02:48:53 +02:00
|
|
|
if counterQuery != "" {
|
|
|
|
var err error
|
|
|
|
readCounters, unreadCounters, err = s.fetchFeedCounter(counterQuery, args...)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
2019-11-18 04:44:12 +01:00
|
|
|
|
|
|
|
feeds := make(model.Feeds, 0)
|
2020-05-23 02:48:53 +02:00
|
|
|
rows, err := s.db.Query(feedQuery, args...)
|
2019-10-30 05:44:35 +01:00
|
|
|
if err != nil {
|
2019-10-30 06:48:07 +01:00
|
|
|
return nil, fmt.Errorf(`store: unable to fetch feeds: %v`, err)
|
2019-10-30 05:44:35 +01:00
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
for rows.Next() {
|
|
|
|
var feed model.Feed
|
|
|
|
var iconID interface{}
|
|
|
|
var tz string
|
2019-11-18 04:44:12 +01:00
|
|
|
feed.Category = &model.Category{}
|
2019-10-30 05:44:35 +01:00
|
|
|
|
|
|
|
err := rows.Scan(
|
|
|
|
&feed.ID,
|
|
|
|
&feed.FeedURL,
|
|
|
|
&feed.SiteURL,
|
|
|
|
&feed.Title,
|
|
|
|
&feed.EtagHeader,
|
|
|
|
&feed.LastModifiedHeader,
|
|
|
|
&feed.UserID,
|
|
|
|
&feed.CheckedAt,
|
|
|
|
&feed.ParsingErrorCount,
|
|
|
|
&feed.ParsingErrorMsg,
|
|
|
|
&feed.ScraperRules,
|
|
|
|
&feed.RewriteRules,
|
|
|
|
&feed.Crawler,
|
|
|
|
&feed.UserAgent,
|
|
|
|
&feed.Username,
|
|
|
|
&feed.Password,
|
2020-06-06 06:50:59 +02:00
|
|
|
&feed.IgnoreHTTPCache,
|
2020-09-10 08:28:54 +02:00
|
|
|
&feed.FetchViaProxy,
|
2019-10-30 05:44:35 +01:00
|
|
|
&feed.Disabled,
|
|
|
|
&feed.Category.ID,
|
|
|
|
&feed.Category.Title,
|
|
|
|
&iconID,
|
|
|
|
&tz,
|
|
|
|
)
|
|
|
|
|
|
|
|
if err != nil {
|
2019-10-30 06:48:07 +01:00
|
|
|
return nil, fmt.Errorf(`store: unable to fetch feeds row: %v`, err)
|
2019-10-30 05:44:35 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if iconID != nil {
|
|
|
|
feed.Icon = &model.FeedIcon{FeedID: feed.ID, IconID: iconID.(int64)}
|
|
|
|
}
|
|
|
|
|
2020-05-23 02:48:53 +02:00
|
|
|
if counterQuery != "" {
|
|
|
|
if count, found := readCounters[feed.ID]; found {
|
|
|
|
feed.ReadCount = count
|
|
|
|
}
|
|
|
|
|
|
|
|
if count, found := unreadCounters[feed.ID]; found {
|
|
|
|
feed.UnreadCount = count
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-30 05:44:35 +01:00
|
|
|
feed.CheckedAt = timezone.Convert(tz, feed.CheckedAt)
|
2019-11-18 04:44:12 +01:00
|
|
|
feed.Category.UserID = feed.UserID
|
2019-10-30 05:44:35 +01:00
|
|
|
feeds = append(feeds, &feed)
|
|
|
|
}
|
|
|
|
|
|
|
|
return feeds, nil
|
|
|
|
}
|
|
|
|
|
2020-05-25 23:59:15 +02:00
|
|
|
// WeeklyFeedEntryCount returns the weekly entry count for a feed.
|
|
|
|
func (s *Storage) WeeklyFeedEntryCount(userID, feedID int64) (int, error) {
|
2020-05-25 23:06:56 +02:00
|
|
|
query := `
|
|
|
|
SELECT
|
|
|
|
count(*)
|
|
|
|
FROM
|
|
|
|
entries
|
|
|
|
WHERE
|
|
|
|
entries.user_id=$1 AND
|
|
|
|
entries.feed_id=$2 AND
|
|
|
|
entries.published_at BETWEEN (now() - interval '1 week') AND now();
|
|
|
|
`
|
|
|
|
|
2020-05-25 23:59:15 +02:00
|
|
|
var weeklyCount int
|
|
|
|
err := s.db.QueryRow(query, userID, feedID).Scan(&weeklyCount)
|
2020-05-25 23:06:56 +02:00
|
|
|
|
|
|
|
switch {
|
|
|
|
case err == sql.ErrNoRows:
|
|
|
|
return 0, nil
|
|
|
|
case err != nil:
|
2020-05-25 23:59:15 +02:00
|
|
|
return 0, fmt.Errorf(`store: unable to fetch weekly count for feed #%d: %v`, feedID, err)
|
2020-05-25 23:06:56 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return weeklyCount, nil
|
|
|
|
}
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// FeedByID returns a feed by the ID.
|
|
|
|
func (s *Storage) FeedByID(userID, feedID int64) (*model.Feed, error) {
|
2017-11-20 06:10:04 +01:00
|
|
|
var feed model.Feed
|
2017-12-25 03:04:34 +01:00
|
|
|
var iconID interface{}
|
2018-03-02 06:24:58 +01:00
|
|
|
var tz string
|
2017-11-20 06:10:04 +01:00
|
|
|
feed.Category = &model.Category{UserID: userID}
|
|
|
|
|
|
|
|
query := `
|
|
|
|
SELECT
|
2019-10-30 06:48:07 +01:00
|
|
|
f.id,
|
|
|
|
f.feed_url,
|
|
|
|
f.site_url,
|
|
|
|
f.title,
|
|
|
|
f.etag_header,
|
|
|
|
f.last_modified_header,
|
|
|
|
f.user_id, f.checked_at at time zone u.timezone,
|
|
|
|
f.parsing_error_count,
|
|
|
|
f.parsing_error_msg,
|
|
|
|
f.scraper_rules,
|
|
|
|
f.rewrite_rules,
|
|
|
|
f.crawler,
|
|
|
|
f.user_agent,
|
|
|
|
f.username,
|
|
|
|
f.password,
|
2020-06-06 06:50:59 +02:00
|
|
|
f.ignore_http_cache,
|
2020-09-10 08:28:54 +02:00
|
|
|
f.fetch_via_proxy,
|
2019-10-30 06:48:07 +01:00
|
|
|
f.disabled,
|
|
|
|
f.category_id,
|
|
|
|
c.title as category_title,
|
|
|
|
fi.icon_id,
|
|
|
|
u.timezone
|
2017-11-20 06:10:04 +01:00
|
|
|
FROM feeds f
|
|
|
|
LEFT JOIN categories c ON c.id=f.category_id
|
2017-12-25 03:04:34 +01:00
|
|
|
LEFT JOIN feed_icons fi ON fi.feed_id=f.id
|
2017-12-29 04:20:14 +01:00
|
|
|
LEFT JOIN users u ON u.id=f.user_id
|
2019-10-30 06:48:07 +01:00
|
|
|
WHERE
|
|
|
|
f.user_id=$1 AND f.id=$2
|
|
|
|
`
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
err := s.db.QueryRow(query, userID, feedID).Scan(
|
|
|
|
&feed.ID,
|
|
|
|
&feed.FeedURL,
|
|
|
|
&feed.SiteURL,
|
|
|
|
&feed.Title,
|
|
|
|
&feed.EtagHeader,
|
|
|
|
&feed.LastModifiedHeader,
|
|
|
|
&feed.UserID,
|
|
|
|
&feed.CheckedAt,
|
|
|
|
&feed.ParsingErrorCount,
|
|
|
|
&feed.ParsingErrorMsg,
|
2017-12-11 05:51:04 +01:00
|
|
|
&feed.ScraperRules,
|
2017-12-12 07:16:32 +01:00
|
|
|
&feed.RewriteRules,
|
2017-12-13 04:19:36 +01:00
|
|
|
&feed.Crawler,
|
2018-09-20 03:19:24 +02:00
|
|
|
&feed.UserAgent,
|
2018-06-20 07:58:29 +02:00
|
|
|
&feed.Username,
|
|
|
|
&feed.Password,
|
2020-06-06 06:50:59 +02:00
|
|
|
&feed.IgnoreHTTPCache,
|
2020-09-10 08:28:54 +02:00
|
|
|
&feed.FetchViaProxy,
|
2019-07-27 06:13:06 +02:00
|
|
|
&feed.Disabled,
|
2017-11-20 06:10:04 +01:00
|
|
|
&feed.Category.ID,
|
|
|
|
&feed.Category.Title,
|
2017-12-25 03:04:34 +01:00
|
|
|
&iconID,
|
2018-03-02 06:24:58 +01:00
|
|
|
&tz,
|
2017-11-20 06:10:04 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
switch {
|
|
|
|
case err == sql.ErrNoRows:
|
|
|
|
return nil, nil
|
|
|
|
case err != nil:
|
2019-10-30 06:48:07 +01:00
|
|
|
return nil, fmt.Errorf(`store: unable to fetch feed #%d: %v`, feedID, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2017-12-25 03:04:34 +01:00
|
|
|
if iconID != nil {
|
|
|
|
feed.Icon = &model.FeedIcon{FeedID: feed.ID, IconID: iconID.(int64)}
|
|
|
|
}
|
|
|
|
|
2018-03-02 06:24:58 +01:00
|
|
|
feed.CheckedAt = timezone.Convert(tz, feed.CheckedAt)
|
2017-11-20 06:10:04 +01:00
|
|
|
return &feed, nil
|
|
|
|
}
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// CreateFeed creates a new feed.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (s *Storage) CreateFeed(feed *model.Feed) error {
|
|
|
|
sql := `
|
2019-10-30 06:48:07 +01:00
|
|
|
INSERT INTO feeds (
|
|
|
|
feed_url,
|
|
|
|
site_url,
|
|
|
|
title,
|
|
|
|
category_id,
|
|
|
|
user_id,
|
|
|
|
etag_header,
|
|
|
|
last_modified_header,
|
|
|
|
crawler,
|
|
|
|
user_agent,
|
|
|
|
username,
|
|
|
|
password,
|
2019-11-29 20:17:14 +01:00
|
|
|
disabled,
|
|
|
|
scraper_rules,
|
2020-09-10 08:28:54 +02:00
|
|
|
rewrite_rules,
|
|
|
|
fetch_via_proxy
|
2019-10-30 06:48:07 +01:00
|
|
|
)
|
|
|
|
VALUES
|
2020-09-10 08:28:54 +02:00
|
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)
|
2019-10-30 06:48:07 +01:00
|
|
|
RETURNING
|
|
|
|
id
|
2017-11-20 06:10:04 +01:00
|
|
|
`
|
|
|
|
err := s.db.QueryRow(
|
|
|
|
sql,
|
|
|
|
feed.FeedURL,
|
|
|
|
feed.SiteURL,
|
|
|
|
feed.Title,
|
|
|
|
feed.Category.ID,
|
|
|
|
feed.UserID,
|
|
|
|
feed.EtagHeader,
|
|
|
|
feed.LastModifiedHeader,
|
2017-12-13 04:19:36 +01:00
|
|
|
feed.Crawler,
|
2018-09-20 03:19:24 +02:00
|
|
|
feed.UserAgent,
|
2018-06-20 07:58:29 +02:00
|
|
|
feed.Username,
|
|
|
|
feed.Password,
|
2019-07-27 06:13:06 +02:00
|
|
|
feed.Disabled,
|
2019-11-29 20:17:14 +01:00
|
|
|
feed.ScraperRules,
|
|
|
|
feed.RewriteRules,
|
2020-09-10 08:28:54 +02:00
|
|
|
feed.FetchViaProxy,
|
2017-11-20 06:10:04 +01:00
|
|
|
).Scan(&feed.ID)
|
|
|
|
if err != nil {
|
2019-10-30 06:48:07 +01:00
|
|
|
return fmt.Errorf(`store: unable to create feed %q: %v`, feed.FeedURL, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for i := 0; i < len(feed.Entries); i++ {
|
|
|
|
feed.Entries[i].FeedID = feed.ID
|
|
|
|
feed.Entries[i].UserID = feed.UserID
|
2019-09-19 07:41:33 +02:00
|
|
|
|
|
|
|
if !s.entryExists(feed.Entries[i]) {
|
|
|
|
err := s.createEntry(feed.Entries[i])
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// UpdateFeed updates an existing feed.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (s *Storage) UpdateFeed(feed *model.Feed) (err error) {
|
2019-07-27 06:13:06 +02:00
|
|
|
query := `
|
2019-10-30 06:48:07 +01:00
|
|
|
UPDATE
|
|
|
|
feeds
|
|
|
|
SET
|
2019-07-27 06:13:06 +02:00
|
|
|
feed_url=$1,
|
|
|
|
site_url=$2,
|
|
|
|
title=$3,
|
|
|
|
category_id=$4,
|
|
|
|
etag_header=$5,
|
|
|
|
last_modified_header=$6,
|
|
|
|
checked_at=$7,
|
|
|
|
parsing_error_msg=$8,
|
|
|
|
parsing_error_count=$9,
|
|
|
|
scraper_rules=$10,
|
|
|
|
rewrite_rules=$11,
|
|
|
|
crawler=$12,
|
|
|
|
user_agent=$13,
|
|
|
|
username=$14,
|
|
|
|
password=$15,
|
2020-05-25 23:06:56 +02:00
|
|
|
disabled=$16,
|
2020-06-06 06:50:59 +02:00
|
|
|
next_check_at=$17,
|
2020-09-10 08:28:54 +02:00
|
|
|
ignore_http_cache=$18,
|
|
|
|
fetch_via_proxy=$19
|
2019-07-27 06:13:06 +02:00
|
|
|
WHERE
|
2020-09-10 08:28:54 +02:00
|
|
|
id=$20 AND user_id=$21
|
2019-07-27 06:13:06 +02:00
|
|
|
`
|
2017-11-20 06:10:04 +01:00
|
|
|
_, err = s.db.Exec(query,
|
|
|
|
feed.FeedURL,
|
|
|
|
feed.SiteURL,
|
|
|
|
feed.Title,
|
|
|
|
feed.Category.ID,
|
|
|
|
feed.EtagHeader,
|
|
|
|
feed.LastModifiedHeader,
|
|
|
|
feed.CheckedAt,
|
|
|
|
feed.ParsingErrorMsg,
|
|
|
|
feed.ParsingErrorCount,
|
2017-12-11 05:51:04 +01:00
|
|
|
feed.ScraperRules,
|
2017-12-12 07:16:32 +01:00
|
|
|
feed.RewriteRules,
|
2017-12-13 04:19:36 +01:00
|
|
|
feed.Crawler,
|
2018-09-20 03:19:24 +02:00
|
|
|
feed.UserAgent,
|
2018-06-20 07:58:29 +02:00
|
|
|
feed.Username,
|
|
|
|
feed.Password,
|
2019-07-27 06:13:06 +02:00
|
|
|
feed.Disabled,
|
2020-05-25 23:06:56 +02:00
|
|
|
feed.NextCheckAt,
|
2020-06-06 06:50:59 +02:00
|
|
|
feed.IgnoreHTTPCache,
|
2020-09-10 08:28:54 +02:00
|
|
|
feed.FetchViaProxy,
|
2017-11-20 06:10:04 +01:00
|
|
|
feed.ID,
|
|
|
|
feed.UserID,
|
|
|
|
)
|
|
|
|
|
|
|
|
if err != nil {
|
2019-10-30 06:48:07 +01:00
|
|
|
return fmt.Errorf(`store: unable to update feed #%d (%s): %v`, feed.ID, feed.FeedURL, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-12-15 22:04:38 +01:00
|
|
|
// UpdateFeedError updates feed errors.
|
|
|
|
func (s *Storage) UpdateFeedError(feed *model.Feed) (err error) {
|
|
|
|
query := `
|
2019-10-30 06:48:07 +01:00
|
|
|
UPDATE
|
|
|
|
feeds
|
2018-12-15 22:04:38 +01:00
|
|
|
SET
|
|
|
|
parsing_error_msg=$1,
|
|
|
|
parsing_error_count=$2,
|
2020-05-25 23:06:56 +02:00
|
|
|
checked_at=$3,
|
|
|
|
next_check_at=$4
|
2019-10-30 06:48:07 +01:00
|
|
|
WHERE
|
2020-05-25 23:06:56 +02:00
|
|
|
id=$5 AND user_id=$6
|
2019-10-30 06:48:07 +01:00
|
|
|
`
|
2018-12-15 22:04:38 +01:00
|
|
|
_, err = s.db.Exec(query,
|
|
|
|
feed.ParsingErrorMsg,
|
|
|
|
feed.ParsingErrorCount,
|
|
|
|
feed.CheckedAt,
|
2020-05-25 23:06:56 +02:00
|
|
|
feed.NextCheckAt,
|
2018-12-15 22:04:38 +01:00
|
|
|
feed.ID,
|
|
|
|
feed.UserID,
|
|
|
|
)
|
|
|
|
|
|
|
|
if err != nil {
|
2019-10-30 06:48:07 +01:00
|
|
|
return fmt.Errorf(`store: unable to update feed error #%d (%s): %v`, feed.ID, feed.FeedURL, err)
|
2018-12-15 22:04:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// RemoveFeed removes a feed.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (s *Storage) RemoveFeed(userID, feedID int64) error {
|
2019-10-30 06:48:07 +01:00
|
|
|
query := `DELETE FROM feeds WHERE id = $1 AND user_id = $2`
|
|
|
|
result, err := s.db.Exec(query, feedID, userID)
|
2017-11-20 06:10:04 +01:00
|
|
|
if err != nil {
|
2019-10-30 06:48:07 +01:00
|
|
|
return fmt.Errorf(`store: unable to remove feed #%d: %v`, feedID, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
count, err := result.RowsAffected()
|
|
|
|
if err != nil {
|
2019-10-30 06:48:07 +01:00
|
|
|
return fmt.Errorf(`store: unable to remove feed #%d: %v`, feedID, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if count == 0 {
|
2019-10-30 06:48:07 +01:00
|
|
|
return errors.New(`store: no feed has been removed`)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
2018-06-30 23:22:45 +02:00
|
|
|
|
|
|
|
// ResetFeedErrors removes all feed errors.
|
|
|
|
func (s *Storage) ResetFeedErrors() error {
|
|
|
|
_, err := s.db.Exec(`UPDATE feeds SET parsing_error_count=0, parsing_error_msg=''`)
|
|
|
|
return err
|
|
|
|
}
|