2017-11-20 06:10:04 +01:00
|
|
|
// Copyright 2017 Frédéric Guillot. All rights reserved.
|
|
|
|
// Use of this source code is governed by the Apache 2.0
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2018-08-25 06:51:50 +02:00
|
|
|
package storage // import "miniflux.app/storage"
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
import (
|
|
|
|
"database/sql"
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
2021-03-17 06:15:40 +01:00
|
|
|
"runtime"
|
2021-06-02 23:01:21 +02:00
|
|
|
"sort"
|
2017-11-22 07:36:00 +01:00
|
|
|
|
2021-01-26 06:41:36 +01:00
|
|
|
"miniflux.app/config"
|
2021-03-17 06:15:40 +01:00
|
|
|
"miniflux.app/logger"
|
2018-08-25 06:51:50 +02:00
|
|
|
"miniflux.app/model"
|
2017-11-20 06:10:04 +01:00
|
|
|
)
|
|
|
|
|
2021-06-02 23:01:21 +02:00
|
|
|
type byStateAndName struct{ f model.Feeds }
|
|
|
|
|
|
|
|
func (l byStateAndName) Len() int { return len(l.f) }
|
|
|
|
func (l byStateAndName) Swap(i, j int) { l.f[i], l.f[j] = l.f[j], l.f[i] }
|
|
|
|
func (l byStateAndName) Less(i, j int) bool {
|
|
|
|
if l.f[i].UnreadCount > 0 && l.f[j].UnreadCount == 0 {
|
|
|
|
return true
|
|
|
|
} else if l.f[i].UnreadCount == 0 && l.f[j].UnreadCount > 0 {
|
|
|
|
return false
|
|
|
|
} else {
|
|
|
|
return l.f[i].Title < l.f[j].Title
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// FeedExists checks if the given feed exists.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (s *Storage) FeedExists(userID, feedID int64) bool {
|
2019-10-30 06:48:07 +01:00
|
|
|
var result bool
|
|
|
|
query := `SELECT true FROM feeds WHERE user_id=$1 AND id=$2`
|
2017-11-20 06:10:04 +01:00
|
|
|
s.db.QueryRow(query, userID, feedID).Scan(&result)
|
2019-10-30 06:48:07 +01:00
|
|
|
return result
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// FeedURLExists checks if feed URL already exists.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (s *Storage) FeedURLExists(userID int64, feedURL string) bool {
|
2019-10-30 06:48:07 +01:00
|
|
|
var result bool
|
|
|
|
query := `SELECT true FROM feeds WHERE user_id=$1 AND feed_url=$2`
|
2017-11-20 06:10:04 +01:00
|
|
|
s.db.QueryRow(query, userID, feedURL).Scan(&result)
|
2019-10-30 06:48:07 +01:00
|
|
|
return result
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2020-09-21 08:29:51 +02:00
|
|
|
// AnotherFeedURLExists checks if the user a duplicated feed.
|
|
|
|
func (s *Storage) AnotherFeedURLExists(userID, feedID int64, feedURL string) bool {
|
|
|
|
var result bool
|
|
|
|
query := `SELECT true FROM feeds WHERE id <> $1 AND user_id=$2 AND feed_url=$3`
|
|
|
|
s.db.QueryRow(query, feedID, userID, feedURL).Scan(&result)
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2020-09-28 01:01:06 +02:00
|
|
|
// CountAllFeeds returns the number of feeds in the database.
|
|
|
|
func (s *Storage) CountAllFeeds() map[string]int64 {
|
|
|
|
rows, err := s.db.Query(`SELECT disabled, count(*) FROM feeds GROUP BY disabled`)
|
|
|
|
if err != nil {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
results := make(map[string]int64)
|
|
|
|
results["enabled"] = 0
|
|
|
|
results["disabled"] = 0
|
|
|
|
|
|
|
|
for rows.Next() {
|
|
|
|
var disabled bool
|
|
|
|
var count int64
|
|
|
|
|
|
|
|
if err := rows.Scan(&disabled, &count); err != nil {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
if disabled {
|
|
|
|
results["disabled"] = count
|
|
|
|
} else {
|
|
|
|
results["enabled"] = count
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
results["total"] = results["disabled"] + results["enabled"]
|
|
|
|
return results
|
|
|
|
}
|
|
|
|
|
2017-11-22 07:36:00 +01:00
|
|
|
// CountFeeds returns the number of feeds that belongs to the given user.
|
|
|
|
func (s *Storage) CountFeeds(userID int64) int {
|
|
|
|
var result int
|
|
|
|
err := s.db.QueryRow(`SELECT count(*) FROM feeds WHERE user_id=$1`, userID).Scan(&result)
|
|
|
|
if err != nil {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2020-09-28 01:01:06 +02:00
|
|
|
// CountUserFeedsWithErrors returns the number of feeds with parsing errors that belong to the given user.
|
|
|
|
func (s *Storage) CountUserFeedsWithErrors(userID int64) int {
|
2021-01-26 06:41:36 +01:00
|
|
|
pollingParsingErrorLimit := config.Opts.PollingParsingErrorLimit()
|
|
|
|
if pollingParsingErrorLimit <= 0 {
|
|
|
|
pollingParsingErrorLimit = 1
|
|
|
|
}
|
2020-09-28 01:01:06 +02:00
|
|
|
query := `SELECT count(*) FROM feeds WHERE user_id=$1 AND parsing_error_count >= $2`
|
2018-08-27 01:18:07 +02:00
|
|
|
var result int
|
2021-01-26 06:41:36 +01:00
|
|
|
err := s.db.QueryRow(query, userID, pollingParsingErrorLimit).Scan(&result)
|
2018-08-27 01:18:07 +02:00
|
|
|
if err != nil {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2020-09-28 01:01:06 +02:00
|
|
|
// CountAllFeedsWithErrors returns the number of feeds with parsing errors.
|
|
|
|
func (s *Storage) CountAllFeedsWithErrors() int {
|
2021-01-26 06:41:36 +01:00
|
|
|
pollingParsingErrorLimit := config.Opts.PollingParsingErrorLimit()
|
|
|
|
if pollingParsingErrorLimit <= 0 {
|
|
|
|
pollingParsingErrorLimit = 1
|
|
|
|
}
|
2020-09-28 01:01:06 +02:00
|
|
|
query := `SELECT count(*) FROM feeds WHERE parsing_error_count >= $1`
|
|
|
|
var result int
|
2021-01-26 06:41:36 +01:00
|
|
|
err := s.db.QueryRow(query, pollingParsingErrorLimit).Scan(&result)
|
2020-09-28 01:01:06 +02:00
|
|
|
if err != nil {
|
|
|
|
return 0
|
|
|
|
}
|
|
|
|
|
|
|
|
return result
|
|
|
|
}
|
|
|
|
|
2020-05-23 02:48:53 +02:00
|
|
|
// Feeds returns all feeds that belongs to the given user.
|
2017-11-28 06:30:04 +01:00
|
|
|
func (s *Storage) Feeds(userID int64) (model.Feeds, error) {
|
2021-01-18 22:22:09 +01:00
|
|
|
builder := NewFeedQueryBuilder(s, userID)
|
|
|
|
builder.WithOrder(model.DefaultFeedSorting)
|
|
|
|
builder.WithDirection(model.DefaultFeedSortingDirection)
|
|
|
|
return builder.GetFeeds()
|
2020-05-23 02:48:53 +02:00
|
|
|
}
|
|
|
|
|
2021-06-02 23:01:21 +02:00
|
|
|
func getFeedsSorted(builder *FeedQueryBuilder) (model.Feeds, error) {
|
|
|
|
result, err := builder.GetFeeds()
|
|
|
|
if err == nil {
|
|
|
|
sort.Sort(byStateAndName{result})
|
|
|
|
return result, nil
|
|
|
|
}
|
|
|
|
return result, err
|
|
|
|
}
|
|
|
|
|
2020-05-23 02:48:53 +02:00
|
|
|
// FeedsWithCounters returns all feeds of the given user with counters of read and unread entries.
|
|
|
|
func (s *Storage) FeedsWithCounters(userID int64) (model.Feeds, error) {
|
2021-01-18 22:22:09 +01:00
|
|
|
builder := NewFeedQueryBuilder(s, userID)
|
|
|
|
builder.WithCounters()
|
|
|
|
builder.WithOrder(model.DefaultFeedSorting)
|
|
|
|
builder.WithDirection(model.DefaultFeedSortingDirection)
|
2021-06-02 23:01:21 +02:00
|
|
|
return getFeedsSorted(builder)
|
2020-05-23 02:48:53 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// FeedsByCategoryWithCounters returns all feeds of the given user/category with counters of read and unread entries.
|
|
|
|
func (s *Storage) FeedsByCategoryWithCounters(userID, categoryID int64) (model.Feeds, error) {
|
2021-01-18 22:22:09 +01:00
|
|
|
builder := NewFeedQueryBuilder(s, userID)
|
|
|
|
builder.WithCategoryID(categoryID)
|
|
|
|
builder.WithCounters()
|
|
|
|
builder.WithOrder(model.DefaultFeedSorting)
|
|
|
|
builder.WithDirection(model.DefaultFeedSortingDirection)
|
2021-06-02 23:01:21 +02:00
|
|
|
return getFeedsSorted(builder)
|
2019-10-30 05:44:35 +01:00
|
|
|
}
|
|
|
|
|
2020-05-25 23:59:15 +02:00
|
|
|
// WeeklyFeedEntryCount returns the weekly entry count for a feed.
|
|
|
|
func (s *Storage) WeeklyFeedEntryCount(userID, feedID int64) (int, error) {
|
2020-05-25 23:06:56 +02:00
|
|
|
query := `
|
|
|
|
SELECT
|
|
|
|
count(*)
|
|
|
|
FROM
|
|
|
|
entries
|
|
|
|
WHERE
|
|
|
|
entries.user_id=$1 AND
|
|
|
|
entries.feed_id=$2 AND
|
|
|
|
entries.published_at BETWEEN (now() - interval '1 week') AND now();
|
|
|
|
`
|
|
|
|
|
2020-05-25 23:59:15 +02:00
|
|
|
var weeklyCount int
|
|
|
|
err := s.db.QueryRow(query, userID, feedID).Scan(&weeklyCount)
|
2020-05-25 23:06:56 +02:00
|
|
|
|
|
|
|
switch {
|
2021-01-18 22:22:09 +01:00
|
|
|
case errors.Is(err, sql.ErrNoRows):
|
2020-05-25 23:06:56 +02:00
|
|
|
return 0, nil
|
|
|
|
case err != nil:
|
2020-05-25 23:59:15 +02:00
|
|
|
return 0, fmt.Errorf(`store: unable to fetch weekly count for feed #%d: %v`, feedID, err)
|
2020-05-25 23:06:56 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return weeklyCount, nil
|
|
|
|
}
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// FeedByID returns a feed by the ID.
|
|
|
|
func (s *Storage) FeedByID(userID, feedID int64) (*model.Feed, error) {
|
2021-01-18 22:22:09 +01:00
|
|
|
builder := NewFeedQueryBuilder(s, userID)
|
|
|
|
builder.WithFeedID(feedID)
|
|
|
|
feed, err := builder.GetFeed()
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
switch {
|
2021-01-18 22:22:09 +01:00
|
|
|
case errors.Is(err, sql.ErrNoRows):
|
2017-11-20 06:10:04 +01:00
|
|
|
return nil, nil
|
|
|
|
case err != nil:
|
2019-10-30 06:48:07 +01:00
|
|
|
return nil, fmt.Errorf(`store: unable to fetch feed #%d: %v`, feedID, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
2021-01-27 14:09:50 +01:00
|
|
|
|
2021-01-18 22:22:09 +01:00
|
|
|
return feed, nil
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// CreateFeed creates a new feed.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (s *Storage) CreateFeed(feed *model.Feed) error {
|
|
|
|
sql := `
|
2019-10-30 06:48:07 +01:00
|
|
|
INSERT INTO feeds (
|
|
|
|
feed_url,
|
|
|
|
site_url,
|
|
|
|
title,
|
|
|
|
category_id,
|
|
|
|
user_id,
|
|
|
|
etag_header,
|
|
|
|
last_modified_header,
|
|
|
|
crawler,
|
|
|
|
user_agent,
|
2021-03-23 04:27:58 +01:00
|
|
|
cookie,
|
2019-10-30 06:48:07 +01:00
|
|
|
username,
|
|
|
|
password,
|
2019-11-29 20:17:14 +01:00
|
|
|
disabled,
|
|
|
|
scraper_rules,
|
2020-09-10 08:28:54 +02:00
|
|
|
rewrite_rules,
|
2020-10-16 23:40:56 +02:00
|
|
|
blocklist_rules,
|
|
|
|
keeplist_rules,
|
2021-01-03 01:33:41 +01:00
|
|
|
ignore_http_cache,
|
2021-02-21 22:42:49 +01:00
|
|
|
allow_self_signed_certificates,
|
2020-09-10 08:28:54 +02:00
|
|
|
fetch_via_proxy
|
2019-10-30 06:48:07 +01:00
|
|
|
)
|
|
|
|
VALUES
|
2021-03-23 04:27:58 +01:00
|
|
|
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20)
|
2019-10-30 06:48:07 +01:00
|
|
|
RETURNING
|
|
|
|
id
|
2017-11-20 06:10:04 +01:00
|
|
|
`
|
|
|
|
err := s.db.QueryRow(
|
|
|
|
sql,
|
|
|
|
feed.FeedURL,
|
|
|
|
feed.SiteURL,
|
|
|
|
feed.Title,
|
|
|
|
feed.Category.ID,
|
|
|
|
feed.UserID,
|
|
|
|
feed.EtagHeader,
|
|
|
|
feed.LastModifiedHeader,
|
2017-12-13 04:19:36 +01:00
|
|
|
feed.Crawler,
|
2018-09-20 03:19:24 +02:00
|
|
|
feed.UserAgent,
|
2021-03-23 04:27:58 +01:00
|
|
|
feed.Cookie,
|
2018-06-20 07:58:29 +02:00
|
|
|
feed.Username,
|
|
|
|
feed.Password,
|
2019-07-27 06:13:06 +02:00
|
|
|
feed.Disabled,
|
2019-11-29 20:17:14 +01:00
|
|
|
feed.ScraperRules,
|
|
|
|
feed.RewriteRules,
|
2020-10-16 23:40:56 +02:00
|
|
|
feed.BlocklistRules,
|
|
|
|
feed.KeeplistRules,
|
2021-01-03 01:33:41 +01:00
|
|
|
feed.IgnoreHTTPCache,
|
2021-02-21 22:42:49 +01:00
|
|
|
feed.AllowSelfSignedCertificates,
|
2020-09-10 08:28:54 +02:00
|
|
|
feed.FetchViaProxy,
|
2017-11-20 06:10:04 +01:00
|
|
|
).Scan(&feed.ID)
|
|
|
|
if err != nil {
|
2019-10-30 06:48:07 +01:00
|
|
|
return fmt.Errorf(`store: unable to create feed %q: %v`, feed.FeedURL, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for i := 0; i < len(feed.Entries); i++ {
|
|
|
|
feed.Entries[i].FeedID = feed.ID
|
|
|
|
feed.Entries[i].UserID = feed.UserID
|
2019-09-19 07:41:33 +02:00
|
|
|
|
2020-09-21 08:01:01 +02:00
|
|
|
tx, err := s.db.Begin()
|
|
|
|
if err != nil {
|
|
|
|
return fmt.Errorf(`store: unable to start transaction: %v`, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
if !s.entryExists(tx, feed.Entries[i]) {
|
|
|
|
if err := s.createEntry(tx, feed.Entries[i]); err != nil {
|
|
|
|
tx.Rollback()
|
2019-09-19 07:41:33 +02:00
|
|
|
return err
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
2020-09-21 08:01:01 +02:00
|
|
|
|
|
|
|
if err := tx.Commit(); err != nil {
|
|
|
|
return fmt.Errorf(`store: unable to commit transaction: %v`, err)
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2017-11-28 06:30:04 +01:00
|
|
|
// UpdateFeed updates an existing feed.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (s *Storage) UpdateFeed(feed *model.Feed) (err error) {
|
2019-07-27 06:13:06 +02:00
|
|
|
query := `
|
2019-10-30 06:48:07 +01:00
|
|
|
UPDATE
|
|
|
|
feeds
|
|
|
|
SET
|
2019-07-27 06:13:06 +02:00
|
|
|
feed_url=$1,
|
|
|
|
site_url=$2,
|
|
|
|
title=$3,
|
|
|
|
category_id=$4,
|
|
|
|
etag_header=$5,
|
|
|
|
last_modified_header=$6,
|
|
|
|
checked_at=$7,
|
|
|
|
parsing_error_msg=$8,
|
|
|
|
parsing_error_count=$9,
|
|
|
|
scraper_rules=$10,
|
|
|
|
rewrite_rules=$11,
|
2020-10-16 23:40:56 +02:00
|
|
|
blocklist_rules=$12,
|
|
|
|
keeplist_rules=$13,
|
|
|
|
crawler=$14,
|
|
|
|
user_agent=$15,
|
2021-03-23 04:27:58 +01:00
|
|
|
cookie=$16,
|
|
|
|
username=$17,
|
|
|
|
password=$18,
|
|
|
|
disabled=$19,
|
|
|
|
next_check_at=$20,
|
|
|
|
ignore_http_cache=$21,
|
|
|
|
allow_self_signed_certificates=$22,
|
|
|
|
fetch_via_proxy=$23
|
2019-07-27 06:13:06 +02:00
|
|
|
WHERE
|
2021-03-23 04:27:58 +01:00
|
|
|
id=$24 AND user_id=$25
|
2019-07-27 06:13:06 +02:00
|
|
|
`
|
2017-11-20 06:10:04 +01:00
|
|
|
_, err = s.db.Exec(query,
|
|
|
|
feed.FeedURL,
|
|
|
|
feed.SiteURL,
|
|
|
|
feed.Title,
|
|
|
|
feed.Category.ID,
|
|
|
|
feed.EtagHeader,
|
|
|
|
feed.LastModifiedHeader,
|
|
|
|
feed.CheckedAt,
|
|
|
|
feed.ParsingErrorMsg,
|
|
|
|
feed.ParsingErrorCount,
|
2017-12-11 05:51:04 +01:00
|
|
|
feed.ScraperRules,
|
2017-12-12 07:16:32 +01:00
|
|
|
feed.RewriteRules,
|
2020-10-16 23:40:56 +02:00
|
|
|
feed.BlocklistRules,
|
|
|
|
feed.KeeplistRules,
|
2017-12-13 04:19:36 +01:00
|
|
|
feed.Crawler,
|
2018-09-20 03:19:24 +02:00
|
|
|
feed.UserAgent,
|
2021-03-23 04:27:58 +01:00
|
|
|
feed.Cookie,
|
2018-06-20 07:58:29 +02:00
|
|
|
feed.Username,
|
|
|
|
feed.Password,
|
2019-07-27 06:13:06 +02:00
|
|
|
feed.Disabled,
|
2020-05-25 23:06:56 +02:00
|
|
|
feed.NextCheckAt,
|
2020-06-06 06:50:59 +02:00
|
|
|
feed.IgnoreHTTPCache,
|
2021-02-21 22:42:49 +01:00
|
|
|
feed.AllowSelfSignedCertificates,
|
2020-09-10 08:28:54 +02:00
|
|
|
feed.FetchViaProxy,
|
2017-11-20 06:10:04 +01:00
|
|
|
feed.ID,
|
|
|
|
feed.UserID,
|
|
|
|
)
|
|
|
|
|
|
|
|
if err != nil {
|
2019-10-30 06:48:07 +01:00
|
|
|
return fmt.Errorf(`store: unable to update feed #%d (%s): %v`, feed.ID, feed.FeedURL, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-12-15 22:04:38 +01:00
|
|
|
// UpdateFeedError updates feed errors.
|
|
|
|
func (s *Storage) UpdateFeedError(feed *model.Feed) (err error) {
|
|
|
|
query := `
|
2019-10-30 06:48:07 +01:00
|
|
|
UPDATE
|
|
|
|
feeds
|
2018-12-15 22:04:38 +01:00
|
|
|
SET
|
|
|
|
parsing_error_msg=$1,
|
|
|
|
parsing_error_count=$2,
|
2020-05-25 23:06:56 +02:00
|
|
|
checked_at=$3,
|
|
|
|
next_check_at=$4
|
2019-10-30 06:48:07 +01:00
|
|
|
WHERE
|
2020-05-25 23:06:56 +02:00
|
|
|
id=$5 AND user_id=$6
|
2019-10-30 06:48:07 +01:00
|
|
|
`
|
2018-12-15 22:04:38 +01:00
|
|
|
_, err = s.db.Exec(query,
|
|
|
|
feed.ParsingErrorMsg,
|
|
|
|
feed.ParsingErrorCount,
|
|
|
|
feed.CheckedAt,
|
2020-05-25 23:06:56 +02:00
|
|
|
feed.NextCheckAt,
|
2018-12-15 22:04:38 +01:00
|
|
|
feed.ID,
|
|
|
|
feed.UserID,
|
|
|
|
)
|
|
|
|
|
|
|
|
if err != nil {
|
2019-10-30 06:48:07 +01:00
|
|
|
return fmt.Errorf(`store: unable to update feed error #%d (%s): %v`, feed.ID, feed.FeedURL, err)
|
2018-12-15 22:04:38 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-03-17 06:15:40 +01:00
|
|
|
// RemoveFeed removes a feed and all entries.
|
|
|
|
// This operation can takes time if the feed has lot of entries.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (s *Storage) RemoveFeed(userID, feedID int64) error {
|
2021-03-17 06:15:40 +01:00
|
|
|
rows, err := s.db.Query(`SELECT id FROM entries WHERE user_id=$1 AND feed_id=$2`, userID, feedID)
|
2017-11-20 06:10:04 +01:00
|
|
|
if err != nil {
|
2021-03-17 06:15:40 +01:00
|
|
|
return fmt.Errorf(`store: unable to get user feed entries: %v`, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
2021-03-17 06:15:40 +01:00
|
|
|
defer rows.Close()
|
2017-11-20 06:10:04 +01:00
|
|
|
|
2021-03-17 06:15:40 +01:00
|
|
|
for rows.Next() {
|
|
|
|
var entryID int64
|
|
|
|
if err := rows.Scan(&entryID); err != nil {
|
|
|
|
return fmt.Errorf(`store: unable to read user feed entry ID: %v`, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
logger.Debug(`[FEED DELETION] Deleting entry #%d of feed #%d for user #%d (%d GoRoutines)`, entryID, feedID, userID, runtime.NumGoroutine())
|
|
|
|
|
|
|
|
if _, err := s.db.Exec(`DELETE FROM entries WHERE id=$1 AND user_id=$2`, entryID, userID); err != nil {
|
|
|
|
return fmt.Errorf(`store: unable to delete user feed entries #%d: %v`, entryID, err)
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2021-05-08 01:25:44 +02:00
|
|
|
if _, err := s.db.Exec(`DELETE FROM feeds WHERE id=$1 AND user_id=$2`, feedID, userID); err != nil {
|
2021-03-17 06:15:40 +01:00
|
|
|
return fmt.Errorf(`store: unable to delete feed #%d: %v`, feedID, err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
2018-06-30 23:22:45 +02:00
|
|
|
|
|
|
|
// ResetFeedErrors removes all feed errors.
|
|
|
|
func (s *Storage) ResetFeedErrors() error {
|
|
|
|
_, err := s.db.Exec(`UPDATE feeds SET parsing_error_count=0, parsing_error_msg=''`)
|
|
|
|
return err
|
|
|
|
}
|