2017-11-20 06:10:04 +01:00
|
|
|
// Copyright 2017 Frédéric Guillot. All rights reserved.
|
|
|
|
// Use of this source code is governed by the Apache 2.0
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2018-08-25 06:51:50 +02:00
|
|
|
package model // import "miniflux.app/model"
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"time"
|
2018-10-15 06:43:48 +02:00
|
|
|
|
|
|
|
"miniflux.app/http/client"
|
2017-11-20 06:10:04 +01:00
|
|
|
)
|
|
|
|
|
2018-10-15 06:43:48 +02:00
|
|
|
// Feed represents a feed in the application.
|
2017-11-20 06:10:04 +01:00
|
|
|
type Feed struct {
|
|
|
|
ID int64 `json:"id"`
|
|
|
|
UserID int64 `json:"user_id"`
|
|
|
|
FeedURL string `json:"feed_url"`
|
|
|
|
SiteURL string `json:"site_url"`
|
|
|
|
Title string `json:"title"`
|
2017-12-25 03:04:34 +01:00
|
|
|
CheckedAt time.Time `json:"checked_at"`
|
|
|
|
EtagHeader string `json:"etag_header"`
|
|
|
|
LastModifiedHeader string `json:"last_modified_header"`
|
|
|
|
ParsingErrorMsg string `json:"parsing_error_message"`
|
|
|
|
ParsingErrorCount int `json:"parsing_error_count"`
|
2017-12-11 05:51:04 +01:00
|
|
|
ScraperRules string `json:"scraper_rules"`
|
2017-12-12 07:16:32 +01:00
|
|
|
RewriteRules string `json:"rewrite_rules"`
|
2017-12-13 04:19:36 +01:00
|
|
|
Crawler bool `json:"crawler"`
|
2018-09-20 03:19:24 +02:00
|
|
|
UserAgent string `json:"user_agent"`
|
2018-06-20 07:58:29 +02:00
|
|
|
Username string `json:"username"`
|
|
|
|
Password string `json:"password"`
|
2019-07-27 06:13:06 +02:00
|
|
|
Disabled bool `json:"disabled"`
|
2017-11-20 06:10:04 +01:00
|
|
|
Category *Category `json:"category,omitempty"`
|
|
|
|
Entries Entries `json:"entries,omitempty"`
|
2017-12-25 03:04:34 +01:00
|
|
|
Icon *FeedIcon `json:"icon"`
|
2019-11-29 20:49:53 +01:00
|
|
|
UnreadCount int `json:"-"`
|
|
|
|
ReadCount int `json:"-"`
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func (f *Feed) String() string {
|
|
|
|
return fmt.Sprintf("ID=%d, UserID=%d, FeedURL=%s, SiteURL=%s, Title=%s, Category={%s}",
|
|
|
|
f.ID,
|
|
|
|
f.UserID,
|
|
|
|
f.FeedURL,
|
|
|
|
f.SiteURL,
|
|
|
|
f.Title,
|
|
|
|
f.Category,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2018-10-15 06:43:48 +02:00
|
|
|
// WithClientResponse updates feed attributes from an HTTP request.
|
|
|
|
func (f *Feed) WithClientResponse(response *client.Response) {
|
|
|
|
f.EtagHeader = response.ETag
|
|
|
|
f.LastModifiedHeader = response.LastModified
|
|
|
|
f.FeedURL = response.EffectiveURL
|
|
|
|
}
|
|
|
|
|
|
|
|
// WithCategoryID initializes the category attribute of the feed.
|
|
|
|
func (f *Feed) WithCategoryID(categoryID int64) {
|
|
|
|
f.Category = &Category{ID: categoryID}
|
|
|
|
}
|
|
|
|
|
|
|
|
// WithBrowsingParameters defines browsing parameters.
|
2019-11-29 20:17:14 +01:00
|
|
|
func (f *Feed) WithBrowsingParameters(crawler bool, userAgent, username, password, scraperRules, rewriteRules string) {
|
2018-10-15 06:43:48 +02:00
|
|
|
f.Crawler = crawler
|
|
|
|
f.UserAgent = userAgent
|
|
|
|
f.Username = username
|
|
|
|
f.Password = password
|
2019-11-29 20:17:14 +01:00
|
|
|
f.ScraperRules = scraperRules
|
|
|
|
f.RewriteRules = rewriteRules
|
2018-10-15 06:43:48 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// WithError adds a new error message and increment the error counter.
|
|
|
|
func (f *Feed) WithError(message string) {
|
|
|
|
f.ParsingErrorCount++
|
|
|
|
f.ParsingErrorMsg = message
|
|
|
|
}
|
|
|
|
|
|
|
|
// ResetErrorCounter removes all previous errors.
|
|
|
|
func (f *Feed) ResetErrorCounter() {
|
|
|
|
f.ParsingErrorCount = 0
|
|
|
|
f.ParsingErrorMsg = ""
|
|
|
|
}
|
|
|
|
|
|
|
|
// CheckedNow set attribute values when the feed is refreshed.
|
|
|
|
func (f *Feed) CheckedNow() {
|
|
|
|
f.CheckedAt = time.Now()
|
|
|
|
|
|
|
|
if f.SiteURL == "" {
|
|
|
|
f.SiteURL = f.FeedURL
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-11-20 06:10:04 +01:00
|
|
|
// Feeds is a list of feed
|
|
|
|
type Feeds []*Feed
|