2017-11-20 06:10:04 +01:00
|
|
|
// Copyright 2017 Frédéric Guillot. All rights reserved.
|
|
|
|
// Use of this source code is governed by the Apache 2.0
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2018-08-25 06:51:50 +02:00
|
|
|
package api // import "miniflux.app/api"
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
import (
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
2017-11-25 07:29:20 +01:00
|
|
|
|
2018-08-25 06:51:50 +02:00
|
|
|
"miniflux.app/model"
|
2017-11-20 06:10:04 +01:00
|
|
|
)
|
|
|
|
|
2020-12-14 05:31:19 +01:00
|
|
|
type feedIconResponse struct {
|
2017-12-16 20:25:18 +01:00
|
|
|
ID int64 `json:"id"`
|
|
|
|
MimeType string `json:"mime_type"`
|
|
|
|
Data string `json:"data"`
|
|
|
|
}
|
|
|
|
|
2018-01-03 07:04:48 +01:00
|
|
|
type entriesResponse struct {
|
2017-11-20 06:10:04 +01:00
|
|
|
Total int `json:"total"`
|
|
|
|
Entries model.Entries `json:"entries"`
|
|
|
|
}
|
|
|
|
|
2020-12-14 05:31:19 +01:00
|
|
|
type subscriptionDiscoveryRequest struct {
|
|
|
|
URL string `json:"url"`
|
|
|
|
UserAgent string `json:"user_agent"`
|
|
|
|
Username string `json:"username"`
|
|
|
|
Password string `json:"password"`
|
|
|
|
FetchViaProxy bool `json:"fetch_via_proxy"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func decodeSubscriptionDiscoveryRequest(r io.ReadCloser) (*subscriptionDiscoveryRequest, error) {
|
|
|
|
defer r.Close()
|
|
|
|
|
|
|
|
var s subscriptionDiscoveryRequest
|
|
|
|
decoder := json.NewDecoder(r)
|
|
|
|
if err := decoder.Decode(&s); err != nil {
|
|
|
|
return nil, fmt.Errorf("invalid JSON payload: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return &s, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
type feedCreationResponse struct {
|
|
|
|
FeedID int64 `json:"feed_id"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type feedCreationRequest struct {
|
2021-01-03 01:33:41 +01:00
|
|
|
FeedURL string `json:"feed_url"`
|
|
|
|
CategoryID int64 `json:"category_id"`
|
|
|
|
UserAgent string `json:"user_agent"`
|
|
|
|
Username string `json:"username"`
|
|
|
|
Password string `json:"password"`
|
|
|
|
Crawler bool `json:"crawler"`
|
|
|
|
Disabled bool `json:"disabled"`
|
|
|
|
IgnoreHTTPCache bool `json:"ignore_http_cache"`
|
|
|
|
FetchViaProxy bool `json:"fetch_via_proxy"`
|
|
|
|
ScraperRules string `json:"scraper_rules"`
|
|
|
|
RewriteRules string `json:"rewrite_rules"`
|
|
|
|
BlocklistRules string `json:"blocklist_rules"`
|
|
|
|
KeeplistRules string `json:"keeplist_rules"`
|
2018-06-20 07:58:29 +02:00
|
|
|
}
|
|
|
|
|
2020-12-14 05:31:19 +01:00
|
|
|
func decodeFeedCreationRequest(r io.ReadCloser) (*feedCreationRequest, error) {
|
|
|
|
defer r.Close()
|
|
|
|
|
|
|
|
var fc feedCreationRequest
|
|
|
|
decoder := json.NewDecoder(r)
|
|
|
|
if err := decoder.Decode(&fc); err != nil {
|
|
|
|
return nil, fmt.Errorf("Invalid JSON payload: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return &fc, nil
|
2018-06-20 07:58:29 +02:00
|
|
|
}
|
|
|
|
|
2020-12-14 05:31:19 +01:00
|
|
|
type feedModificationRequest struct {
|
|
|
|
FeedURL *string `json:"feed_url"`
|
|
|
|
SiteURL *string `json:"site_url"`
|
|
|
|
Title *string `json:"title"`
|
|
|
|
ScraperRules *string `json:"scraper_rules"`
|
|
|
|
RewriteRules *string `json:"rewrite_rules"`
|
|
|
|
BlocklistRules *string `json:"blocklist_rules"`
|
|
|
|
KeeplistRules *string `json:"keeplist_rules"`
|
|
|
|
Crawler *bool `json:"crawler"`
|
|
|
|
UserAgent *string `json:"user_agent"`
|
|
|
|
Username *string `json:"username"`
|
|
|
|
Password *string `json:"password"`
|
|
|
|
CategoryID *int64 `json:"category_id"`
|
|
|
|
Disabled *bool `json:"disabled"`
|
|
|
|
IgnoreHTTPCache *bool `json:"ignore_http_cache"`
|
|
|
|
FetchViaProxy *bool `json:"fetch_via_proxy"`
|
2018-06-24 01:16:54 +02:00
|
|
|
}
|
|
|
|
|
2020-12-14 05:31:19 +01:00
|
|
|
func (f *feedModificationRequest) Update(feed *model.Feed) {
|
2018-06-24 01:16:54 +02:00
|
|
|
if f.FeedURL != nil && *f.FeedURL != "" {
|
|
|
|
feed.FeedURL = *f.FeedURL
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.SiteURL != nil && *f.SiteURL != "" {
|
|
|
|
feed.SiteURL = *f.SiteURL
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.Title != nil && *f.Title != "" {
|
|
|
|
feed.Title = *f.Title
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.ScraperRules != nil {
|
|
|
|
feed.ScraperRules = *f.ScraperRules
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.RewriteRules != nil {
|
|
|
|
feed.RewriteRules = *f.RewriteRules
|
|
|
|
}
|
|
|
|
|
2020-10-16 23:40:56 +02:00
|
|
|
if f.KeeplistRules != nil {
|
|
|
|
feed.KeeplistRules = *f.KeeplistRules
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.BlocklistRules != nil {
|
|
|
|
feed.BlocklistRules = *f.BlocklistRules
|
|
|
|
}
|
|
|
|
|
2018-06-24 01:16:54 +02:00
|
|
|
if f.Crawler != nil {
|
|
|
|
feed.Crawler = *f.Crawler
|
|
|
|
}
|
|
|
|
|
2018-09-20 03:19:24 +02:00
|
|
|
if f.UserAgent != nil {
|
|
|
|
feed.UserAgent = *f.UserAgent
|
|
|
|
}
|
|
|
|
|
2018-06-24 01:16:54 +02:00
|
|
|
if f.Username != nil {
|
|
|
|
feed.Username = *f.Username
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
|
2018-06-24 01:16:54 +02:00
|
|
|
if f.Password != nil {
|
|
|
|
feed.Password = *f.Password
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.CategoryID != nil && *f.CategoryID > 0 {
|
|
|
|
feed.Category.ID = *f.CategoryID
|
|
|
|
}
|
2019-07-27 06:13:06 +02:00
|
|
|
|
|
|
|
if f.Disabled != nil {
|
|
|
|
feed.Disabled = *f.Disabled
|
|
|
|
}
|
2020-12-14 05:31:19 +01:00
|
|
|
|
|
|
|
if f.IgnoreHTTPCache != nil {
|
|
|
|
feed.IgnoreHTTPCache = *f.IgnoreHTTPCache
|
|
|
|
}
|
|
|
|
|
|
|
|
if f.FetchViaProxy != nil {
|
|
|
|
feed.FetchViaProxy = *f.FetchViaProxy
|
|
|
|
}
|
2018-06-24 01:16:54 +02:00
|
|
|
}
|
|
|
|
|
2020-12-14 05:31:19 +01:00
|
|
|
func decodeFeedModificationRequest(r io.ReadCloser) (*feedModificationRequest, error) {
|
|
|
|
defer r.Close()
|
|
|
|
|
|
|
|
var feed feedModificationRequest
|
|
|
|
decoder := json.NewDecoder(r)
|
|
|
|
if err := decoder.Decode(&feed); err != nil {
|
|
|
|
return nil, fmt.Errorf("Unable to decode feed modification JSON object: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return &feed, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func decodeEntryStatusRequest(r io.ReadCloser) ([]int64, string, error) {
|
2017-11-20 06:10:04 +01:00
|
|
|
type payload struct {
|
2017-11-25 07:29:20 +01:00
|
|
|
EntryIDs []int64 `json:"entry_ids"`
|
|
|
|
Status string `json:"status"`
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
var p payload
|
2018-04-30 01:35:04 +02:00
|
|
|
decoder := json.NewDecoder(r)
|
|
|
|
defer r.Close()
|
2017-11-20 06:10:04 +01:00
|
|
|
if err := decoder.Decode(&p); err != nil {
|
2017-11-25 07:29:20 +01:00
|
|
|
return nil, "", fmt.Errorf("invalid JSON payload: %v", err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2017-11-25 07:29:20 +01:00
|
|
|
return p.EntryIDs, p.Status, nil
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2020-12-14 03:50:28 +01:00
|
|
|
type categoryRequest struct {
|
|
|
|
Title string `json:"title"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func decodeCategoryRequest(r io.ReadCloser) (*categoryRequest, error) {
|
|
|
|
var payload categoryRequest
|
2017-11-20 06:10:04 +01:00
|
|
|
|
2018-04-30 01:35:04 +02:00
|
|
|
decoder := json.NewDecoder(r)
|
|
|
|
defer r.Close()
|
2020-12-14 03:50:28 +01:00
|
|
|
if err := decoder.Decode(&payload); err != nil {
|
|
|
|
return nil, fmt.Errorf("Unable to decode JSON object: %v", err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2020-12-14 03:50:28 +01:00
|
|
|
return &payload, nil
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|