Add option to allow self-signed or invalid certificates

This commit is contained in:
Frédéric Guillot 2021-02-21 13:42:49 -08:00 committed by fguillot
parent c3f871b49b
commit ec3c604a83
35 changed files with 388 additions and 227 deletions

View file

@ -32,6 +32,7 @@ func (h *handler) discoverSubscriptions(w http.ResponseWriter, r *http.Request)
subscriptionDiscoveryRequest.Username,
subscriptionDiscoveryRequest.Password,
subscriptionDiscoveryRequest.FetchViaProxy,
subscriptionDiscoveryRequest.AllowSelfSignedCertificates,
)
if finderErr != nil {
json.ServerError(w, r, finderErr)

View file

@ -145,7 +145,9 @@ func Parse() {
}
if flagMigrate {
database.Migrate(db)
if err := database.Migrate(db); err != nil {
logger.Fatal(`%v`, err)
}
return
}

View file

@ -100,64 +100,67 @@ type Subscriptions []*Subscription
// Feed represents a Miniflux feed.
type Feed struct {
ID int64 `json:"id"`
UserID int64 `json:"user_id"`
FeedURL string `json:"feed_url"`
SiteURL string `json:"site_url"`
Title string `json:"title"`
CheckedAt time.Time `json:"checked_at,omitempty"`
EtagHeader string `json:"etag_header,omitempty"`
LastModifiedHeader string `json:"last_modified_header,omitempty"`
ParsingErrorMsg string `json:"parsing_error_message,omitempty"`
ParsingErrorCount int `json:"parsing_error_count,omitempty"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
Crawler bool `json:"crawler"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Category *Category `json:"category,omitempty"`
ID int64 `json:"id"`
UserID int64 `json:"user_id"`
FeedURL string `json:"feed_url"`
SiteURL string `json:"site_url"`
Title string `json:"title"`
CheckedAt time.Time `json:"checked_at,omitempty"`
EtagHeader string `json:"etag_header,omitempty"`
LastModifiedHeader string `json:"last_modified_header,omitempty"`
ParsingErrorMsg string `json:"parsing_error_message,omitempty"`
ParsingErrorCount int `json:"parsing_error_count,omitempty"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
Crawler bool `json:"crawler"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Category *Category `json:"category,omitempty"`
}
// FeedCreationRequest represents the request to create a feed.
type FeedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
}
// FeedModificationRequest represents the request to update a feed.
type FeedModificationRequest struct {
FeedURL *string `json:"feed_url"`
SiteURL *string `json:"site_url"`
Title *string `json:"title"`
ScraperRules *string `json:"scraper_rules"`
RewriteRules *string `json:"rewrite_rules"`
BlocklistRules *string `json:"blocklist_rules"`
KeeplistRules *string `json:"keeplist_rules"`
Crawler *bool `json:"crawler"`
UserAgent *string `json:"user_agent"`
Username *string `json:"username"`
Password *string `json:"password"`
CategoryID *int64 `json:"category_id"`
Disabled *bool `json:"disabled"`
IgnoreHTTPCache *bool `json:"ignore_http_cache"`
FetchViaProxy *bool `json:"fetch_via_proxy"`
FeedURL *string `json:"feed_url"`
SiteURL *string `json:"site_url"`
Title *string `json:"title"`
ScraperRules *string `json:"scraper_rules"`
RewriteRules *string `json:"rewrite_rules"`
BlocklistRules *string `json:"blocklist_rules"`
KeeplistRules *string `json:"keeplist_rules"`
Crawler *bool `json:"crawler"`
UserAgent *string `json:"user_agent"`
Username *string `json:"username"`
Password *string `json:"password"`
CategoryID *int64 `json:"category_id"`
Disabled *bool `json:"disabled"`
IgnoreHTTPCache *bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates *bool `json:"allow_self_signed_certificates"`
FetchViaProxy *bool `json:"fetch_via_proxy"`
}
// FeedIcon represents the feed icon.

View file

@ -10,6 +10,7 @@ import (
var schemaVersion = len(migrations)
// Order is important. Add new migrations at the end of the list.
var migrations = []func(tx *sql.Tx) error{
func(tx *sql.Tx) (err error) {
sql := `
@ -514,4 +515,10 @@ var migrations = []func(tx *sql.Tx) error{
`)
return err
},
func(tx *sql.Tx) (err error) {
_, err = tx.Exec(`
ALTER TABLE feeds ADD COLUMN allow_self_signed_certificates boolean not null default false
`)
return err
},
}

View file

@ -6,6 +6,7 @@ package client // import "miniflux.app/http/client"
import (
"bytes"
"crypto/tls"
"crypto/x509"
"encoding/json"
"fmt"
@ -50,9 +51,10 @@ type Client struct {
useProxy bool
doNotFollowRedirects bool
ClientTimeout int
ClientMaxBodySize int64
ClientProxyURL string
ClientTimeout int
ClientMaxBodySize int64
ClientProxyURL string
AllowSelfSignedCertificates bool
}
// New initializes a new HTTP client.
@ -87,13 +89,14 @@ func (c *Client) String() string {
}
return fmt.Sprintf(
`InputURL=%q RequestURL=%q ETag=%s LastModified=%s Auth=%v UserAgent=%q`,
`InputURL=%q RequestURL=%q ETag=%s LastMod=%s Auth=%v UserAgent=%q Verify=%v`,
c.inputURL,
c.requestURL,
etagHeader,
lastModifiedHeader,
c.requestAuthorizationHeader != "" || (c.requestUsername != "" && c.requestPassword != ""),
c.requestUserAgent,
!c.AllowSelfSignedCertificates,
)
}
@ -288,6 +291,10 @@ func (c *Client) buildClient() http.Client {
IdleConnTimeout: 10 * time.Second,
}
if c.AllowSelfSignedCertificates {
transport.TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
}
if c.doNotFollowRedirects {
client.CheckRedirect = func(req *http.Request, via []*http.Request) error {
return http.ErrUseLastResponse

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Blockierregeln",
"form.feed.label.keeplist_rules": "Erlaubnisregeln",
"form.feed.label.ignore_http_cache": "Ignoriere HTTP-cache",
"form.feed.label.allow_self_signed_certificates": "Erlaube selbstsignierte oder ungültige Zertifikate",
"form.feed.label.fetch_via_proxy": "Über Proxy abrufen",
"form.feed.label.disabled": "Dieses Abonnement nicht aktualisieren",
"form.category.label.title": "Titel",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Block Rules",
"form.feed.label.keeplist_rules": "Keep Rules",
"form.feed.label.ignore_http_cache": "Ignore HTTP cache",
"form.feed.label.allow_self_signed_certificates": "Allow self-signed or invalid certificates",
"form.feed.label.fetch_via_proxy": "Fetch via proxy",
"form.feed.label.disabled": "Do not refresh this feed",
"form.category.label.title": "Title",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Reglas de Filtrado(Bloquear)",
"form.feed.label.keeplist_rules": "Reglas de Filtrado(Permitir)",
"form.feed.label.ignore_http_cache": "Ignorar caché HTTP",
"form.feed.label.allow_self_signed_certificates": "Permitir certificados autofirmados o no válidos",
"form.feed.label.fetch_via_proxy": "Buscar a través de proxy",
"form.feed.label.disabled": "No actualice este feed",
"form.category.label.title": "Título",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Règles de blocage",
"form.feed.label.keeplist_rules": "Règles d'autorisation",
"form.feed.label.ignore_http_cache": "Ignorer le cache HTTP",
"form.feed.label.allow_self_signed_certificates": "Autoriser les certificats auto-signés ou non valides",
"form.feed.label.fetch_via_proxy": "Récupérer via proxy",
"form.feed.label.disabled": "Ne pas actualiser ce flux",
"form.category.label.title": "Titre",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Regole di blocco",
"form.feed.label.keeplist_rules": "Regole di autorizzazione",
"form.feed.label.ignore_http_cache": "Ignora cache HTTP",
"form.feed.label.allow_self_signed_certificates": "Consenti certificati autofirmati o non validi",
"form.feed.label.fetch_via_proxy": "Recuperare tramite proxy",
"form.feed.label.disabled": "Non aggiornare questo feed",
"form.category.label.title": "Titolo",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "ブロックルール",
"form.feed.label.keeplist_rules": "許可規則",
"form.feed.label.ignore_http_cache": "HTTPキャッシュを無視",
"form.feed.label.allow_self_signed_certificates": "自己署名証明書または無効な証明書を許可する",
"form.feed.label.fetch_via_proxy": "プロキシ経由でフェッチ",
"form.feed.label.disabled": "このフィードを更新しない",
"form.category.label.title": "タイトル",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Blokkeer regels",
"form.feed.label.keeplist_rules": "toestemmingsregels",
"form.feed.label.ignore_http_cache": "Negeer HTTP-cache",
"form.feed.label.allow_self_signed_certificates": "Sta zelfondertekende of ongeldige certificaten toe",
"form.feed.label.fetch_via_proxy": "Ophalen via proxy",
"form.feed.label.disabled": "Vernieuw deze feed niet",
"form.category.label.title": "Naam",

View file

@ -269,6 +269,7 @@
"form.feed.label.blocklist_rules": "Zasady blokowania",
"form.feed.label.keeplist_rules": "Zasady zezwoleń",
"form.feed.label.ignore_http_cache": "Zignoruj pamięć podręczną HTTP",
"form.feed.label.allow_self_signed_certificates": "Zezwalaj na certyfikaty z podpisem własnym lub nieprawidłowe certyfikaty",
"form.feed.label.fetch_via_proxy": "Pobierz przez proxy",
"form.feed.label.disabled": "Не обновлять этот канал",
"form.category.label.title": "Tytuł",

View file

@ -267,6 +267,7 @@
"form.feed.label.blocklist_rules": "Regras de bloqueio",
"form.feed.label.keeplist_rules": "Regras de permissão",
"form.feed.label.ignore_http_cache": "Ignorar cache HTTP",
"form.feed.label.allow_self_signed_certificates": "Permitir certificados autoassinados ou inválidos",
"form.feed.label.disabled": "Não atualizar esta fonte",
"form.feed.label.fetch_via_proxy": "Buscar via proxy",
"form.category.label.title": "Título",

View file

@ -269,6 +269,7 @@
"form.feed.label.blocklist_rules": "Правила блокировки",
"form.feed.label.keeplist_rules": "правила разрешений",
"form.feed.label.ignore_http_cache": "Игнорировать HTTP-кеш",
"form.feed.label.allow_self_signed_certificates": "Разрешить самоподписанные или недействительные сертификаты",
"form.feed.label.fetch_via_proxy": "Получить через прокси",
"form.feed.label.disabled": "Не обновлять этот канал",
"form.category.label.title": "Название",

View file

@ -265,6 +265,7 @@
"form.feed.label.blocklist_rules": "封锁规则",
"form.feed.label.keeplist_rules": "许可规则",
"form.feed.label.ignore_http_cache": "忽略HTTP缓存",
"form.feed.label.allow_self_signed_certificates": "允许自签名或无效的证书",
"form.feed.label.fetch_via_proxy": "通过代理获取",
"form.feed.label.disabled": "请勿刷新此Feed",
"form.category.label.title": "标题",

View file

@ -24,33 +24,34 @@ const (
// Feed represents a feed in the application.
type Feed struct {
ID int64 `json:"id"`
UserID int64 `json:"user_id"`
FeedURL string `json:"feed_url"`
SiteURL string `json:"site_url"`
Title string `json:"title"`
CheckedAt time.Time `json:"checked_at"`
NextCheckAt time.Time `json:"next_check_at"`
EtagHeader string `json:"etag_header"`
LastModifiedHeader string `json:"last_modified_header"`
ParsingErrorMsg string `json:"parsing_error_message"`
ParsingErrorCount int `json:"parsing_error_count"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
Crawler bool `json:"crawler"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
Category *Category `json:"category,omitempty"`
Entries Entries `json:"entries,omitempty"`
Icon *FeedIcon `json:"icon"`
UnreadCount int `json:"-"`
ReadCount int `json:"-"`
ID int64 `json:"id"`
UserID int64 `json:"user_id"`
FeedURL string `json:"feed_url"`
SiteURL string `json:"site_url"`
Title string `json:"title"`
CheckedAt time.Time `json:"checked_at"`
NextCheckAt time.Time `json:"next_check_at"`
EtagHeader string `json:"etag_header"`
LastModifiedHeader string `json:"last_modified_header"`
ParsingErrorMsg string `json:"parsing_error_message"`
ParsingErrorCount int `json:"parsing_error_count"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
Crawler bool `json:"crawler"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"`
Category *Category `json:"category,omitempty"`
Entries Entries `json:"entries,omitempty"`
Icon *FeedIcon `json:"icon"`
UnreadCount int `json:"-"`
ReadCount int `json:"-"`
}
func (f *Feed) String() string {
@ -117,38 +118,40 @@ func (f *Feed) ScheduleNextCheck(weeklyCount int) {
// FeedCreationRequest represents the request to create a feed.
type FeedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
Disabled bool `json:"disabled"`
IgnoreHTTPCache bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
FetchViaProxy bool `json:"fetch_via_proxy"`
ScraperRules string `json:"scraper_rules"`
RewriteRules string `json:"rewrite_rules"`
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
}
// FeedModificationRequest represents the request to update a feed.
type FeedModificationRequest struct {
FeedURL *string `json:"feed_url"`
SiteURL *string `json:"site_url"`
Title *string `json:"title"`
ScraperRules *string `json:"scraper_rules"`
RewriteRules *string `json:"rewrite_rules"`
BlocklistRules *string `json:"blocklist_rules"`
KeeplistRules *string `json:"keeplist_rules"`
Crawler *bool `json:"crawler"`
UserAgent *string `json:"user_agent"`
Username *string `json:"username"`
Password *string `json:"password"`
CategoryID *int64 `json:"category_id"`
Disabled *bool `json:"disabled"`
IgnoreHTTPCache *bool `json:"ignore_http_cache"`
FetchViaProxy *bool `json:"fetch_via_proxy"`
FeedURL *string `json:"feed_url"`
SiteURL *string `json:"site_url"`
Title *string `json:"title"`
ScraperRules *string `json:"scraper_rules"`
RewriteRules *string `json:"rewrite_rules"`
BlocklistRules *string `json:"blocklist_rules"`
KeeplistRules *string `json:"keeplist_rules"`
Crawler *bool `json:"crawler"`
UserAgent *string `json:"user_agent"`
Username *string `json:"username"`
Password *string `json:"password"`
CategoryID *int64 `json:"category_id"`
Disabled *bool `json:"disabled"`
IgnoreHTTPCache *bool `json:"ignore_http_cache"`
AllowSelfSignedCertificates *bool `json:"allow_self_signed_certificates"`
FetchViaProxy *bool `json:"fetch_via_proxy"`
}
// Patch updates a feed with modified values.
@ -209,6 +212,10 @@ func (f *FeedModificationRequest) Patch(feed *Feed) {
feed.IgnoreHTTPCache = *f.IgnoreHTTPCache
}
if f.AllowSelfSignedCertificates != nil {
feed.AllowSelfSignedCertificates = *f.AllowSelfSignedCertificates
}
if f.FetchViaProxy != nil {
feed.FetchViaProxy = *f.FetchViaProxy
}

View file

@ -6,9 +6,10 @@ package model // import "miniflux.app/model"
// SubscriptionDiscoveryRequest represents a request to discover subscriptions.
type SubscriptionDiscoveryRequest struct {
URL string `json:"url"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
FetchViaProxy bool `json:"fetch_via_proxy"`
URL string `json:"url"`
UserAgent string `json:"user_agent"`
Username string `json:"username"`
Password string `json:"password"`
FetchViaProxy bool `json:"fetch_via_proxy"`
AllowSelfSignedCertificates bool `json:"allow_self_signed_certificates"`
}

View file

@ -39,6 +39,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
request := client.NewClientWithConfig(feedCreationRequest.FeedURL, config.Opts)
request.WithCredentials(feedCreationRequest.Username, feedCreationRequest.Password)
request.WithUserAgent(feedCreationRequest.UserAgent)
request.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates
if feedCreationRequest.FetchViaProxy {
request.WithProxy()
@ -65,6 +66,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
subscription.Crawler = feedCreationRequest.Crawler
subscription.Disabled = feedCreationRequest.Disabled
subscription.IgnoreHTTPCache = feedCreationRequest.IgnoreHTTPCache
subscription.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates
subscription.FetchViaProxy = feedCreationRequest.FetchViaProxy
subscription.ScraperRules = feedCreationRequest.ScraperRules
subscription.RewriteRules = feedCreationRequest.RewriteRules
@ -82,7 +84,13 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
logger.Debug("[CreateFeed] Feed saved with ID: %d", subscription.ID)
checkFeedIcon(store, subscription.ID, subscription.SiteURL, feedCreationRequest.FetchViaProxy)
checkFeedIcon(
store,
subscription.ID,
subscription.SiteURL,
feedCreationRequest.FetchViaProxy,
feedCreationRequest.AllowSelfSignedCertificates,
)
return subscription, nil
}
@ -116,6 +124,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
request := client.NewClientWithConfig(originalFeed.FeedURL, config.Opts)
request.WithCredentials(originalFeed.Username, originalFeed.Password)
request.WithUserAgent(originalFeed.UserAgent)
request.AllowSelfSignedCertificates = originalFeed.AllowSelfSignedCertificates
if !originalFeed.IgnoreHTTPCache {
request.WithCacheHeaders(originalFeed.EtagHeader, originalFeed.LastModifiedHeader)
@ -162,7 +171,13 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
// We update caching headers only if the feed has been modified,
// because some websites don't return the same headers when replying with a 304.
originalFeed.WithClientResponse(response)
checkFeedIcon(store, originalFeed.ID, originalFeed.SiteURL, originalFeed.FetchViaProxy)
checkFeedIcon(
store,
originalFeed.ID,
originalFeed.SiteURL,
originalFeed.FetchViaProxy,
originalFeed.AllowSelfSignedCertificates,
)
} else {
logger.Debug("[RefreshFeed] Feed #%d not modified", feedID)
}
@ -178,9 +193,9 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
return nil
}
func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy bool) {
func checkFeedIcon(store *storage.Storage, feedID int64, websiteURL string, fetchViaProxy, allowSelfSignedCertificates bool) {
if !store.HasIcon(feedID) {
icon, err := icon.FindIcon(websiteURL, fetchViaProxy)
icon, err := icon.FindIcon(websiteURL, fetchViaProxy, allowSelfSignedCertificates)
if err != nil {
logger.Debug(`[CheckFeedIcon] %v (feedID=%d websiteURL=%s)`, err, feedID, websiteURL)
} else if icon == nil {

View file

@ -21,12 +21,14 @@ import (
)
// FindIcon try to find the website's icon.
func FindIcon(websiteURL string, fetchViaProxy bool) (*model.Icon, error) {
func FindIcon(websiteURL string, fetchViaProxy, allowSelfSignedCertificates bool) (*model.Icon, error) {
rootURL := url.RootURL(websiteURL)
clt := client.NewClientWithConfig(rootURL, config.Opts)
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
if fetchViaProxy {
clt.WithProxy()
}
response, err := clt.Get()
if err != nil {
return nil, fmt.Errorf("unable to download website index page: %v", err)
@ -46,7 +48,7 @@ func FindIcon(websiteURL string, fetchViaProxy bool) (*model.Icon, error) {
}
logger.Debug("[FindIcon] Fetching icon => %s", iconURL)
icon, err := downloadIcon(iconURL, fetchViaProxy)
icon, err := downloadIcon(iconURL, fetchViaProxy, allowSelfSignedCertificates)
if err != nil {
return nil, err
}
@ -89,8 +91,9 @@ func parseDocument(websiteURL string, data io.Reader) (string, error) {
return iconURL, nil
}
func downloadIcon(iconURL string, fetchViaProxy bool) (*model.Icon, error) {
func downloadIcon(iconURL string, fetchViaProxy, allowSelfSignedCertificates bool) (*model.Icon, error) {
clt := client.NewClientWithConfig(iconURL, config.Opts)
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
if fetchViaProxy {
clt.WithProxy()
}

View file

@ -50,7 +50,12 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) {
logger.Debug("[Processor] Crawling entry %q from feed %q", entry.URL, feed.FeedURL)
startTime := time.Now()
content, scraperErr := scraper.Fetch(entry.URL, feed.ScraperRules, feed.UserAgent)
content, scraperErr := scraper.Fetch(
entry.URL,
feed.ScraperRules,
feed.UserAgent,
feed.AllowSelfSignedCertificates,
)
if config.Opts.HasMetricsCollector() {
status := "success"
@ -118,9 +123,15 @@ func isAllowedEntry(feed *model.Feed, entry *model.Entry) bool {
}
// ProcessEntryWebPage downloads the entry web page and apply rewrite rules.
func ProcessEntryWebPage(entry *model.Entry) error {
func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry) error {
startTime := time.Now()
content, scraperErr := scraper.Fetch(entry.URL, entry.Feed.ScraperRules, entry.Feed.UserAgent)
content, scraperErr := scraper.Fetch(
entry.URL,
entry.Feed.ScraperRules,
entry.Feed.UserAgent,
feed.AllowSelfSignedCertificates,
)
if config.Opts.HasMetricsCollector() {
status := "success"
if scraperErr != nil {

View file

@ -20,11 +20,10 @@ import (
)
// Fetch downloads a web page and returns relevant contents.
func Fetch(websiteURL, rules, userAgent string) (string, error) {
func Fetch(websiteURL, rules, userAgent string, allowSelfSignedCertificates bool) (string, error) {
clt := client.NewClientWithConfig(websiteURL, config.Opts)
if userAgent != "" {
clt.WithUserAgent(userAgent)
}
clt.WithUserAgent(userAgent)
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
response, err := clt.Get()
if err != nil {

View file

@ -27,13 +27,14 @@ var (
)
// FindSubscriptions downloads and try to find one or more subscriptions from an URL.
func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy bool) (Subscriptions, *errors.LocalizedError) {
func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy, allowSelfSignedCertificates bool) (Subscriptions, *errors.LocalizedError) {
websiteURL = findYoutubeChannelFeed(websiteURL)
websiteURL = parseYoutubeVideoPage(websiteURL)
clt := client.NewClientWithConfig(websiteURL, config.Opts)
clt.WithCredentials(username, password)
clt.WithUserAgent(userAgent)
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
if fetchViaProxy {
clt.WithProxy()

View file

@ -201,10 +201,11 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
blocklist_rules,
keeplist_rules,
ignore_http_cache,
allow_self_signed_certificates,
fetch_via_proxy
)
VALUES
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18)
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19)
RETURNING
id
`
@ -227,6 +228,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
feed.BlocklistRules,
feed.KeeplistRules,
feed.IgnoreHTTPCache,
feed.AllowSelfSignedCertificates,
feed.FetchViaProxy,
).Scan(&feed.ID)
if err != nil {
@ -283,9 +285,10 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) {
disabled=$18,
next_check_at=$19,
ignore_http_cache=$20,
fetch_via_proxy=$21
allow_self_signed_certificates=$21,
fetch_via_proxy=$22
WHERE
id=$22 AND user_id=$23
id=$23 AND user_id=$24
`
_, err = s.db.Exec(query,
feed.FeedURL,
@ -308,6 +311,7 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) {
feed.Disabled,
feed.NextCheckAt,
feed.IgnoreHTTPCache,
feed.AllowSelfSignedCertificates,
feed.FetchViaProxy,
feed.ID,
feed.UserID,

View file

@ -162,6 +162,7 @@ func (f *FeedQueryBuilder) GetFeeds() (model.Feeds, error) {
f.username,
f.password,
f.ignore_http_cache,
f.allow_self_signed_certificates,
f.fetch_via_proxy,
f.disabled,
f.category_id,
@ -220,6 +221,7 @@ func (f *FeedQueryBuilder) GetFeeds() (model.Feeds, error) {
&feed.Username,
&feed.Password,
&feed.IgnoreHTTPCache,
&feed.AllowSelfSignedCertificates,
&feed.FetchViaProxy,
&feed.Disabled,
&feed.Category.ID,

View file

@ -30,6 +30,8 @@
<summary>{{ t "page.add_feed.legend.advanced_options" }}</summary>
<div class="details-content">
<label><input type="checkbox" name="crawler" value="1" {{ if .form.Crawler }}checked{{ end }}> {{ t "form.feed.label.crawler" }}</label>
<label><input type="checkbox" name="allow_self_signed_certificates" value="1" {{ if .form.AllowSelfSignedCertificates }}checked{{ end }}> {{ t "form.feed.label.allow_self_signed_certificates" }}</label>
{{ if .hasProxyConfigured }}
<label><input type="checkbox" name="fetch_via_proxy" value="1" {{ if .form.FetchViaProxy }}checked{{ end }}> {{ t "form.feed.label.fetch_via_proxy" }}</label>
{{ end }}

View file

@ -20,6 +20,9 @@
{{ if .form.Crawler }}
<input type="hidden" name="crawler" value="1">
{{ end }}
{{ if .form.AllowSelfSignedCertificates }}
<input type="hidden" name="allow_self_signed_certificates" value="1">
{{ end }}
<h3>{{ t "page.add_feed.choose_feed" }}</h3>

View file

@ -79,6 +79,7 @@
<label><input type="checkbox" name="crawler" value="1" {{ if .form.Crawler }}checked{{ end }}> {{ t "form.feed.label.crawler" }}</label>
<label><input type="checkbox" name="ignore_http_cache" value="1" {{ if .form.IgnoreHTTPCache }}checked{{ end }}> {{ t "form.feed.label.ignore_http_cache" }}</label>
<label><input type="checkbox" name="allow_self_signed_certificates" value="1" {{ if .form.AllowSelfSignedCertificates }}checked{{ end }}> {{ t "form.feed.label.allow_self_signed_certificates" }}</label>
{{ if .hasProxyConfigured }}
<label><input type="checkbox" name="fetch_via_proxy" value="1" {{ if .form.FetchViaProxy }}checked{{ end }}> {{ t "form.feed.label.fetch_via_proxy" }}</label>
{{ end }}

View file

@ -171,6 +171,37 @@ func TestCreateFeedWithCrawlerEnabled(t *testing.T) {
}
}
func TestCreateFeedWithSelfSignedCertificatesAllowed(t *testing.T) {
client := createClient(t)
categories, err := client.Categories()
if err != nil {
t.Fatal(err)
}
feedID, err := client.CreateFeed(&miniflux.FeedCreationRequest{
FeedURL: testFeedURL,
CategoryID: categories[0].ID,
AllowSelfSignedCertificates: true,
})
if err != nil {
t.Fatal(err)
}
if feedID == 0 {
t.Fatalf(`Invalid feed ID, got %q`, feedID)
}
feed, err := client.Feed(feedID)
if err != nil {
t.Fatal(err)
}
if !feed.AllowSelfSignedCertificates {
t.Error(`The feed should have self-signed certificates enabled`)
}
}
func TestCreateFeedWithScraperRule(t *testing.T) {
client := createClient(t)
@ -375,6 +406,31 @@ func TestUpdateFeedCrawler(t *testing.T) {
}
}
func TestUpdateFeedAllowSelfSignedCertificates(t *testing.T) {
client := createClient(t)
feed, _ := createFeed(t, client)
selfSigned := true
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{AllowSelfSignedCertificates: &selfSigned})
if err != nil {
t.Fatal(err)
}
if updatedFeed.AllowSelfSignedCertificates != selfSigned {
t.Fatalf(`Wrong AllowSelfSignedCertificates value, got "%v" instead of "%v"`, updatedFeed.AllowSelfSignedCertificates, selfSigned)
}
selfSigned = false
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{AllowSelfSignedCertificates: &selfSigned})
if err != nil {
t.Fatal(err)
}
if updatedFeed.AllowSelfSignedCertificates != selfSigned {
t.Fatalf(`Wrong AllowSelfSignedCertificates value, got "%v" instead of "%v"`, updatedFeed.AllowSelfSignedCertificates, selfSigned)
}
}
func TestUpdateFeedScraperRules(t *testing.T) {
client := createClient(t)
feed, _ := createFeed(t, client)

View file

@ -12,15 +12,18 @@ import (
"miniflux.app/model"
"miniflux.app/proxy"
"miniflux.app/reader/processor"
"miniflux.app/storage"
)
func (h *handler) fetchContent(w http.ResponseWriter, r *http.Request) {
loggedUserID := request.UserID(r)
entryID := request.RouteInt64Param(r, "entryID")
builder := h.store.NewEntryQueryBuilder(request.UserID(r))
builder.WithEntryID(entryID)
builder.WithoutStatus(model.EntryStatusRemoved)
entry, err := builder.GetEntry()
entryBuilder := h.store.NewEntryQueryBuilder(loggedUserID)
entryBuilder.WithEntryID(entryID)
entryBuilder.WithoutStatus(model.EntryStatusRemoved)
entry, err := entryBuilder.GetEntry()
if err != nil {
json.ServerError(w, r, err)
return
@ -31,7 +34,20 @@ func (h *handler) fetchContent(w http.ResponseWriter, r *http.Request) {
return
}
if err := processor.ProcessEntryWebPage(entry); err != nil {
feedBuilder := storage.NewFeedQueryBuilder(h.store, loggedUserID)
feedBuilder.WithFeedID(entry.FeedID)
feed, err := feedBuilder.GetFeed()
if err != nil {
json.ServerError(w, r, err)
return
}
if feed == nil {
json.NotFound(w, r)
return
}
if err := processor.ProcessEntryWebPage(feed, entry); err != nil {
json.ServerError(w, r, err)
return
}

View file

@ -41,21 +41,22 @@ func (h *handler) showEditFeedPage(w http.ResponseWriter, r *http.Request) {
}
feedForm := form.FeedForm{
SiteURL: feed.SiteURL,
FeedURL: feed.FeedURL,
Title: feed.Title,
ScraperRules: feed.ScraperRules,
RewriteRules: feed.RewriteRules,
BlocklistRules: feed.BlocklistRules,
KeeplistRules: feed.KeeplistRules,
Crawler: feed.Crawler,
UserAgent: feed.UserAgent,
CategoryID: feed.Category.ID,
Username: feed.Username,
Password: feed.Password,
IgnoreHTTPCache: feed.IgnoreHTTPCache,
FetchViaProxy: feed.FetchViaProxy,
Disabled: feed.Disabled,
SiteURL: feed.SiteURL,
FeedURL: feed.FeedURL,
Title: feed.Title,
ScraperRules: feed.ScraperRules,
RewriteRules: feed.RewriteRules,
BlocklistRules: feed.BlocklistRules,
KeeplistRules: feed.KeeplistRules,
Crawler: feed.Crawler,
UserAgent: feed.UserAgent,
CategoryID: feed.Category.ID,
Username: feed.Username,
Password: feed.Password,
IgnoreHTTPCache: feed.IgnoreHTTPCache,
AllowSelfSignedCertificates: feed.AllowSelfSignedCertificates,
FetchViaProxy: feed.FetchViaProxy,
Disabled: feed.Disabled,
}
sess := session.New(h.store, request.SessionID(r))

View file

@ -13,21 +13,22 @@ import (
// FeedForm represents a feed form in the UI
type FeedForm struct {
FeedURL string
SiteURL string
Title string
ScraperRules string
RewriteRules string
BlocklistRules string
KeeplistRules string
Crawler bool
UserAgent string
CategoryID int64
Username string
Password string
IgnoreHTTPCache bool
FetchViaProxy bool
Disabled bool
FeedURL string
SiteURL string
Title string
ScraperRules string
RewriteRules string
BlocklistRules string
KeeplistRules string
Crawler bool
UserAgent string
CategoryID int64
Username string
Password string
IgnoreHTTPCache bool
AllowSelfSignedCertificates bool
FetchViaProxy bool
Disabled bool
}
// Merge updates the fields of the given feed.
@ -47,6 +48,7 @@ func (f FeedForm) Merge(feed *model.Feed) *model.Feed {
feed.Username = f.Username
feed.Password = f.Password
feed.IgnoreHTTPCache = f.IgnoreHTTPCache
feed.AllowSelfSignedCertificates = f.AllowSelfSignedCertificates
feed.FetchViaProxy = f.FetchViaProxy
feed.Disabled = f.Disabled
return feed
@ -59,20 +61,21 @@ func NewFeedForm(r *http.Request) *FeedForm {
categoryID = 0
}
return &FeedForm{
FeedURL: r.FormValue("feed_url"),
SiteURL: r.FormValue("site_url"),
Title: r.FormValue("title"),
ScraperRules: r.FormValue("scraper_rules"),
UserAgent: r.FormValue("user_agent"),
RewriteRules: r.FormValue("rewrite_rules"),
BlocklistRules: r.FormValue("blocklist_rules"),
KeeplistRules: r.FormValue("keeplist_rules"),
Crawler: r.FormValue("crawler") == "1",
CategoryID: int64(categoryID),
Username: r.FormValue("feed_username"),
Password: r.FormValue("feed_password"),
IgnoreHTTPCache: r.FormValue("ignore_http_cache") == "1",
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
Disabled: r.FormValue("disabled") == "1",
FeedURL: r.FormValue("feed_url"),
SiteURL: r.FormValue("site_url"),
Title: r.FormValue("title"),
ScraperRules: r.FormValue("scraper_rules"),
UserAgent: r.FormValue("user_agent"),
RewriteRules: r.FormValue("rewrite_rules"),
BlocklistRules: r.FormValue("blocklist_rules"),
KeeplistRules: r.FormValue("keeplist_rules"),
Crawler: r.FormValue("crawler") == "1",
CategoryID: int64(categoryID),
Username: r.FormValue("feed_username"),
Password: r.FormValue("feed_password"),
IgnoreHTTPCache: r.FormValue("ignore_http_cache") == "1",
AllowSelfSignedCertificates: r.FormValue("allow_self_signed_certificates") == "1",
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
Disabled: r.FormValue("disabled") == "1",
}
}

View file

@ -14,17 +14,18 @@ import (
// SubscriptionForm represents the subscription form.
type SubscriptionForm struct {
URL string
CategoryID int64
Crawler bool
FetchViaProxy bool
UserAgent string
Username string
Password string
ScraperRules string
RewriteRules string
BlocklistRules string
KeeplistRules string
URL string
CategoryID int64
Crawler bool
FetchViaProxy bool
AllowSelfSignedCertificates bool
UserAgent string
Username string
Password string
ScraperRules string
RewriteRules string
BlocklistRules string
KeeplistRules string
}
// Validate makes sure the form values are valid.
@ -56,16 +57,17 @@ func NewSubscriptionForm(r *http.Request) *SubscriptionForm {
}
return &SubscriptionForm{
URL: r.FormValue("url"),
CategoryID: int64(categoryID),
Crawler: r.FormValue("crawler") == "1",
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
UserAgent: r.FormValue("user_agent"),
Username: r.FormValue("feed_username"),
Password: r.FormValue("feed_password"),
ScraperRules: r.FormValue("scraper_rules"),
RewriteRules: r.FormValue("rewrite_rules"),
BlocklistRules: r.FormValue("blocklist_rules"),
KeeplistRules: r.FormValue("keeplist_rules"),
URL: r.FormValue("url"),
CategoryID: int64(categoryID),
Crawler: r.FormValue("crawler") == "1",
AllowSelfSignedCertificates: r.FormValue("allow_self_signed_certificates") == "1",
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
UserAgent: r.FormValue("user_agent"),
Username: r.FormValue("feed_username"),
Password: r.FormValue("feed_password"),
ScraperRules: r.FormValue("scraper_rules"),
RewriteRules: r.FormValue("rewrite_rules"),
BlocklistRules: r.FormValue("blocklist_rules"),
KeeplistRules: r.FormValue("keeplist_rules"),
}
}

View file

@ -50,17 +50,18 @@ func (h *handler) showChooseSubscriptionPage(w http.ResponseWriter, r *http.Requ
}
feed, err := feedHandler.CreateFeed(h.store, user.ID, &model.FeedCreationRequest{
CategoryID: subscriptionForm.CategoryID,
FeedURL: subscriptionForm.URL,
Crawler: subscriptionForm.Crawler,
UserAgent: subscriptionForm.UserAgent,
Username: subscriptionForm.Username,
Password: subscriptionForm.Password,
ScraperRules: subscriptionForm.ScraperRules,
RewriteRules: subscriptionForm.RewriteRules,
BlocklistRules: subscriptionForm.BlocklistRules,
KeeplistRules: subscriptionForm.KeeplistRules,
FetchViaProxy: subscriptionForm.FetchViaProxy,
CategoryID: subscriptionForm.CategoryID,
FeedURL: subscriptionForm.URL,
Crawler: subscriptionForm.Crawler,
AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates,
UserAgent: subscriptionForm.UserAgent,
Username: subscriptionForm.Username,
Password: subscriptionForm.Password,
ScraperRules: subscriptionForm.ScraperRules,
RewriteRules: subscriptionForm.RewriteRules,
BlocklistRules: subscriptionForm.BlocklistRules,
KeeplistRules: subscriptionForm.KeeplistRules,
FetchViaProxy: subscriptionForm.FetchViaProxy,
})
if err != nil {
view.Set("form", subscriptionForm)

View file

@ -58,6 +58,7 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
subscriptionForm.Username,
subscriptionForm.Password,
subscriptionForm.FetchViaProxy,
subscriptionForm.AllowSelfSignedCertificates,
)
if findErr != nil {
logger.Error("[UI:SubmitSubscription] %s", findErr)
@ -77,17 +78,18 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
html.OK(w, r, v.Render("add_subscription"))
case n == 1:
feed, err := feedHandler.CreateFeed(h.store, user.ID, &model.FeedCreationRequest{
CategoryID: subscriptionForm.CategoryID,
FeedURL: subscriptions[0].URL,
Crawler: subscriptionForm.Crawler,
UserAgent: subscriptionForm.UserAgent,
Username: subscriptionForm.Username,
Password: subscriptionForm.Password,
ScraperRules: subscriptionForm.ScraperRules,
RewriteRules: subscriptionForm.RewriteRules,
BlocklistRules: subscriptionForm.BlocklistRules,
KeeplistRules: subscriptionForm.KeeplistRules,
FetchViaProxy: subscriptionForm.FetchViaProxy,
CategoryID: subscriptionForm.CategoryID,
FeedURL: subscriptions[0].URL,
Crawler: subscriptionForm.Crawler,
AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates,
UserAgent: subscriptionForm.UserAgent,
Username: subscriptionForm.Username,
Password: subscriptionForm.Password,
ScraperRules: subscriptionForm.ScraperRules,
RewriteRules: subscriptionForm.RewriteRules,
BlocklistRules: subscriptionForm.BlocklistRules,
KeeplistRules: subscriptionForm.KeeplistRules,
FetchViaProxy: subscriptionForm.FetchViaProxy,
})
if err != nil {
v.Set("form", subscriptionForm)