Add per feed cookies option

This commit is contained in:
Darius 2021-03-23 04:27:58 +01:00 committed by GitHub
parent b0c14aa8f9
commit 9242350f0e
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 114 additions and 15 deletions

View file

@ -29,6 +29,7 @@ func (h *handler) discoverSubscriptions(w http.ResponseWriter, r *http.Request)
subscriptions, finderErr := subscription.FindSubscriptions(
subscriptionDiscoveryRequest.URL,
subscriptionDiscoveryRequest.UserAgent,
subscriptionDiscoveryRequest.Cookie,
subscriptionDiscoveryRequest.Username,
subscriptionDiscoveryRequest.Password,
subscriptionDiscoveryRequest.FetchViaProxy,

View file

@ -122,6 +122,7 @@ type Feed struct {
KeeplistRules string `json:"keeplist_rules"`
Crawler bool `json:"crawler"`
UserAgent string `json:"user_agent"`
Cookie string `json:"cookie"`
Username string `json:"username"`
Password string `json:"password"`
Category *Category `json:"category,omitempty"`
@ -132,6 +133,7 @@ type FeedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Cookie string `json:"cookie"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
@ -156,6 +158,7 @@ type FeedModificationRequest struct {
KeeplistRules *string `json:"keeplist_rules"`
Crawler *bool `json:"crawler"`
UserAgent *string `json:"user_agent"`
Cookie *string `json:"cookie"`
Username *string `json:"username"`
Password *string `json:"password"`
CategoryID *int64 `json:"category_id"`

View file

@ -529,4 +529,9 @@ var migrations = []func(tx *sql.Tx) error{
_, err = tx.Exec(sql)
return err
},
func(tx *sql.Tx) (err error) {
sql := `ALTER TABLE feeds ADD COLUMN cookie text default ''`
_, err = tx.Exec(sql)
return err
},
}

View file

@ -47,6 +47,7 @@ type Client struct {
requestUsername string
requestPassword string
requestUserAgent string
requestCookie string
useProxy bool
doNotFollowRedirects bool
@ -142,6 +143,14 @@ func (c *Client) WithUserAgent(userAgent string) *Client {
return c
}
// WithCookie defines the Cookies to use for HTTP requests.
func (c *Client) WithCookie(cookie string) *Client {
if cookie != "" {
c.requestCookie = cookie
}
return c
}
// Get performs a GET HTTP request.
func (c *Client) Get() (*Response, error) {
request, err := c.buildRequest(http.MethodGet, nil)
@ -336,6 +345,10 @@ func (c *Client) buildHeaders() http.Header {
headers.Add("Authorization", c.requestAuthorizationHeader)
}
if c.requestCookie != "" {
headers.Add("Cookie", c.requestCookie)
}
headers.Add("Connection", "close")
return headers
}

View file

@ -266,6 +266,7 @@
"form.feed.label.feed_username": "Benutzername des Abonnements",
"form.feed.label.feed_password": "Passwort des Abonnements",
"form.feed.label.user_agent": "Standardbenutzeragenten überschreiben",
"form.feed.label.cookie": "Cookies setzten",
"form.feed.label.scraper_rules": "Extraktionsregeln",
"form.feed.label.rewrite_rules": "Umschreiberegeln",
"form.feed.label.blocklist_rules": "Blockierregeln",

View file

@ -266,6 +266,7 @@
"form.feed.label.feed_username": "Feed Username",
"form.feed.label.feed_password": "Feed Password",
"form.feed.label.user_agent": "Override Default User Agent",
"form.feed.label.cookie": "Set Cookies",
"form.feed.label.scraper_rules": "Scraper Rules",
"form.feed.label.rewrite_rules": "Rewrite Rules",
"form.feed.label.blocklist_rules": "Block Rules",

View file

@ -266,6 +266,7 @@
"form.feed.label.feed_username": "Nombre de usuario de fuente",
"form.feed.label.feed_password": "Contraseña de fuente",
"form.feed.label.user_agent": "Invalidar el agente de usuario predeterminado",
"form.feed.label.cookie": "Configurar las cookies",
"form.feed.label.scraper_rules": "Reglas de raspador",
"form.feed.label.rewrite_rules": "Reglas de reescribir",
"form.feed.label.blocklist_rules": "Reglas de Filtrado(Bloquear)",

View file

@ -266,6 +266,7 @@
"form.feed.label.feed_username": "Nom d'utilisateur du flux",
"form.feed.label.feed_password": "Mot de passe du flux",
"form.feed.label.user_agent": "Remplacer l'agent utilisateur par défaut",
"form.feed.label.cookie": "Définir les cookies",
"form.feed.label.scraper_rules": "Règles pour récupérer le contenu original",
"form.feed.label.rewrite_rules": "Règles de réécriture",
"form.feed.label.blocklist_rules": "Règles de blocage",

View file

@ -266,6 +266,7 @@
"form.feed.label.feed_username": "Nome utente del feed",
"form.feed.label.feed_password": "Password del feed",
"form.feed.label.user_agent": "Usa user agent personalizzato",
"form.feed.label.cookie": "Installare i cookies",
"form.feed.label.scraper_rules": "Regole di estrazione del contenuto",
"form.feed.label.rewrite_rules": "Regole di impaginazione del contenuto",
"form.feed.label.blocklist_rules": "Regole di blocco",

View file

@ -266,6 +266,7 @@
"form.feed.label.feed_username": "フィードのユーザー名",
"form.feed.label.feed_password": "フィードのパスワード",
"form.feed.label.user_agent": "ディフォルトの User Agent を上書きする",
"form.feed.label.cookie": "クッキーの設定",
"form.feed.label.scraper_rules": "スクラップルール",
"form.feed.label.rewrite_rules": "Rewrite ルール",
"form.feed.label.blocklist_rules": "ブロックルール",

View file

@ -266,6 +266,7 @@
"form.feed.label.feed_username": "Feed-gebruikersnaam",
"form.feed.label.feed_password": "Feed wachtwoord",
"form.feed.label.user_agent": "Standaard User Agent overschrijven",
"form.feed.label.cookie": "Cookies instellen",
"form.feed.label.scraper_rules": "Scraper regels",
"form.feed.label.rewrite_rules": "Rewrite regels",
"form.feed.label.blocklist_rules": "Blokkeer regels",

View file

@ -268,6 +268,7 @@
"form.feed.label.feed_username": "Subskrypcję nazwa użytkownika",
"form.feed.label.feed_password": "Subskrypcję Hasło",
"form.feed.label.user_agent": "Zastąp domyślny agent użytkownika",
"form.feed.label.cookie": "Ustawianie ciasteczek",
"form.feed.label.scraper_rules": "Zasady ekstrakcji",
"form.feed.label.rewrite_rules": "Reguły zapisu",
"form.feed.label.blocklist_rules": "Zasady blokowania",

View file

@ -266,6 +266,7 @@
"form.feed.label.feed_username": "Nome de usuário da fonte",
"form.feed.label.feed_password": "Senha da fonte",
"form.feed.label.user_agent": "Sobrescrever o agente de usuário (user-agent) padrão",
"form.feed.label.cookie": "Definir Cookies",
"form.feed.label.scraper_rules": "Regras do scraper",
"form.feed.label.rewrite_rules": "Regras para o Rewrite",
"form.feed.label.blocklist_rules": "Regras de bloqueio",

View file

@ -268,6 +268,7 @@
"form.feed.label.feed_username": "Имя пользователя подписки",
"form.feed.label.feed_password": "Пароль подписки",
"form.feed.label.user_agent": "Переопределить User Agent по умолчанию",
"form.feed.label.cookie": "Установить Ку́ки",
"form.feed.label.scraper_rules": "Правила Scraper",
"form.feed.label.rewrite_rules": "Правила Rewrite",
"form.feed.label.blocklist_rules": "Правила блокировки",

View file

@ -266,6 +266,7 @@
"form.feed.label.feed_username": "Besleme Kullanıcı Adı",
"form.feed.label.feed_password": "Besleme Parolası",
"form.feed.label.user_agent": "Varsayılan User Agent'i Geçersiz Kıl",
"form.feed.label.cookie": "Çerezleri Ayarla",
"form.feed.label.scraper_rules": "Scrapper Kuralları",
"form.feed.label.rewrite_rules": "Yeniden Yazma Kuralları",
"form.feed.label.blocklist_rules": "Engelleme Kuralları",

View file

@ -264,6 +264,7 @@
"form.feed.label.feed_username": "源用户名",
"form.feed.label.feed_password": "源密码",
"form.feed.label.user_agent": "覆盖默认 User-Agent",
"form.feed.label.cookie": "设置 Cookies",
"form.feed.label.scraper_rules": "Scraper 规则",
"form.feed.label.rewrite_rules": "重写规则",
"form.feed.label.blocklist_rules": "封锁规则",

View file

@ -41,6 +41,7 @@ type Feed struct {
BlocklistRules string `json:"blocklist_rules"`
KeeplistRules string `json:"keeplist_rules"`
UserAgent string `json:"user_agent"`
Cookie string `json:"cookie"`
Username string `json:"username"`
Password string `json:"password"`
Disabled bool `json:"disabled"`
@ -121,6 +122,7 @@ type FeedCreationRequest struct {
FeedURL string `json:"feed_url"`
CategoryID int64 `json:"category_id"`
UserAgent string `json:"user_agent"`
Cookie string `json:"cookie"`
Username string `json:"username"`
Password string `json:"password"`
Crawler bool `json:"crawler"`
@ -145,6 +147,7 @@ type FeedModificationRequest struct {
KeeplistRules *string `json:"keeplist_rules"`
Crawler *bool `json:"crawler"`
UserAgent *string `json:"user_agent"`
Cookie *string `json:"cookie"`
Username *string `json:"username"`
Password *string `json:"password"`
CategoryID *int64 `json:"category_id"`
@ -192,6 +195,10 @@ func (f *FeedModificationRequest) Patch(feed *Feed) {
feed.UserAgent = *f.UserAgent
}
if f.Cookie != nil {
feed.Cookie = *f.Cookie
}
if f.Username != nil {
feed.Username = *f.Username
}

View file

@ -8,6 +8,7 @@ package model // import "miniflux.app/model"
type SubscriptionDiscoveryRequest struct {
URL string `json:"url"`
UserAgent string `json:"user_agent"`
Cookie string `json:"cookie"`
Username string `json:"username"`
Password string `json:"password"`
FetchViaProxy bool `json:"fetch_via_proxy"`

View file

@ -39,6 +39,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
request := client.NewClientWithConfig(feedCreationRequest.FeedURL, config.Opts)
request.WithCredentials(feedCreationRequest.Username, feedCreationRequest.Password)
request.WithUserAgent(feedCreationRequest.UserAgent)
request.WithCookie(feedCreationRequest.Cookie)
request.AllowSelfSignedCertificates = feedCreationRequest.AllowSelfSignedCertificates
if feedCreationRequest.FetchViaProxy {
@ -61,6 +62,7 @@ func CreateFeed(store *storage.Storage, userID int64, feedCreationRequest *model
subscription.UserID = userID
subscription.UserAgent = feedCreationRequest.UserAgent
subscription.Cookie = feedCreationRequest.Cookie
subscription.Username = feedCreationRequest.Username
subscription.Password = feedCreationRequest.Password
subscription.Crawler = feedCreationRequest.Crawler
@ -124,6 +126,7 @@ func RefreshFeed(store *storage.Storage, userID, feedID int64) error {
request := client.NewClientWithConfig(originalFeed.FeedURL, config.Opts)
request.WithCredentials(originalFeed.Username, originalFeed.Password)
request.WithUserAgent(originalFeed.UserAgent)
request.WithCookie(originalFeed.Cookie)
request.AllowSelfSignedCertificates = originalFeed.AllowSelfSignedCertificates
if !originalFeed.IgnoreHTTPCache {

View file

@ -54,6 +54,7 @@ func ProcessFeedEntries(store *storage.Storage, feed *model.Feed) {
entry.URL,
feed.ScraperRules,
feed.UserAgent,
feed.Cookie,
feed.AllowSelfSignedCertificates,
)
@ -115,6 +116,7 @@ func ProcessEntryWebPage(feed *model.Feed, entry *model.Entry) error {
entry.URL,
entry.Feed.ScraperRules,
entry.Feed.UserAgent,
entry.Feed.Cookie,
feed.AllowSelfSignedCertificates,
)

View file

@ -20,9 +20,10 @@ import (
)
// Fetch downloads a web page and returns relevant contents.
func Fetch(websiteURL, rules, userAgent string, allowSelfSignedCertificates bool) (string, error) {
func Fetch(websiteURL, rules, userAgent string, cookie string, allowSelfSignedCertificates bool) (string, error) {
clt := client.NewClientWithConfig(websiteURL, config.Opts)
clt.WithUserAgent(userAgent)
clt.WithCookie(cookie)
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
response, err := clt.Get()

View file

@ -27,13 +27,14 @@ var (
)
// FindSubscriptions downloads and try to find one or more subscriptions from an URL.
func FindSubscriptions(websiteURL, userAgent, username, password string, fetchViaProxy, allowSelfSignedCertificates bool) (Subscriptions, *errors.LocalizedError) {
func FindSubscriptions(websiteURL, userAgent, cookie, username, password string, fetchViaProxy, allowSelfSignedCertificates bool) (Subscriptions, *errors.LocalizedError) {
websiteURL = findYoutubeChannelFeed(websiteURL)
websiteURL = parseYoutubeVideoPage(websiteURL)
clt := client.NewClientWithConfig(websiteURL, config.Opts)
clt.WithCredentials(username, password)
clt.WithUserAgent(userAgent)
clt.WithCookie(cookie)
clt.AllowSelfSignedCertificates = allowSelfSignedCertificates
if fetchViaProxy {
@ -62,7 +63,7 @@ func FindSubscriptions(websiteURL, userAgent, username, password string, fetchVi
return subscriptions, err
}
return tryWellKnownUrls(websiteURL, userAgent, username, password)
return tryWellKnownUrls(websiteURL, userAgent, cookie, username, password)
}
func parseWebPage(websiteURL string, data io.Reader) (Subscriptions, *errors.LocalizedError) {
@ -138,7 +139,7 @@ func parseYoutubeVideoPage(websiteURL string) string {
return websiteURL
}
func tryWellKnownUrls(websiteURL, userAgent, username, password string) (Subscriptions, *errors.LocalizedError) {
func tryWellKnownUrls(websiteURL, userAgent, cookie, username, password string) (Subscriptions, *errors.LocalizedError) {
var subscriptions Subscriptions
knownURLs := map[string]string{
"/atom.xml": "atom",
@ -161,6 +162,7 @@ func tryWellKnownUrls(websiteURL, userAgent, username, password string) (Subscri
clt := client.NewClientWithConfig(fullURL, config.Opts)
clt.WithCredentials(username, password)
clt.WithUserAgent(userAgent)
clt.WithCookie(cookie)
// Some websites redirects unknown URLs to the home page.
// As result, the list of known URLs is returned to the subscription list.

View file

@ -242,6 +242,7 @@ func (e *EntryQueryBuilder) GetEntries() (model.Entries, error) {
f.rewrite_rules,
f.crawler,
f.user_agent,
f.cookie,
fi.icon_id,
u.timezone
FROM
@ -303,6 +304,7 @@ func (e *EntryQueryBuilder) GetEntries() (model.Entries, error) {
&entry.Feed.RewriteRules,
&entry.Feed.Crawler,
&entry.Feed.UserAgent,
&entry.Feed.Cookie,
&iconID,
&tz,
)

View file

@ -195,6 +195,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
last_modified_header,
crawler,
user_agent,
cookie,
username,
password,
disabled,
@ -207,7 +208,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
fetch_via_proxy
)
VALUES
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19)
($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16, $17, $18, $19, $20)
RETURNING
id
`
@ -222,6 +223,7 @@ func (s *Storage) CreateFeed(feed *model.Feed) error {
feed.LastModifiedHeader,
feed.Crawler,
feed.UserAgent,
feed.Cookie,
feed.Username,
feed.Password,
feed.Disabled,
@ -282,15 +284,16 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) {
keeplist_rules=$13,
crawler=$14,
user_agent=$15,
username=$16,
password=$17,
disabled=$18,
next_check_at=$19,
ignore_http_cache=$20,
allow_self_signed_certificates=$21,
fetch_via_proxy=$22
cookie=$16,
username=$17,
password=$18,
disabled=$19,
next_check_at=$20,
ignore_http_cache=$21,
allow_self_signed_certificates=$22,
fetch_via_proxy=$23
WHERE
id=$23 AND user_id=$24
id=$24 AND user_id=$25
`
_, err = s.db.Exec(query,
feed.FeedURL,
@ -308,6 +311,7 @@ func (s *Storage) UpdateFeed(feed *model.Feed) (err error) {
feed.KeeplistRules,
feed.Crawler,
feed.UserAgent,
feed.Cookie,
feed.Username,
feed.Password,
feed.Disabled,

View file

@ -159,6 +159,7 @@ func (f *FeedQueryBuilder) GetFeeds() (model.Feeds, error) {
f.keeplist_rules,
f.crawler,
f.user_agent,
f.cookie,
f.username,
f.password,
f.ignore_http_cache,
@ -218,6 +219,7 @@ func (f *FeedQueryBuilder) GetFeeds() (model.Feeds, error) {
&feed.KeeplistRules,
&feed.Crawler,
&feed.UserAgent,
&feed.Cookie,
&feed.Username,
&feed.Password,
&feed.IgnoreHTTPCache,

View file

@ -31,7 +31,7 @@
<div class="details-content">
<label><input type="checkbox" name="crawler" value="1" {{ if .form.Crawler }}checked{{ end }}> {{ t "form.feed.label.crawler" }}</label>
<label><input type="checkbox" name="allow_self_signed_certificates" value="1" {{ if .form.AllowSelfSignedCertificates }}checked{{ end }}> {{ t "form.feed.label.allow_self_signed_certificates" }}</label>
{{ if .hasProxyConfigured }}
<label><input type="checkbox" name="fetch_via_proxy" value="1" {{ if .form.FetchViaProxy }}checked{{ end }}> {{ t "form.feed.label.fetch_via_proxy" }}</label>
{{ end }}
@ -39,6 +39,9 @@
<label for="form-user-agent">{{ t "form.feed.label.user_agent" }}</label>
<input type="text" name="user_agent" id="form-user-agent" placeholder="{{ .defaultUserAgent }}" value="{{ .form.UserAgent }}" spellcheck="false" autocomplete="off">
<label for="form-cookie">{{ t "form.feed.label.cookie" }}</label>
<input type="text" name="cookie" id="form-cookie" value="{{ .form.Cookie }}" spellcheck="false" autocomplete="off">
<label for="form-feed-username">{{ t "form.feed.label.feed_username" }}</label>
<input type="text" name="feed_username" id="form-feed-username" value="{{ .form.Username }}" spellcheck="false">

View file

@ -10,6 +10,7 @@
<input type="hidden" name="csrf" value="{{ .csrf }}">
<input type="hidden" name="category_id" value="{{ .form.CategoryID }}">
<input type="hidden" name="user_agent" value="{{ .form.UserAgent }}">
<input type="hidden" name="cookie" value="{{ .form.Cookie }}">
<input type="hidden" name="feed_username" value="{{ .form.Username }}">
<input type="hidden" name="feed_password" value="{{ .form.Password }}">
<input type="hidden" name="scraper_rules" value="{{ .form.ScraperRules }}">

View file

@ -58,6 +58,9 @@
<label for="form-user-agent">{{ t "form.feed.label.user_agent" }}</label>
<input type="text" name="user_agent" id="form-user-agent" placeholder="{{ .defaultUserAgent }}" value="{{ .form.UserAgent }}" spellcheck="false">
<label for="form-cookie">{{ t "form.feed.label.cookie" }}</label>
<input type="text" name="cookie" id="form-cookie" value="{{ .form.Cookie }}" spellcheck="false">
<label for="form-scraper-rules">{{ t "form.feed.label.scraper_rules" }}</label>
<input type="text" name="scraper_rules" id="form-scraper-rules" value="{{ .form.ScraperRules }}" spellcheck="false">
@ -66,7 +69,7 @@
<label for="form-blocklist-rules">{{ t "form.feed.label.blocklist_rules" }}</label>
<input type="text" name="blocklist_rules" id="form-blocklist-rules" value="{{ .form.BlocklistRules }}" spellcheck="false">
<label for="form-keeplist-rules">{{ t "form.feed.label.keeplist_rules" }}</label>
<input type="text" name="keeplist_rules" id="form-keeplist-rules" value="{{ .form.KeeplistRules }}" spellcheck="false">

View file

@ -531,6 +531,31 @@ func TestUpdateFeedUserAgent(t *testing.T) {
}
}
func TestUpdateFeedCookie(t *testing.T) {
client := createClient(t)
feed, _ := createFeed(t, client)
cookie := "test"
updatedFeed, err := client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Cookie: &cookie})
if err != nil {
t.Fatal(err)
}
if updatedFeed.Cookie != cookie {
t.Fatalf(`Wrong Cookie value, got "%v" instead of "%v"`, updatedFeed.Cookie, cookie)
}
cookie = ""
updatedFeed, err = client.UpdateFeed(feed.ID, &miniflux.FeedModificationRequest{Cookie: &cookie})
if err != nil {
t.Fatal(err)
}
if updatedFeed.Cookie != cookie {
t.Fatalf(`Wrong Cookie value, got "%v" instead of "%v"`, updatedFeed.Cookie, cookie)
}
}
func TestUpdateFeedUsername(t *testing.T) {
client := createClient(t)
feed, _ := createFeed(t, client)

View file

@ -50,6 +50,7 @@ func (h *handler) showEditFeedPage(w http.ResponseWriter, r *http.Request) {
KeeplistRules: feed.KeeplistRules,
Crawler: feed.Crawler,
UserAgent: feed.UserAgent,
Cookie: feed.Cookie,
CategoryID: feed.Category.ID,
Username: feed.Username,
Password: feed.Password,

View file

@ -22,6 +22,7 @@ type FeedForm struct {
KeeplistRules string
Crawler bool
UserAgent string
Cookie string
CategoryID int64
Username string
Password string
@ -43,6 +44,7 @@ func (f FeedForm) Merge(feed *model.Feed) *model.Feed {
feed.KeeplistRules = f.KeeplistRules
feed.Crawler = f.Crawler
feed.UserAgent = f.UserAgent
feed.Cookie = f.Cookie
feed.ParsingErrorCount = 0
feed.ParsingErrorMsg = ""
feed.Username = f.Username
@ -66,6 +68,7 @@ func NewFeedForm(r *http.Request) *FeedForm {
Title: r.FormValue("title"),
ScraperRules: r.FormValue("scraper_rules"),
UserAgent: r.FormValue("user_agent"),
Cookie: r.FormValue("cookie"),
RewriteRules: r.FormValue("rewrite_rules"),
BlocklistRules: r.FormValue("blocklist_rules"),
KeeplistRules: r.FormValue("keeplist_rules"),

View file

@ -20,6 +20,7 @@ type SubscriptionForm struct {
FetchViaProxy bool
AllowSelfSignedCertificates bool
UserAgent string
Cookie string
Username string
Password string
ScraperRules string
@ -63,6 +64,7 @@ func NewSubscriptionForm(r *http.Request) *SubscriptionForm {
AllowSelfSignedCertificates: r.FormValue("allow_self_signed_certificates") == "1",
FetchViaProxy: r.FormValue("fetch_via_proxy") == "1",
UserAgent: r.FormValue("user_agent"),
Cookie: r.FormValue("cookie"),
Username: r.FormValue("feed_username"),
Password: r.FormValue("feed_password"),
ScraperRules: r.FormValue("scraper_rules"),

View file

@ -55,6 +55,7 @@ func (h *handler) showChooseSubscriptionPage(w http.ResponseWriter, r *http.Requ
Crawler: subscriptionForm.Crawler,
AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates,
UserAgent: subscriptionForm.UserAgent,
Cookie: subscriptionForm.Cookie,
Username: subscriptionForm.Username,
Password: subscriptionForm.Password,
ScraperRules: subscriptionForm.ScraperRules,

View file

@ -55,6 +55,7 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
subscriptions, findErr := subscription.FindSubscriptions(
subscriptionForm.URL,
subscriptionForm.UserAgent,
subscriptionForm.Cookie,
subscriptionForm.Username,
subscriptionForm.Password,
subscriptionForm.FetchViaProxy,
@ -83,6 +84,7 @@ func (h *handler) submitSubscription(w http.ResponseWriter, r *http.Request) {
Crawler: subscriptionForm.Crawler,
AllowSelfSignedCertificates: subscriptionForm.AllowSelfSignedCertificates,
UserAgent: subscriptionForm.UserAgent,
Cookie: subscriptionForm.Cookie,
Username: subscriptionForm.Username,
Password: subscriptionForm.Password,
ScraperRules: subscriptionForm.ScraperRules,