2017-11-20 06:10:04 +01:00
|
|
|
// Copyright 2017 Frédéric Guillot. All rights reserved.
|
|
|
|
// Use of this source code is governed by the Apache 2.0
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2018-08-25 06:51:50 +02:00
|
|
|
package storage // import "miniflux.app/storage"
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
import (
|
2021-01-18 22:22:09 +01:00
|
|
|
"database/sql"
|
2017-11-20 06:10:04 +01:00
|
|
|
"fmt"
|
|
|
|
"strings"
|
|
|
|
"time"
|
2017-11-22 02:40:29 +01:00
|
|
|
|
2017-12-04 02:44:27 +01:00
|
|
|
"github.com/lib/pq"
|
|
|
|
|
2018-08-25 06:51:50 +02:00
|
|
|
"miniflux.app/model"
|
|
|
|
"miniflux.app/timezone"
|
2017-11-20 06:10:04 +01:00
|
|
|
)
|
|
|
|
|
2017-11-22 02:40:29 +01:00
|
|
|
// EntryQueryBuilder builds a SQL query to fetch entries.
|
2017-11-20 06:10:04 +01:00
|
|
|
type EntryQueryBuilder struct {
|
2018-06-09 03:24:41 +02:00
|
|
|
store *Storage
|
|
|
|
args []interface{}
|
|
|
|
conditions []string
|
|
|
|
order string
|
|
|
|
direction string
|
|
|
|
limit int
|
|
|
|
offset int
|
2017-12-22 20:33:01 +01:00
|
|
|
}
|
|
|
|
|
2018-07-05 02:40:03 +02:00
|
|
|
// WithSearchQuery adds full-text search query to the condition.
|
|
|
|
func (e *EntryQueryBuilder) WithSearchQuery(query string) *EntryQueryBuilder {
|
|
|
|
if query != "" {
|
2020-03-11 04:58:45 +01:00
|
|
|
nArgs := len(e.args) + 1
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.document_vectors @@ plainto_tsquery($%d)", nArgs))
|
2018-07-05 02:40:03 +02:00
|
|
|
e.args = append(e.args, query)
|
2020-07-01 05:50:34 +02:00
|
|
|
|
|
|
|
// 0.0000001 = 0.1 / (seconds_in_a_day)
|
|
|
|
e.WithOrder(fmt.Sprintf("ts_rank(document_vectors, plainto_tsquery($%d)) - extract (epoch from now() - published_at)::float * 0.0000001", nArgs))
|
2020-03-11 04:58:45 +01:00
|
|
|
e.WithDirection("DESC")
|
2018-07-05 02:40:03 +02:00
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-12-22 20:33:01 +01:00
|
|
|
// WithStarred adds starred filter.
|
|
|
|
func (e *EntryQueryBuilder) WithStarred() *EntryQueryBuilder {
|
2018-06-09 03:24:41 +02:00
|
|
|
e.conditions = append(e.conditions, "e.starred is true")
|
2017-12-22 20:33:01 +01:00
|
|
|
return e
|
2017-12-04 02:44:27 +01:00
|
|
|
}
|
|
|
|
|
2018-06-09 22:40:20 +02:00
|
|
|
// BeforeDate adds a condition < published_at
|
|
|
|
func (e *EntryQueryBuilder) BeforeDate(date time.Time) *EntryQueryBuilder {
|
2018-06-09 03:24:41 +02:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.published_at < $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, date)
|
2017-12-04 02:44:27 +01:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2018-06-09 22:40:20 +02:00
|
|
|
// AfterDate adds a condition > published_at
|
|
|
|
func (e *EntryQueryBuilder) AfterDate(date time.Time) *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.published_at > $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, date)
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
|
|
|
// BeforeEntryID adds a condition < entryID.
|
|
|
|
func (e *EntryQueryBuilder) BeforeEntryID(entryID int64) *EntryQueryBuilder {
|
|
|
|
if entryID != 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id < $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, entryID)
|
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
|
|
|
// AfterEntryID adds a condition > entryID.
|
|
|
|
func (e *EntryQueryBuilder) AfterEntryID(entryID int64) *EntryQueryBuilder {
|
2018-06-09 03:24:41 +02:00
|
|
|
if entryID != 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id > $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, entryID)
|
|
|
|
}
|
2017-12-04 02:44:27 +01:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 06:17:55 +02:00
|
|
|
// WithEntryIDs filter by entry IDs.
|
2017-12-04 02:44:27 +01:00
|
|
|
func (e *EntryQueryBuilder) WithEntryIDs(entryIDs []int64) *EntryQueryBuilder {
|
2018-06-09 03:24:41 +02:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id = ANY($%d)", len(e.args)+1))
|
2020-09-13 06:17:55 +02:00
|
|
|
e.args = append(e.args, pq.Int64Array(entryIDs))
|
2017-12-04 02:44:27 +01:00
|
|
|
return e
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2020-09-13 06:17:55 +02:00
|
|
|
// WithEntryID filter by entry ID.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) WithEntryID(entryID int64) *EntryQueryBuilder {
|
2018-06-09 03:24:41 +02:00
|
|
|
if entryID != 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.id = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, entryID)
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 06:17:55 +02:00
|
|
|
// WithFeedID filter by feed ID.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) WithFeedID(feedID int64) *EntryQueryBuilder {
|
2020-09-14 06:38:11 +02:00
|
|
|
if feedID > 0 {
|
2018-06-09 03:24:41 +02:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.feed_id = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, feedID)
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 06:17:55 +02:00
|
|
|
// WithCategoryID filter by category ID.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) WithCategoryID(categoryID int64) *EntryQueryBuilder {
|
2019-11-18 07:53:11 +01:00
|
|
|
if categoryID > 0 {
|
2018-06-09 03:24:41 +02:00
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("f.category_id = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, categoryID)
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 06:17:55 +02:00
|
|
|
// WithStatus filter by entry status.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) WithStatus(status string) *EntryQueryBuilder {
|
2018-06-09 03:24:41 +02:00
|
|
|
if status != "" {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.status = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, status)
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-09-13 06:17:55 +02:00
|
|
|
// WithStatuses filter by a list of entry statuses.
|
|
|
|
func (e *EntryQueryBuilder) WithStatuses(statuses []string) *EntryQueryBuilder {
|
|
|
|
if len(statuses) > 0 {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.status = ANY($%d)", len(e.args)+1))
|
|
|
|
e.args = append(e.args, pq.StringArray(statuses))
|
|
|
|
}
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-22 02:40:29 +01:00
|
|
|
// WithoutStatus set the entry status that should not be returned.
|
|
|
|
func (e *EntryQueryBuilder) WithoutStatus(status string) *EntryQueryBuilder {
|
2018-06-09 03:24:41 +02:00
|
|
|
if status != "" {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.status <> $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, status)
|
|
|
|
}
|
2017-11-22 02:40:29 +01:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-03-18 04:22:02 +01:00
|
|
|
// WithShareCode set the entry share code.
|
2019-10-05 13:30:25 +02:00
|
|
|
func (e *EntryQueryBuilder) WithShareCode(shareCode string) *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, fmt.Sprintf("e.share_code = $%d", len(e.args)+1))
|
|
|
|
e.args = append(e.args, shareCode)
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2020-03-23 02:48:14 +01:00
|
|
|
// WithShareCodeNotEmpty adds a filter for non-empty share code.
|
|
|
|
func (e *EntryQueryBuilder) WithShareCodeNotEmpty() *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, "e.share_code <> ''")
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-22 02:40:29 +01:00
|
|
|
// WithOrder set the sorting order.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) WithOrder(order string) *EntryQueryBuilder {
|
|
|
|
e.order = order
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-22 02:40:29 +01:00
|
|
|
// WithDirection set the sorting direction.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) WithDirection(direction string) *EntryQueryBuilder {
|
|
|
|
e.direction = direction
|
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-22 02:40:29 +01:00
|
|
|
// WithLimit set the limit.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) WithLimit(limit int) *EntryQueryBuilder {
|
2021-01-04 06:20:21 +01:00
|
|
|
if limit > 0 {
|
|
|
|
e.limit = limit
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-22 02:40:29 +01:00
|
|
|
// WithOffset set the offset.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) WithOffset(offset int) *EntryQueryBuilder {
|
2021-01-04 06:20:21 +01:00
|
|
|
if offset > 0 {
|
|
|
|
e.offset = offset
|
|
|
|
}
|
2017-11-20 06:10:04 +01:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2021-06-03 02:39:47 +02:00
|
|
|
func (e *EntryQueryBuilder) WithGloballyVisible() *EntryQueryBuilder {
|
|
|
|
e.conditions = append(e.conditions, "not c.hide_globally")
|
2021-08-15 17:32:43 +02:00
|
|
|
e.conditions = append(e.conditions, "not f.hide_globally")
|
2021-06-03 02:39:47 +02:00
|
|
|
return e
|
|
|
|
}
|
|
|
|
|
2017-11-22 02:40:29 +01:00
|
|
|
// CountEntries count the number of entries that match the condition.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) CountEntries() (count int, err error) {
|
2021-06-03 02:39:47 +02:00
|
|
|
query := `
|
|
|
|
SELECT count(*)
|
|
|
|
FROM entries e
|
|
|
|
JOIN feeds f ON f.id = e.feed_id
|
|
|
|
JOIN categories c ON c.id = f.category_id
|
|
|
|
WHERE %s
|
|
|
|
`
|
2018-06-09 03:24:41 +02:00
|
|
|
condition := e.buildCondition()
|
|
|
|
|
|
|
|
err = e.store.db.QueryRow(fmt.Sprintf(query, condition), e.args...).Scan(&count)
|
2017-11-20 06:10:04 +01:00
|
|
|
if err != nil {
|
|
|
|
return 0, fmt.Errorf("unable to count entries: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return count, nil
|
|
|
|
}
|
|
|
|
|
2017-11-22 02:40:29 +01:00
|
|
|
// GetEntry returns a single entry that match the condition.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) GetEntry() (*model.Entry, error) {
|
|
|
|
e.limit = 1
|
|
|
|
entries, err := e.GetEntries()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(entries) != 1 {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
entries[0].Enclosures, err = e.store.GetEnclosures(entries[0].ID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return entries[0], nil
|
|
|
|
}
|
|
|
|
|
2017-11-22 02:40:29 +01:00
|
|
|
// GetEntries returns a list of entries that match the condition.
|
2017-11-20 06:10:04 +01:00
|
|
|
func (e *EntryQueryBuilder) GetEntries() (model.Entries, error) {
|
|
|
|
query := `
|
|
|
|
SELECT
|
2020-03-11 04:58:45 +01:00
|
|
|
e.id,
|
|
|
|
e.user_id,
|
|
|
|
e.feed_id,
|
|
|
|
e.hash,
|
|
|
|
e.published_at at time zone u.timezone,
|
|
|
|
e.title,
|
|
|
|
e.url,
|
|
|
|
e.comments_url,
|
|
|
|
e.author,
|
2019-10-05 13:30:25 +02:00
|
|
|
e.share_code,
|
2020-03-11 04:58:45 +01:00
|
|
|
e.content,
|
|
|
|
e.status,
|
|
|
|
e.starred,
|
2020-11-19 02:29:40 +01:00
|
|
|
e.reading_time,
|
2020-11-30 02:04:18 +01:00
|
|
|
e.created_at,
|
2021-05-26 07:13:38 +02:00
|
|
|
e.changed_at,
|
2020-03-11 04:58:45 +01:00
|
|
|
f.title as feed_title,
|
|
|
|
f.feed_url,
|
|
|
|
f.site_url,
|
|
|
|
f.checked_at,
|
|
|
|
f.category_id, c.title as category_title,
|
|
|
|
f.scraper_rules,
|
|
|
|
f.rewrite_rules,
|
|
|
|
f.crawler,
|
|
|
|
f.user_agent,
|
2021-03-23 04:27:58 +01:00
|
|
|
f.cookie,
|
2020-03-11 04:58:45 +01:00
|
|
|
fi.icon_id,
|
|
|
|
u.timezone
|
|
|
|
FROM
|
|
|
|
entries e
|
|
|
|
LEFT JOIN
|
|
|
|
feeds f ON f.id=e.feed_id
|
|
|
|
LEFT JOIN
|
|
|
|
categories c ON c.id=f.category_id
|
|
|
|
LEFT JOIN
|
|
|
|
feed_icons fi ON fi.feed_id=f.id
|
|
|
|
LEFT JOIN
|
|
|
|
users u ON u.id=e.user_id
|
2017-11-20 06:10:04 +01:00
|
|
|
WHERE %s %s
|
|
|
|
`
|
|
|
|
|
2018-06-09 03:24:41 +02:00
|
|
|
condition := e.buildCondition()
|
2018-06-09 22:40:20 +02:00
|
|
|
sorting := e.buildSorting()
|
|
|
|
query = fmt.Sprintf(query, condition, sorting)
|
2017-11-20 06:10:04 +01:00
|
|
|
|
2018-06-09 03:24:41 +02:00
|
|
|
rows, err := e.store.db.Query(query, e.args...)
|
2017-11-20 06:10:04 +01:00
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("unable to get entries: %v", err)
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
entries := make(model.Entries, 0)
|
|
|
|
for rows.Next() {
|
|
|
|
var entry model.Entry
|
2021-01-18 22:22:09 +01:00
|
|
|
var iconID sql.NullInt64
|
2018-03-02 06:24:58 +01:00
|
|
|
var tz string
|
2017-11-20 06:10:04 +01:00
|
|
|
|
2018-06-09 03:24:41 +02:00
|
|
|
entry.Feed = &model.Feed{}
|
|
|
|
entry.Feed.Category = &model.Category{}
|
2017-11-20 06:10:04 +01:00
|
|
|
entry.Feed.Icon = &model.FeedIcon{}
|
|
|
|
|
|
|
|
err := rows.Scan(
|
|
|
|
&entry.ID,
|
|
|
|
&entry.UserID,
|
|
|
|
&entry.FeedID,
|
|
|
|
&entry.Hash,
|
|
|
|
&entry.Date,
|
|
|
|
&entry.Title,
|
|
|
|
&entry.URL,
|
2018-04-07 22:50:45 +02:00
|
|
|
&entry.CommentsURL,
|
2017-11-20 06:10:04 +01:00
|
|
|
&entry.Author,
|
2019-10-05 13:30:25 +02:00
|
|
|
&entry.ShareCode,
|
2017-11-20 06:10:04 +01:00
|
|
|
&entry.Content,
|
|
|
|
&entry.Status,
|
2017-12-22 20:33:01 +01:00
|
|
|
&entry.Starred,
|
2020-11-19 02:29:40 +01:00
|
|
|
&entry.ReadingTime,
|
2020-11-30 02:04:18 +01:00
|
|
|
&entry.CreatedAt,
|
2021-05-26 07:13:38 +02:00
|
|
|
&entry.ChangedAt,
|
2017-11-20 06:10:04 +01:00
|
|
|
&entry.Feed.Title,
|
|
|
|
&entry.Feed.FeedURL,
|
|
|
|
&entry.Feed.SiteURL,
|
|
|
|
&entry.Feed.CheckedAt,
|
|
|
|
&entry.Feed.Category.ID,
|
|
|
|
&entry.Feed.Category.Title,
|
2017-12-11 05:51:04 +01:00
|
|
|
&entry.Feed.ScraperRules,
|
2017-12-12 07:16:32 +01:00
|
|
|
&entry.Feed.RewriteRules,
|
2017-12-13 04:19:36 +01:00
|
|
|
&entry.Feed.Crawler,
|
2018-09-20 03:19:24 +02:00
|
|
|
&entry.Feed.UserAgent,
|
2021-03-23 04:27:58 +01:00
|
|
|
&entry.Feed.Cookie,
|
2017-11-20 06:10:04 +01:00
|
|
|
&iconID,
|
2018-03-02 06:24:58 +01:00
|
|
|
&tz,
|
2017-11-20 06:10:04 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
if err != nil {
|
2017-12-03 02:04:01 +01:00
|
|
|
return nil, fmt.Errorf("unable to fetch entry row: %v", err)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2021-01-18 22:22:09 +01:00
|
|
|
if iconID.Valid {
|
|
|
|
entry.Feed.Icon.IconID = iconID.Int64
|
2017-11-20 06:10:04 +01:00
|
|
|
} else {
|
2021-01-18 22:22:09 +01:00
|
|
|
entry.Feed.Icon.IconID = 0
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2018-03-02 06:24:58 +01:00
|
|
|
// Make sure that timestamp fields contains timezone information (API)
|
|
|
|
entry.Date = timezone.Convert(tz, entry.Date)
|
2020-11-30 02:04:18 +01:00
|
|
|
entry.CreatedAt = timezone.Convert(tz, entry.CreatedAt)
|
2021-05-26 07:13:38 +02:00
|
|
|
entry.ChangedAt = timezone.Convert(tz, entry.ChangedAt)
|
2018-03-02 06:24:58 +01:00
|
|
|
entry.Feed.CheckedAt = timezone.Convert(tz, entry.Feed.CheckedAt)
|
|
|
|
|
2017-11-20 06:10:04 +01:00
|
|
|
entry.Feed.ID = entry.FeedID
|
2018-06-09 03:24:41 +02:00
|
|
|
entry.Feed.UserID = entry.UserID
|
2017-11-20 06:10:04 +01:00
|
|
|
entry.Feed.Icon.FeedID = entry.FeedID
|
2018-06-09 03:24:41 +02:00
|
|
|
entry.Feed.Category.UserID = entry.UserID
|
2017-11-20 06:10:04 +01:00
|
|
|
entries = append(entries, &entry)
|
|
|
|
}
|
|
|
|
|
|
|
|
return entries, nil
|
|
|
|
}
|
|
|
|
|
2017-12-04 02:44:27 +01:00
|
|
|
// GetEntryIDs returns a list of entry IDs that match the condition.
|
|
|
|
func (e *EntryQueryBuilder) GetEntryIDs() ([]int64, error) {
|
2018-06-09 03:24:41 +02:00
|
|
|
query := `SELECT e.id FROM entries e LEFT JOIN feeds f ON f.id=e.feed_id WHERE %s %s`
|
2017-12-04 02:44:27 +01:00
|
|
|
|
2018-06-09 03:24:41 +02:00
|
|
|
condition := e.buildCondition()
|
|
|
|
query = fmt.Sprintf(query, condition, e.buildSorting())
|
|
|
|
|
|
|
|
rows, err := e.store.db.Query(query, e.args...)
|
2017-12-04 02:44:27 +01:00
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("unable to get entries: %v", err)
|
|
|
|
}
|
|
|
|
defer rows.Close()
|
|
|
|
|
|
|
|
var entryIDs []int64
|
|
|
|
for rows.Next() {
|
|
|
|
var entryID int64
|
|
|
|
|
|
|
|
err := rows.Scan(&entryID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("unable to fetch entry row: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
entryIDs = append(entryIDs, entryID)
|
|
|
|
}
|
|
|
|
|
|
|
|
return entryIDs, nil
|
|
|
|
}
|
|
|
|
|
2018-06-09 03:24:41 +02:00
|
|
|
func (e *EntryQueryBuilder) buildCondition() string {
|
|
|
|
return strings.Join(e.conditions, " AND ")
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func (e *EntryQueryBuilder) buildSorting() string {
|
2018-06-09 22:40:20 +02:00
|
|
|
var parts []string
|
2017-11-20 06:10:04 +01:00
|
|
|
|
|
|
|
if e.order != "" {
|
2019-02-12 07:20:07 +01:00
|
|
|
parts = append(parts, fmt.Sprintf(`ORDER BY %s`, e.order))
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if e.direction != "" {
|
2021-03-23 05:04:10 +01:00
|
|
|
parts = append(parts, e.direction)
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2021-01-04 06:20:21 +01:00
|
|
|
if e.limit > 0 {
|
2018-06-09 22:40:20 +02:00
|
|
|
parts = append(parts, fmt.Sprintf(`LIMIT %d`, e.limit))
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2021-01-04 06:20:21 +01:00
|
|
|
if e.offset > 0 {
|
2018-06-09 22:40:20 +02:00
|
|
|
parts = append(parts, fmt.Sprintf(`OFFSET %d`, e.offset))
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2018-06-09 22:40:20 +02:00
|
|
|
return strings.Join(parts, " ")
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
|
2017-11-22 02:40:29 +01:00
|
|
|
// NewEntryQueryBuilder returns a new EntryQueryBuilder.
|
2017-12-29 04:20:14 +01:00
|
|
|
func NewEntryQueryBuilder(store *Storage, userID int64) *EntryQueryBuilder {
|
2017-11-20 06:10:04 +01:00
|
|
|
return &EntryQueryBuilder{
|
2018-06-09 03:24:41 +02:00
|
|
|
store: store,
|
|
|
|
args: []interface{}{userID},
|
|
|
|
conditions: []string{"e.user_id = $1"},
|
2017-11-20 06:10:04 +01:00
|
|
|
}
|
|
|
|
}
|
2019-10-05 13:30:25 +02:00
|
|
|
|
|
|
|
// NewAnonymousQueryBuilder returns a new EntryQueryBuilder suitable for anonymous users.
|
|
|
|
func NewAnonymousQueryBuilder(store *Storage) *EntryQueryBuilder {
|
|
|
|
return &EntryQueryBuilder{
|
|
|
|
store: store,
|
|
|
|
}
|
|
|
|
}
|