aboutsummaryrefslogtreecommitdiffhomepage
path: root/backend/graphql/resolver/pagination.go
diff options
context:
space:
mode:
authornsfisis <nsfisis@gmail.com>2026-02-13 22:01:12 +0900
committernsfisis <nsfisis@gmail.com>2026-02-13 22:01:12 +0900
commite216c3bc97994b4172d15d52b46d5f6b75f35ea4 (patch)
tree3ffbd74f4cb2d90846931c8dcbb97ec07f2b91f1 /backend/graphql/resolver/pagination.go
parentc863e64c0521926e785f4aa7ecf4cf15bb9defa7 (diff)
downloadfeedaka-e216c3bc97994b4172d15d52b46d5f6b75f35ea4.tar.gz
feedaka-e216c3bc97994b4172d15d52b46d5f6b75f35ea4.tar.zst
feedaka-e216c3bc97994b4172d15d52b46d5f6b75f35ea4.zip
feat: add feed sidebar and cursor-based pagination
Add a feed sidebar to /unread and /read pages for filtering articles by feed, and replace the fixed 100-article limit with cursor-based pagination using a "Load more" button. Backend: - Add PageInfo, ArticleConnection types and pagination args to GraphQL - Replace GetUnreadArticles/GetReadArticles with parameterized queries - Add GetFeedUnreadCounts query and composite index - Add shared pagination helper in resolver Frontend: - Add FeedSidebar component with unread count badges - Add usePaginatedArticles hook for cursor-based fetching - Update ArticleList with Load more button and single-feed mode - Use ?feed=<id> query parameter for feed filtering Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
Diffstat (limited to 'backend/graphql/resolver/pagination.go')
-rw-r--r--backend/graphql/resolver/pagination.go177
1 files changed, 177 insertions, 0 deletions
diff --git a/backend/graphql/resolver/pagination.go b/backend/graphql/resolver/pagination.go
new file mode 100644
index 0000000..1a14650
--- /dev/null
+++ b/backend/graphql/resolver/pagination.go
@@ -0,0 +1,177 @@
+package resolver
+
+import (
+ "context"
+ "fmt"
+ "strconv"
+
+ "undef.ninja/x/feedaka/db"
+ "undef.ninja/x/feedaka/graphql/model"
+)
+
+const defaultPageSize = 30
+const maxPageSize = 100
+
+// articleRow is a common interface for all paginated article query rows.
+type articleRow struct {
+ ID int64
+ FeedID int64
+ Guid string
+ Title string
+ Url string
+ IsRead int64
+ FeedID2 int64
+ FeedUrl string
+ FeedTitle string
+ FeedIsSubscribed int64
+}
+
+func toArticleRow(r any) articleRow {
+ switch v := r.(type) {
+ case db.GetArticlesPaginatedRow:
+ return articleRow{v.ID, v.FeedID, v.Guid, v.Title, v.Url, v.IsRead, v.FeedID2, v.FeedUrl, v.FeedTitle, v.FeedIsSubscribed}
+ case db.GetArticlesPaginatedAfterRow:
+ return articleRow{v.ID, v.FeedID, v.Guid, v.Title, v.Url, v.IsRead, v.FeedID2, v.FeedUrl, v.FeedTitle, v.FeedIsSubscribed}
+ case db.GetArticlesByFeedPaginatedRow:
+ return articleRow{v.ID, v.FeedID, v.Guid, v.Title, v.Url, v.IsRead, v.FeedID2, v.FeedUrl, v.FeedTitle, v.FeedIsSubscribed}
+ case db.GetArticlesByFeedPaginatedAfterRow:
+ return articleRow{v.ID, v.FeedID, v.Guid, v.Title, v.Url, v.IsRead, v.FeedID2, v.FeedUrl, v.FeedTitle, v.FeedIsSubscribed}
+ default:
+ panic("unexpected row type")
+ }
+}
+
+func rowToArticle(row articleRow) *model.Article {
+ return &model.Article{
+ ID: strconv.FormatInt(row.ID, 10),
+ FeedID: strconv.FormatInt(row.FeedID, 10),
+ GUID: row.Guid,
+ Title: row.Title,
+ URL: row.Url,
+ IsRead: row.IsRead == 1,
+ Feed: &model.Feed{
+ ID: strconv.FormatInt(row.FeedID2, 10),
+ URL: row.FeedUrl,
+ Title: row.FeedTitle,
+ IsSubscribed: row.FeedIsSubscribed == 1,
+ },
+ }
+}
+
+func (r *queryResolver) paginatedArticles(ctx context.Context, isRead int64, feedID *string, after *string, first *int32) (*model.ArticleConnection, error) {
+ userID, err := getUserIDFromContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ limit := int64(defaultPageSize)
+ if first != nil {
+ limit = int64(*first)
+ if limit <= 0 {
+ limit = int64(defaultPageSize)
+ }
+ if limit > maxPageSize {
+ limit = maxPageSize
+ }
+ }
+
+ // Fetch limit+1 to determine hasNextPage
+ fetchLimit := limit + 1
+
+ var rawRows []any
+
+ if feedID != nil {
+ parsedFeedID, err := strconv.ParseInt(*feedID, 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("invalid feed ID: %w", err)
+ }
+
+ if after != nil {
+ cursor, err := strconv.ParseInt(*after, 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("invalid cursor: %w", err)
+ }
+ rows, err := r.Queries.GetArticlesByFeedPaginatedAfter(ctx, db.GetArticlesByFeedPaginatedAfterParams{
+ IsRead: isRead,
+ UserID: userID,
+ FeedID: parsedFeedID,
+ ID: cursor,
+ Limit: fetchLimit,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to query articles: %w", err)
+ }
+ for _, row := range rows {
+ rawRows = append(rawRows, row)
+ }
+ } else {
+ rows, err := r.Queries.GetArticlesByFeedPaginated(ctx, db.GetArticlesByFeedPaginatedParams{
+ IsRead: isRead,
+ UserID: userID,
+ FeedID: parsedFeedID,
+ Limit: fetchLimit,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to query articles: %w", err)
+ }
+ for _, row := range rows {
+ rawRows = append(rawRows, row)
+ }
+ }
+ } else {
+ if after != nil {
+ cursor, err := strconv.ParseInt(*after, 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("invalid cursor: %w", err)
+ }
+ rows, err := r.Queries.GetArticlesPaginatedAfter(ctx, db.GetArticlesPaginatedAfterParams{
+ IsRead: isRead,
+ UserID: userID,
+ ID: cursor,
+ Limit: fetchLimit,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to query articles: %w", err)
+ }
+ for _, row := range rows {
+ rawRows = append(rawRows, row)
+ }
+ } else {
+ rows, err := r.Queries.GetArticlesPaginated(ctx, db.GetArticlesPaginatedParams{
+ IsRead: isRead,
+ UserID: userID,
+ Limit: fetchLimit,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to query articles: %w", err)
+ }
+ for _, row := range rows {
+ rawRows = append(rawRows, row)
+ }
+ }
+ }
+
+ hasNextPage := int64(len(rawRows)) > limit
+ if hasNextPage {
+ rawRows = rawRows[:limit]
+ }
+
+ articles := make([]*model.Article, 0, len(rawRows))
+ for _, raw := range rawRows {
+ articles = append(articles, rowToArticle(toArticleRow(raw)))
+ }
+
+ var endCursor *string
+ if len(articles) > 0 {
+ lastID := articles[len(articles)-1].ID
+ endCursor = &lastID
+ }
+
+ return &model.ArticleConnection{
+ Articles: articles,
+ PageInfo: &model.PageInfo{
+ HasNextPage: hasNextPage,
+ EndCursor: endCursor,
+ },
+ }, nil
+}