glance/internal/feed/lobsters.go

66 lines
1.4 KiB
Go
Raw Normal View History

2024-05-12 14:20:34 +03:00
package feed
import (
"net/http"
"time"
)
type lobstersPostResponseJson struct {
2024-06-02 19:51:07 +03:00
CreatedAt string `json:"created_at"`
Title string `json:"title"`
URL string `json:"url"`
Score int `json:"score"`
CommentCount int `json:"comment_count"`
CommentsURL string `json:"comments_url"`
Tags []string `json:"tags"`
2024-05-12 14:20:34 +03:00
}
2024-05-12 18:14:04 +03:00
type lobstersFeedResponseJson []lobstersPostResponseJson
2024-05-12 14:20:34 +03:00
2024-05-12 18:14:04 +03:00
func getLobstersPostsFromFeed(feedUrl string) (ForumPosts, error) {
request, err := http.NewRequest("GET", feedUrl, nil)
2024-05-12 14:20:34 +03:00
if err != nil {
2024-05-12 18:14:04 +03:00
return nil, err
2024-05-12 14:20:34 +03:00
}
2024-05-12 18:14:04 +03:00
feed, err := decodeJsonFromRequest[lobstersFeedResponseJson](defaultClient, request)
2024-05-12 14:20:34 +03:00
if err != nil {
return nil, err
}
2024-05-12 18:14:04 +03:00
posts := make(ForumPosts, 0, len(feed))
2024-05-12 14:20:34 +03:00
2024-05-12 18:14:04 +03:00
for i := range feed {
createdAt, _ := time.Parse(time.RFC3339, feed[i].CreatedAt)
2024-05-12 14:20:34 +03:00
posts = append(posts, ForumPost{
Title: feed[i].Title,
2024-05-12 18:14:04 +03:00
DiscussionUrl: feed[i].CommentsURL,
TargetUrl: feed[i].URL,
TargetUrlDomain: extractDomainFromUrl(feed[i].URL),
CommentCount: feed[i].CommentCount,
Score: feed[i].Score,
2024-05-12 14:20:34 +03:00
TimePosted: createdAt,
Tags: feed[i].Tags,
2024-05-12 14:20:34 +03:00
})
}
if len(posts) == 0 {
return nil, ErrNoContent
}
return posts, nil
}
2024-05-12 18:14:04 +03:00
func FetchLobstersTopPosts(feedUrl string) (ForumPosts, error) {
posts, err := getLobstersPostsFromFeed(feedUrl)
2024-05-12 14:20:34 +03:00
if err != nil {
return nil, err
}
2024-05-12 18:14:04 +03:00
return posts, nil
2024-05-12 14:20:34 +03:00
}