enormous refactor
j3s j3s@c3f.net
Tue, 23 Feb 2021 17:55:32 -0600
1 files changed,
78 insertions(+),
28 deletions(-)
jump to
M
main.go
→
main.go
@@ -14,55 +14,101 @@
"github.com/mmcdole/gofeed" ) +// There are two layers that we're dealing with here - +// The "feed" layer and the "post" layer. +// The biggest difference between the two is that +// the feed layer contains much more information about the +// "broader site" like the site title - while the post layer +// contains a bunch of post-related info. func rootHandler(w http.ResponseWriter, r *http.Request) { - feeds, err := getFeedList(os.Args[1]) + scrapedFeeds, err := makeFeedList(os.Args[1]) if err != nil { + fmt.Fprintf(w, "makeList error: %s", err) log.Fatal(err) } - posts := make([]gofeed.Feed, 0) - var gottenFeed *gofeed.Feed - for _, f := range feeds { - gottenFeed, err = getFeed(f) + + feeds := make([]gofeed.Feed, 0) + var feed *gofeed.Feed + fetchFailures := make([]error, 0) + + for _, f := range scrapedFeeds { + feed, err = getFeed(f) if err != nil { // break loop in case of // feed fetching failure - log.Println(err) + err = fmt.Errorf("%s<br>%s", f, err) + fetchFailures = append(fetchFailures, err) continue } - posts = append(posts, *gottenFeed) + feeds = append(feeds, *feed) } - beegFeed := gofeed.Feed{} - for _, p := range posts { - for _, i := range p.Items { - // Overriding description to the blog title - i.Description = p.Title - beegFeed.Items = append(beegFeed.Items, i) + consolidatedFeed := consolidateFeeds(feeds) + + // sort posts by date, most recent first + sort.Sort(sort.Reverse(consolidatedFeed)) + + // here we goooOooOoOoOooOooOo + // ooOooOOoooOOooooOOOOOooooOO + fmt.Fprintf(w, "<!doctype html>"+ + "<html lang=\"en\">"+ + "<meta charset=\"utf-8\" />"+ + "<title>jrss</title>"+ + "<table>"+ + "<tbody>") + if len(fetchFailures) > 0 { + for _, f := range fetchFailures { + fmt.Fprintf(w, "<p style=\"color:red\">failed to fetch %s</p>", f) } } - sort.Sort(sort.Reverse(beegFeed)) - for _, i := range beegFeed.Items { - fmt.Fprintf(w, "<p>%s: <a href=\"%s\">%s</a></p>", i.Description, i.Link, i.Title) + for _, post := range consolidatedFeed.Items { + // TODO: Add author name (nil pointer dereference) + fmt.Fprintf(w, "<tr>") + fmt.Fprintf(w, "<td>%s: (%s)<a href=\"%s\">%s</a> <sub>%s</sub></td>", + post.Custom["feed-title"], post.Author.Name, post.Link, post.Title, post.Published) + fmt.Fprintf(w, "</tr>") } + fmt.Fprintf(w, "</tbody>"+"</table>"+"</html>") +} + +// consolidateFeeds takes in a bunch of disparate feeds & mushes them into one +// big single omega-feed so that we may loop over it and sort it how we wish +func consolidateFeeds(inputFeeds []gofeed.Feed) gofeed.Feed { + omegaFeed := gofeed.Feed{} + // We stick all of the posts into one .Items + // So that they may be easily sorted (implemented in + // gofeeds) + for _, feed := range inputFeeds { + for _, post := range feed.Items { + // Pass a few feed-level vars down + // to the post level so that we may + // display them when we loop over the + // posts. + post.Custom = make(map[string]string) + post.Custom["feed-title"] = feed.Title + omegaFeed.Items = append(omegaFeed.Items, post) + } + } + return omegaFeed } func getFeed(url string) (*gofeed.Feed, error) { - fp := gofeed.NewParser() - feed, err := fp.ParseURL(url) - return feed, err + p := gofeed.NewParser() + return p.ParseURL(url) } -func getFeedList(url string) ([]string, error) { +func makeFeedList(url string) ([]string, error) { res, err := http.Get(url) if err != nil { - err = fmt.Errorf("getFeedList http.Get: %w", err) - log.Fatal(err) + err = fmt.Errorf("makeFeedList http.Get: %w", err) + return nil, err } - robots, err := ioutil.ReadAll(res.Body) + body, err := ioutil.ReadAll(res.Body) if err != nil { - log.Fatal(err) + err = fmt.Errorf("makeFeedList failed to read http body: %w", err) + return nil, err } res.Body.Close() - r := csv.NewReader(strings.NewReader(string(robots))) + r := csv.NewReader(strings.NewReader(string(body))) feeds := make([]string, 0) for { record, err := r.Read()@@ -70,16 +116,20 @@ if err == io.EOF {
break } if err != nil { - log.Fatal(err) + err = fmt.Errorf("makeFeedList record parsing error: %w\nvalue: %s", err, record) + return nil, err } - // [0] == url to a feed + // [0] == url to a feed (item 0 of the csv) feeds = append(feeds, record[0]) } return feeds, err } func main() { - // TODO: check whether ARG1 exists + if len(os.Args) == 1 { + log.Fatal("Supply your feed list as ARG1.\nExample: go run main.go https://j3s.sh/feeds.txt") + } + http.HandleFunc("/", rootHandler) log.Fatal(http.ListenAndServe(":8080", nil)) }