small pixel drawing of a pufferfish jrss

main.go

package main

import (
	"encoding/csv"
	"fmt"
	"io"
	"io/ioutil"
	"log"
	"net/http"
	"os"
	"sort"
	"strings"

	"github.com/mmcdole/gofeed"
)

// There are two layers that we're dealing with here -
// The "feed" layer and the "post" layer.
// The biggest difference between the two is that
// the feed layer contains much more information about the
// "broader site" like the site title - while the post layer
// contains a bunch of post-related info.
func rootHandler(w http.ResponseWriter, r *http.Request) {
	scrapedFeeds, err := makeFeedList(os.Args[1])
	if err != nil {
		fmt.Fprintf(w, "makeList error: %s", err)
		log.Fatal(err)
	}

	feeds := make([]gofeed.Feed, 0)
	var feed *gofeed.Feed
	fetchFailures := make([]error, 0)

	for _, f := range scrapedFeeds {
		feed, err = getFeed(f)
		if err != nil {
			// break loop in case of
			// feed fetching failure
			err = fmt.Errorf("%s<br>%s", f, err)
			fetchFailures = append(fetchFailures, err)
			continue
		}
		feeds = append(feeds, *feed)
	}
	consolidatedFeed := consolidateFeeds(feeds)

	// sort posts by date, most recent first
	sort.Sort(sort.Reverse(consolidatedFeed))

	// here we goooOooOoOoOooOooOo
	// ooOooOOoooOOooooOOOOOooooOO
	fmt.Fprintf(w, "<!doctype html>"+
		"<html lang=\"en\">"+
		"<meta charset=\"utf-8\" />"+
		"<title>jrss</title>"+
		"<table>"+
		"<tbody>")
	if len(fetchFailures) > 0 {
		for _, f := range fetchFailures {
			fmt.Fprintf(w, "<p style=\"color:red\">failed to fetch %s</p>", f)
		}
	}
	for _, post := range consolidatedFeed.Items {
		// TODO: Add author name (nil pointer dereference)
		fmt.Fprintf(w, "<tr>")
		fmt.Fprintf(w, "<td>%s: (%s)<a href=\"%s\">%s</a> <sub>%s</sub></td>",
			post.Custom["feed-title"], post.Author.Name, post.Link, post.Title, post.Published)
		fmt.Fprintf(w, "</tr>")
	}
	fmt.Fprintf(w, "</tbody>"+"</table>"+"</html>")
}

// consolidateFeeds takes in a bunch of disparate feeds & mushes them into one
// big single omega-feed so that we may loop over it and sort it how we wish
func consolidateFeeds(inputFeeds []gofeed.Feed) gofeed.Feed {
	omegaFeed := gofeed.Feed{}
	// We stick all of the posts into one .Items
	// So that they may be easily sorted (implemented in
	// gofeeds)
	for _, feed := range inputFeeds {
		for _, post := range feed.Items {
			// Pass a few feed-level vars down
			// to the post level so that we may
			// display them when we loop over the
			// posts.
			post.Custom = make(map[string]string)
			post.Custom["feed-title"] = feed.Title
			omegaFeed.Items = append(omegaFeed.Items, post)
		}
	}
	return omegaFeed
}

func getFeed(url string) (*gofeed.Feed, error) {
	p := gofeed.NewParser()
	return p.ParseURL(url)
}

func makeFeedList(url string) ([]string, error) {
	res, err := http.Get(url)
	if err != nil {
		err = fmt.Errorf("makeFeedList http.Get: %w", err)
		return nil, err
	}
	body, err := ioutil.ReadAll(res.Body)
	if err != nil {
		err = fmt.Errorf("makeFeedList failed to read http body: %w", err)
		return nil, err
	}
	res.Body.Close()
	r := csv.NewReader(strings.NewReader(string(body)))
	feeds := make([]string, 0)
	for {
		record, err := r.Read()
		if err == io.EOF {
			break
		}
		if err != nil {
			err = fmt.Errorf("makeFeedList record parsing error: %w\nvalue: %s", err, record)
			return nil, err
		}
		// [0] == url to a feed (item 0 of the csv)
		feeds = append(feeds, record[0])
	}
	return feeds, err
}

func main() {
	if len(os.Args) == 1 {
		log.Fatal("Supply your feed list as ARG1.\nExample: go run main.go https://j3s.sh/feeds.txt")
	}

	http.HandleFunc("/", rootHandler)
	log.Fatal(http.ListenAndServe(":8080", nil))
}