main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
package main import ( "encoding/csv" "fmt" "io" "io/ioutil" "log" "net/http" "os" "sort" "strings" "github.com/mmcdole/gofeed" ) // There are two layers that we're dealing with here - // The "feed" layer and the "post" layer. // The biggest difference between the two is that // the feed layer contains much more information about the // "broader site" like the site title - while the post layer // contains a bunch of post-related info. func rootHandler(w http.ResponseWriter, r *http.Request) { scrapedFeeds, err := makeFeedList(os.Args[1]) if err != nil { fmt.Fprintf(w, "makeList error: %s", err) log.Fatal(err) } feeds := make([]gofeed.Feed, 0) var feed *gofeed.Feed fetchFailures := make([]error, 0) for _, f := range scrapedFeeds { feed, err = getFeed(f) if err != nil { // break loop in case of // feed fetching failure err = fmt.Errorf("%s<br>%s", f, err) fetchFailures = append(fetchFailures, err) continue } feeds = append(feeds, *feed) } consolidatedFeed := consolidateFeeds(feeds) // sort posts by date, most recent first sort.Sort(sort.Reverse(consolidatedFeed)) // here we goooOooOoOoOooOooOo // ooOooOOoooOOooooOOOOOooooOO fmt.Fprintf(w, "<!doctype html>"+ "<html lang=\"en\">"+ "<meta charset=\"utf-8\" />"+ "<title>jrss</title>"+ "<table>"+ "<tbody>") if len(fetchFailures) > 0 { for _, f := range fetchFailures { fmt.Fprintf(w, "<p style=\"color:red\">failed to fetch %s</p>", f) } } for _, post := range consolidatedFeed.Items { // TODO: Add author name (nil pointer dereference) fmt.Fprintf(w, "<tr>") fmt.Fprintf(w, "<td>%s: (%s)<a href=\"%s\">%s</a> <sub>%s</sub></td>", post.Custom["feed-title"], post.Author.Name, post.Link, post.Title, post.Published) fmt.Fprintf(w, "</tr>") } fmt.Fprintf(w, "</tbody>"+"</table>"+"</html>") } // consolidateFeeds takes in a bunch of disparate feeds & mushes them into one // big single omega-feed so that we may loop over it and sort it how we wish func consolidateFeeds(inputFeeds []gofeed.Feed) gofeed.Feed { omegaFeed := gofeed.Feed{} // We stick all of the posts into one .Items // So that they may be easily sorted (implemented in // gofeeds) for _, feed := range inputFeeds { for _, post := range feed.Items { // Pass a few feed-level vars down // to the post level so that we may // display them when we loop over the // posts. post.Custom = make(map[string]string) post.Custom["feed-title"] = feed.Title if post.Author == nil { post.Author = &gofeed.Person{} } omegaFeed.Items = append(omegaFeed.Items, post) } } return omegaFeed } func getFeed(url string) (*gofeed.Feed, error) { p := gofeed.NewParser() return p.ParseURL(url) } func makeFeedList(url string) ([]string, error) { res, err := http.Get(url) if err != nil { err = fmt.Errorf("makeFeedList http.Get: %w", err) return nil, err } body, err := ioutil.ReadAll(res.Body) if err != nil { err = fmt.Errorf("makeFeedList failed to read http body: %w", err) return nil, err } res.Body.Close() r := csv.NewReader(strings.NewReader(string(body))) feeds := make([]string, 0) for { record, err := r.Read() if err == io.EOF { break } if err != nil { err = fmt.Errorf("makeFeedList record parsing error: %w\nvalue: %s", err, record) return nil, err } // [0] == url to a feed (item 0 of the csv) feeds = append(feeds, record[0]) } return feeds, err } func main() { if len(os.Args) == 1 { log.Fatal("Supply your feed list as ARG1.\nExample: go run main.go https://j3s.sh/feeds.txt") } http.HandleFunc("/", rootHandler) log.Fatal(http.ListenAndServe(":8080", nil)) }