main.go
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 |
package main import ( "encoding/csv" "fmt" "github.com/mmcdole/gofeed" "io" "io/ioutil" "log" "net/http" "os" "sort" "strings" ) func rootHandler(w http.ResponseWriter, r *http.Request) { feeds, err := getFeedList(os.Args[1]) if err != nil { log.Fatal(err) } posts := make([]gofeed.Feed, 0) var gottenFeed *gofeed.Feed for _, f := range feeds { gottenFeed, err = getFeed(f) if err != nil { // break loop in case of // feed fetching failure log.Println(err) continue } posts = append(posts, *gottenFeed) } beegFeed := gofeed.Feed{} for _, p := range posts { for _, i := range p.Items { // Overriding description to the blog title i.Description = p.Title beegFeed.Items = append(beegFeed.Items, i) } } sort.Sort(sort.Reverse(beegFeed)) for _, i := range beegFeed.Items { fmt.Fprintf(w, "<p>%s: <a href=\"%s\">%s</a></p>", i.Description, i.Link, i.Title) } } func getFeed(url string) (*gofeed.Feed, error) { fp := gofeed.NewParser() feed, err := fp.ParseURL(url) return feed, err } func getFeedList(url string) ([]string, error) { res, err := http.Get(url) if err != nil { err = fmt.Errorf("getFeedList http.Get: %w", err) log.Fatal(err) } robots, err := ioutil.ReadAll(res.Body) if err != nil { log.Fatal(err) } res.Body.Close() r := csv.NewReader(strings.NewReader(string(robots))) feeds := make([]string, 0) for { record, err := r.Read() if err == io.EOF { break } if err != nil { log.Fatal(err) } // [0] == url to a feed feeds = append(feeds, record[0]) } return feeds, err } func main() { // TODO: check whether ARG1 exists http.HandleFunc("/", rootHandler) log.Fatal(http.ListenAndServe(":8080", nil)) } |