- commit
- 54cc017
- parent
- 815c299
- author
- Eric Bower
- date
- 2022-12-22 03:40:43 +0000 UTC
feat(feeds): support html Also changed how we fetch feeds
3 files changed,
+113,
-35
+90,
-26
1@@ -3,6 +3,8 @@ package feeds
2 import (
3 "errors"
4 "fmt"
5+ html "html/template"
6+ "io"
7 "net/http"
8 "strings"
9 "text/template"
10@@ -27,9 +29,9 @@ func (c *UserAgentTransport) RoundTrip(r *http.Request) (*http.Response, error)
11 }
12
13 type FeedItem struct {
14- Title string
15- Link string
16- Description string
17+ Title string
18+ Link string
19+ Content string
20 }
21
22 type Feed struct {
23@@ -114,12 +116,12 @@ func (f *Fetcher) RunPost(user *db.User, post *db.Post) error {
24 urls = append(urls, url)
25 }
26
27- txt, err := f.FetchAll(urls, post.Data.LastDigest)
28+ msgBody, err := f.FetchAll(urls, post.Data.LastDigest)
29 if err != nil {
30 return err
31 }
32
33- err = f.SendEmail(user.Name, parsed.Email, txt)
34+ err = f.SendEmail(user.Name, parsed.Email, msgBody)
35 if err != nil {
36 return err
37 }
38@@ -150,10 +152,47 @@ func (f *Fetcher) RunUser(user *db.User) error {
39 return nil
40 }
41
42+func (f *Fetcher) ParseURL(fp *gofeed.Parser, url string) (*gofeed.Feed, error) {
43+ client := &http.Client{
44+ Transport: &UserAgentTransport{http.DefaultTransport},
45+ }
46+ req, err := http.NewRequest("GET", url, nil)
47+ resp, err := client.Do(req)
48+ if err != nil {
49+ return nil, err
50+ }
51+
52+ defer resp.Body.Close()
53+ body, err := io.ReadAll(resp.Body)
54+ if err != nil {
55+ return nil, err
56+ }
57+
58+ if strings.HasPrefix(url, "https://old") {
59+ f.cfg.Logger.Infof("BODY: (%s)", string(body))
60+ }
61+
62+ if resp.StatusCode < 200 || resp.StatusCode > 300 {
63+ return nil, fmt.Errorf("fetching feed resulted in an error: %s %s", resp.Status, body)
64+ }
65+
66+ if err != nil {
67+ return nil, err
68+ }
69+
70+ feed, err := fp.ParseString(string(body))
71+
72+ if err != nil {
73+ return nil, err
74+ }
75+
76+ return feed, nil
77+}
78+
79 func (f *Fetcher) Fetch(fp *gofeed.Parser, url string, lastDigest *time.Time) (*Feed, error) {
80 f.cfg.Logger.Infof("(%s) fetching feed", url)
81
82- feed, err := fp.ParseURL(url)
83+ feed, err := f.ParseURL(fp, url)
84 if err != nil {
85 return nil, err
86 }
87@@ -166,14 +205,14 @@ func (f *Fetcher) Fetch(fp *gofeed.Parser, url string, lastDigest *time.Time) (*
88 items := []*FeedItem{}
89 // we only want to return feed items published since the last digest time we fetched
90 for _, item := range feed.Items {
91- if lastDigest == nil || item.PublishedParsed.Before(*lastDigest) {
92+ if lastDigest != nil && item.PublishedParsed.Before(*lastDigest) {
93 continue
94 }
95
96 items = append(items, &FeedItem{
97- Title: item.Title,
98- Link: item.Link,
99- Description: item.Description,
100+ Title: item.Title,
101+ Link: item.Link,
102+ Content: item.Content,
103 })
104 }
105
106@@ -185,8 +224,26 @@ func (f *Fetcher) Fetch(fp *gofeed.Parser, url string, lastDigest *time.Time) (*
107 return feedTmpl, nil
108 }
109
110-func (f *Fetcher) Print(feedTmpl *DigestFeed) (string, error) {
111+func (f *Fetcher) PrintText(feedTmpl *DigestFeed) (string, error) {
112 ts, err := template.ParseFiles(
113+ f.cfg.StaticPath("html/digest_text.page.tmpl"),
114+ )
115+
116+ if err != nil {
117+ return "", err
118+ }
119+
120+ w := new(strings.Builder)
121+ err = ts.Execute(w, feedTmpl)
122+ if err != nil {
123+ return "", err
124+ }
125+
126+ return w.String(), nil
127+}
128+
129+func (f *Fetcher) PrintHtml(feedTmpl *DigestFeed) (string, error) {
130+ ts, err := html.ParseFiles(
131 f.cfg.StaticPath("html/digest.page.tmpl"),
132 )
133
134@@ -203,11 +260,13 @@ func (f *Fetcher) Print(feedTmpl *DigestFeed) (string, error) {
135 return w.String(), nil
136 }
137
138-func (f *Fetcher) FetchAll(urls []string, lastDigest *time.Time) (string, error) {
139+type MsgBody struct {
140+ Html string
141+ Text string
142+}
143+
144+func (f *Fetcher) FetchAll(urls []string, lastDigest *time.Time) (*MsgBody, error) {
145 fp := gofeed.NewParser()
146- fp.Client = &http.Client{
147- Transport: &UserAgentTransport{http.DefaultTransport},
148- }
149 feeds := &DigestFeed{}
150
151 for _, url := range urls {
152@@ -224,18 +283,26 @@ func (f *Fetcher) FetchAll(urls []string, lastDigest *time.Time) (string, error)
153 }
154
155 if len(feeds.Feeds) == 0 {
156- return "", fmt.Errorf("%w, skipping", ErrNoRecentArticles)
157+ return nil, fmt.Errorf("%w, skipping", ErrNoRecentArticles)
158 }
159
160- str, err := f.Print(feeds)
161+ text, err := f.PrintText(feeds)
162 if err != nil {
163- return "", nil
164+ return nil, err
165+ }
166+
167+ html, err := f.PrintHtml(feeds)
168+ if err != nil {
169+ return nil, err
170 }
171
172- return str, nil
173+ return &MsgBody{
174+ Text: text,
175+ Html: html,
176+ }, nil
177 }
178
179-func (f *Fetcher) SendEmail(username, email, msg string) error {
180+func (f *Fetcher) SendEmail(username, email string, msg *MsgBody) error {
181 if email == "" {
182 return fmt.Errorf("(%s) does not have an email associated with their feed post", username)
183 }
184@@ -244,12 +311,9 @@ func (f *Fetcher) SendEmail(username, email, msg string) error {
185 subject := "feeds.sh daily digest"
186 to := mail.NewEmail(username, email)
187
188- plainTextContent := msg
189- htmlContent := msg
190-
191- f.cfg.Logger.Infof("message body (%s)", plainTextContent)
192+ // f.cfg.Logger.Infof("message body (%s)", plainTextContent)
193
194- message := mail.NewSingleEmail(from, subject, to, plainTextContent, htmlContent)
195+ message := mail.NewSingleEmail(from, subject, to, msg.Text, msg.Html)
196 client := sendgrid.NewSendClient(f.cfg.SendgridKey)
197
198 f.cfg.Logger.Infof("(%s) sending email digest", username)
199@@ -258,7 +322,7 @@ func (f *Fetcher) SendEmail(username, email, msg string) error {
200 return err
201 }
202
203- f.cfg.Logger.Infof("(%s) email digest response: %v", username, response)
204+ // f.cfg.Logger.Infof("(%s) email digest response: %v", username, response)
205
206 if len(response.Headers["X-Message-Id"]) > 0 {
207 f.cfg.Logger.Infof(
+10,
-9
1@@ -1,15 +1,16 @@
2-feeds.sh daily digest
3-
4 {{range .Feeds}}
5- {{.Title}}
6- {{.Link}}
7- {{.Description}}
8+<div style="margin-bottom: 10px;">
9+ <h2 style="margin-bottom: 3px;"><a href="{{.Link}}">{{.Title}}</a></h2>
10+ <div>{{.Description}}</div>
11+</div>
12
13+<div>
14 {{range .Items}}
15- {{.Title}}
16- {{.Link}}
17+ <div>
18+ <a href="{{.Link}}">{{.Title}}</a>
19+ </div>
20 {{end}}
21+</div>
22
23- ---
24-
25+<hr style="margin: 10px 0;" />
26 {{end}}
+13,
-0
1@@ -0,0 +1,13 @@
2+{{range .Feeds}}
3+ {{.Title}}
4+ {{.Link}}
5+ {{.Description}}
6+
7+ {{range .Items}}
8+ {{.Title}}
9+ {{.Link}}
10+ {{end}}
11+
12+ ---
13+
14+{{end}}