[mirror] yet another tui rss reader github.com/olexsmir/smutok

fix: change the way stream content is parsed

olexsmir.xyz 47b0d336 54180a9d

verified
Changed files
+7 -17
internal
freshrss
+5 -15
internal/freshrss/client.go
··· 120 Published int64 121 Title string 122 Author string 123 - Canonical []string 124 Content string 125 Categories []string 126 TimestampUsec string 127 - Origin struct { 128 - HTMLURL string 129 - StreamID string 130 - Title string 131 - } 132 } 133 134 type StreamContents struct { ··· 161 ci.ID = item.Get("id").String() 162 ci.Title = item.Get("title").String() 163 ci.Published = item.Get("published").Int() 164 - ci.Author = item.Get("author").String() 165 ci.Content = item.Get("summary.content").String() 166 - ci.Origin.StreamID = item.Get("origin.streamId").String() 167 - ci.Origin.HTMLURL = item.Get("origin.htmlUrl").String() 168 - ci.Origin.Title = item.Get("origin.title").String() 169 ci.TimestampUsec = item.Get("timestampUsec").String() 170 171 - for _, href := range item.Get("canonical.#.href").Array() { 172 - if h := href.String(); h != "" { 173 - ci.Canonical = append(ci.Canonical, h) 174 - } 175 - } 176 for _, cat := range item.Get("categories").Array() { 177 ci.Categories = append(ci.Categories, cat.String()) 178 }
··· 120 Published int64 121 Title string 122 Author string 123 + Href string 124 Content string 125 Categories []string 126 TimestampUsec string 127 + FeedID string 128 } 129 130 type StreamContents struct { ··· 157 ci.ID = item.Get("id").String() 158 ci.Title = item.Get("title").String() 159 ci.Published = item.Get("published").Int() 160 + ci.Author = item.Get("origin.title").String() 161 ci.Content = item.Get("summary.content").String() 162 + ci.Href = item.Get("alternate|0.href").String() 163 + ci.FeedID = item.Get("origin.streamId").String() 164 ci.TimestampUsec = item.Get("timestampUsec").String() 165 166 for _, cat := range item.Get("categories").Array() { 167 ci.Categories = append(ci.Categories, cat.String()) 168 }
+2 -2
internal/freshrss/sync.go
··· 162 163 var errs []error 164 for _, item := range items { 165 - if err := f.store.UpsertArticle(ctx, item.TimestampUsec, item.Origin.StreamID, item.Title, item.Content, item.Author, item.Origin.HTMLURL, int(item.Published)); err != nil { 166 errs = append(errs, err) 167 } 168 } ··· 206 207 var errs []error 208 for _, item := range items { 209 - if err := f.store.UpsertArticle(ctx, item.TimestampUsec, item.Origin.StreamID, item.Title, item.Content, item.Author, item.Origin.HTMLURL, int(item.Published)); err != nil { 210 errs = append(errs, err) 211 } 212 }
··· 162 163 var errs []error 164 for _, item := range items { 165 + if err := f.store.UpsertArticle(ctx, item.TimestampUsec, item.FeedID, item.Title, item.Content, item.Author, item.Href, int(item.Published)); err != nil { 166 errs = append(errs, err) 167 } 168 } ··· 206 207 var errs []error 208 for _, item := range items { 209 + if err := f.store.UpsertArticle(ctx, item.TimestampUsec, item.FeedID, item.Title, item.Content, item.Author, item.Href, int(item.Published)); err != nil { 210 errs = append(errs, err) 211 } 212 }