This repository has been archived by the owner on Dec 18, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 8
/
feed.go
100 lines (90 loc) · 2.57 KB
/
feed.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
package main
import (
"os"
"strconv"
"strings"
"sync"
"unicode/utf8"
"github.com/mmcdole/gofeed"
log "github.com/sirupsen/logrus"
"golang.org/x/exp/slices"
)
type Feed struct {
Name string
URL string
}
var wg sync.WaitGroup
var isAllUpdate bool
const newArticleDirectory = "new"
const maxFileNameLength = 255
func truncateString(s string, n int) string {
if len(s) <= n {
return s
}
for !utf8.ValidString(s[:n]) {
n--
}
return s[:n]
}
func InitialiseNewArticleDirectory() {
DeleteFeedFiles(newArticleDirectory)
os.MkdirAll(Config.FeedDirectory+"/"+newArticleDirectory, 0755)
}
func DeleteFeedFiles(name string) {
os.RemoveAll(Config.FeedDirectory + "/" + name)
}
func UpdateFeed(name string, deleteFiles bool) {
log.Info("Updating feed '" + name + "'")
fp := gofeed.NewParser()
downloadCount := 0
skipCount := 0
feed, err := fp.ParseURL(Config.Feeds[slices.IndexFunc(Config.Feeds, func(f Feed) bool { return f.Name == name })].URL)
if err != nil {
log.Error("Failed to fetch the feed '" + name + "'")
if isAllUpdate {
wg.Done()
}
return
}
if deleteFiles {
DeleteFeedFiles(name)
}
os.MkdirAll(Config.FeedDirectory+"/"+name, 0777)
for _, item := range feed.Items {
articlePath := Config.FeedDirectory + "/" + name + "/" + truncateString(strings.ReplaceAll(item.Title, "/", ""), maxFileNameLength)
if _, err := os.Stat(articlePath); err == nil {
log.Debug("Article " + articlePath + " already exists - skipping download")
skipCount++
continue
}
file, err := os.Create(articlePath)
if err != nil {
log.Error("Failed to create a file for article titled '" + item.Title + "'")
continue
}
defer file.Close()
_, err = file.WriteString(item.Description + "\n" + item.Link + "\n" + item.Published + "\n" + item.Content)
if err != nil {
log.Error("Failed to write content to a file for article titled '" + item.Title + "'")
continue
}
downloadCount++
newLinkPath := Config.FeedDirectory + "/" + newArticleDirectory + "/" + truncateString(strings.ReplaceAll(item.Title, "/", ""), maxFileNameLength)
err = os.Symlink(articlePath, newLinkPath)
if err != nil {
log.Error("Could not create symlink for newly downloaded article " + articlePath)
}
}
log.Info(strconv.Itoa(downloadCount) + " articles fetched from feed '" + name + "' (" + strconv.Itoa(skipCount) + " already seen, " + strconv.Itoa(len(feed.Items)) + " total in feed)")
if isAllUpdate {
wg.Done()
}
}
func UpdateAllFeeds(deleteFiles bool) {
isAllUpdate = true
for _, feed := range Config.Feeds {
wg.Add(1)
go UpdateFeed(feed.Name, deleteFiles)
}
wg.Wait()
}