/
hathor.go
78 lines (68 loc) · 1.65 KB
/
hathor.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
package main
import (
"fmt"
rss "github.com/jteeuwen/go-pkg-rss"
"io"
"net/http"
"os"
"time"
)
func download(episode Episode) {
err := os.MkdirAll(episode.DirPath, 0755)
if err != nil {
fmt.Printf("[e] Couldn't make dir: %s\n -> %s", episode.DirPath, err)
return
}
// Only download if it's not already cached
// TODO: Is there a way to continue a download if it gets interrupted?
if _, err := os.Stat(episode.FilePath); os.IsNotExist(err) {
fmt.Printf("%s - Downloading '%s'\n", episode.Key, episode.Title)
// Create the file
out, err := os.Create(episode.FilePath)
if err != nil {
fmt.Println(err)
return
}
defer out.Close()
// Get the data
resp, err := http.Get(episode.Url)
if err != nil {
fmt.Println(err)
return
}
defer resp.Body.Close()
// Writer the body to file
_, err = io.Copy(out, resp.Body)
if err != nil {
fmt.Println(err)
return
}
fmt.Printf("%s - Finished '%s'\n", episode.Key, episode.Title)
}
UpdateFeed(episode)
}
func main() {
config, err := GetConfig()
if err != nil {
fmt.Printf("[e] %s\n", err)
return
}
for key := range config {
go func(uri string, timeout int) {
rssfeed := NewRssFeed(key, config[key])
feed := rss.New(timeout, true, rssfeed.Channels, rssfeed.Items)
for {
if err := feed.Fetch(uri, nil); err != nil {
fmt.Printf("[e] %s: %s\n", err, uri)
return
}
update := feed.SecondsTillUpdate()
fmt.Printf("%s - Updating again in %d seconds\n", key, update)
<-time.After(time.Duration(update) * time.Second)
}
}(config[key].Source, 5)
}
// Wait for episodes to arrive and download 'em
go ProcessEpisodes(download)
ServeFeeds()
}