1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
|
package nvdmirror
import (
"github.com/rs/zerolog/log"
"path/filepath"
)
// Sync to destination directory and return an array of updated files.
func Sync(config SyncConfig, cache Cache, dstDir string) []string {
// build sync context
ctx := newSyncContext(config, cache, dstDir)
// get meta URL to full URL map
metaUrls := config.getMetaUrls()
// fetch meta URLs
for metaUrl, _ := range(metaUrls) {
log.Debug().Str("url", metaUrl).Msg("init")
go ctx.fetch(metaUrl)
}
// build list of metas to check
checkUrls := make([]string, 0, len(metaUrls))
// read meta results
for range(metaUrls) {
r := <-ctx.ch
sl := log.With().Str("url", r.fetch.src).Logger()
if r.fetch.err != nil {
// URL error
sl.Error().Err(r.fetch.err).Send()
} else if !r.fetch.modified {
// URL not modified
sl.Debug().Msg("not modified")
} else {
// URL updated
sl.Debug().Msg("update")
// build request headers
headers := map[string]string {
"if-none-match": r.fetch.headers.Get("etag"),
"if-modified-since": r.fetch.headers.Get("last-modified"),
}
// save headers to cache
if err := cache.Set(r.fetch.src, headers); err != nil {
sl.Error().Err(r.fetch.err).Msg("cache.Set")
} else {
// append to list of check URLs
checkUrls = append(checkUrls, r.fetch.src)
}
}
}
// check size and hash in updated metas
logArray("checkUrls", checkUrls)
for _, metaUrl := range(checkUrls) {
go ctx.check(metaUrl, metaUrls[metaUrl])
}
// build list of non-meta URLs to sync.
syncUrls := make([]string, 0, len(metaUrls))
syncUrls = append(syncUrls, config.GetCpeDictUrl())
for range(checkUrls) {
r := <-ctx.ch
// create sublogger
sl := log.With().
Str("metaUrl", r.check.metaUrl).
Str("metaPath", r.check.metaPath).
Str("fullPath", r.check.fullPath).
Logger()
if r.check.err != nil {
sl.Error().Err(r.check.err).Send()
} else if r.check.match {
sl.Debug().Msg("match")
} else {
syncUrls = append(syncUrls, metaUrls[r.check.metaUrl])
}
}
logArray("syncUrls", syncUrls)
for _, fullUrl := range(syncUrls) {
go ctx.fetch(fullUrl)
}
// build list of changed files
changed := make([]string, 0, len(syncUrls))
// read sync results
for range(syncUrls) {
r := <-ctx.ch
// build sublogger
sl := log.With().Str("url", r.fetch.src).Logger()
if r.fetch.err != nil {
sl.Error().Err(r.fetch.err).Send()
} else if !r.fetch.modified {
sl.Debug().Msg("not modified")
} else {
sl.Debug().Msg("update")
// build request headers
headers := map[string]string {
"if-none-match": r.fetch.headers.Get("etag"),
"if-modified-since": r.fetch.headers.Get("last-modified"),
}
// save headers to cache
if err := cache.Set(r.fetch.src, headers); err != nil {
sl.Error().Err(r.fetch.err).Msg("cache.Set")
} else {
// append to list of changed files
changed = append(changed, filepath.Base(r.fetch.src))
}
}
}
// log changed files
logArray("changed", changed)
// return success
return changed
}
|