aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPaul Duncan <pabs@pablotron.org>2022-03-19 00:17:22 -0400
committerPaul Duncan <pabs@pablotron.org>2022-03-19 00:17:22 -0400
commit6d113c1ac4001ecb97ff430a264b7232324b0e26 (patch)
treebba4cf3276ee21e05f750480c9586fb0bc9a54ea
parentbc29bd5fa5e3c640c8a66d5428fd5e1ca60c4a26 (diff)
downloadcvez-6d113c1ac4001ecb97ff430a264b7232324b0e26.tar.bz2
cvez-6d113c1ac4001ecb97ff430a264b7232324b0e26.zip
nvdmirror: use Update internally and return Update from Sync()
-rw-r--r--nvdmirror/sync.go8
-rw-r--r--nvdmirror/syncconfig.go22
-rw-r--r--nvdmirror/synccontext.go65
3 files changed, 55 insertions, 40 deletions
diff --git a/nvdmirror/sync.go b/nvdmirror/sync.go
index c7a82c1..65a23cf 100644
--- a/nvdmirror/sync.go
+++ b/nvdmirror/sync.go
@@ -37,7 +37,7 @@ package nvdmirror
// * user-agent: User agent from config, or the default user agent if
// the config value is unspecified.
//
-func Sync(config SyncConfig, cache Cache, dstDir string) []string {
+func Sync(config SyncConfig, cache Cache, dstDir string) []Update {
// build sync context
ctx := newSyncContext(config, cache, dstDir)
@@ -45,8 +45,8 @@ func Sync(config SyncConfig, cache Cache, dstDir string) []string {
// fetch updated files, and then return a list of changed files
return ctx.syncUrls(append(
ctx.checkMetas(ctx.fetchMetas()),
- config.GetCpeDictUrl(),
- config.GetCisaKevcUrl(),
- config.GetCweListUrl(),
+ Update { Type: UpdateCpeDict, Url: config.GetCpeDictUrl() },
+ Update { Type: UpdateCisaKevc, Url: config.GetCisaKevcUrl() },
+ Update { Type: UpdateCweList, Url: config.GetCweListUrl() },
))
}
diff --git a/nvdmirror/syncconfig.go b/nvdmirror/syncconfig.go
index e80e12e..fb8136b 100644
--- a/nvdmirror/syncconfig.go
+++ b/nvdmirror/syncconfig.go
@@ -104,32 +104,38 @@ func (me SyncConfig) GetCweListUrl() string {
}
}
-// get meta URL map.
-func (me SyncConfig) getMetaUrls() map[string]string {
+// get map of meta URLs to possible meta updates.
+func (me SyncConfig) getMetas() map[string]Update {
// calculate total number of years
numYears := time.Now().Year() - baseYear + 1
- r := make(map[string]string)
+ r := make(map[string]Update)
// fetch cve feed metas
for i := 0; i < numYears; i++ {
- metaUrl := me.GetCveYearUrl(baseYear + i, "meta")
- feedUrl := me.GetCveYearUrl(baseYear + i, "json.gz")
- r[metaUrl] = feedUrl
+ year := baseYear + i
+ metaUrl := me.GetCveYearUrl(year, "meta")
+ feedUrl := me.GetCveYearUrl(year, "json.gz")
+
+ r[metaUrl] = Update {
+ Type: UpdateCveYear,
+ Year: baseYear + i,
+ Url: feedUrl,
+ }
}
// fetch cve extra file metas
for _, s := range(cveExtraFiles) {
metaUrl := me.GetCveUrl(s, "meta")
feedUrl := me.GetCveUrl(s, "json.gz")
- r[metaUrl] = feedUrl
+ r[metaUrl] = Update { Type: UpdateCveYear, Meta: s, Url: feedUrl }
}
{
// add cpe match
metaUrl := me.GetCpeMatchUrl("meta")
feedUrl := me.GetCpeMatchUrl("json.gz")
- r[metaUrl] = feedUrl
+ r[metaUrl] = Update { Type: UpdateCpeMatch, Url: feedUrl }
}
// return map
diff --git a/nvdmirror/synccontext.go b/nvdmirror/synccontext.go
index a53804e..9578d0d 100644
--- a/nvdmirror/synccontext.go
+++ b/nvdmirror/synccontext.go
@@ -249,24 +249,24 @@ func (me syncContext) check(metaUrl, fullUrl string) {
}
// Fetch updated meta files and get a map of updated meta files to their
-// corresponding full content URL.
+// corresponding update.
//
// Note: This function uses the syncContext member channel and
// goroutines to fetch all meta URLS concurrently.
-func (me syncContext) fetchMetas() map[string]string {
- ret := make(map[string]string)
+func (me syncContext) fetchMetas() map[string]Update {
+ ret := make(map[string]Update)
- // get map of meta URLs to full URLs.
- metaUrls := me.config.getMetaUrls()
+ // get map of meta URLs to meta updates
+ metas := me.config.getMetas()
// fetch meta URLs
- for metaUrl, _ := range(metaUrls) {
+ for metaUrl, _ := range(metas) {
log.Debug().Str("url", metaUrl).Msg("init")
go me.fetch(metaUrl)
}
// read meta results
- for range(metaUrls) {
+ for range(metas) {
r := <-me.ch
sl := log.With().Str("url", r.fetch.src).Logger()
@@ -280,7 +280,7 @@ func (me syncContext) fetchMetas() map[string]string {
sl.Error().Err(err).Msg("saveHeaders")
} else {
// add to result
- ret[r.fetch.src] = metaUrls[r.fetch.src]
+ ret[r.fetch.src] = metas[r.fetch.src]
}
}
@@ -289,18 +289,18 @@ func (me syncContext) fetchMetas() map[string]string {
}
// Check compare file size and hash in updated metadata files. Returns
-// an array of URLs that should be updated.
+// an array of potential updates.
//
// Note: This function uses the syncContext member channel and
// goroutines to check the size and hash of all files concurrently.
-func (me syncContext) checkMetas(checks map[string]string) []string {
+func (me syncContext) checkMetas(checks map[string]Update) []Update {
// build list of URLs to sync
// (include one extra slot for cpedict)
- syncUrls := make([]string, 0, len(checks) + 1)
+ updates := make([]Update, 0, len(checks))
// check size and hash in updated metas concurrently
- for metaUrl, fullUrl := range(checks) {
- go me.check(metaUrl, fullUrl)
+ for metaUrl, update := range(checks) {
+ go me.check(metaUrl, update.Url)
}
for range(checks) {
@@ -318,32 +318,37 @@ func (me syncContext) checkMetas(checks map[string]string) []string {
} else if r.check.match {
sl.Debug().Msg("match")
} else {
- // append list of full URLs to sync
- syncUrls = append(syncUrls, checks[r.check.metaUrl])
+ // append list of updates
+ updates = append(updates, checks[r.check.metaUrl])
}
}
- // return results
- return syncUrls
+ // return updates
+ return updates
}
-// Fetch full URLs. Returns an array of files in destination directory
-// that have changed.
+// Conditionally fetch full URLs. Returns an array of updates.
//
// Note: This function uses the syncContext member channel and
// goroutines to fetch URLs concurrently.
-func (me syncContext) syncUrls(urls []string) []string {
- // build list of changed files
- changed := make([]string, 0, len(urls))
+func (me syncContext) syncUrls(checks []Update) []Update {
+ // build list of changes
+ changed := make([]Update, 0, len(checks))
+
+ // build map of full urls to updates
+ urls := make(map[string]Update)
+ for _, update := range(checks) {
+ urls[update.Url] = update
+ }
// fetch URLs concurrently
- logArray("syncUrls", urls)
- for _, url := range(urls) {
- go me.fetch(url)
+ // logArray("syncUpdates", checks)
+ for _, update := range(checks) {
+ go me.fetch(update.Url)
}
// read sync results
- for range(urls) {
+ for range(checks) {
r := <-me.ch
// build sublogger
sl := log.With().Str("url", r.fetch.src).Logger()
@@ -355,8 +360,12 @@ func (me syncContext) syncUrls(urls []string) []string {
} else if err := saveHeaders(me.cache, r.fetch); err != nil {
sl.Error().Err(err).Msg("cache.Set")
} else {
- // append to list of changed files
- changed = append(changed, filepath.Base(r.fetch.src))
+ // get update, set path
+ update := urls[r.fetch.src]
+ update.Path = filepath.Base(r.fetch.src)
+
+ // append to result
+ changed = append(changed, update)
}
}