diff options
author | Paul Duncan <pabs@pablotron.org> | 2022-02-24 00:52:19 -0500 |
---|---|---|
committer | Paul Duncan <pabs@pablotron.org> | 2022-02-24 00:52:19 -0500 |
commit | 5fa2cbc1cd4c2ccbdf8897fcf9b5263ab050fbf6 (patch) | |
tree | 69405b68c3c183bba4df0a08f9cc0e701441e8b0 /nvdmirror/synccontext.go | |
parent | f09a9126c43d00c31b2e5bc9a4fc812b33e4475c (diff) | |
download | cvez-5fa2cbc1cd4c2ccbdf8897fcf9b5263ab050fbf6.tar.bz2 cvez-5fa2cbc1cd4c2ccbdf8897fcf9b5263ab050fbf6.zip |
nvdmirror: break Sync() into logical chunks, move into syncContext
Diffstat (limited to 'nvdmirror/synccontext.go')
-rw-r--r-- | nvdmirror/synccontext.go | 116 |
1 files changed, 116 insertions, 0 deletions
diff --git a/nvdmirror/synccontext.go b/nvdmirror/synccontext.go index efc2f28..a53804e 100644 --- a/nvdmirror/synccontext.go +++ b/nvdmirror/synccontext.go @@ -247,3 +247,119 @@ func (me syncContext) check(metaUrl, fullUrl string) { r.check.match = (bytes.Compare(m.Sha256[:], fh[:]) == 0) me.ch <- r } + +// Fetch updated meta files and get a map of updated meta files to their +// corresponding full content URL. +// +// Note: This function uses the syncContext member channel and +// goroutines to fetch all meta URLS concurrently. +func (me syncContext) fetchMetas() map[string]string { + ret := make(map[string]string) + + // get map of meta URLs to full URLs. + metaUrls := me.config.getMetaUrls() + + // fetch meta URLs + for metaUrl, _ := range(metaUrls) { + log.Debug().Str("url", metaUrl).Msg("init") + go me.fetch(metaUrl) + } + + // read meta results + for range(metaUrls) { + r := <-me.ch + sl := log.With().Str("url", r.fetch.src).Logger() + + if r.fetch.err != nil { + // URL error + sl.Error().Err(r.fetch.err).Send() + } else if !r.fetch.modified { + // URL not modified + sl.Debug().Msg("not modified") + } else if err := saveHeaders(me.cache, r.fetch); err != nil { + sl.Error().Err(err).Msg("saveHeaders") + } else { + // add to result + ret[r.fetch.src] = metaUrls[r.fetch.src] + } + } + + // return result + return ret +} + +// Check compare file size and hash in updated metadata files. Returns +// an array of URLs that should be updated. +// +// Note: This function uses the syncContext member channel and +// goroutines to check the size and hash of all files concurrently. +func (me syncContext) checkMetas(checks map[string]string) []string { + // build list of URLs to sync + // (include one extra slot for cpedict) + syncUrls := make([]string, 0, len(checks) + 1) + + // check size and hash in updated metas concurrently + for metaUrl, fullUrl := range(checks) { + go me.check(metaUrl, fullUrl) + } + + for range(checks) { + r := <-me.ch + + // create sublogger + sl := log.With(). + Str("metaUrl", r.check.metaUrl). + Str("metaPath", r.check.metaPath). + Str("fullPath", r.check.fullPath). + Logger() + + if r.check.err != nil { + sl.Error().Err(r.check.err).Send() + } else if r.check.match { + sl.Debug().Msg("match") + } else { + // append list of full URLs to sync + syncUrls = append(syncUrls, checks[r.check.metaUrl]) + } + } + + // return results + return syncUrls +} + +// Fetch full URLs. Returns an array of files in destination directory +// that have changed. +// +// Note: This function uses the syncContext member channel and +// goroutines to fetch URLs concurrently. +func (me syncContext) syncUrls(urls []string) []string { + // build list of changed files + changed := make([]string, 0, len(urls)) + + // fetch URLs concurrently + logArray("syncUrls", urls) + for _, url := range(urls) { + go me.fetch(url) + } + + // read sync results + for range(urls) { + r := <-me.ch + // build sublogger + sl := log.With().Str("url", r.fetch.src).Logger() + + if r.fetch.err != nil { + sl.Error().Err(r.fetch.err).Send() + } else if !r.fetch.modified { + sl.Debug().Msg("not modified") + } else if err := saveHeaders(me.cache, r.fetch); err != nil { + sl.Error().Err(err).Msg("cache.Set") + } else { + // append to list of changed files + changed = append(changed, filepath.Base(r.fetch.src)) + } + } + + // return results + return changed +} |