diff --git a/src/vunnel/cli/config.py b/src/vunnel/cli/config.py index d499cb5f..d36b919f 100644 --- a/src/vunnel/cli/config.py +++ b/src/vunnel/cli/config.py @@ -28,6 +28,7 @@ class ImportResults: host: str = "" path: str = __default_path__ enabled: bool = False + skip_newer_archive_check: bool = False def __post_init__(self) -> None: if not self.path: diff --git a/src/vunnel/provider.py b/src/vunnel/provider.py index 7af1c500..93d537b7 100644 --- a/src/vunnel/provider.py +++ b/src/vunnel/provider.py @@ -67,6 +67,8 @@ class RuntimeConfig: existing_results: ResultStatePolicy = ResultStatePolicy.KEEP # the format the results should be written in result_store: result.StoreStrategy = result.StoreStrategy.FLAT_FILE + # skip checks for newer archive if true (always download latest) + skip_newer_archive_check: bool = False import_results_host: Optional[str] = None # noqa: UP007 - breaks mashumaro import_results_path: Optional[str] = None # noqa: UP007 - breaks mashumaro @@ -192,8 +194,14 @@ def _fetch_or_use_results_archive(self) -> tuple[list[str], int, datetime.dateti if not latest_entry: raise RuntimeError("no listing entry found") - if self._has_newer_archive(latest_entry=latest_entry): + if self._has_newer_archive(latest_entry=latest_entry) or self.runtime_cfg.skip_newer_archive_check: + self.logger.info("fetching latest listing") self._prep_workspace_from_listing_entry(entry=latest_entry) + else: + # Update the timestamp of the state to the latest entry's built time + self.logger.info("using existing listing and updating timestamp") + self.workspace.state().timestamp = datetime.datetime.fromisoformat(latest_entry.built) + state = self.workspace.state() return state.urls, state.result_count(self.workspace.path), state.timestamp @@ -321,7 +329,6 @@ def results_writer(self, **kwargs: Any) -> result.Writer: def _fetch_listing_entry_archive(dest: str, entry: distribution.ListingEntry, logger: logging.Logger) -> str: - archive_path = os.path.join(dest, os.path.basename(urlparse(entry.url, allow_fragments=False).path)) # download the URL for the archive diff --git a/tests/unit/cli/test_cli.py b/tests/unit/cli/test_cli.py index bf9a53f5..0f1627e7 100644 --- a/tests/unit/cli/test_cli.py +++ b/tests/unit/cli/test_cli.py @@ -138,6 +138,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false amazon: max_allowed_alas_http_403: 25 request_timeout: 125 @@ -154,6 +155,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false security_advisories: '2': https://alas.aws.amazon.com/AL2/alas.rss '2022': https://alas.aws.amazon.com/AL2022/alas.rss @@ -173,11 +175,13 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false common: import_results: enabled: false host: '' path: providers/{provider_name}/listing.json + skip_newer_archive_check: false debian: releases: bookworm: '12' @@ -202,6 +206,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false github: api_url: https://api.github.com/graphql request_timeout: 125 @@ -218,6 +223,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false token: secret mariner: allow_versions: @@ -237,6 +243,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false nvd: api_key: secret overrides_enabled: false @@ -255,6 +262,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false oracle: request_timeout: 125 runtime: @@ -270,6 +278,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false rhel: full_sync_interval: 2 parallelism: 4 @@ -287,6 +296,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false skip_namespaces: - rhel:3 - rhel:4 @@ -309,6 +319,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false ubuntu: additional_versions: {} enable_rev_history: true @@ -329,6 +340,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false wolfi: request_timeout: 125 runtime: @@ -344,6 +356,7 @@ def test_config(monkeypatch) -> None: retry_count: 3 retry_delay: 5 result_store: sqlite + skip_newer_archive_check: false root: ./data """ assert expected_output.strip() in res.output