From d192eea54c56de32b9818aeaad7de9f9438038bc Mon Sep 17 00:00:00 2001 From: tdruez <489057+tdruez@users.noreply.github.com> Date: Wed, 17 Jan 2024 18:46:54 -0700 Subject: [PATCH] Rename multiple pipelines #1044 (#1053) * Rename multiple pipelines #1044 Signed-off-by: tdruez * Update the documentation with pipelines renaming #1044 Signed-off-by: tdruez * Add backward compatibility support in API and CLI #1044 Signed-off-by: tdruez --------- Signed-off-by: tdruez --- CHANGELOG.rst | 16 +++ docs/automation.rst | 2 +- docs/built-in-pipelines.rst | 56 +++++----- docs/command-line-interface.rst | 2 +- docs/distros-os-images.rst | 19 ++-- docs/faq.rst | 26 +++-- docs/output-files.rst | 4 +- docs/rest-api.rst | 12 +- docs/tutorial_api_analyze_package_archive.rst | 8 +- docs/tutorial_cli_analyze_docker_image.rst | 27 ++--- docs/tutorial_web_ui_analyze_docker_image.rst | 7 +- scanpipe/api/serializers.py | 5 + scanpipe/api/views.py | 1 + scanpipe/apps.py | 21 ++++ scanpipe/management/commands/__init__.py | 8 ++ scanpipe/management/commands/add-pipeline.py | 2 +- .../management/commands/create-project.py | 2 +- .../migrations/0051_rename_pipelines_data.py | 38 +++++++ scanpipe/pipelines/docker.py | 2 +- ...nspect_manifest.py => inspect_packages.py} | 2 +- ...root_filesystems.py => root_filesystem.py} | 0 ...scan_package.py => scan_single_package.py} | 2 +- .../data/alpine_3_15_4_scan_codebase.json | 2 +- .../data/basic-rootfs_root_filesystems.json | 2 +- scanpipe/tests/data/centos_scan_codebase.json | 2 +- .../tests/data/d2d/about_files/expected.json | 2 +- scanpipe/tests/data/debian_scan_codebase.json | 2 +- ...ith-license-alpine.tar.xz-docker-scan.json | 2 +- ...ith-license-debian.tar.xz-docker-scan.json | 2 +- .../tests/data/flume-ng-node-d2d-input.json | 2 +- scanpipe/tests/data/flume-ng-node-d2d.json | 2 +- .../gcr_io_distroless_base_scan_codebase.json | 2 +- .../minitag.tar-expected-scan.json | 2 +- scanpipe/tests/pipes/test_scancode.py | 4 +- scanpipe/tests/test_api.py | 103 +++++++++++++----- scanpipe/tests/test_apps.py | 27 +++-- scanpipe/tests/test_commands.py | 53 ++++++--- scanpipe/tests/test_models.py | 23 ++-- scanpipe/tests/test_pipelines.py | 38 +++---- scanpipe/tests/test_views.py | 26 ++--- setup.cfg | 12 +- 41 files changed, 371 insertions(+), 199 deletions(-) create mode 100644 scanpipe/migrations/0051_rename_pipelines_data.py rename scanpipe/pipelines/{inspect_manifest.py => inspect_packages.py} (99%) rename scanpipe/pipelines/{root_filesystems.py => root_filesystem.py} (100%) rename scanpipe/pipelines/{scan_package.py => scan_single_package.py} (99%) diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 01b2bac7d..46b120d83 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,22 @@ Changelog ========= +Unreleased +---------- + +- Rename multiple pipelines for consistency and precision: + * docker: analyze_docker_image + * root_filesystems: analyze_root_filesystem_or_vm_image + * docker_windows: analyze_windows_docker_image + * inspect_manifest: inspect_packages + * deploy_to_develop: map_deploy_to_develop + * scan_package: scan_single_package + + A data migration is included to facilitate the migration of existing data. + Only the new names are available in the web UI but the REST API and CLI are backward + compatible with the old names. + https://github.com/nexB/scancode.io/issues/1044 + v33.0.0 (2024-01-16) -------------------- diff --git a/docs/automation.rst b/docs/automation.rst index 612509840..86b8d5125 100644 --- a/docs/automation.rst +++ b/docs/automation.rst @@ -93,7 +93,7 @@ For instance, you can create a project and trigger it using the following comman crontab:: docker compose exec -it web scanpipe create-project scan-$(date +"%Y-%m-%dT%H:%M:%S") \ - --pipeline scan_package \ + --pipeline scan_single_package \ --input-url https://github.com/package-url/packageurl-python/archive/refs/heads/main.zip \ --execute diff --git a/docs/built-in-pipelines.rst b/docs/built-in-pipelines.rst index 3813d6759..a738368e5 100644 --- a/docs/built-in-pipelines.rst +++ b/docs/built-in-pipelines.rst @@ -18,26 +18,26 @@ Pipeline Base Class :members: :member-order: bysource -.. _pipeline_deploy_to_develop: +.. _pipeline_analyze_docker_image: -Deploy To Develop ------------------ -.. autoclass:: scanpipe.pipelines.deploy_to_develop.DeployToDevelop() +Analyse Docker Image +-------------------- +.. autoclass:: scanpipe.pipelines.docker.Docker() :members: :member-order: bysource -.. _pipeline_docker: +.. _pipeline_analyze_root_filesystem: -Docker Image Analysis ---------------------- -.. autoclass:: scanpipe.pipelines.docker.Docker() +Analyze Root Filesystem or VM Image +----------------------------------- +.. autoclass:: scanpipe.pipelines.root_filesystem.RootFS() :members: :member-order: bysource -.. _pipeline_docker_windows: +.. _analyze_windows_docker_image: -Docker Windows Image Analysis ------------------------------ +Analyse Docker Windows Image +---------------------------- .. autoclass:: scanpipe.pipelines.docker_windows.DockerWindows() :members: :member-order: bysource @@ -50,35 +50,35 @@ Find Vulnerabilities :members: :member-order: bysource -.. _pipeline_inspect_manifest: +.. _pipeline_inspect_packages: -Inspect Manifest +Inspect Packages ---------------- -.. autoclass:: scanpipe.pipelines.inspect_manifest.InspectManifest() +.. autoclass:: scanpipe.pipelines.inspect_packages.InspectPackages() :members: :member-order: bysource .. _pipeline_load_inventory: -Load Inventory From Scan ------------------------- +Load Inventory +-------------- .. autoclass:: scanpipe.pipelines.load_inventory.LoadInventory() :members: :member-order: bysource -.. _pipeline_populate_purldb: +.. _pipeline_map_deploy_to_develop: -Populate PurlDB ---------------- -.. autoclass:: scanpipe.pipelines.populate_purldb.PopulatePurlDB() +Map Deploy To Develop +--------------------- +.. autoclass:: scanpipe.pipelines.deploy_to_develop.DeployToDevelop() :members: :member-order: bysource -.. _pipeline_root_filesystems: +.. _pipeline_populate_purldb: -Root Filesystem Analysis ------------------------- -.. autoclass:: scanpipe.pipelines.root_filesystems.RootFS() +Populate PurlDB +--------------- +.. autoclass:: scanpipe.pipelines.populate_purldb.PopulatePurlDB() :members: :member-order: bysource @@ -98,10 +98,10 @@ Scan Codebase Package :members: :member-order: bysource -.. _pipeline_scan_package: +.. _pipeline_scan_single_package: -Scan Package ------------- -.. autoclass:: scanpipe.pipelines.scan_package.ScanPackage() +Scan Single Package +------------------- +.. autoclass:: scanpipe.pipelines.scan_single_package.ScanSinglePackage() :members: :member-order: bysource diff --git a/docs/command-line-interface.rst b/docs/command-line-interface.rst index cf2f88957..2a64f897a 100644 --- a/docs/command-line-interface.rst +++ b/docs/command-line-interface.rst @@ -172,7 +172,7 @@ You can use more than one ``PIPELINE_NAME`` to add multiple pipelines at once. For example, assuming you have created beforehand a project named "foo", this will add the docker pipeline to your project:: - $ scanpipe add-pipeline --project foo docker + $ scanpipe add-pipeline --project foo analyze_docker_image `$ scanpipe execute --project PROJECT` diff --git a/docs/distros-os-images.rst b/docs/distros-os-images.rst index 3e7ec63a0..44ff4ac3e 100644 --- a/docs/distros-os-images.rst +++ b/docs/distros-os-images.rst @@ -39,18 +39,19 @@ may be only used for certain pipelines: - **RPM-based** Linux distros: RHEL, Fedora, openSUSE/SUSE - **Alpine** Linux distros -For the above three flavors, the :ref:`docker ` and -:ref:`root_filesystems ` pipelines support comprehensive -detection of installed system packages, their provenance, their license metadata, -and their installed files. +For the above three flavors, the +:ref:`analyze_docker_image ` and +:ref:`analyze_root_filesystem_or_vm_image ` pipelines +support comprehensive detection of installed system packages, their provenance, +their license metadata, and their installed files. -- For **Windows**, the :ref:`docker_windows ` pipeline supports - Windows Docker images with extensive detection of installed Windows packages, - programs, and the majority of installed files. +- For **Windows**, the :ref:`analyze_windows_docker_image ` + pipeline supports Windows Docker images with extensive detection of installed Windows + packages, programs, and the majority of installed files. - **Distroless** Docker images system packages are detected with the - :ref:`docker ` pipeline; package and license metadata are also - detected. + :ref:`analyze_docker_image ` pipeline; package and + license metadata are also detected. However, some work needs to be done to achieve comprehensive support and fix the issue of system packages ot tracking their installed files. Check `this open GitHub issue `_ diff --git a/docs/faq.rst b/docs/faq.rst index 0768d09e7..26b6e4974 100644 --- a/docs/faq.rst +++ b/docs/faq.rst @@ -23,22 +23,22 @@ Selecting the right pipeline for your needs depends primarily on the type of inp data you have available. Here are some general guidelines based on different input scenarios: -- If you have a **Docker image** as input, use the :ref:`docker ` - pipeline. +- If you have a **Docker image** as input, use the + :ref:`analyze_docker_image ` pipeline. - For a full **codebase compressed as an archive**, choose the :ref:`scan_codebase ` pipeline. - If you have a **single package archive**, opt for the - :ref:`scan_package ` pipeline. + :ref:`scan_single_package ` pipeline. - When dealing with a **Linux root filesystem** (rootfs), the - :ref:`root_filesystems ` pipeline is the appropriate - choice. + :ref:`analyze_root_filesystem_or_vm_image ` pipeline + is the appropriate choice. - For processing the results of a **ScanCode-toolkit scan** or **ScanCode.io scan**, use the :ref:`load_inventory ` pipeline. - When you have **manifest files**, such as a **CycloneDX BOM, SPDX document, lockfile**, etc., - use the :ref:`inspect_manifest ` pipeline. + use the :ref:`inspect_packages ` pipeline. - For scenarios involving both a **development and deployment codebase**, consider using - the :ref:`deploy_to_develop ` pipeline. + the :ref:`map_deploy_to_develop ` pipeline. These pipelines will automatically execute the necessary steps to scan and create the packages, dependencies, and resources for your project based on the input data provided. @@ -56,10 +56,11 @@ by running some of the following additional pipelines: Please ensure that you have set up :ref:`PurlDB ` before running this pipeline. -What is the difference between scan_codebase and scan_package pipelines? ------------------------------------------------------------------------- +What is the difference between scan_codebase and scan_single_package pipelines? +------------------------------------------------------------------------------- -The key differences are that the :ref:`scan_package ` pipeline +The key differences are that the +:ref:`scan_single_package ` pipeline treats the input as if it were a single package, such as a package archive, and computes a **License clarity** and a **Scan summary** to aggregate the package scan data: @@ -116,8 +117,9 @@ The following tools and libraries are used during the docker images analysis pip - Secondary libraries and plugins from `scancode-plugins `_. -The pipeline documentation is available at :ref:`pipeline_docker` and its source code -at `docker.py `_. +The pipeline documentation is available at :ref:`pipeline_analyze_docker_image` and +its source code at +`docker.py `_. It is hopefully designed to be simple and readable code. Am I able to run ScanCode.io on Windows? diff --git a/docs/output-files.rst b/docs/output-files.rst index ee3e7d1ea..2ed3ec051 100644 --- a/docs/output-files.rst +++ b/docs/output-files.rst @@ -69,7 +69,7 @@ as shown below ], "runs": [ { - "pipeline_name": "docker", + "pipeline_name": "analyze_docker_image", "description": "A pipeline to analyze a Docker image.", "uuid": "5f1ec0c5-91ed-45c8-ab3d-beae44018716", "created_date": "2021-06-13T00:50:18.367560Z", @@ -78,7 +78,7 @@ as shown below "task_end_date": "2021-06-13T01:20:56.486136Z", "task_exitcode": 0, "task_output": "", - "log": "2021-06-13 01:20:47.66 Pipeline [docker] starting\n2021-06-13 01:20:47.66 Step [extract_images] starting\n2021-06-13 01:20:47.72 Step [extract_images] completed in 0.05 seconds\n2021-06-13 01:20:47.72 Step [extract_layers] starting\n2021-06-13 01:20:47.84 Step [extract_layers] completed in 0.12 seconds\n2021-06-13 01:20:47.84 Step [find_images_linux_distro] starting\n2021-06-13 01:20:47.84 Step [find_images_linux_distro] completed in 0.00 seconds\n2021-06-13 01:20:47.85 Step [collect_images_information] starting\n2021-06-13 01:20:47.85 Step [collect_images_information] completed in 0.00 seconds\n2021-06-13 01:20:47.85 Step [collect_and_create_codebase_resources] starting\n2021-06-13 01:20:48.65 Step [collect_and_create_codebase_resources] completed in 0.79 seconds\n2021-06-13 01:20:48.65 Step [collect_and_create_system_packages] starting\n2021-06-13 01:20:50.89 Step [collect_and_create_system_packages] completed in 2.24 seconds\n2021-06-13 01:20:50.89 Step [flag_uninteresting_codebase_resources] starting\n2021-06-13 01:20:50.90 Step [tag_uninteresting_codebase_resources] completed in 0.00 seconds\n2021-06-13 01:20:50.90 Step [tag_empty_files] starting\n2021-06-13 01:20:50.91 Step [tag_empty_files] completed in 0.00 seconds\n2021-06-13 01:20:50.91 Step [scan_for_application_packages] starting\n2021-06-13 01:20:50.98 Step [scan_for_application_packages] completed in 0.07 seconds\n2021-06-13 01:20:50.98 Step [scan_for_files] starting\n2021-06-13 01:20:56.46 Step [scan_for_files] completed in 5.48 seconds\n2021-06-13 01:20:56.46 Step [analyze_scanned_files] starting\n2021-06-13 01:20:56.47 Step [analyze_scanned_files] completed in 0.00 seconds\n2021-06-13 01:20:56.47 Step [tag_not_analyzed_codebase_resources] starting\n2021-06-13 01:20:56.48 Step [tag_not_analyzed_codebase_resources] completed in 0.00 seconds\n2021-06-13 01:20:56.48 Pipeline completed\n", + "log": "2021-06-13 01:20:47.66 Pipeline [analyze_docker_image] starting\n2021-06-13 01:20:47.66 Step [extract_images] starting\n2021-06-13 01:20:47.72 Step [extract_images] completed in 0.05 seconds\n2021-06-13 01:20:47.72 Step [extract_layers] starting\n2021-06-13 01:20:47.84 Step [extract_layers] completed in 0.12 seconds\n2021-06-13 01:20:47.84 Step [find_images_linux_distro] starting\n2021-06-13 01:20:47.84 Step [find_images_linux_distro] completed in 0.00 seconds\n2021-06-13 01:20:47.85 Step [collect_images_information] starting\n2021-06-13 01:20:47.85 Step [collect_images_information] completed in 0.00 seconds\n2021-06-13 01:20:47.85 Step [collect_and_create_codebase_resources] starting\n2021-06-13 01:20:48.65 Step [collect_and_create_codebase_resources] completed in 0.79 seconds\n2021-06-13 01:20:48.65 Step [collect_and_create_system_packages] starting\n2021-06-13 01:20:50.89 Step [collect_and_create_system_packages] completed in 2.24 seconds\n2021-06-13 01:20:50.89 Step [flag_uninteresting_codebase_resources] starting\n2021-06-13 01:20:50.90 Step [tag_uninteresting_codebase_resources] completed in 0.00 seconds\n2021-06-13 01:20:50.90 Step [tag_empty_files] starting\n2021-06-13 01:20:50.91 Step [tag_empty_files] completed in 0.00 seconds\n2021-06-13 01:20:50.91 Step [scan_for_application_packages] starting\n2021-06-13 01:20:50.98 Step [scan_for_application_packages] completed in 0.07 seconds\n2021-06-13 01:20:50.98 Step [scan_for_files] starting\n2021-06-13 01:20:56.46 Step [scan_for_files] completed in 5.48 seconds\n2021-06-13 01:20:56.46 Step [analyze_scanned_files] starting\n2021-06-13 01:20:56.47 Step [analyze_scanned_files] completed in 0.00 seconds\n2021-06-13 01:20:56.47 Step [tag_not_analyzed_codebase_resources] starting\n2021-06-13 01:20:56.48 Step [tag_not_analyzed_codebase_resources] completed in 0.00 seconds\n2021-06-13 01:20:56.48 Pipeline completed\n", "execution_time": 8 } ], diff --git a/docs/rest-api.rst b/docs/rest-api.rst index 776133e4a..a5f27c373 100644 --- a/docs/rest-api.rst +++ b/docs/rest-api.rst @@ -94,7 +94,7 @@ Using cURL: data='{ "name": "project_name", "input_urls": "https://download.url/package.archive", - "pipeline": "scan_package", + "pipeline": "scan_single_package", "execute_now": true }' @@ -111,7 +111,7 @@ Using cURL: upload_file="/path/to/the/archive.zip" curl -F "name=project_name" \ - -F "pipeline=scan_package" \ + -F "pipeline=scan_single_package" \ -F "execute_now=True" \ -F "upload_file=@$upload_file" \ "$api_url" @@ -131,7 +131,7 @@ Using Python and the **"requests"** library: data = { "name": "project_name", "input_urls": "https://download.url/package.archive", - "pipeline": "scan_package", + "pipeline": "scan_single_package", "execute_now": True, } response = requests.post(api_url, data=data) @@ -149,7 +149,7 @@ Using Python and the **"requests"** library: api_url = "http://localhost/api/projects/" data = { "name": "project_name", - "pipeline": "scan_package", + "pipeline": "scan_single_package", "execute_now": True, } files = {"upload_file": open("/path/to/the/archive.zip", "rb")} @@ -279,7 +279,7 @@ Using cURL: api_url="http://localhost/api/projects/6461408c-726c-4b70-aa7a-c9cc9d1c9685/add_pipeline/" content_type="Content-Type: application/json" data='{ - "pipeline": "docker", + "pipeline": "analyze_docker_image", "execute_now": true }' @@ -434,7 +434,7 @@ The run details view returns all information available about a pipeline run. { "url": "http://localhost/api/runs/8d5c3962-5fca-47d7-b8c8-47a19247714e/", - "pipeline_name": "scan_package", + "pipeline_name": "scan_single_package", "status": "success", "description": "A pipeline to scan a single package archive with ScanCode-toolkit.", "project": "http://localhost/api/projects/cd5b0459-303f-4e92-99c4-ea6d0a70193e/", diff --git a/docs/tutorial_api_analyze_package_archive.rst b/docs/tutorial_api_analyze_package_archive.rst index dd1f9406d..010c89cda 100644 --- a/docs/tutorial_api_analyze_package_archive.rst +++ b/docs/tutorial_api_analyze_package_archive.rst @@ -15,7 +15,7 @@ Instructions: - First, let's create a new project called ``boolean.py-3.8``. - We'll be using this `package `_ as the project input. -- We can add and execute the scan_package pipeline on our new project. +- We can add and execute the scan_single_package pipeline on our new project. .. note:: Whether you follow this tutorial and previous instructions using cURL or @@ -33,7 +33,7 @@ Using cURL data='{ "name": "boolean.py-3.8", "input_urls": "https://github.com/bastikr/boolean.py/archive/refs/tags/v3.8.zip", - "pipeline": "scan_package", + "pipeline": "scan_single_package", "execute_now": true }' @@ -52,7 +52,7 @@ Using cURL { "name": "boolean.py-3.8", "input_urls": "https://github.com/bastikr/boolean.py/archive/refs/tags/v3.8.zip", - "pipeline": "scan_package", + "pipeline": "scan_single_package", "execute_now": true } @@ -100,7 +100,7 @@ Using Python script data = { "name": "boolean.py-3.8", "input_urls": "https://github.com/bastikr/boolean.py/archive/refs/tags/v3.8.zip", - "pipeline": "scan_package", + "pipeline": "scan_single_package", "execute_now": True, } response = requests.post(api_url, data=data) diff --git a/docs/tutorial_cli_analyze_docker_image.rst b/docs/tutorial_cli_analyze_docker_image.rst index 81fbc2e73..0d3f0ef3a 100644 --- a/docs/tutorial_cli_analyze_docker_image.rst +++ b/docs/tutorial_cli_analyze_docker_image.rst @@ -79,15 +79,15 @@ Instructions Alternatively, you can copy files manually to the :guilabel:`input/` directory to include entire directories. -- Add the docker pipeline to your project: +- Add the ``analyze_docker_image`` pipeline to your project: .. code-block:: console - $ scanpipe add-pipeline --project staticbox docker + $ scanpipe add-pipeline --project staticbox analyze_docker_image .. code-block:: console - >> Pipeline docker added to the project + >> Pipeline analyze_docker_image added to the project - Check the status of the pipeline added to your project: @@ -97,7 +97,7 @@ Instructions .. code-block:: console - >> [NOT_STARTED] docker + >> [NOT_STARTED] analyze_docker_image .. note:: The ``scanpipe show-pipeline`` command lists all the pipelines added to the @@ -106,8 +106,8 @@ Instructions already running, pipelines with **"SUCCESS"** or **"FAILURE"** status, and those will be running next, pipelines with **"NOT_STARTED"** status as shown below. -- Run the docker pipeline on this project. In the output, you will be shown - the pipeline's execution progress: +- Run the ``analyze_docker_image`` pipeline on this project. In the output, you will be + shown the pipeline's execution progress: .. code-block:: console @@ -115,17 +115,17 @@ Instructions .. code-block:: console - >> Pipeline docker run in progress... - Pipeline [docker] starting + >> Pipeline analyze_docker_image run in progress... + Pipeline [analyze_docker_image] starting Step [extract_images] starting Step [extract_images] completed in 0.18 seconds Step [extract_layers] starting [...] Pipeline completed - docker successfully executed on project staticbox + analyze_docker_image successfully executed on project staticbox - Executing the ``show-pipeline`` command again will also confirm the success - of the pipeline execution - **"[SUCCESS] docker"** status: + of the pipeline execution - **"[SUCCESS] analyze_docker_image"** status: .. code-block:: console @@ -133,7 +133,7 @@ Instructions .. code-block:: console - >> [SUCCESS] docker + >> [SUCCESS] analyze_docker_image - Get the results of the pipeline execution as a JSON file using the ``output`` command: @@ -155,11 +155,12 @@ Instructions after the project creation. For example, the following command will create a project named ``staticbox2``, download the test Docker image to the project's :guilabel:`input/` - directory, add the docker pipeline, and execute the pipeline in one operation: + directory, add the ``analyze_docker_image`` pipeline, and execute the pipeline in + one operation: .. code-block:: bash $ scanpipe create-project staticbox2 \ --input-url https://github.com/nexB/scancode.io-tutorial/releases/download/sample-images/30-alpine-nickolashkraus-staticbox-latest.tar \ - --pipeline docker \ + --pipeline analyze_docker_image \ --execute diff --git a/docs/tutorial_web_ui_analyze_docker_image.rst b/docs/tutorial_web_ui_analyze_docker_image.rst index 5374e3226..0f08fe37f 100644 --- a/docs/tutorial_web_ui_analyze_docker_image.rst +++ b/docs/tutorial_web_ui_analyze_docker_image.rst @@ -38,9 +38,10 @@ Instructions - Paste the input Docker image's URL, `docker://alpine/httpie `_, in the **"Download URL"** field, which fetches the image from the provided URL. -- Use the **"Pipeline"** dropdown list, add the **"docker"** pipeline to your project -- You can add and execute the docker pipeline in one operation by checking the - **"Execute pipeline now"** checkbox. +- Use the **"Pipeline"** dropdown list, add the ``analyze_docker_image`` pipeline to + your project. +- You can add and execute the ``analyze_docker_image`` pipeline in one operation by + checking the **"Execute pipeline now"** checkbox. .. image:: images/tutorial-web-ui-project-form.png diff --git a/scanpipe/api/serializers.py b/scanpipe/api/serializers.py index cb8efe404..dfcf38a35 100644 --- a/scanpipe/api/serializers.py +++ b/scanpipe/api/serializers.py @@ -78,6 +78,11 @@ def to_internal_value(self, data): if not self.allow_empty and len(data) == 0: self.fail("empty") + # Backward compatibility with old pipeline names. + # This will need to be refactored in case this OrderedMultipleChoiceField + # class is used for another field that is not ``pipeline`` related. + data = [scanpipe_app.get_new_pipeline_name(pipeline) for pipeline in data] + return [ super(serializers.MultipleChoiceField, self).to_internal_value(item) for item in data diff --git a/scanpipe/api/views.py b/scanpipe/api/views.py index a0a8b7d85..0634522da 100644 --- a/scanpipe/api/views.py +++ b/scanpipe/api/views.py @@ -247,6 +247,7 @@ def add_pipeline(self, request, *args, **kwargs): pipeline = request.data.get("pipeline") if pipeline: + pipeline = scanpipe_app.get_new_pipeline_name(pipeline) if pipeline in scanpipe_app.pipelines: execute_now = request.data.get("execute_now") project.add_pipeline(pipeline, execute_now) diff --git a/scanpipe/apps.py b/scanpipe/apps.py index a08374fa3..a74314a30 100644 --- a/scanpipe/apps.py +++ b/scanpipe/apps.py @@ -168,6 +168,27 @@ def get_pipeline_choices(self, include_blank=True, include_addon=True): choices.extend([(name, name) for name in pipeline_names]) return choices + @staticmethod + def get_new_pipeline_name(pipeline_name): + """Backward compatibility with old pipeline names.""" + pipeline_old_names_mapping = { + "docker": "analyze_docker_image", + "root_filesystems": "analyze_root_filesystem_or_vm_image", + "docker_windows": "analyze_windows_docker_image", + "inspect_manifest": "inspect_packages", + "deploy_to_develop": "map_deploy_to_develop", + "scan_package": "scan_single_package", + } + if new_name := pipeline_old_names_mapping.get(pipeline_name): + warnings.warn( + f"Pipeline name {pipeline_name} is deprecated and will be " + f"removed in a future release. Use {new_name} instead.", + DeprecationWarning, + stacklevel=2, + ) + return new_name + return pipeline_name + def get_scancode_licenses(self): """ Load licenses-related information from the ScanCode-toolkit ``licensedcode`` diff --git a/scanpipe/management/commands/__init__.py b/scanpipe/management/commands/__init__.py index a76cdbdef..254f5b1f1 100644 --- a/scanpipe/management/commands/__init__.py +++ b/scanpipe/management/commands/__init__.py @@ -247,9 +247,17 @@ def validate_copy_from(copy_from): def validate_pipelines(pipeline_names): """Raise an error if one of the `pipeline_names` is not available.""" + # Backward compatibility with old pipeline names. + pipeline_names = [ + scanpipe_app.get_new_pipeline_name(pipeline_name) + for pipeline_name in pipeline_names + ] + for pipeline_name in pipeline_names: if pipeline_name not in scanpipe_app.pipelines: raise CommandError( f"{pipeline_name} is not a valid pipeline. \n" f"Available: {', '.join(scanpipe_app.pipelines.keys())}" ) + + return pipeline_names diff --git a/scanpipe/management/commands/add-pipeline.py b/scanpipe/management/commands/add-pipeline.py index 6d0ec3c98..417d84922 100644 --- a/scanpipe/management/commands/add-pipeline.py +++ b/scanpipe/management/commands/add-pipeline.py @@ -41,7 +41,7 @@ def add_arguments(self, parser): def handle(self, *pipeline_names, **options): super().handle(*pipeline_names, **options) - validate_pipelines(pipeline_names) + pipeline_names = validate_pipelines(pipeline_names) for pipeline_name in pipeline_names: self.project.add_pipeline(pipeline_name) diff --git a/scanpipe/management/commands/create-project.py b/scanpipe/management/commands/create-project.py index a66091a1f..7fdf5044f 100644 --- a/scanpipe/management/commands/create-project.py +++ b/scanpipe/management/commands/create-project.py @@ -85,7 +85,7 @@ def handle(self, *args, **options): raise CommandError("\n".join(e.messages)) # Run validation before creating the project in the database - validate_pipelines(pipeline_names) + pipeline_names = validate_pipelines(pipeline_names) validate_input_files(inputs_files) validate_copy_from(copy_from) diff --git a/scanpipe/migrations/0051_rename_pipelines_data.py b/scanpipe/migrations/0051_rename_pipelines_data.py new file mode 100644 index 000000000..1236a3d9f --- /dev/null +++ b/scanpipe/migrations/0051_rename_pipelines_data.py @@ -0,0 +1,38 @@ +# Generated by Django 5.0.1 on 2024-01-17 16:32 + +from django.db import migrations + + +pipeline_old_names_mapping = { + "docker": "analyze_docker_image", + "root_filesystems": "analyze_root_filesystem_or_vm_image", + "docker_windows": "analyze_windows_docker_image", + "inspect_manifest": "inspect_packages", + "deploy_to_develop": "map_deploy_to_develop", + "scan_package": "scan_single_package", +} + + +def rename_pipelines_data(apps, schema_editor): + Run = apps.get_model("scanpipe", "Run") + for old_name, new_name in pipeline_old_names_mapping.items(): + Run.objects.filter(pipeline_name=old_name).update(pipeline_name=new_name) + + +def reverse_rename_pipelines_data(apps, schema_editor): + Run = apps.get_model("scanpipe", "Run") + for old_name, new_name in pipeline_old_names_mapping.items(): + Run.objects.filter(pipeline_name=new_name).update(pipeline_name=old_name) + + +class Migration(migrations.Migration): + dependencies = [ + ("scanpipe", "0050_remove_project_input_sources"), + ] + + operations = [ + migrations.RunPython( + rename_pipelines_data, + reverse_code=reverse_rename_pipelines_data, + ), + ] diff --git a/scanpipe/pipelines/docker.py b/scanpipe/pipelines/docker.py index 4cc3821dd..3e0897226 100644 --- a/scanpipe/pipelines/docker.py +++ b/scanpipe/pipelines/docker.py @@ -20,7 +20,7 @@ # ScanCode.io is a free software code scanning tool from nexB Inc. and others. # Visit https://github.com/nexB/scancode.io for support and download. -from scanpipe.pipelines.root_filesystems import RootFS +from scanpipe.pipelines.root_filesystem import RootFS from scanpipe.pipes import docker from scanpipe.pipes import rootfs diff --git a/scanpipe/pipelines/inspect_manifest.py b/scanpipe/pipelines/inspect_packages.py similarity index 99% rename from scanpipe/pipelines/inspect_manifest.py rename to scanpipe/pipelines/inspect_packages.py index 88dcd9853..28c19a060 100644 --- a/scanpipe/pipelines/inspect_manifest.py +++ b/scanpipe/pipelines/inspect_packages.py @@ -25,7 +25,7 @@ from scanpipe.pipes import update_or_create_package -class InspectManifest(Pipeline): +class InspectPackages(Pipeline): """ Inspect one or more manifest files and resolve their associated packages. diff --git a/scanpipe/pipelines/root_filesystems.py b/scanpipe/pipelines/root_filesystem.py similarity index 100% rename from scanpipe/pipelines/root_filesystems.py rename to scanpipe/pipelines/root_filesystem.py diff --git a/scanpipe/pipelines/scan_package.py b/scanpipe/pipelines/scan_single_package.py similarity index 99% rename from scanpipe/pipelines/scan_package.py rename to scanpipe/pipelines/scan_single_package.py index 1ef07d76d..b28550101 100644 --- a/scanpipe/pipelines/scan_package.py +++ b/scanpipe/pipelines/scan_single_package.py @@ -34,7 +34,7 @@ from scanpipe.pipes.scancode import extract_archive -class ScanPackage(Pipeline): +class ScanSinglePackage(Pipeline): """ Scan a single package file or package archive with ScanCode-toolkit. diff --git a/scanpipe/tests/data/alpine_3_15_4_scan_codebase.json b/scanpipe/tests/data/alpine_3_15_4_scan_codebase.json index 739966d09..ae43fbcbc 100644 --- a/scanpipe/tests/data/alpine_3_15_4_scan_codebase.json +++ b/scanpipe/tests/data/alpine_3_15_4_scan_codebase.json @@ -15,7 +15,7 @@ ], "runs": [ { - "pipeline_name": "docker", + "pipeline_name": "analyze_docker_image", "status": "not_started", "scancodeio_version": "", "task_id": null, diff --git a/scanpipe/tests/data/basic-rootfs_root_filesystems.json b/scanpipe/tests/data/basic-rootfs_root_filesystems.json index d06491ae0..37faeeb72 100644 --- a/scanpipe/tests/data/basic-rootfs_root_filesystems.json +++ b/scanpipe/tests/data/basic-rootfs_root_filesystems.json @@ -13,7 +13,7 @@ ], "runs": [ { - "pipeline_name": "root_filesystems", + "pipeline_name": "analyze_root_filesystem_or_vm_image", "status": "not_started", "scancodeio_version": "", "task_id": null, diff --git a/scanpipe/tests/data/centos_scan_codebase.json b/scanpipe/tests/data/centos_scan_codebase.json index aa5ea8ac9..4357711a3 100644 --- a/scanpipe/tests/data/centos_scan_codebase.json +++ b/scanpipe/tests/data/centos_scan_codebase.json @@ -15,7 +15,7 @@ ], "runs": [ { - "pipeline_name": "docker", + "pipeline_name": "analyze_docker_image", "status": "not_started", "scancodeio_version": "", "task_id": null, diff --git a/scanpipe/tests/data/d2d/about_files/expected.json b/scanpipe/tests/data/d2d/about_files/expected.json index 6df0ad150..8fcccf577 100644 --- a/scanpipe/tests/data/d2d/about_files/expected.json +++ b/scanpipe/tests/data/d2d/about_files/expected.json @@ -19,7 +19,7 @@ ], "runs": [ { - "pipeline_name": "deploy_to_develop", + "pipeline_name": "map_deploy_to_develop", "status": "not_started", "scancodeio_version": "", "task_id": null, diff --git a/scanpipe/tests/data/debian_scan_codebase.json b/scanpipe/tests/data/debian_scan_codebase.json index 11c806dd0..d7cc3af1d 100644 --- a/scanpipe/tests/data/debian_scan_codebase.json +++ b/scanpipe/tests/data/debian_scan_codebase.json @@ -15,7 +15,7 @@ ], "runs": [ { - "pipeline_name": "docker", + "pipeline_name": "analyze_docker_image", "status": "not_started", "scancodeio_version": "", "task_id": null, diff --git a/scanpipe/tests/data/docker-mini-with-license-alpine.tar.xz-docker-scan.json b/scanpipe/tests/data/docker-mini-with-license-alpine.tar.xz-docker-scan.json index 5f51dceea..863a36366 100644 --- a/scanpipe/tests/data/docker-mini-with-license-alpine.tar.xz-docker-scan.json +++ b/scanpipe/tests/data/docker-mini-with-license-alpine.tar.xz-docker-scan.json @@ -11,7 +11,7 @@ ], "runs": [ { - "pipeline_name": "docker", + "pipeline_name": "analyze_docker_image", "status": "not_started", "description": "Analyze Docker images.", "scancodeio_version": "", diff --git a/scanpipe/tests/data/docker-mini-with-license-debian.tar.xz-docker-scan.json b/scanpipe/tests/data/docker-mini-with-license-debian.tar.xz-docker-scan.json index 58dcdbeff..f84ed2b69 100644 --- a/scanpipe/tests/data/docker-mini-with-license-debian.tar.xz-docker-scan.json +++ b/scanpipe/tests/data/docker-mini-with-license-debian.tar.xz-docker-scan.json @@ -11,7 +11,7 @@ ], "runs": [ { - "pipeline_name": "docker", + "pipeline_name": "analyze_docker_image", "status": "not_started", "description": "Analyze Docker images.", "scancodeio_version": "", diff --git a/scanpipe/tests/data/flume-ng-node-d2d-input.json b/scanpipe/tests/data/flume-ng-node-d2d-input.json index ed9821dc3..4a206e9d3 100644 --- a/scanpipe/tests/data/flume-ng-node-d2d-input.json +++ b/scanpipe/tests/data/flume-ng-node-d2d-input.json @@ -6,7 +6,7 @@ "input_sources": [], "runs": [ { - "pipeline_name": "deploy_to_develop", + "pipeline_name": "map_deploy_to_develop", "status": "not_started", "description": "Establish relationships between two code trees: deployment and development.", "scancodeio_version": "", diff --git a/scanpipe/tests/data/flume-ng-node-d2d.json b/scanpipe/tests/data/flume-ng-node-d2d.json index c815b8bb1..2b93d65f0 100644 --- a/scanpipe/tests/data/flume-ng-node-d2d.json +++ b/scanpipe/tests/data/flume-ng-node-d2d.json @@ -19,7 +19,7 @@ ], "runs": [ { - "pipeline_name": "deploy_to_develop", + "pipeline_name": "map_deploy_to_develop", "status": "not_started", "scancodeio_version": "", "task_id": null, diff --git a/scanpipe/tests/data/gcr_io_distroless_base_scan_codebase.json b/scanpipe/tests/data/gcr_io_distroless_base_scan_codebase.json index eb69efb27..a514dc48a 100644 --- a/scanpipe/tests/data/gcr_io_distroless_base_scan_codebase.json +++ b/scanpipe/tests/data/gcr_io_distroless_base_scan_codebase.json @@ -15,7 +15,7 @@ ], "runs": [ { - "pipeline_name": "docker", + "pipeline_name": "analyze_docker_image", "status": "not_started", "scancodeio_version": "", "task_id": null, diff --git a/scanpipe/tests/data/image-with-symlinks/minitag.tar-expected-scan.json b/scanpipe/tests/data/image-with-symlinks/minitag.tar-expected-scan.json index 30161f786..2cb74bb82 100644 --- a/scanpipe/tests/data/image-with-symlinks/minitag.tar-expected-scan.json +++ b/scanpipe/tests/data/image-with-symlinks/minitag.tar-expected-scan.json @@ -15,7 +15,7 @@ ], "runs": [ { - "pipeline_name": "docker", + "pipeline_name": "analyze_docker_image", "status": "not_started", "scancodeio_version": "", "task_id": null, diff --git a/scanpipe/tests/pipes/test_scancode.py b/scanpipe/tests/pipes/test_scancode.py index 861b19894..4f0b68c0b 100644 --- a/scanpipe/tests/pipes/test_scancode.py +++ b/scanpipe/tests/pipes/test_scancode.py @@ -441,8 +441,8 @@ def test_scanpipe_pipes_scancode_run_scan_args(self, mock_run_scan): def test_scanpipe_pipes_scancode_make_results_summary(self, regen=FIXTURES_REGEN): # Ensure the policies index is empty to avoid any side effect on results scanpipe_app.license_policies_index = None - # Run the scan_package pipeline to have a proper DB and local files setup - pipeline_name = "scan_package" + # Run the scan_single_package pipeline to have a proper DB and local files setup + pipeline_name = "scan_single_package" project1 = Project.objects.create(name="Analysis") input_location = self.data_location / "is-npm-1.0.0.tgz" diff --git a/scanpipe/tests/test_api.py b/scanpipe/tests/test_api.py index 0bfdeb345..267e1c4f9 100644 --- a/scanpipe/tests/test_api.py +++ b/scanpipe/tests/test_api.py @@ -268,7 +268,7 @@ def test_scanpipe_api_project_create_base(self, mock_execute_pipeline_task): data = { "name": "Name", - "pipeline": "docker", + "pipeline": "analyze_docker_image", } response = self.csrf_client.post(self.project_list_url, data) self.assertEqual(status.HTTP_201_CREATED, response.status_code) @@ -278,7 +278,7 @@ def test_scanpipe_api_project_create_base(self, mock_execute_pipeline_task): data = { "name": "OtherName", - "pipeline": "docker", + "pipeline": "analyze_docker_image", "upload_file": io.BytesIO(b"Content"), } response = self.csrf_client.post(self.project_list_url, data) @@ -292,7 +292,7 @@ def test_scanpipe_api_project_create_base(self, mock_execute_pipeline_task): data = { "name": "BetterName", - "pipeline": "docker", + "pipeline": "analyze_docker_image", "upload_file": io.BytesIO(b"Content"), "execute_now": True, } @@ -358,48 +358,85 @@ def test_scanpipe_api_project_create_base(self, mock_execute_pipeline_task): def test_scanpipe_api_project_create_multiple_pipelines(self): data = { "name": "Single string", - "pipeline": "docker", + "pipeline": "analyze_docker_image", } response = self.csrf_client.post(self.project_list_url, data) self.assertEqual(status.HTTP_201_CREATED, response.status_code) self.assertEqual(1, len(response.data["runs"])) - self.assertEqual("docker", response.data["runs"][0]["pipeline_name"]) + self.assertEqual( + "analyze_docker_image", response.data["runs"][0]["pipeline_name"] + ) data = { "name": "Single list", - "pipeline": ["docker"], + "pipeline": ["analyze_docker_image"], } response = self.csrf_client.post(self.project_list_url, data) self.assertEqual(status.HTTP_201_CREATED, response.status_code) self.assertEqual(1, len(response.data["runs"])) - self.assertEqual("docker", response.data["runs"][0]["pipeline_name"]) + self.assertEqual( + "analyze_docker_image", response.data["runs"][0]["pipeline_name"] + ) data = { "name": "Multi list", - "pipeline": ["docker", "scan_package"], + "pipeline": ["analyze_docker_image", "scan_single_package"], } response = self.csrf_client.post(self.project_list_url, data) self.assertEqual(status.HTTP_201_CREATED, response.status_code) self.assertEqual(2, len(response.data["runs"])) - self.assertEqual("docker", response.data["runs"][0]["pipeline_name"]) - self.assertEqual("scan_package", response.data["runs"][1]["pipeline_name"]) + self.assertEqual( + "analyze_docker_image", response.data["runs"][0]["pipeline_name"] + ) + self.assertEqual( + "scan_single_package", response.data["runs"][1]["pipeline_name"] + ) data = { "name": "Multi string", - "pipeline": "docker,scan_package", + "pipeline": "analyze_docker_image,scan_single_package", } response = self.csrf_client.post(self.project_list_url, data) self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code) expected = { "pipeline": [ ErrorDetail( - string='"docker,scan_package" is not a valid choice.', + string=( + '"analyze_docker_image,scan_single_package" ' + "is not a valid choice." + ), code="invalid_choice", ) ] } self.assertEqual(expected, response.data) + def test_scanpipe_api_project_create_pipeline_old_name_compatibility(self): + data = { + "name": "Single string", + "pipeline": "docker", + } + response = self.csrf_client.post(self.project_list_url, data) + self.assertEqual(status.HTTP_201_CREATED, response.status_code) + self.assertEqual(1, len(response.data["runs"])) + self.assertEqual( + "analyze_docker_image", response.data["runs"][0]["pipeline_name"] + ) + + data = { + "name": "Multi list", + "pipeline": ["docker_windows", "scan_package"], + } + response = self.csrf_client.post(self.project_list_url, data) + self.assertEqual(status.HTTP_201_CREATED, response.status_code) + self.assertEqual(2, len(response.data["runs"])) + self.assertEqual( + "analyze_windows_docker_image", response.data["runs"][0]["pipeline_name"] + ) + self.assertEqual( + "scan_single_package", response.data["runs"][1]["pipeline_name"] + ) + def test_scanpipe_api_project_create_labels(self): data = { "name": "Project1", @@ -582,7 +619,7 @@ def test_scanpipe_api_project_action_summary(self): self.assertEqual(10, len(response.data.keys())) def test_scanpipe_api_project_action_delete(self): - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") run.set_task_started(task_id=uuid.uuid4()) self.assertEqual(run.Status.RUNNING, run.status) @@ -624,7 +661,7 @@ def test_scanpipe_api_project_action_archive(self): self.assertEqual(1, len(Project.get_root_content(self.project1.codebase_path))) def test_scanpipe_api_project_action_reset(self): - self.project1.add_pipeline("docker") + self.project1.add_pipeline("analyze_docker_image") self.assertEqual(1, self.project1.runs.count()) self.assertEqual(1, self.project1.codebaseresources.count()) self.assertEqual(1, self.project1.discoveredpackages.count()) @@ -657,14 +694,14 @@ def test_scanpipe_api_project_action_add_pipeline(self, mock_execute_pipeline_ta url = reverse("project-add-pipeline", args=[self.project1.uuid]) response = self.csrf_client.get(url) self.assertEqual("Pipeline required.", response.data.get("status")) - self.assertIn("docker", response.data.get("pipelines")) + self.assertIn("analyze_docker_image", response.data.get("pipelines")) data = {"pipeline": "not_available"} response = self.csrf_client.post(url, data=data) expected = {"status": "not_available is not a valid pipeline."} self.assertEqual(expected, response.data) - data = {"pipeline": "docker"} + data = {"pipeline": "analyze_docker_image"} response = self.csrf_client.post(url, data=data) self.assertEqual({"status": "Pipeline added."}, response.data) mock_execute_pipeline_task.assert_not_called() @@ -680,6 +717,16 @@ def test_scanpipe_api_project_action_add_pipeline(self, mock_execute_pipeline_ta self.assertEqual({"status": "Pipeline added."}, response.data) mock_execute_pipeline_task.assert_called_once() + def test_scanpipe_api_project_action_add_pipeline_old_name_compatibility(self): + url = reverse("project-add-pipeline", args=[self.project1.uuid]) + data = { + "pipeline": "docker", # old name + "execute_now": False, + } + response = self.csrf_client.post(url, data=data) + self.assertEqual({"status": "Pipeline added."}, response.data) + self.assertEqual("analyze_docker_image", self.project1.runs.get().pipeline_name) + def test_scanpipe_api_project_action_add_input(self): url = reverse("project-add-input", args=[self.project1.uuid]) response = self.csrf_client.get(url) @@ -701,7 +748,7 @@ def test_scanpipe_api_project_action_add_input(self): expected = sorted(["upload_file"]) self.assertEqual(expected, sorted(self.project1.input_root)) - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") run.set_task_started(task_id=uuid.uuid4()) response = self.csrf_client.get(url) expected = "Cannot add inputs once a pipeline has started to execute." @@ -709,13 +756,13 @@ def test_scanpipe_api_project_action_add_input(self): self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code) def test_scanpipe_api_run_detail(self): - run1 = self.project1.add_pipeline("docker") + run1 = self.project1.add_pipeline("analyze_docker_image") url = reverse("run-detail", args=[run1.uuid]) response = self.csrf_client.get(url) self.assertEqual(str(run1.uuid), response.data["uuid"]) self.assertIn(self.project1_detail_url, response.data["project"]) - self.assertEqual("docker", response.data["pipeline_name"]) + self.assertEqual("analyze_docker_image", response.data["pipeline_name"]) self.assertEqual("Analyze Docker images.", response.data["description"]) self.assertEqual("", response.data["scancodeio_version"]) self.assertIsNone(response.data["task_id"]) @@ -727,10 +774,10 @@ def test_scanpipe_api_run_detail(self): @mock.patch("scanpipe.models.Run.execute_task_async") def test_scanpipe_api_run_action_start_pipeline(self, mock_execute_task): - run1 = self.project1.add_pipeline("docker") + run1 = self.project1.add_pipeline("analyze_docker_image") url = reverse("run-start-pipeline", args=[run1.uuid]) response = self.csrf_client.post(url) - expected = {"status": "Pipeline docker started."} + expected = {"status": "Pipeline analyze_docker_image started."} self.assertEqual(expected, response.data) mock_execute_task.assert_called_once() @@ -757,7 +804,7 @@ def test_scanpipe_api_run_action_start_pipeline(self, mock_execute_task): @override_settings(SCANCODEIO_ASYNC=False) def test_scanpipe_api_run_action_stop_pipeline(self): - run1 = self.project1.add_pipeline("docker") + run1 = self.project1.add_pipeline("analyze_docker_image") url = reverse("run-stop-pipeline", args=[run1.uuid]) response = self.csrf_client.post(url) self.assertEqual(status.HTTP_400_BAD_REQUEST, response.status_code) @@ -767,7 +814,7 @@ def test_scanpipe_api_run_action_stop_pipeline(self): run1.set_task_started(run1.pk) response = self.csrf_client.post(url) self.assertEqual(status.HTTP_200_OK, response.status_code) - expected = {"status": "Pipeline docker stopped."} + expected = {"status": "Pipeline analyze_docker_image stopped."} self.assertEqual(expected, response.data) run1.refresh_from_db() @@ -775,26 +822,26 @@ def test_scanpipe_api_run_action_stop_pipeline(self): @override_settings(SCANCODEIO_ASYNC=False) def test_scanpipe_api_run_action_delete_pipeline(self): - run1 = self.project1.add_pipeline("docker") + run1 = self.project1.add_pipeline("analyze_docker_image") url = reverse("run-delete-pipeline", args=[run1.uuid]) response = self.csrf_client.post(url) self.assertEqual(status.HTTP_200_OK, response.status_code) - expected = {"status": "Pipeline docker deleted."} + expected = {"status": "Pipeline analyze_docker_image deleted."} self.assertEqual(expected, response.data) self.assertFalse(Run.objects.filter(pk=run1.pk).exists()) - run2 = self.project1.add_pipeline("docker") + run2 = self.project1.add_pipeline("analyze_docker_image") url = reverse("run-delete-pipeline", args=[run2.uuid]) run2.set_task_queued() response = self.csrf_client.post(url) self.assertEqual(status.HTTP_200_OK, response.status_code) - expected = {"status": "Pipeline docker deleted."} + expected = {"status": "Pipeline analyze_docker_image deleted."} self.assertEqual(expected, response.data) self.assertFalse(Run.objects.filter(pk=run2.pk).exists()) - run3 = self.project1.add_pipeline("docker") + run3 = self.project1.add_pipeline("analyze_docker_image") url = reverse("run-delete-pipeline", args=[run3.uuid]) run3.set_task_started(run3.pk) diff --git a/scanpipe/tests/test_apps.py b/scanpipe/tests/test_apps.py index f508a4bd1..6b650eb9f 100644 --- a/scanpipe/tests/test_apps.py +++ b/scanpipe/tests/test_apps.py @@ -122,20 +122,31 @@ def test_scanpipe_apps_sync_runs_and_jobs(self, mock_sync_with_job): def test_scanpipe_apps_get_pipeline_choices(self): blank_entry = ("", "---------") - main_pipline = ("scan_codebase", "scan_codebase") - addon_pipline = ("find_vulnerabilities", "find_vulnerabilities") + main_pipeline = ("scan_codebase", "scan_codebase") + addon_pipeline = ("find_vulnerabilities", "find_vulnerabilities") choices = scanpipe_app.get_pipeline_choices() self.assertIn(blank_entry, choices) - self.assertIn(main_pipline, choices) - self.assertIn(addon_pipline, choices) + self.assertIn(main_pipeline, choices) + self.assertIn(addon_pipeline, choices) choices = scanpipe_app.get_pipeline_choices(include_blank=False) self.assertNotIn(blank_entry, choices) - self.assertIn(main_pipline, choices) - self.assertIn(addon_pipline, choices) + self.assertIn(main_pipeline, choices) + self.assertIn(addon_pipeline, choices) choices = scanpipe_app.get_pipeline_choices(include_addon=False) self.assertIn(blank_entry, choices) - self.assertIn(main_pipline, choices) - self.assertNotIn(addon_pipline, choices) + self.assertIn(main_pipeline, choices) + self.assertNotIn(addon_pipeline, choices) + + def test_scanpipe_apps_get_new_pipeline_name(self): + self.assertEqual( + "scan_codebase", scanpipe_app.get_new_pipeline_name("scan_codebase") + ) + self.assertEqual( + "not_existing", scanpipe_app.get_new_pipeline_name("not_existing") + ) + self.assertEqual( + "analyze_docker_image", scanpipe_app.get_new_pipeline_name("docker") + ) diff --git a/scanpipe/tests/test_commands.py b/scanpipe/tests/test_commands.py index 2a0fb9a1d..d793e6d79 100644 --- a/scanpipe/tests/test_commands.py +++ b/scanpipe/tests/test_commands.py @@ -60,7 +60,7 @@ def raise_interrupt(run_pk): class ScanPipeManagementCommandTest(TestCase): - pipeline_name = "docker" + pipeline_name = "analyze_docker_image" pipeline_class = scanpipe_app.pipelines.get(pipeline_name) def test_scanpipe_management_command_create_project_base(self): @@ -100,14 +100,17 @@ def test_scanpipe_management_command_create_project_pipelines(self): "--pipeline", self.pipeline_name, "--pipeline", - "root_filesystems", + "analyze_root_filesystem_or_vm_image", + "--pipeline", + "scan_package", # old name backward compatibility ] call_command("create-project", "my_project", *options, stdout=out) self.assertIn("Project my_project created", out.getvalue()) project = Project.objects.get(name="my_project") expected = [ self.pipeline_name, - "root_filesystems", + "analyze_root_filesystem_or_vm_image", + "scan_single_package", ] self.assertEqual(expected, [run.pipeline_name for run in project.runs.all()]) @@ -247,7 +250,8 @@ def test_scanpipe_management_command_add_pipeline(self): pipelines = [ self.pipeline_name, - "root_filesystems", + "analyze_root_filesystem_or_vm_image", + "scan_package", # old name backward compatibility ] options = pipelines[:] @@ -257,10 +261,17 @@ def test_scanpipe_management_command_add_pipeline(self): options.extend(["--project", project.name]) call_command("add-pipeline", *options, stdout=out) - self.assertIn( - "Pipelines docker, root_filesystems added to the project", out.getvalue() + expected = ( + "Pipelines analyze_docker_image, analyze_root_filesystem_or_vm_image, " + "scan_single_package added to the project" ) - self.assertEqual(pipelines, [run.pipeline_name for run in project.runs.all()]) + self.assertIn(expected, out.getvalue()) + expected = [ + "analyze_docker_image", + "analyze_root_filesystem_or_vm_image", + "scan_single_package", + ] + self.assertEqual(expected, [run.pipeline_name for run in project.runs.all()]) options = ["--project", project.name, "non-existing"] expected = "non-existing is not a valid pipeline" @@ -270,7 +281,7 @@ def test_scanpipe_management_command_add_pipeline(self): def test_scanpipe_management_command_show_pipeline(self): pipeline_names = [ self.pipeline_name, - "root_filesystems", + "analyze_root_filesystem_or_vm_image", ] project = Project.objects.create(name="my_project") @@ -280,7 +291,10 @@ def test_scanpipe_management_command_show_pipeline(self): options = ["--project", project.name, "--no-color"] out = StringIO() call_command("show-pipeline", *options, stdout=out) - expected = " [NOT_STARTED] docker\n" " [NOT_STARTED] root_filesystems\n" + expected = ( + " [NOT_STARTED] analyze_docker_image\n" + " [NOT_STARTED] analyze_root_filesystem_or_vm_image\n" + ) self.assertEqual(expected, out.getvalue()) project.runs.filter(pipeline_name=pipeline_names[0]).update(task_exitcode=0) @@ -288,7 +302,10 @@ def test_scanpipe_management_command_show_pipeline(self): out = StringIO() call_command("show-pipeline", *options, stdout=out) - expected = " [SUCCESS] docker\n" " [FAILURE] root_filesystems\n" + expected = ( + " [SUCCESS] analyze_docker_image\n" + " [FAILURE] analyze_root_filesystem_or_vm_image\n" + ) self.assertEqual(expected, out.getvalue()) def test_scanpipe_management_command_execute(self): @@ -304,7 +321,7 @@ def test_scanpipe_management_command_execute(self): run1 = project.add_pipeline(self.pipeline_name) with mock.patch("scanpipe.tasks.execute_pipeline_task", task_success): call_command("execute", *options, stdout=out) - expected = "Start the docker pipeline execution..." + expected = "Start the analyze_docker_image pipeline execution..." self.assertIn(expected, out.getvalue()) expected = "successfully executed on project my_project" self.assertIn(expected, out.getvalue()) @@ -316,7 +333,7 @@ def test_scanpipe_management_command_execute(self): err = StringIO() run2 = project.add_pipeline(self.pipeline_name) - expected = "Error during docker execution:\nError log" + expected = "Error during analyze_docker_image execution:\nError log" with mock.patch("scanpipe.tasks.execute_pipeline_task", task_failure): with self.assertRaisesMessage(CommandError, expected): call_command("execute", *options, stdout=out, stderr=err) @@ -348,14 +365,14 @@ def test_scanpipe_management_command_status(self): self.assertIn("- CodebaseResource: 0", output) self.assertIn("- DiscoveredPackage: 0", output) self.assertIn("- ProjectMessage: 0", output) - self.assertIn("[NOT_STARTED] docker", output) + self.assertIn("[NOT_STARTED] analyze_docker_image", output) run.task_id = uuid.uuid4() run.save() out = StringIO() call_command("status", *options, stdout=out) output = out.getvalue() - self.assertIn("[QUEUED] docker", output) + self.assertIn("[QUEUED] analyze_docker_image", output) run.task_start_date = timezone.now() run.log = ( @@ -367,7 +384,7 @@ def test_scanpipe_management_command_status(self): call_command("status", *options, stdout=out) output = out.getvalue() - self.assertIn("[RUNNING] docker", output) + self.assertIn("[RUNNING] analyze_docker_image", output) for line in run.log.splitlines(): self.assertIn(line, output) @@ -377,7 +394,9 @@ def test_scanpipe_management_command_status(self): out = StringIO() call_command("status", *options, stdout=out) output = out.getvalue() - expected = f"[SUCCESS] docker (executed in {run.execution_time} seconds)" + expected = ( + f"[SUCCESS] analyze_docker_image (executed in {run.execution_time} seconds)" + ) self.assertIn(expected, output) def test_scanpipe_management_command_list_project(self): @@ -508,7 +527,7 @@ def test_scanpipe_management_command_archive_project(self): def test_scanpipe_management_command_reset_project(self): project = Project.objects.create(name="my_project") - project.add_pipeline("docker") + project.add_pipeline("analyze_docker_image") CodebaseResource.objects.create(project=project, path="filename.ext") DiscoveredPackage.objects.create(project=project) diff --git a/scanpipe/tests/test_models.py b/scanpipe/tests/test_models.py index 29e579cd1..03ca443e6 100644 --- a/scanpipe/tests/test_models.py +++ b/scanpipe/tests/test_models.py @@ -153,7 +153,7 @@ def test_scanpipe_project_model_delete_related_objects(self): work_path = self.project1.work_path self.assertTrue(work_path.exists()) - self.project1.add_pipeline("docker") + self.project1.add_pipeline("analyze_docker_image") self.project1.labels.add("label1", "label2") self.assertEqual(2, UUIDTaggedItem.objects.count()) resource = CodebaseResource.objects.create(project=self.project1, path="path") @@ -180,7 +180,7 @@ def test_scanpipe_project_model_delete(self): uploaded_file = SimpleUploadedFile("file.ext", content=b"content") self.project1.write_input_file(uploaded_file) - self.project1.add_pipeline("docker") + self.project1.add_pipeline("analyze_docker_image") resource = CodebaseResource.objects.create(project=self.project1, path="path") package = DiscoveredPackage.objects.create(project=self.project1) resource.discovered_packages.add(package) @@ -198,7 +198,7 @@ def test_scanpipe_project_model_reset(self): uploaded_file = SimpleUploadedFile("file.ext", content=b"content") self.project1.write_input_file(uploaded_file) - self.project1.add_pipeline("docker") + self.project1.add_pipeline("analyze_docker_image") resource = CodebaseResource.objects.create(project=self.project1, path="path") package = DiscoveredPackage.objects.create(project=self.project1) resource.discovered_packages.add(package) @@ -226,7 +226,7 @@ def test_scanpipe_project_model_clone(self): self.project1.update(settings={"extract_recursively": True}) new_file_path1 = self.project1.input_path / "file.zip" new_file_path1.touch() - run1 = self.project1.add_pipeline("docker") + run1 = self.project1.add_pipeline("analyze_docker_image") run2 = self.project1.add_pipeline("find_vulnerabilities") subscription1 = self.project1.add_webhook_subscription("http://domain.url") @@ -258,7 +258,8 @@ def test_scanpipe_project_model_clone(self): self.assertEqual(1, len(list(cloned_project2.inputs()))) runs = cloned_project2.runs.all() self.assertEqual( - ["docker", "find_vulnerabilities"], [run.pipeline_name for run in runs] + ["analyze_docker_image", "find_vulnerabilities"], + [run.pipeline_name for run in runs], ) self.assertNotEqual(run1.pk, runs[0].pk) self.assertNotEqual(run2.pk, runs[1].pk) @@ -413,7 +414,7 @@ def test_scanpipe_project_model_can_start_pipelines(self): self.assertFalse(self.project1.can_start_pipelines) # Not started - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") self.project1 = Project.objects.get(uuid=self.project1.uuid) self.assertTrue(self.project1.can_start_pipelines) @@ -431,14 +432,14 @@ def test_scanpipe_project_model_can_start_pipelines(self): self.assertFalse(self.project1.can_start_pipelines) # Another "Not started" - self.project1.add_pipeline("docker") + self.project1.add_pipeline("analyze_docker_image") self.project1 = Project.objects.get(uuid=self.project1.uuid) self.assertTrue(self.project1.can_start_pipelines) def test_scanpipe_project_model_can_change_inputs(self): self.assertTrue(self.project1.can_change_inputs) - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") self.project1 = Project.objects.get(uuid=self.project1.uuid) self.assertTrue(self.project1.can_change_inputs) @@ -2061,7 +2062,7 @@ def test_scanpipe_project_model_add_pipeline(self, mock_execute_task): project1.add_pipeline(pipeline_name) self.assertEqual("Unknown pipeline: not_available", str(error.exception)) - pipeline_name = "inspect_manifest" + pipeline_name = "inspect_packages" project1.add_pipeline(pipeline_name) pipeline_class = scanpipe_app.pipelines.get(pipeline_name) @@ -2078,7 +2079,7 @@ def test_scanpipe_project_model_add_pipeline(self, mock_execute_task): @mock.patch("scanpipe.models.Run.execute_task_async") def test_scanpipe_project_model_add_pipeline_run_can_start(self, mock_execute_task): project1 = Project.objects.create(name="Analysis") - pipeline_name = "inspect_manifest" + pipeline_name = "inspect_packages" run1 = project1.add_pipeline(pipeline_name, execute_now=False) run2 = project1.add_pipeline(pipeline_name, execute_now=True) self.assertEqual(Run.Status.NOT_STARTED, run1.status) @@ -2090,7 +2091,7 @@ def test_scanpipe_project_model_add_pipeline_run_can_start(self, mock_execute_ta @mock.patch("scanpipe.models.Run.execute_task_async") def test_scanpipe_project_model_add_pipeline_start_method(self, mock_execute_task): project1 = Project.objects.create(name="Analysis") - pipeline_name = "inspect_manifest" + pipeline_name = "inspect_packages" run1 = project1.add_pipeline(pipeline_name, execute_now=False) run2 = project1.add_pipeline(pipeline_name, execute_now=False) self.assertEqual(Run.Status.NOT_STARTED, run1.status) diff --git a/scanpipe/tests/test_pipelines.py b/scanpipe/tests/test_pipelines.py index 0f28efbd1..b097d2b0c 100644 --- a/scanpipe/tests/test_pipelines.py +++ b/scanpipe/tests/test_pipelines.py @@ -42,7 +42,7 @@ from scanpipe.pipelines import InputFileError from scanpipe.pipelines import Pipeline from scanpipe.pipelines import is_pipeline -from scanpipe.pipelines import root_filesystems +from scanpipe.pipelines import root_filesystem from scanpipe.pipes import output from scanpipe.pipes import scancode from scanpipe.pipes.input import copy_input @@ -288,8 +288,8 @@ def test_scanpipe_pipelines_class_flag_ignored_resources(self): class RootFSPipelineTest(TestCase): def test_scanpipe_rootfs_pipeline_extract_input_files_errors(self): project1 = Project.objects.create(name="Analysis") - run = project1.add_pipeline("root_filesystems") - pipeline_instance = root_filesystems.RootFS(run) + run = project1.add_pipeline("analyze_root_filesystem_or_vm_image") + pipeline_instance = root_filesystem.RootFS(run) # Create 2 files in the input/ directory to generate error twice project1.move_input_from(tempfile.mkstemp()[1]) @@ -451,7 +451,7 @@ def assertPipelineResultEqual( @skipIf(from_docker_image, "Random failure in the Docker context.") def test_scanpipe_scan_package_pipeline_integration(self): - pipeline_name = "scan_package" + pipeline_name = "scan_single_package" project1 = Project.objects.create(name="Analysis") input_location = self.data_location / "is-npm-1.0.0.tgz" @@ -485,7 +485,7 @@ def test_scanpipe_scan_package_pipeline_integration(self): @skipIf(from_docker_image, "Random failure in the Docker context.") def test_scanpipe_scan_package_pipeline_integration_multiple_packages(self): - pipeline_name = "scan_package" + pipeline_name = "scan_single_package" project1 = Project.objects.create(name="Analysis") input_location = self.data_location / "multiple-is-npm-1.0.0.tar.gz" @@ -513,7 +513,7 @@ def test_scanpipe_scan_package_pipeline_integration_multiple_packages(self): self.assertPipelineResultEqual(expected_file, summary_file) def test_scanpipe_scan_package_single_file(self): - pipeline_name = "scan_package" + pipeline_name = "scan_single_package" project1 = Project.objects.create(name="Analysis") input_location = ( @@ -605,7 +605,7 @@ def test_scanpipe_scan_codebase_can_process_wheel(self): @skipIf(sys.platform != "linux", "Expected results are inconsistent across OS") def test_scanpipe_docker_pipeline_alpine_integration(self): - pipeline_name = "docker" + pipeline_name = "analyze_docker_image" project1 = Project.objects.create(name="Analysis") filename = "alpine_3_15_4.tar.gz" @@ -628,7 +628,7 @@ def test_scanpipe_docker_pipeline_alpine_integration(self): self.assertPipelineResultEqual(expected_file, result_file) def test_scanpipe_docker_pipeline_does_not_report_errors_for_broken_symlinks(self): - pipeline_name = "docker" + pipeline_name = "analyze_docker_image" project1 = Project.objects.create(name="Analysis") filename = "minitag.tar" @@ -656,7 +656,7 @@ def test_scanpipe_docker_pipeline_does_not_report_errors_for_broken_symlinks(sel @skipIf(sys.platform != "linux", "RPM related features only supported on Linux.") def test_scanpipe_docker_pipeline_rpm_integration(self): - pipeline_name = "docker" + pipeline_name = "analyze_docker_image" project1 = Project.objects.create(name="Analysis") filename = "centos.tar.gz" @@ -679,7 +679,7 @@ def test_scanpipe_docker_pipeline_rpm_integration(self): self.assertPipelineResultEqual(expected_file, result_file) def test_scanpipe_docker_pipeline_debian_integration(self): - pipeline_name = "docker" + pipeline_name = "analyze_docker_image" project1 = Project.objects.create(name="Analysis") filename = "debian.tar.gz" @@ -702,7 +702,7 @@ def test_scanpipe_docker_pipeline_debian_integration(self): self.assertPipelineResultEqual(expected_file, result_file) def test_scanpipe_docker_pipeline_distroless_debian_integration(self): - pipeline_name = "docker" + pipeline_name = "analyze_docker_image" project1 = Project.objects.create(name="Analysis") filename = "gcr_io_distroless_base.tar.gz" @@ -725,7 +725,7 @@ def test_scanpipe_docker_pipeline_distroless_debian_integration(self): self.assertPipelineResultEqual(expected_file, result_file) def test_scanpipe_rootfs_pipeline_integration(self): - pipeline_name = "root_filesystems" + pipeline_name = "analyze_root_filesystem_or_vm_image" project1 = Project.objects.create(name="Analysis") input_location = self.data_location / "basic-rootfs.tar.gz" @@ -836,7 +836,7 @@ def test_scanpipe_find_vulnerabilities_pipeline_integration( self.assertEqual(expected, package1.affected_by_vulnerabilities) def test_scanpipe_inspect_manifest_pipeline_integration(self): - pipeline_name = "inspect_manifest" + pipeline_name = "inspect_packages" project1 = Project.objects.create(name="Analysis") run = project1.add_pipeline(pipeline_name) @@ -851,7 +851,7 @@ def test_scanpipe_inspect_manifest_pipeline_integration(self): def test_scanpipe_inspect_manifest_pipeline_pypi_integration( self, resolve_dependencies ): - pipeline_name = "inspect_manifest" + pipeline_name = "inspect_packages" project1 = Project.objects.create(name="Analysis") run = project1.add_pipeline(pipeline_name) @@ -875,7 +875,7 @@ def test_scanpipe_inspect_manifest_pipeline_pypi_integration( self.assertEqual(value, getattr(discoveredpackage, field_name)) def test_scanpipe_inspect_manifest_pipeline_aboutfile_integration(self): - pipeline_name = "inspect_manifest" + pipeline_name = "inspect_packages" project1 = Project.objects.create(name="Analysis") input_location = ( @@ -897,7 +897,7 @@ def test_scanpipe_inspect_manifest_pipeline_aboutfile_integration(self): self.assertEqual("bsd-new", discoveredpackage.declared_license_expression) def test_scanpipe_inspect_manifest_pipeline_spdx_integration(self): - pipeline_name = "inspect_manifest" + pipeline_name = "inspect_packages" project1 = Project.objects.create(name="Analysis") input_location = self.data_location / "manifests" / "toml.spdx.json" @@ -919,7 +919,7 @@ def test_scanpipe_inspect_manifest_pipeline_spdx_integration(self): self.assertEqual("mit", discoveredpackage.declared_license_expression) def test_scanpipe_inspect_manifest_pipeline_cyclonedx_integration(self): - pipeline_name = "inspect_manifest" + pipeline_name = "inspect_packages" project1 = Project.objects.create(name="Analysis") input_location = self.data_location / "cyclonedx/nested.cdx.json" @@ -996,7 +996,7 @@ def test_scanpipe_inspect_manifest_pipeline_cyclonedx_integration(self): @mock.patch("scanpipe.pipes.purldb.request_post") def test_scanpipe_deploy_to_develop_pipeline_integration(self, mock_request): mock_request.return_value = None - pipeline_name = "deploy_to_develop" + pipeline_name = "map_deploy_to_develop" project1 = Project.objects.create(name="Analysis") jar_location = self.data_location / "d2d" / "jars" @@ -1021,7 +1021,7 @@ def test_scanpipe_deploy_to_develop_pipeline_integration(self, mock_request): @mock.patch("scanpipe.pipes.purldb.request_post") def test_scanpipe_deploy_to_develop_pipeline_with_about_file(self, mock_request): mock_request.return_value = None - pipeline_name = "deploy_to_develop" + pipeline_name = "map_deploy_to_develop" project1 = Project.objects.create(name="Analysis") data_dir = self.data_location / "d2d" / "about_files" diff --git a/scanpipe/tests/test_views.py b/scanpipe/tests/test_views.py index 11d6cb88e..d494288cb 100644 --- a/scanpipe/tests/test_views.py +++ b/scanpipe/tests/test_views.py @@ -96,12 +96,12 @@ def test_scanpipe_views_project_list_filters(self): self.assertContains(response, is_archived_filters, html=True) pipeline_filters = [ - "?pipeline=docker", - "?pipeline=docker_windows", + "?pipeline=analyze_docker_image", + "?pipeline=analyze_windows_docker_image", "?pipeline=load_inventory", - "?pipeline=root_filesystems", + "?pipeline=analyze_root_filesystem_or_vm_image", "?pipeline=scan_codebase", - "?pipeline=scan_package", + "?pipeline=scan_single_package", ] for pipeline_filter in pipeline_filters: self.assertContains(response, pipeline_filter) @@ -313,7 +313,7 @@ def test_scanpipe_views_project_details_missing_inputs(self): def test_scanpipe_views_project_details_add_pipelines(self): url = self.project1.get_absolute_url() data = { - "pipeline": "docker", + "pipeline": "analyze_docker_image", } response = self.client.post(url, data, follow=True) self.assertEqual(404, response.status_code) @@ -322,7 +322,7 @@ def test_scanpipe_views_project_details_add_pipelines(self): response = self.client.post(url, data, follow=True) self.assertContains(response, "Pipeline added.") run = self.project1.runs.get() - self.assertEqual("docker", run.pipeline_name) + self.assertEqual("analyze_docker_image", run.pipeline_name) self.assertIsNone(run.task_start_date) def test_scanpipe_views_project_details_add_labels(self): @@ -550,7 +550,7 @@ def test_scanpipe_views_project_codebase_view_get_tree(self): def test_scanpipe_views_project_archive_view(self): url = reverse("project_archive", args=[self.project1.slug]) - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") run.set_task_started(run.pk) response = self.client.post(url, follow=True) @@ -569,7 +569,7 @@ def test_scanpipe_views_project_archive_view(self): def test_scanpipe_views_project_delete_view(self): url = reverse("project_delete", args=[self.project1.slug]) - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") run.set_task_started(run.pk) response = self.client.post(url, follow=True) @@ -587,7 +587,7 @@ def test_scanpipe_views_project_delete_view(self): def test_scanpipe_views_project_reset_view(self): url = reverse("project_reset", args=[self.project1.slug]) - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") run.set_task_started(run.pk) response = self.client.post(url, follow=True) @@ -651,7 +651,7 @@ def test_scanpipe_views_project_views(self): @mock.patch("scanpipe.models.Run.execute_task_async") def test_scanpipe_views_execute_pipelines_view(self, mock_execute_task): - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") url = reverse("project_execute_pipelines", args=[self.project1.slug]) response = self.client.get(url, follow=True) @@ -674,7 +674,7 @@ def test_scanpipe_views_execute_pipelines_view(self, mock_execute_task): @mock.patch("scanpipe.models.Run.stop_task") def test_scanpipe_views_stop_pipeline_view(self, mock_stop_task): - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") url = reverse("project_stop_pipeline", args=[self.project1.slug, run.uuid]) response = self.client.get(url) @@ -688,7 +688,7 @@ def test_scanpipe_views_stop_pipeline_view(self, mock_stop_task): @mock.patch("scanpipe.models.Run.delete_task") def test_scanpipe_views_delete_pipeline_view(self, mock_delete_task): - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") url = reverse("project_delete_pipeline", args=[self.project1.slug, run.uuid]) response = self.client.get(url, follow=True) @@ -701,7 +701,7 @@ def test_scanpipe_views_delete_pipeline_view(self, mock_delete_task): self.assertEqual(404, response.status_code) def test_scanpipe_views_run_status_view(self): - run = self.project1.add_pipeline("docker") + run = self.project1.add_pipeline("analyze_docker_image") url = reverse("run_status", args=[run.uuid]) response = self.client.get(url) diff --git a/setup.cfg b/setup.cfg index 4d8a103d0..6656208a2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -123,17 +123,17 @@ console_scripts = scanpipe = scancodeio:command_line scancodeio_pipelines = - deploy_to_develop = scanpipe.pipelines.deploy_to_develop:DeployToDevelop - docker = scanpipe.pipelines.docker:Docker - docker_windows = scanpipe.pipelines.docker_windows:DockerWindows + analyze_docker_image = scanpipe.pipelines.docker:Docker + analyze_root_filesystem_or_vm_image = scanpipe.pipelines.root_filesystem:RootFS + analyze_windows_docker_image = scanpipe.pipelines.docker_windows:DockerWindows find_vulnerabilities = scanpipe.pipelines.find_vulnerabilities:FindVulnerabilities - inspect_manifest = scanpipe.pipelines.inspect_manifest:InspectManifest + inspect_packages = scanpipe.pipelines.inspect_packages:InspectPackages load_inventory = scanpipe.pipelines.load_inventory:LoadInventory + map_deploy_to_develop = scanpipe.pipelines.deploy_to_develop:DeployToDevelop populate_purldb = scanpipe.pipelines.populate_purldb:PopulatePurlDB - root_filesystems = scanpipe.pipelines.root_filesystems:RootFS scan_codebase = scanpipe.pipelines.scan_codebase:ScanCodebase scan_codebase_packages = scanpipe.pipelines.scan_codebase_packages:ScanCodebasePackages - scan_package = scanpipe.pipelines.scan_package:ScanPackage + scan_single_package = scanpipe.pipelines.scan_single_package:ScanSinglePackage [isort] force_single_line = True