From 206137f35a0cb5e646e56bb83b6897a4410ae73b Mon Sep 17 00:00:00 2001 From: Robbert Date: Wed, 22 Mar 2023 07:58:49 +0100 Subject: [PATCH 1/4] Change the build system from Wheel to Hatch, switch from Flake8 to Ruff and introduce parallel matrix for GitLab CI/CD --- .codeclimate.yml | 11 +- .coveragerc | 29 --- .dockerignore | 29 +-- .editorconfig | 27 +-- .gitignore | 8 +- .gitlab-ci.yml | 281 ++++++++--------------- .gitpod-vscode-settings.json | 4 +- .gitpod.yml | 4 +- .mdlrc | 1 - .pre-commit-config.yaml | 28 +-- LICENSE | 2 +- MANIFEST.in | 7 - Makefile | 34 +-- README.md | 48 ++-- askanna/__init__.py | 12 +- askanna/cli/__init__.py | 6 +- askanna/cli/__main__.py | 1 - askanna/cli/run_utils/__init__.py | 6 +- askanna/cli/run_utils/__main__.py | 1 - askanna/cli/run_utils/get_package.py | 1 - askanna/config/server.py | 1 - askanna/config/utils.py | 3 +- askanna/core/dataclasses/run.py | 6 +- askanna/core/exceptions.py | 4 +- askanna/core/push.py | 1 - askanna/core/utils/main.py | 4 +- askanna/gateways/api_client.py | 7 +- pyproject.toml | 151 ++++++++++++ requirements-dev.txt | 18 -- requirements.txt | 15 -- setup.cfg | 21 -- setup.py | 68 ------ tests/create_fake_files.py | 31 +-- tests/fixtures/responses/api/__init__.py | 1 - tox.ini | 41 ---- 35 files changed, 369 insertions(+), 543 deletions(-) delete mode 100644 .coveragerc delete mode 100644 .mdlrc delete mode 100644 MANIFEST.in delete mode 100644 requirements-dev.txt delete mode 100644 requirements.txt delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 tox.ini diff --git a/.codeclimate.yml b/.codeclimate.yml index 5640727..6d3009a 100644 --- a/.codeclimate.yml +++ b/.codeclimate.yml @@ -1,10 +1,9 @@ ---- version: "2" plugins: - fixme: - enabled: true pep8: enabled: true + bandit: + enabled: true markdownlint: enabled: true duplication: @@ -15,9 +14,7 @@ plugins: editorconfig: enabled: true channel: beta - config: - editorconfig: .editorconfig + fixme: + enabled: true exclude_patterns: - - "**/test/" - - "**/tests/" - tests/ diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 972212f..0000000 --- a/.coveragerc +++ /dev/null @@ -1,29 +0,0 @@ -[run] -include = askanna/* -branch = True -omit = - # don't cover tests - *tests* - # don't cover txt files - *.txt - # don't cover .sh files - *.sh - -[report] -precision = 2 -# Regexes for lines to exclude from consideration -exclude_lines = - # Have to re-enable the standard pragma - pragma: no cover - - # Don't complain about missing debug-only code: - def __repr__ - if self\.debug - - # Don't complain if tests don't hit defensive assertion code: - raise AssertionError - raise NotImplementedError - - # Don't complain if non-runnable code isn't run: - if 0: - if __name__ == .__main__.: diff --git a/.dockerignore b/.dockerignore index 0b337ff..751bdaa 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,4 +1,5 @@ **/__pycache__ +**/*_cache **/.classpath **/.dockerignore **/.env @@ -6,27 +7,17 @@ **/.gitignore **/.project **/.settings -**/.toolstarget -**/.vs **/.vscode -**/*.*proj.user -**/*.dbmdl -**/*.jfm -**/azds.yaml -**/bin -**/charts -**/docker-compose* -**/Dockerfile* -**/node_modules -**/npm-debug.log -**/obj -**/secrets.dev.yaml -**/values.dev.yaml -**/requirements-dev.txt +**/Dockerfile **/.gitlab-ci.yml -**/setup.cfg -**/tox.ini **/.editorconfig **/tests **/Makefile -**/MANIFEST.in +**/htmlcov +**/.codeclimate.yml +**/.gitpod.yml +**/.gitpod-vscode-settings.json +**/.markdownlint.yaml +**/.pre-commit-config.yaml +**/coverage.xml +**/junit.xml diff --git a/.editorconfig b/.editorconfig index 8912791..704fab5 100644 --- a/.editorconfig +++ b/.editorconfig @@ -5,28 +5,13 @@ root = true [*] charset = utf-8 end_of_line = lf +indent_size = 4 +indent_style = space insert_final_newline = true -trim_trailing_whitespace = true - -[*.{py,md,txt}] line_length = 119 +trim_trailing_whitespace = true -[*.{py,rst,ini}] -indent_style = space -indent_size = 4 - -[*.py] -known_first_party = askanna -multi_line_output = 3 -default_section = THIRDPARTY -recursive = true -skip = venv/ -include_trailing_comma = true -force_grid_wrap = 0 -use_parentheses = true - -[*.{html,css,scss,json,yml}] -indent_style = space +[*.{json,yaml,yml,toml}] indent_size = 2 [*.md] @@ -34,7 +19,3 @@ trim_trailing_whitespace = false [Makefile] indent_style = tab - -[nginx.conf] -indent_style = space -indent_size = 2 diff --git a/.gitignore b/.gitignore index b339f5a..548e3b1 100644 --- a/.gitignore +++ b/.gitignore @@ -25,7 +25,6 @@ pip-log.txt # Unit test / coverage reports .coverage -.tox nosetests.xml htmlcov @@ -59,14 +58,11 @@ docs/_build # MacOS .DS_Store -# Running tox -.eggs/ -gl-code-quality-report.json - .venv .venv* - .env* junit.xml coverage.xml + +*_cache diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index d73b40d..68e3223 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,34 +1,54 @@ -include: - - template: Code-Quality.gitlab-ci.yml - stages: - test - build - - verifybuild + - verify build - publish -code_quality: - tags: - - docker +include: + - template: Code-Quality.gitlab-ci.yml .test_template: &test_template stage: test - image: python:3.9-slim + image: python:3-slim before_script: - apt-get update && apt-get install -y git - - pip install -U pip - - pip install -U -r requirements-dev.txt + - pip install hatch + tags: + - kubernetes + +.twine_template: &twine_template + stage: publish + image: python:3-slim tags: - kubernetes + before_script: + - pip install build twine + - python -m build + +code_quality: + rules: + - if: $CODE_QUALITY_DISABLED + when: never + - if: ($CI_COMMIT_BRANCH == "main" || $CI_COMMIT_TAG) + when: never + - if: $CI_COMMIT_BRANCH + before_script: + - echo -e "[pycodestyle]\nmax-line-length = 119" > setup.cfg + - echo -e "rule 'MD013', :line_length = 119" > .mdlrc + +lint: + <<: *test_template + script: + - hatch run +py=3 test:lint coverage: <<: *test_template script: - - tox -e coverage + - hatch run +py=3 test:cov coverage: '/(?i)total.*? (100(?:\.0+)?\%|[1-9]?\d(?:\.\d+)?\%)$/' artifacts: expire_in: 2 weeks - name: "${CI_JOB_NAME}-${CI_COMMIT_REF_SLUG}" + name: "$CI_JOB_NAME-$CI_COMMIT_REF_SLUG" paths: - htmlcov/ reports: @@ -37,131 +57,63 @@ coverage: path: coverage.xml junit: junit.xml -flake8: +pytest: <<: *test_template + parallel: + matrix: + - PY_VERSION: ["3", "3.7", "3.8", "3.9", "3.10", "3.11"] + image: python:$PY_VERSION-slim script: - - tox -e flake8 - artifacts: - reports: - codequality: gl-code-quality-report.json - -twine-check: - <<: *test_template - script: - - tox -e twine-check - -python 3: - <<: *test_template - image: python:3-slim - script: - - tox -e py3 + - hatch run +py=$PY_VERSION test:pytest -python 3.7: - <<: *test_template - image: python:3.7-slim - script: - - tox -e py37 - -python 3.8: - <<: *test_template - image: python:3.8-slim - script: - - tox -e py38 - -python 3.9: - <<: *test_template - image: python:3.9-slim - script: - - tox -e py39 - -python 3.10: - <<: *test_template - image: python:3.10-slim - script: - - tox -e py310 - -python 3.11: - <<: *test_template - image: python:3.11-slim +twine check: + <<: *twine_template + stage: test script: - - tox -e py311 + - twine check dist/* -.build_docker_image_template: &build_docker_image_template +build image: stage: build + needs: [pytest] tags: - kubernetes image: name: gcr.io/kaniko-project/executor:debug entrypoint: [""] - variables: - PY_VERSION: "3-slim" + parallel: + matrix: + - PY_VERSION: ["3", "3.7", "3.8", "3.9", "3.10", "3.11"] rules: - - if: ($CI_PIPELINE_SOURCE == "schedule" || $CI_PIPELINE_SOURCE == "trigger") - when: never - if: '$CI_COMMIT_BRANCH == "main"' variables: - DESTINATION: ${CI_REGISTRY_IMAGE}:${PY_VERSION}-${CI_COMMIT_REF_SLUG} + DESTINATION: $CI_REGISTRY_IMAGE:python-$PY_VERSION-$CI_COMMIT_REF_SLUG - if: '$CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/' variables: - DESTINATION: ${CI_REGISTRY_IMAGE}/tag:${CI_COMMIT_TAG}-${PY_VERSION} + DESTINATION: $CI_REGISTRY_IMAGE/tag:$CI_COMMIT_TAG-python-$PY_VERSION - if: '$CI_PIPELINE_SOURCE == "push"' variables: - DESTINATION: ${CI_REGISTRY_IMAGE}/review:${CI_COMMIT_REF_SLUG}-${PY_VERSION} + DESTINATION: $CI_REGISTRY_IMAGE/review:$CI_COMMIT_REF_SLUG-python-$PY_VERSION script: - echo "Building image to " $DESTINATION - echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json - - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination ${DESTINATION} --build-arg PY_VERSION=${PY_VERSION} - -image python 3: - <<: *build_docker_image_template - needs: [python 3] - -image python 3.7: - <<: *build_docker_image_template - needs: [python 3.7] - variables: - PY_VERSION: "3.7-slim" - -image python 3.8: - <<: *build_docker_image_template - needs: [python 3.8] - variables: - PY_VERSION: "3.8-slim" - -image python 3.9: - <<: *build_docker_image_template - needs: [python 3.9] - variables: - PY_VERSION: "3.9-slim" - -image python 3.10: - <<: *build_docker_image_template - needs: [python 3.10] - variables: - PY_VERSION: "3.10-slim" - -image python 3.11: - <<: *build_docker_image_template - needs: [python 3.11] - variables: - PY_VERSION: "3.11-slim" - -.verify_docker_build_template: &verify_docker_build_template - stage: verifybuild - variables: - PY_VERSION: "3-slim" + - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $DESTINATION --build-arg PY_VERSION=$PY_VERSION-slim + +verify image: + stage: verify build + needs: [build image] + parallel: + matrix: + - PY_VERSION: ["3", "3.7", "3.8", "3.9", "3.10", "3.11"] rules: - - if: ($CI_PIPELINE_SOURCE == "schedule" || $CI_PIPELINE_SOURCE == "trigger") - when: never - if: '$CI_COMMIT_BRANCH == "main"' variables: - DOCKER_IMAGE: ${CI_REGISTRY_IMAGE}:${PY_VERSION}-${CI_COMMIT_REF_SLUG} + DOCKER_IMAGE: $CI_REGISTRY_IMAGE:python-$PY_VERSION-$CI_COMMIT_REF_SLUG - if: '$CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/' variables: - DOCKER_IMAGE: ${CI_REGISTRY_IMAGE}/tag:${CI_COMMIT_TAG}-${PY_VERSION} + DOCKER_IMAGE: $CI_REGISTRY_IMAGE/tag:$CI_COMMIT_TAG-python-$PY_VERSION - if: '$CI_PIPELINE_SOURCE == "push"' variables: - DOCKER_IMAGE: ${CI_REGISTRY_IMAGE}/review:${CI_COMMIT_REF_SLUG}-${PY_VERSION} + DOCKER_IMAGE: $CI_REGISTRY_IMAGE/review:$CI_COMMIT_REF_SLUG-python-$PY_VERSION image: $DOCKER_IMAGE tags: - kubernetes @@ -173,92 +125,51 @@ image python 3.11: - askanna-run-utils --version - askanna-run-utils -verify image python 3: - <<: *verify_docker_build_template - needs: [image python 3] - -verify image python 3.7: - <<: *verify_docker_build_template - needs: [image python 3.7] - variables: - PY_VERSION: "3.7-slim" - -verify image python 3.8: - <<: *verify_docker_build_template - needs: [image python 3.8] - variables: - PY_VERSION: "3.8-slim" - -verify image python 3.9: - <<: *verify_docker_build_template - needs: [image python 3.9] - variables: - PY_VERSION: "3.9-slim" - -verify image python 3.10: - <<: *verify_docker_build_template - needs: [image python 3.10] - variables: - PY_VERSION: "3.10-slim" - -verify image python 3.11: - <<: *verify_docker_build_template - needs: [image python 3.11] - variables: - PY_VERSION: "3.11-slim" - -publish_to_pypi: - stage: publish - image: python:3-slim - tags: - - kubernetes - before_script: - - echo $PYPIRC | base64 -d > ~/.pypirc - - pip install -U pip wheel twine +publish to pypi: + <<: *twine_template + needs: [twine check, pytest] + rules: + - if: '$CI_COMMIT_BRANCH == "main"' script: - - python3 setup.py sdist bdist_wheel - - twine upload --verbose --non-interactive --config-file ~/.pypirc -r pypi dist/* - only: - refs: - - main + - echo $PYPIRC | base64 -d > ~/.pypirc + - python -m twine upload --verbose --non-interactive --config-file ~/.pypirc -r pypi dist/* -publish_to_testpypi: - stage: publish - image: python:3-slim - tags: - - kubernetes - allow_failure: true - before_script: - - echo $PYPITESTRC | base64 -d > ~/.pypirc - - pip install -U pip wheel twine +publish to testpypi: + <<: *twine_template + needs: [twine check] + rules: + - if: '$CI_COMMIT_BRANCH == "main"' + when: never + - if: '$CI_COMMIT_BRANCH' + when: manual + allow_failure: true script: - - python3 setup.py sdist bdist_wheel - - twine upload --verbose --non-interactive --config-file ~/.pypirc -r testpypi dist/* - when: manual - except: - refs: - - main + - echo $PYPITESTRC | base64 -d > ~/.pypirc + - python -m twine upload --verbose --non-interactive --config-file ~/.pypirc -r testpypi dist/* -publish_to_docker: +publish to docker: stage: publish + needs: [verify image] image: docker:latest services: - docker:dind tags: - docker - allow_failure: true + rules: + - if: '$CI_COMMIT_BRANCH == "main"' + allow_failure: true before_script: - echo "$DOCKER_TOKEN" | docker login --username $DOCKER_USER --password-stdin - echo "$CI_REGISTRY_PASSWORD" | docker login $CI_REGISTRY --username $CI_REGISTRY_USER --password-stdin script: - docker pull gitlab.askanna.io:4567/askanna/askanna-cli --all-tags - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:3-slim-main askanna/python:3 - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:3-slim-main askanna/python:latest - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:3.7-slim-main askanna/python:3.7 - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:3.8-slim-main askanna/python:3.8 - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:3.9-slim-main askanna/python:3.9 - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:3.10-slim-main askanna/python:3.10 - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:3.11-slim-main askanna/python:3.11 + - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3-main askanna/python:3 + - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3-main askanna/python:latest + - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3.7-main askanna/python:3.7 + - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3.8-main askanna/python:3.8 + - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3.9-main askanna/python:3.9 + - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3.10-main askanna/python:3.10 + - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3.11-main askanna/python:3.11 - docker push askanna/python:latest - docker push askanna/python:3 - docker push askanna/python:3.7 @@ -268,12 +179,10 @@ publish_to_docker: - docker push askanna/python:3.11 after_script: - docker logout - only: - refs: - - main -publish_to_public_repos: +publish to public repos: stage: publish + needs: [twine check, pytest] tags: - kubernetes allow_failure: true @@ -287,8 +196,8 @@ publish_to_public_repos: before_script: - apt-get update -y && apt-get install openssh-client git curl -y - eval $(ssh-agent -s) - - echo "${SSH_PRIVATE_KEY_GITLAB_COM}" | tr -d '\r' | ssh-add - > /dev/null - - echo "${SSH_PRIVATE_KEY_GITHUB_COM}" | tr -d '\r' | ssh-add - > /dev/null + - echo "$SSH_PRIVATE_KEY_GITLAB_COM" | tr -d '\r' | ssh-add - > /dev/null + - echo "$SSH_PRIVATE_KEY_GITHUB_COM" | tr -d '\r' | ssh-add - > /dev/null - mkdir -p ~/.ssh - chmod 700 ~/.ssh - ssh-keyscan gitlab.com >> ~/.ssh/known_hosts diff --git a/.gitpod-vscode-settings.json b/.gitpod-vscode-settings.json index 8d661ee..b16e47b 100644 --- a/.gitpod-vscode-settings.json +++ b/.gitpod-vscode-settings.json @@ -1,6 +1,5 @@ { "python.defaultInterpreterPath": "/workspace/askanna-cli/.venv/bin/python", - "python.languageServer": "Pylance", "python.testing.unittestEnabled": false, "python.testing.pytestEnabled": true, "python.formatting.provider": "black", @@ -9,6 +8,5 @@ "python.linting.enabled": true, "python.linting.pylintEnabled": true, "python.linting.banditPath": "/workspace/askanna-cli/.venv/bin/bandit", - "python.linting.flake8Path": "/workspace/askanna-cli/.venv/bin/flake8", - "python.formatting.blackPath": "/workspace/askanna-cli/.venv/bin/black", + "python.formatting.blackPath": "/workspace/askanna-cli/.venv/bin/black" } diff --git a/.gitpod.yml b/.gitpod.yml index 50ee60c..faa41c4 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -3,8 +3,7 @@ tasks: before: | python -m venv .venv && source .venv/bin/activate pip install --upgrade pip - pip install -r requirements-dev.txt - pip install --editable . + make install-dev pre-commit install --install-hooks init: | mkdir .vscode && ln -n .gitpod-vscode-settings.json .vscode/settings.json @@ -15,6 +14,7 @@ vscode: - eamodio.gitlens - GitLab.gitlab-workflow - ms-python.python + - charliermarsh.ruff - njpwerner.autodocstring - DavidAnson.vscode-markdownlint - yzhang.markdown-all-in-one diff --git a/.mdlrc b/.mdlrc deleted file mode 100644 index ff5a62f..0000000 --- a/.mdlrc +++ /dev/null @@ -1 +0,0 @@ -rule 'MD013', :line_length = 119 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4825af7..531ef8b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,14 +8,15 @@ repos: - id: check-merge-conflict - id: debug-statements - id: check-symlinks + - id: check-json - id: check-yaml + - id: check-toml - id: detect-private-key - id: end-of-file-fixer - id: fix-encoding-pragma args: [--remove] - id: no-commit-to-branch args: [--branch, main, --pattern, release/.*] - - id: requirements-txt-fixer - id: trailing-whitespace - repo: https://github.com/igorshubovych/markdownlint-cli @@ -23,34 +24,23 @@ repos: hooks: - id: markdownlint - - repo: https://github.com/pycqa/flake8 - rev: 6.0.0 + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: v0.0.257 hooks: - - id: flake8 + - id: ruff args: - [ - "--config=setup.cfg", - "--ignore=W391,W503,E203", - "--max-line-length=119", - ] + - --fix + - --exit-non-zero-on-fix - repo: https://github.com/ambv/black rev: 23.1.0 hooks: - id: black - args: ["--line-length=119"] - repo: https://github.com/pycqa/isort rev: 5.12.0 hooks: - id: isort - name: isort (python) - - id: isort - name: isort (cython) - types: [cython] - - id: isort - name: isort (pyi) - types: [pyi] - repo: https://github.com/PyCQA/bandit rev: 1.7.4 @@ -63,4 +53,6 @@ repos: rev: v3.3.1 hooks: - id: pyupgrade - args: [--py37-plus] + args: + - --py37-plus + - --keep-runtime-typing diff --git a/LICENSE b/LICENSE index 1009eb7..3f0a6f6 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2022, AskAnna IP B.V. +Copyright (c) 2019 - present, AskAnna IP B.V. All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index a6380bb..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,7 +0,0 @@ -include requirements.txt -include README.md -include CHANGELOG.md -include AUTHORS.md - -recursive-exclude * __pycache__ -recursive-exclude * *.py[co] diff --git a/Makefile b/Makefile index 90684f2..4abbb71 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,7 @@ .DEFAULT_GOAL := help clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts + rm -fr .ruff_cache/ clean-build: ## remove build artifacts rm -fr build/ @@ -16,41 +17,44 @@ clean-pyc: ## remove Python file artifacts find . -name '__pycache__' -exec rm -fr {} + clean-test: ## remove test and coverage artifacts - rm -fr .tox/ rm -f .coverage rm -fr htmlcov/ - rm -fr .pytest_cache + rm -fr .pytest_cache/ rm -f gl-code-quality-report.json rm -f coverage.xml rm -f junit.xml lint: ## check style with flake8 - tox -e flake8 + ruff check . -test: ## run tests quickly with the default Python - tox -e py3 +test: ## run tests with the default Python version + hatch run +py=3 test:pytest -test-all: ## run tests on every Python version with tox - tox +test-all: ## run tests on every Python version that is supported + hatch run test:pytest -coverage: ## check code coverage quickly with the default Python - pytest --cov=askanna - coverage html - $(BROWSER) htmlcov/index.html +coverage: ## check code coverage with the default Python version + hatch run +py=3 test:cov + open htmlcov/index.html dist: clean ## builds source and wheel package - python setup.py sdist - python setup.py bdist_wheel + pip install build + python -m build install: clean ## install the package to the active Python's site-packages pip install . +install-dev: clean ## install the package to the active Python's site-packages + pip install -e ."[dev]" + uninstall: clean ## uninstall the AskAnna package - pip uninstall askanna -y + pip freeze | grep -v "^-e" | xargs pip uninstall -y reinstall: uninstall install ## uninstall and install the AskAnna package -help: ## show this message +reinstall-dev: uninstall install-dev ## uninstall and install a development environment + +help: ## show this message @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) define PRINT_HELP_PYSCRIPT diff --git a/README.md b/README.md index a88c920..c21d449 100644 --- a/README.md +++ b/README.md @@ -1,14 +1,13 @@ # AskAnna CLI & Python SDK -The AskAnna CLI offers a command-line interface to the AskAnna platform. With the Python SDK you can run AskAnna +The AskAnna CLI offers a command line interface to the AskAnna platform. With the Python SDK you can run AskAnna functions directly from your Python script. The CLI & Python SDK simplifies the communication with the AskAnna platform and provides facilities for supporting every part of a data science project. -[![PyPi](https://img.shields.io/pypi/v/askanna.svg)](https://pypi.org/project/askanna/) -[![License](https://img.shields.io/badge/License-BSD_3--Clause-brightgreen.svg)](https://gitlab.com/askanna/askanna-python/-/blob/master/LICENSE) -[![Documentation](https://img.shields.io/badge/docs-latest-success.svg)](https://docs.askanna.io/) -[![Downloads](https://pepy.tech/badge/askanna)](https://pepy.tech/project/askanna) -[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) +[![PyPI](https://img.shields.io/pypi/v/askanna.svg?color=%2334D058)](https://pypi.org/project/askanna/) +[![Python](https://img.shields.io/pypi/pyversions/askanna.svg?color=%2334D058)](https://pypi.org/project/askanna/) +[![License](https://img.shields.io/badge/License-BSD_3--Clause-brightgreen.svg?label=license)](https://gitlab.com/askanna/askanna-python/-/blob/master/LICENSE) +[![Documentation](https://img.shields.io/badge/docs-available-success.svg)](https://docs.askanna.io/) ## Documentation @@ -26,7 +25,7 @@ documentation for: pip install askanna ``` -## Update AskAnna +### Update AskAnna ```bash pip install -U askanna @@ -38,8 +37,7 @@ pip install -U askanna askanna login ``` -This will create a `.askanna.yml` in your home folder. - +This will create a `.askanna.yml` in your home directory. ### Authorization token @@ -55,28 +53,44 @@ The API token can be found in the created `~/.askanna.yml` file after you logged ### How to push code to AskAnna +Run the innit-command on the main directory of your project. The command will create a new project on AskAnna and will +add a `askanna.yml` file to your local project directory. In the `askanna.yml` file you can configure jobs. For more +information, see the [askanna.yml documentation](https://docs.askanna.io/code/#askannayml). + ```bash askanna init ``` -Run this command on the main directory of your project. The command will create a new project on AskAnna and will -add a `askanna.yml` file to your local project directory. +If you want to start a project from scratch (or a template), you can run the create-command. ```bash askanna create ``` -If you want to start a project from scratch (or a template), you can run this command. +With the push-command your code will be uploaded to the project in AskAnna. ```bash askanna push ``` -Run `askanna push` and your code will be uploaded to the project in AskAnna. +### Track metrics + +To track metrics for your runs, you can use the function `track_metric`. For more information, see the +[metrics documentation](https://docs.askanna.io/metrics/). + +```python +from askanna import track_metric + +track_metric("name", "value") +``` + +### Track variables -## Credits +To track variables for your runs, you can use the function `track_variable`. For more information, see the +[variables documentation](https://docs.askanna.io/variable/tracking/). -Tools used in the AskAnna package: +```python +from askanna import track_variable -* [Cookiecutter](https://github.com/audreyr/cookiecutter) -* [cookiecutter-pypackage](https://github.com/audreyr/cookiecutter-pypackage) +track_variable("name", "value") +``` diff --git a/askanna/__init__.py b/askanna/__init__.py index 8fdb5b9..68eac02 100644 --- a/askanna/__init__.py +++ b/askanna/__init__.py @@ -1,17 +1,21 @@ -__author__ = "AskAnna Team" -__email__ = "support@askanna.io" -__version__ = "0.22.0" +""" +The AskAnna CLI & Python SDK is part of the AskAnna platform to kickstart your data science projects. +""" + +__version__ = "0.23.0.dev2" import re import sys +ASKANNA_VERSION = __version__ + # Determine whether we are in the CLI or using the SDK by chekiking for the `askanna` command USING_ASKANNA_CLI: bool = any([re.match(".+bin/askanna$", sys.argv[0])]) try: import click # noqa: F401 except ModuleNotFoundError as e: - # We are propably within an installation for tox or pip, skip the rest of the initialization + # We are propably within an installation for hatch or pip, skip the rest of the initialization print(e) else: # We use dotenv for development but it's not a requirement for the CLI or SDK diff --git a/askanna/cli/__init__.py b/askanna/cli/__init__.py index 42bc88f..65e7068 100644 --- a/askanna/cli/__init__.py +++ b/askanna/cli/__init__.py @@ -1,10 +1,8 @@ -from __future__ import absolute_import - import importlib import click -from askanna import __version__ as askanna_version +from askanna import ASKANNA_VERSION HELP = """ The AskAnna CLI helps you running data science projects on AskAnna. @@ -24,7 +22,7 @@ short_help=SHORT_HELP, epilog=EPILOG, ) -@click.version_option(version=askanna_version, prog_name="AskAnna CLI") +@click.version_option(version=ASKANNA_VERSION, prog_name="AskAnna CLI") def cli(): """ Initialize the AskAnna CLI commands diff --git a/askanna/cli/__main__.py b/askanna/cli/__main__.py index 7406fe4..4a59657 100644 --- a/askanna/cli/__main__.py +++ b/askanna/cli/__main__.py @@ -2,5 +2,4 @@ """Allow askanna.cli to be executable through `python -m askanna.cli`""" if __name__ == "__main__": - cli() diff --git a/askanna/cli/run_utils/__init__.py b/askanna/cli/run_utils/__init__.py index f391511..9ac8140 100644 --- a/askanna/cli/run_utils/__init__.py +++ b/askanna/cli/run_utils/__init__.py @@ -1,10 +1,8 @@ -from __future__ import absolute_import - import importlib import click -from askanna import __version__ as askanna_version +from askanna import ASKANNA_VERSION HELP = """ The run util is used to support AskAnna runs @@ -24,7 +22,7 @@ short_help=SHORT_HELP, epilog=EPILOG, ) -@click.version_option(version=askanna_version, prog_name="AskAnna Run Utils") +@click.version_option(version=ASKANNA_VERSION, prog_name="AskAnna Run Utils") def cli(): """ Initialize the AskAnna Run Utils commands diff --git a/askanna/cli/run_utils/__main__.py b/askanna/cli/run_utils/__main__.py index 406e4a5..225a43b 100644 --- a/askanna/cli/run_utils/__main__.py +++ b/askanna/cli/run_utils/__main__.py @@ -2,5 +2,4 @@ """Allow askanna.cli.run_utils to be executable through `python -m askanna.cli.run_utils`""" if __name__ == "__main__": - cli() diff --git a/askanna/cli/run_utils/get_package.py b/askanna/cli/run_utils/get_package.py index afe041c..13a8737 100644 --- a/askanna/cli/run_utils/get_package.py +++ b/askanna/cli/run_utils/get_package.py @@ -31,7 +31,6 @@ type=click.Path(path_type=Path), ) def cli(package_suuid, output_dir): - try: package_content = PackageSDK().get(package_suuid) except Exception as e: diff --git a/askanna/config/server.py b/askanna/config/server.py index 26dc295..0a3830e 100644 --- a/askanna/config/server.py +++ b/askanna/config/server.py @@ -56,7 +56,6 @@ def logout_and_remove_token(self): def add_or_update_server_in_config_dict( # nosec config_dict: Optional[Dict], server: str, remote: str, token: str = "", ui: str = "" ) -> Dict: - server_dict = { "remote": remote, "ui": ui, diff --git a/askanna/config/utils.py b/askanna/config/utils.py index bb43189..761c489 100644 --- a/askanna/config/utils.py +++ b/askanna/config/utils.py @@ -10,8 +10,7 @@ from yaml import CSafeDumper as SafeDumper from yaml import CSafeLoader as SafeLoader except ImportError: # pragma: no cover - from yaml import SafeDumper - from yaml import SafeLoader as SafeLoader + from yaml import SafeDumper, SafeLoader string_format_datetime = "%Y-%m-%d %H:%M:%S %Z" diff --git a/askanna/core/dataclasses/run.py b/askanna/core/dataclasses/run.py index fa493ad..b0d2da0 100644 --- a/askanna/core/dataclasses/run.py +++ b/askanna/core/dataclasses/run.py @@ -5,7 +5,7 @@ from dateutil import parser as dateutil_parser -from askanna.core.exceptions import MultipleObjectsReturned +from askanna.core.exceptions import MultipleObjectsReturnedError from askanna.core.utils.object import json_serializer from .base import Label @@ -105,7 +105,7 @@ def get(self, name) -> Union[VariableObject, None]: if len(variables_filtered) == 1: return variables_filtered[0] if len(variables_filtered) > 1: - raise MultipleObjectsReturned( + raise MultipleObjectsReturnedError( f"Found multiple variables matching name '{name}', please use the method .filter(name=\"{name}\")." ) return None @@ -194,7 +194,7 @@ def get(self, name) -> Union[MetricObject, None]: if len(metrics_filtered) == 1: return metrics_filtered[0] if len(metrics_filtered) > 1: - raise MultipleObjectsReturned( + raise MultipleObjectsReturnedError( f"Found multiple metrics matching name '{name}', please use the method .filter(name=\"{name}\")." ) return None diff --git a/askanna/core/exceptions.py b/askanna/core/exceptions.py index 76e025a..6028a34 100644 --- a/askanna/core/exceptions.py +++ b/askanna/core/exceptions.py @@ -38,7 +38,5 @@ class RunError(Error): pass -class MultipleObjectsReturned(Error): +class MultipleObjectsReturnedError(Error): """The query returned multiple objects when only one was expected.""" - - pass diff --git a/askanna/core/push.py b/askanna/core/push.py index 971fbde..11037fb 100644 --- a/askanna/core/push.py +++ b/askanna/core/push.py @@ -67,7 +67,6 @@ def is_project_config_push_ready() -> bool: def push(overwrite: bool = False, description: Union[str, None] = None) -> bool: - if not is_project_config_push_ready(): sys.exit(1) diff --git a/askanna/core/utils/main.py b/askanna/core/utils/main.py index 1d47eb9..22fa6fc 100644 --- a/askanna/core/utils/main.py +++ b/askanna/core/utils/main.py @@ -3,7 +3,7 @@ import click import requests -from askanna import __version__ as askanna_version +from askanna import ASKANNA_VERSION from askanna.core.dataclasses.run import Label from askanna.core.utils.object import ( get_type, @@ -30,7 +30,7 @@ def update_available() -> bool: else: return False - if askanna_version == pypi_info["info"]["version"]: + if ASKANNA_VERSION == pypi_info["info"]["version"]: return False else: click.echo("[INFO] A newer version of AskAnna is available. Update via: pip install -U askanna") diff --git a/askanna/gateways/api_client.py b/askanna/gateways/api_client.py index 3f0c980..e93433b 100644 --- a/askanna/gateways/api_client.py +++ b/askanna/gateways/api_client.py @@ -3,8 +3,7 @@ import requests from requests.structures import CaseInsensitiveDict -from askanna import USING_ASKANNA_CLI -from askanna import __version__ as askanna_version +from askanna import ASKANNA_VERSION, USING_ASKANNA_CLI from askanna.config import config from askanna.config.api_url import askanna_url from askanna.core.exceptions import ConnectionError @@ -27,8 +26,8 @@ def generate_authenication_header(self) -> CaseInsensitiveDict: auth_header = CaseInsensitiveDict( { "askanna-agent": askanna_agent, - "askanna-agent-version": askanna_version, - "user-agent": f"askanna-python/{askanna_version}", + "askanna-agent-version": ASKANNA_VERSION, + "user-agent": f"askanna-python/{ASKANNA_VERSION}", } ) diff --git a/pyproject.toml b/pyproject.toml index 6ed3d3e..ea1b5e9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,159 @@ +[project] +name="askanna" +dynamic = ["version"] +authors = [ + { name="AskAnna Team", email="support@askanna.io" }, +] +maintainers= [ + { name="AskAnna Team", email="support@askanna.io" }, +] +description="The AskAnna CLI & Python SDK is part of the AskAnna platform to kickstart your data science projects" +readme = "README.md" +requires-python = ">=3.7" +license = "BSD-3-Clause" +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: Education", + "Intended Audience :: Information Technology", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Natural Language :: English", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Artificial Intelligence", + "Topic :: Scientific/Engineering :: Information Analysis", +] +keywords = [ + "askanna", + "ml", + "ai", + "data", + "datascience", + "versioncontrol", +] +dependencies = [ + "click>=7.1.2,<9.0.0", + "cookiecutter>1.7.0,<3.0.0", + "croniter>=1.0.15,<2.0.0", + "email-validator<2.0.0", + "futures~=2.2.0; python_version < '3.10'", # latest futures package cannot be installed on Python < 3.10 + "GitPython>=3.1.0,<4.0.0", + "igittigitt>=2.0.2,<3.0.0", + "python-dateutil>=2.8.0,<3.0.0", + "python-slugify>=7.0.0,<9.0.0", + "pytz>=2021,<2023", + "PyYAML>=5.3.1,<7.0.0", + "requests>=2.22.0,<3.0.0", + "resumable~=0.1.1", + "typing_extensions>=4.0.0,<5.0.0; python_version < '3.8'", + "tzlocal>=2.1,<5.0", +] + +[project.optional-dependencies] +test = [ + "faker~=16.6.1", + "numpy~=1.19.5; python_version < '3.8'", # numpy is only required during development for testing numpy support + "numpy~=1.24.1; python_version >= '3.8'", # numpy is only required during development for testing numpy support + "pytest~=7.2.1", + "pytest-cov~=4.0.0", + "responses~=0.22.0", + "ruff~=0.0.257", +] +dev = [ + "askanna[test]", + "bandit~=1.7.4", + "black~=23.1.0", + "hatch~=1.6.3", + "isort~=5.12.0; python_version >= '3.8'", + "pre-commit~=3.0.0; python_version >= '3.8'", + "pre-commit~=2.21.0; python_version < '3.8'", + "python-dotenv~=0.21.1", +] + +[project.urls] +Homepage = "https://askanna.io" +"Documentation Python SDK" = "https://docs.askanna.io/python-sdk/" +"Documentation CLI" = "https://docs.askanna.io/cli/" +"Documentation AskAnna" = "https://docs.askanna.io/" +Changelog = "https://gitlab.com/askanna/askanna-python/-/blob/master/CHANGELOG.md" +"Issue Tracker" = "https://gitlab.com/askanna/askanna-python/issues" +Source = "https://gitlab.com/askanna/askanna-python" + +[project.scripts] +askanna = "askanna.cli:cli" +ask = "askanna.cli:cli" +askanna-run-utils = "askanna.cli.run_utils:cli" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.version] +path = "askanna/__init__.py" + +[tool.hatch.build] +include = [ + "/askanna", + "CONTRIBUTING.md", +] + +[tool.hatch.envs.default] +features = [ + "dev", +] + +[tool.hatch.envs.test] +dev-mode = false +features = [ + "test", +] + +[[tool.hatch.envs.test.matrix]] +python = ["3", "3.7", "3.8", "3.9", "3.10", "3.11"] + +[tool.hatch.envs.test.scripts] +cov = [ + "pytest --cov-config=pyproject.toml --cov=askanna --junitxml=junit.xml", + "coverage xml", + "coverage html", +] +lint = "ruff check ." + +[tool.coverage.run] +branch = true + +[tool.coverage.report] +precision = 2 +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "def __str__", + "raise NotImplementedError", + "if __name__ == .__main__.:", +] + [tool.black] line-length = 119 [tool.isort] profile = "black" +known_first_party = ["askanna"] [tool.bandit] skips = ["B101"] + +[tool.ruff] +target-version = "py37" +line-length = 119 +select = [ + "F", # pyflakes + "E", # pycodestyle errors + "W", # pycodestyle warnings + "UP", # pyupgrade +] diff --git a/requirements-dev.txt b/requirements-dev.txt deleted file mode 100644 index 6159f86..0000000 --- a/requirements-dev.txt +++ /dev/null @@ -1,18 +0,0 @@ --r requirements.txt -bandit~=1.7.4 -black~=23.1.0 -bumpversion~=0.6.0 -faker~=16.6.1 -flake8~=6.0.0; python_version >= '3.8' -flake8~=5.0.4; python_version < '3.8' -isort~=5.12.0; python_version >= '3.8' -numpy~=1.19.5; python_version < '3.8' # numpy is only required during development for testing numpy support -numpy~=1.24.1; python_version >= '3.8' # numpy is only required during development for testing numpy support -pre-commit~=3.0.0; python_version >= '3.8' -pre-commit~=2.21.0; python_version < '3.8' -pytest~=7.2.1 -pytest-cov~=4.0.0 -python-dotenv~=0.21.1 -responses~=0.22.0 -tox~=4.4.2; python_version >= '3.8' -tox~=3.28.0; python_version < '3.8' diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 240cce4..0000000 --- a/requirements.txt +++ /dev/null @@ -1,15 +0,0 @@ -click>=7.1.2,<9.0.0 -cookiecutter>1.7.0,<3.0.0 -croniter>=1.0.15,<2.0.0 -email-validator<2.0.0 -futures~=2.2.0; python_version < '3.10' # latest futures package cannot be installed on Python < 3.10 -GitPython>=3.1.0,<4.0.0 -igittigitt>=2.0.2,<3.0.0 -python-dateutil>=2.8.0,<3.0.0 -python-slugify>=7.0.0,<9.0.0 -pytz>=2021,<2023 -PyYAML>=5.3.1,<7.0.0 -requests>=2.22.0,<3.0.0 -resumable~=0.1.1 -typing_extensions>=4.0.0,<5.0.0; python_version < '3.8' -tzlocal>=2.1,<5.0 diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 0546b0b..0000000 --- a/setup.cfg +++ /dev/null @@ -1,21 +0,0 @@ -[bumpversion] -current_version = 0.22.0 -commit = True -tag = False - -[bumpversion:file:askanna/__init__.py] -search = __version__ = "{current_version}" -replace = __version__ = "{new_version}" - -[bdist_wheel] -universal = 1 - -[flake8] -max-line-length = 119 -exclude = .tox,.git -ignore = W391,W503,E203 - -[pycodestyle] -max-line-length = 119 -exclude = .tox,.git -ignore = W391,W503,E203 diff --git a/setup.py b/setup.py deleted file mode 100644 index 81eb63c..0000000 --- a/setup.py +++ /dev/null @@ -1,68 +0,0 @@ -from setuptools import find_packages, setup - -from askanna import __author__ as askanna_author -from askanna import __email__ as askanna_email -from askanna import __version__ as askanna_version - -with open("README.md") as readme_file: - readme = readme_file.read() - -with open("CHANGELOG.md") as history_file: - history = history_file.read() - -with open("requirements.txt") as requirements_file: - requirements = requirements_file.read() - -setup_requirements = ["wheel~=0.38.4"] - - -setup( - name="askanna", - version=askanna_version, - author=askanna_author, - author_email=askanna_email, - python_requires=">=3.7", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Developers", - "Intended Audience :: Education", - "Intended Audience :: Information Technology", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: BSD License", - "Natural Language :: English", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Topic :: Scientific/Engineering", - "Topic :: Scientific/Engineering :: Artificial Intelligence", - "Topic :: Scientific/Engineering :: Information Analysis", - ], - entry_points={ - "console_scripts": [ - "askanna=askanna.cli:cli", - "askanna-run-utils=askanna.cli.run_utils:cli", - ], - }, - description="The AskAnna CLI & Python SDK is part of the AskAnna platform to kickstart your data science projects", - long_description=readme, - long_description_content_type="text/markdown", - include_package_data=True, - keywords="askanna ml ai data datascience versioncontrol", - license="BSD 3-Clause License", - packages=find_packages(exclude=["tests"]), - install_requires=requirements, - setup_requires=setup_requirements, - url="https://askanna.io", - project_urls={ - "Documentation Python SDK": "https://docs.askanna.io/python-sdk/", - "Documentation CLI": "https://docs.askanna.io/cli/", - "Documentation AskAnna": "https://docs.askanna.io/", - "Changelog": "https://gitlab.com/askanna/askanna-python/-/blob/master/CHANGELOG.md", - "Issue tracker": "https://gitlab.com/askanna/askanna-python/issues", - "Source": "https://gitlab.com/askanna/askanna-python", - }, - zip_safe=False, -) diff --git a/tests/create_fake_files.py b/tests/create_fake_files.py index c52eb27..ac381b3 100644 --- a/tests/create_fake_files.py +++ b/tests/create_fake_files.py @@ -1,29 +1,32 @@ -from faker import Faker import json import os from zipfile import ZipFile +from faker import Faker + -def create_fake_result(records : int = 1) -> list: +def create_fake_result(records: int = 1) -> list: fake = Faker() result = [] for i in range(records): - result.append({ - "id": i + 1, - "first_name": fake.first_name(), - "last_name": fake.last_name(), - "email": fake.email(), - "ip": fake.ipv4(), - "number": fake.pyfloat(), - "boolean": fake.boolean(), - "datetime": fake.date() + "T" + fake.time() - }) + result.append( + { + "id": i + 1, + "first_name": fake.first_name(), + "last_name": fake.last_name(), + "email": fake.email(), + "ip": fake.ipv4(), + "number": fake.pyfloat(), + "boolean": fake.boolean(), + "datetime": fake.date() + "T" + fake.time(), + } + ) return result -def create_json_file(dir : str, records: int) -> str: +def create_json_file(dir: str, records: int) -> str: result = create_fake_result(records) json_file_name = f"{dir}/random_json.json" @@ -33,7 +36,7 @@ def create_json_file(dir : str, records: int) -> str: return json_file_name -def create_zip_file(dir : str, records : int) -> str: +def create_zip_file(dir: str, records: int) -> str: result = create_fake_result(records) json_file_name = f"{dir}/random_json.json" diff --git a/tests/fixtures/responses/api/__init__.py b/tests/fixtures/responses/api/__init__.py index a84db44..12aff15 100644 --- a/tests/fixtures/responses/api/__init__.py +++ b/tests/fixtures/responses/api/__init__.py @@ -43,7 +43,6 @@ def api_response( workspace_detail, workspace_new_detail, ): - api_responses = RequestsMock() api_responses.start() diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 3124cc1..0000000 --- a/tox.ini +++ /dev/null @@ -1,41 +0,0 @@ -[tox] -envlist = py3, py37, py38, py39, py310, py311 - -[testenv] -setenv = - PYTHONPATH = {toxinidir} -deps = - -r{toxinidir}/requirements-dev.txt -commands = - pytest --basetemp={envtmpdir} - -[testenv:coverage] -basepython = python3 -deps = - -r{toxinidir}/requirements-dev.txt -commands = - pytest --basetemp={envtmpdir} --cov=askanna --junitxml=junit.xml - coverage html - coverage xml - -[testenv:flake8] -basepython = python3 -deps = - flake8-gl-codeclimate -commands = - flake8 --exit-zero --format=gl-codeclimate --output-file gl-code-quality-report.json - -[flake8] -max-line-length = 119 -exclude = - .tox/ - -[testenv:twine-check] -basepython = python3 -skipsdist = True -deps = - -r{toxinidir}/requirements.txt - twine -commands = - python3 setup.py sdist bdist_wheel - twine check dist/* From dd2fb69d936201a2eb4598f72000a79764efb188 Mon Sep 17 00:00:00 2001 From: Robbert Date: Wed, 22 Mar 2023 08:47:03 +0100 Subject: [PATCH 2/4] Remove requirement pytz and use zoneinfo to validate timezone --- askanna/core/utils/validate.py | 9 ++++++--- pyproject.toml | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/askanna/core/utils/validate.py b/askanna/core/utils/validate.py index 52ceabb..760661c 100644 --- a/askanna/core/utils/validate.py +++ b/askanna/core/utils/validate.py @@ -1,8 +1,11 @@ +try: + import zoneinfo +except ImportError: + from backports import zoneinfo from typing import Dict, List, Union import click import croniter -import pytz from email_validator import EmailNotValidError, validate_email from tzlocal import get_localzone @@ -243,7 +246,7 @@ def validate_askanna_yml(config): global_timezone = config.get("timezone") # validate the global timezone if global_timezone: - if global_timezone not in pytz.all_timezones: + if global_timezone not in zoneinfo.available_timezones(): click.echo( "Invalid timezone setting found in askanna.yml:\n" + f"timezone: {global_timezone}", err=True, @@ -286,7 +289,7 @@ def validate_askanna_yml(config): return False # validate the timezone if set timezone = job.get("timezone") - if timezone and timezone not in pytz.all_timezones: + if timezone and timezone not in zoneinfo.available_timezones(): click.echo( f"Invalid timezone setting found in job `{jobname}`:\n" + f"timezone: {timezone}", err=True, diff --git a/pyproject.toml b/pyproject.toml index ea1b5e9..3b1e3d7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,7 @@ keywords = [ "versioncontrol", ] dependencies = [ + "backports.zoneinfo>=0.2.1; python_version < '3.9'", "click>=7.1.2,<9.0.0", "cookiecutter>1.7.0,<3.0.0", "croniter>=1.0.15,<2.0.0", @@ -47,7 +48,6 @@ dependencies = [ "igittigitt>=2.0.2,<3.0.0", "python-dateutil>=2.8.0,<3.0.0", "python-slugify>=7.0.0,<9.0.0", - "pytz>=2021,<2023", "PyYAML>=5.3.1,<7.0.0", "requests>=2.22.0,<3.0.0", "resumable~=0.1.1", From 5962d1325b5691b2d030d41b4dd36e761fbacd82 Mon Sep 17 00:00:00 2001 From: Robbert Date: Wed, 5 Apr 2023 08:56:48 +0200 Subject: [PATCH 3/4] Added new run filters Added Literal for status + trigger values Added more tests Added test utils a.o. with date_time helpers Removed deprecated GetRunsSDK --- askanna/__init__.py | 10 +- askanna/cli/run.py | 16 +- askanna/core/dataclasses/base.py | 2 +- askanna/core/dataclasses/run.py | 14 +- askanna/core/utils/at_exit.py | 2 +- askanna/core/utils/object.py | 2 +- askanna/core/utils/validate.py | 2 +- askanna/gateways/run.py | 76 +++- askanna/init.py | 5 +- askanna/sdk/run.py | 105 +++--- tests/conftest.py | 2 +- tests/fixtures/responses/api/__init__.py | 6 + tests/fixtures/responses/api/run.py | 144 ++++++++ tests/fixtures/responses/run.py | 125 +++++++ tests/test_cli/test_artifact.py | 4 +- tests/test_cli/test_run.py | 51 +++ .../test_run_utils/test_get_package.py | 4 +- .../test_run_utils/test_get_payload.py | 4 +- .../test_run_utils/test_get_run_manifest.py | 4 +- tests/test_core/test_dataclasses/test_job.py | 19 +- .../test_dataclasses/test_package.py | 11 +- .../test_dataclasses/test_project.py | 11 +- tests/test_core/test_dataclasses/test_run.py | 182 ++++++++-- .../test_dataclasses/test_variable.py | 11 +- .../test_dataclasses/test_workspace.py | 11 +- .../test_core/test_utils/test_validate_yml.py | 2 +- tests/test_gateways/test_job.py | 2 +- tests/test_gateways/test_package.py | 2 +- tests/test_gateways/test_run.py | 339 ++++++++++++++++++ tests/test_sdk/test_run.py | 80 +++++ tests/test_sdk/test_sdk_import.py | 15 +- tests/utils.py | 18 + 32 files changed, 1124 insertions(+), 157 deletions(-) create mode 100644 tests/test_gateways/test_run.py create mode 100644 tests/test_sdk/test_run.py create mode 100644 tests/utils.py diff --git a/askanna/__init__.py b/askanna/__init__.py index 68eac02..3115e81 100644 --- a/askanna/__init__.py +++ b/askanna/__init__.py @@ -9,22 +9,22 @@ ASKANNA_VERSION = __version__ -# Determine whether we are in the CLI or using the SDK by chekiking for the `askanna` command +# Determine whether we are in the CLI or using the SDK by checking for the `askanna` command USING_ASKANNA_CLI: bool = any([re.match(".+bin/askanna$", sys.argv[0])]) try: import click # noqa: F401 -except ModuleNotFoundError as e: +except ModuleNotFoundError as e: # pragma: no cover # We are propably within an installation for hatch or pip, skip the rest of the initialization print(e) else: # We use dotenv for development but it's not a requirement for the CLI or SDK try: from dotenv import find_dotenv, load_dotenv - except ImportError: + except ImportError: # pragma: no cover pass - else: + else: # pragma: no cover load_dotenv(find_dotenv()) # Initialize AskAnna after everything is loaded successfully - from .init import * # noqa + from .init import * # noqa: F403 diff --git a/askanna/cli/run.py b/askanna/cli/run.py index e1a2e3c..eb901a1 100644 --- a/askanna/cli/run.py +++ b/askanna/cli/run.py @@ -11,9 +11,15 @@ ) from askanna.config import config from askanna.config.utils import string_format_datetime +from askanna.core.dataclasses.run import STATUS from askanna.core.exceptions import GetError, PatchError from askanna.sdk.run import RunSDK +try: + from typing import get_args +except ImportError: # pragma: no cover + from typing_extensions import get_args + HELP = """ This command will allow you to start a run in AskAnna. """ @@ -88,6 +94,13 @@ def cli( @cli.command(help="List runs available in AskAnna", short_help="List runs") +@click.option( + "--status", + "status", + required=False, + type=click.Choice(list(get_args(STATUS)), case_sensitive=False), + help="Show runs with a specific run status", +) @click.option( "--job", "-j", @@ -113,11 +126,12 @@ def cli( help="Workspace SUUID to list runs for a workspace", ) @click.option("--search", "-s", required=False, type=str, help="Search for a specific run") -def list(job_suuid, project_suuid, workspace_suuid, search): +def list(status, job_suuid, project_suuid, workspace_suuid, search): run_sdk = RunSDK() try: runs = run_sdk.list( number_of_results=100, + status=status, job_suuid=job_suuid, project_suuid=project_suuid, workspace_suuid=workspace_suuid, diff --git a/askanna/core/dataclasses/base.py b/askanna/core/dataclasses/base.py index 6cde9f2..ba5d98d 100644 --- a/askanna/core/dataclasses/base.py +++ b/askanna/core/dataclasses/base.py @@ -4,7 +4,7 @@ try: from typing import Literal -except ImportError: +except ImportError: # pragma: no cover from typing_extensions import Literal VISIBILITY = Literal["private", "public", "PRIVATE", "PUBLIC"] diff --git a/askanna/core/dataclasses/run.py b/askanna/core/dataclasses/run.py index b0d2da0..401aa18 100644 --- a/askanna/core/dataclasses/run.py +++ b/askanna/core/dataclasses/run.py @@ -19,6 +19,14 @@ WorkspaceRelation, ) +try: + from typing import Literal +except ImportError: # pragma: no cover + from typing_extensions import Literal + +STATUS = Literal["queued", "running", "finished", "failed"] +TRIGGER = Literal["api", "cli", "python-sdk", "webui", "schedule", "worker"] + @dataclass class Variable: @@ -216,10 +224,10 @@ class Run: name: str description: str - status: str + status: STATUS duration: int - trigger: dict + trigger: TRIGGER created_by: CreatedByWithAvatarRelation package: dict @@ -286,7 +294,7 @@ def from_dict(cls, data: Dict) -> "Run": @dataclass class RunStatus: suuid: str - status: str + status: STATUS name: str next_url: str created_by: CreatedByRelation diff --git a/askanna/core/utils/at_exit.py b/askanna/core/utils/at_exit.py index c9bd1be..ddf83f7 100644 --- a/askanna/core/utils/at_exit.py +++ b/askanna/core/utils/at_exit.py @@ -4,7 +4,7 @@ ip = get_ipython() # type: ignore except NameError: from atexit import register as exit_register # noqa: F401 -else: +else: # pragma: no cover from functools import wraps def exit_register(func, *args, **kwargs): diff --git a/askanna/core/utils/object.py b/askanna/core/utils/object.py index 977f541..f2cd271 100644 --- a/askanna/core/utils/object.py +++ b/askanna/core/utils/object.py @@ -19,7 +19,7 @@ # If numpy as installed, we add the numpy types to the supported types try: import numpy as np # noqa: F401 -except ImportError: +except ImportError: # pragma: no cover NUMPY_INSTALLED = False else: NUMPY_INSTALLED = True diff --git a/askanna/core/utils/validate.py b/askanna/core/utils/validate.py index 760661c..eb9c248 100644 --- a/askanna/core/utils/validate.py +++ b/askanna/core/utils/validate.py @@ -1,6 +1,6 @@ try: import zoneinfo -except ImportError: +except ImportError: # pragma: no cover from backports import zoneinfo from typing import Dict, List, Union diff --git a/askanna/gateways/run.py b/askanna/gateways/run.py index 2b29a31..909e1f5 100644 --- a/askanna/gateways/run.py +++ b/askanna/gateways/run.py @@ -3,6 +3,8 @@ from askanna.core.dataclasses.job import Payload from askanna.core.dataclasses.run import ( + STATUS, + TRIGGER, ArtifactInfo, MetricList, MetricObject, @@ -41,10 +43,22 @@ class RunGateway: def list( self, + status: Optional[STATUS] = None, + status__exclude: Optional[STATUS] = None, run_suuid_list: Optional[List[str]] = None, + run_suuid__exclude: Optional[str] = None, job_suuid: Optional[str] = None, + job_suuid__exclude: Optional[str] = None, project_suuid: Optional[str] = None, + project_suuid__exclude: Optional[str] = None, workspace_suuid: Optional[str] = None, + workspace_suuid__exclude: Optional[str] = None, + created_by_suuid: Optional[str] = None, + created_by_suuid__exclude: Optional[str] = None, + trigger: Optional[Union[TRIGGER, List[TRIGGER]]] = None, + trigger__exclude: Optional[Union[TRIGGER, List[TRIGGER]]] = None, + package_suuid: Optional[str] = None, + package_suuid__exclude: Optional[str] = None, page_size: Optional[int] = None, cursor: Optional[str] = None, order_by: Optional[str] = None, @@ -53,13 +67,38 @@ def list( """List all runs with filter and order options Args: + status (STATUS, optional): Status of the run to filter on. Defaults to None. + status__exclude (str, optional): Status of the run to exclude. Defaults to None. + STATUS values: queued, running, finished, failed + run_suuid_list (List[str], optional): List of run SUUIDs to filter on. Defaults to None. - job_suuid (str, optional): Job SUUID to filter for runs in a job. Defaults to None. - project_suuid (str, optional): Project SUUID to filter for runs in a project. Defaults to None. - workspace_suuid (str, optional): Workspace SUUID to filter for runs in a workspace. Defaults to None. + run_suuid__exclude (str, optional): SUUID of the run to exclude. Defaults to None. + + job_name (str, optional): Name of the job to filter on. Defaults to None. + job_suuid (str, optional): SUUID of the job to filter on. Defaults to None. + job_suuid__exclude (str, optional): SUUID of the job to exclude. Defaults to None. + + project_suuid (str, optional): SUUID of the project to filter on. Defaults to None. + project_suuid__exclude (str, optional): SUUID of the project to exclude. Defaults to None. + + workspace_suuid (str, optional): SUUID of the workspace to filter on. Defaults to None. + workspace_suuid__exclude (str, optional): SUUID of the workspace to exclude. Defaults to None. + + created_by_suuid (str, optional): SUUID of the workspace member to filter on. Defaults to None. + created_by_suuid__exclude (str, optional): SUUID of the workspace member to exclude. + Defaults to None. + + trigger (TRIGGER, optional): Trigger of the run to filter on. Defaults to None. + trigger__exclude (TRIGGER, optional): Trigger of the run to exclude. Defaults to None. + TRIGGER values: api, cli, python-sdk, webui, schedule, worker + + package_suuid (str, optional): SUUID of the package to filter on. Defaults to None. + package_suuid__exclude (str, optional): SUUID of the package to exclude. Defaults to None. + page_size (int, optional): Number of results per page. Defaults to the default value of the backend. cursor (str, optional): Cursor to start the page from. Defaults to None. order_by (str, optional): Order by a field. Defaults to None. + search (str, optional): Search for a specific run. Defaults to None. Raises: @@ -68,7 +107,12 @@ def list( Returns: RunListResponse: The response from the API with a list of runs and pagination information """ - assert page_size is None or page_size > 0, "page_size must be a positive integer" + if page_size is not None and ( + (isinstance(page_size, int) and page_size <= 0) + or isinstance(page_size, bool) + or not isinstance(page_size, int) + ): + raise ValueError("page_size must be a positive integer") run_suuid = None if run_suuid_list and len(run_suuid_list) > 0: @@ -77,10 +121,22 @@ def list( response = client.get( url=client.askanna_url.run.run_list(), params={ + "status": status, + "status__exclude": status__exclude, "run_suuid": run_suuid, + "run_suuid__exclude": run_suuid__exclude, "job_suuid": job_suuid, + "job_suuid__exclude": job_suuid__exclude, "project_suuid": project_suuid, + "project_suuid__exclude": project_suuid__exclude, "workspace_suuid": workspace_suuid, + "workspace_suuid__exclude": workspace_suuid__exclude, + "created_by_suuid": created_by_suuid, + "created_by_suuid__exclude": created_by_suuid__exclude, + "trigger": trigger, + "trigger__exclude": trigger__exclude, + "package_suuid": package_suuid, + "package_suuid__exclude": package_suuid__exclude, "page_size": page_size, "cursor": cursor, "order_by": order_by, @@ -177,10 +233,10 @@ def delete(self, run_suuid: str) -> bool: ) if response.status_code == 404: - raise DeleteError(f"404 - The job SUUID '{run_suuid}' was not found") + raise DeleteError(f"404 - The run SUUID '{run_suuid}' was not found") if response.status_code != 204: raise DeleteError( - f"{response.status_code} - Something went wrong while deleting the job SUUID '{run_suuid}': " + f"{response.status_code} - Something went wrong while deleting the run SUUID '{run_suuid}': " f"{response.json()}" ) @@ -301,7 +357,10 @@ def metric_update(self, run_suuid: str, metrics: MetricList) -> None: if response.status_code == 404: raise PutError(f"404 - The run SUUID '{run_suuid}' was not found") if response.status_code != 200: - raise PutError(f"{response.status_code} - Something went wrong while updating metrics: {response.json()}") + raise PutError( + f"{response.status_code} - Something went wrong while updating metrics of run SUUID '{run_suuid}': " + f"{response.json()}" + ) def variable(self, run_suuid: str) -> VariableList: """Get the variables of a run @@ -345,7 +404,8 @@ def variable_update(self, run_suuid: str, variables: VariableList) -> None: raise PatchError(f"404 - The run SUUID '{run_suuid}' was not found") if response.status_code != 200: raise PatchError( - f"{response.status_code} - Something went wrong while updating variables: {response.json()}" + f"{response.status_code} - Something went wrong while updating variables of run SUUID '{run_suuid}': " + f"{response.json()}" ) def log(self, run_suuid: str, limit: Optional[int] = -1, offset: Optional[int] = None) -> List: diff --git a/askanna/init.py b/askanna/init.py index eeb6a52..2a0074a 100644 --- a/askanna/init.py +++ b/askanna/init.py @@ -5,7 +5,7 @@ from askanna.core.utils.main import update_available from askanna.sdk.job import JobSDK from askanna.sdk.project import ProjectSDK -from askanna.sdk.run import GetRunsSDK, ResultSDK, RunSDK +from askanna.sdk.run import ResultSDK, RunSDK from askanna.sdk.track import ( # noqa: F401 track_metric, track_metrics, @@ -15,7 +15,7 @@ from askanna.sdk.variable import VariableSDK from askanna.sdk.workspace import WorkspaceSDK -if USING_ASKANNA_CLI: +if USING_ASKANNA_CLI: # pragma: no cover try: update_available() except Exception as e: @@ -27,6 +27,5 @@ project = ProjectSDK() result = ResultSDK() run = RunSDK() -runs = GetRunsSDK() variable = VariableSDK() workspace = WorkspaceSDK() diff --git a/askanna/sdk/run.py b/askanna/sdk/run.py index 28db5df..0852ff2 100644 --- a/askanna/sdk/run.py +++ b/askanna/sdk/run.py @@ -1,10 +1,11 @@ -import warnings from pathlib import Path from typing import List, Optional, Union from askanna.config import config from askanna.core.dataclasses.job import Payload from askanna.core.dataclasses.run import ( + STATUS, + TRIGGER, ArtifactInfo, MetricList, Run, @@ -19,7 +20,8 @@ __all__ = [ "RunSDK", - "GetRunsSDK", + "ResultSDK", + "ArtifactSDK", ] @@ -39,11 +41,23 @@ def _get_run_suuid(self) -> str: def list( self, + status: Optional[STATUS] = None, + status__exclude: Optional[STATUS] = None, run_suuid_list: Optional[List[str]] = None, + run_suuid__exclude: Optional[str] = None, job_name: Optional[str] = None, job_suuid: Optional[str] = None, + job_suuid__exclude: Optional[str] = None, project_suuid: Optional[str] = None, + project_suuid__exclude: Optional[str] = None, workspace_suuid: Optional[str] = None, + workspace_suuid__exclude: Optional[str] = None, + created_by_suuid: Optional[str] = None, + created_by_suuid__exclude: Optional[str] = None, + trigger: Optional[Union[TRIGGER, List[TRIGGER]]] = None, + trigger__exclude: Optional[Union[TRIGGER, List[TRIGGER]]] = None, + package_suuid: Optional[str] = None, + package_suuid__exclude: Optional[str] = None, include_metrics: bool = False, include_variables: bool = False, number_of_results: int = 100, @@ -53,14 +67,40 @@ def list( """List all runs with filter and order options Args: + status (STATUS, optional): Status of the run to filter on. Defaults to None. + status__exclude (str, optional): Status of the run to exclude. Defaults to None. + STATUS values: queued, running, finished, failed + run_suuid_list (List[str], optional): List of run SUUIDs to filter on. Defaults to None. + run_suuid__exclude (str, optional): SUUID of the run to exclude. Defaults to None. + job_name (str, optional): Name of the job to filter on. Defaults to None. job_suuid (str, optional): SUUID of the job to filter on. Defaults to None. + job_suuid__exclude (str, optional): SUUID of the job to exclude. Defaults to None. + project_suuid (str, optional): SUUID of the project to filter on. Defaults to None. + project_suuid__exclude (str, optional): SUUID of the project to exclude. Defaults to None. + + workspace_suuid (str, optional): SUUID of the workspace to filter on. Defaults to None. + workspace_suuid__exclude (str, optional): SUUID of the workspace to exclude. Defaults to None. + + created_by_suuid (str, optional): SUUID of the workspace member to filter on. Defaults to None. + created_by_suuid__exclude (str, optional): SUUID of the workspace member to exclude. + Defaults to None. + + trigger (TRIGGER, optional): Trigger of the run to filter on. Defaults to None. + trigger__exclude (TRIGGER, optional): Trigger of the run to exclude. Defaults to None. + TRIGGER values: api, cli, python-sdk, webui, schedule, worker + + package_suuid (str, optional): SUUID of the package to filter on. Defaults to None. + package_suuid__exclude (str, optional): SUUID of the package to exclude. Defaults to None. + include_metrics (bool, optional): Include the metrics in the Run dataclass. Defaults to False. include_variables (bool, optional): Include the variables in the Run dataclass. Defaults to False. + number_of_results (int): Number of runs to return. Defaults to 100. order_by (str, optional): Order by field(s). + search (str, optional): Search for a specific run. Raises: @@ -80,10 +120,22 @@ def list( number_of_results=number_of_results, order_by=order_by, other_query_params={ + "status": status, + "status__exclude": status__exclude, "run_suuid_list": run_suuid_list, + "run_suuid__exclude": run_suuid__exclude, "job_suuid": job_suuid, + "job_suuid__exclude": job_suuid__exclude, "project_suuid": project_suuid, + "project_suuid__exclude": project_suuid__exclude, "workspace_suuid": workspace_suuid, + "workspace_suuid__exclude": workspace_suuid__exclude, + "created_by_suuid": created_by_suuid, + "created_by_suuid__exclude": created_by_suuid__exclude, + "trigger": trigger, + "trigger__exclude": trigger__exclude, + "package_suuid": package_suuid, + "package_suuid__exclude": package_suuid__exclude, "search": search, }, ) @@ -367,55 +419,6 @@ def artifact_info(self, run_suuid: Optional[str] = None) -> ArtifactInfo: return self.gateway.artifact_info(run_suuid) -# TODO: remove the GetRunsSDK after release 0.21.0 -class GetRunsSDK: - """Get runs SDK""" - - def get( - self, - run_suuid_list: Optional[List[str]] = None, - job_name: Optional[str] = None, - job_suuid: Optional[str] = None, - project_suuid: Optional[str] = None, - workspace_suuid: Optional[str] = None, - include_metrics: bool = False, - include_variables: bool = False, - number_of_results: int = 100, - order_by: Optional[str] = None, - search: Optional[str] = None, - ) -> List[Run]: - """Get a list of runs - - Args: - run_suuid_list (List[str], optional): List of run SUUIDs to filter on. Defaults to None. - job_name (str, optional): Name of the job to filter on. Defaults to None. - job_suuid (str, optional): SUUID of the job to filter on. Defaults to None. - project_suuid (str, optional): SUUID of the project to filter on. Defaults to None. - workspace_suuid (str, optional): SUUID of the workspace to filter on. Defaults to None. - include_metrics (bool, optional): Include the metrics in the Run dataclass. Defaults to False. - include_variables (bool, optional): Include the variables in the Run dataclass. Defaults to False. - number_of_results (int): Number of runs to return. Defaults to 100. - order_by (str, optional): Order by field(s). - search (str, optional): Search for a specific run. - - Returns: - List[Run]: List of runs. List items are of type Run dataclass. - """ - warnings.warn("GetRunsSDK is deprecated, use RunSDK().list instead.", DeprecationWarning) - return RunSDK().list( - run_suuid_list=run_suuid_list, - job_name=job_name, - job_suuid=job_suuid, - project_suuid=project_suuid, - workspace_suuid=workspace_suuid, - include_metrics=include_metrics, - include_variables=include_variables, - number_of_results=number_of_results, - order_by=order_by, - search=search, - ) - - class ResultSDK: """Get result SDK""" diff --git a/tests/conftest.py b/tests/conftest.py index 3684b83..05d8e7f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -16,7 +16,7 @@ @pytest.fixture() def temp_dir(tmp_path_factory): - temp_dir = tmp_path_factory.mktemp("askanna-test-") + temp_dir = str(tmp_path_factory.mktemp("askanna-test-")) yield temp_dir shutil.rmtree(temp_dir, ignore_errors=True) diff --git a/tests/fixtures/responses/api/__init__.py b/tests/fixtures/responses/api/__init__.py index 12aff15..da3660f 100644 --- a/tests/fixtures/responses/api/__init__.py +++ b/tests/fixtures/responses/api/__init__.py @@ -35,6 +35,9 @@ def api_response( run_artifact_list, run_artifact_list_not_found, run_log, + run_metric_list, + run_variable_list, + run_artifact_item, variable_list, variable_list_limit, variable_detail, @@ -75,6 +78,9 @@ def api_response( run_payload, job_run_request, # as run status response run_log, + run_metric_list, + run_variable_list, + run_artifact_item, ) api_responses = project_response( diff --git a/tests/fixtures/responses/api/run.py b/tests/fixtures/responses/api/run.py index da78bc8..ea484e8 100644 --- a/tests/fixtures/responses/api/run.py +++ b/tests/fixtures/responses/api/run.py @@ -12,6 +12,9 @@ def run_response( run_payload, run_status, run_log, + run_metric_list, + run_variable_list, + run_artifact_item, ) -> RequestsMock: # Run list api_responses.add( @@ -21,6 +24,18 @@ def run_response( content_type="application/json", json=run_list, ) + api_responses.add( + "GET", + url=f"{askanna_url.run.run_list()}?cursor=999", + status=500, + content_type="application/json", + json={"error": "Internal Server Error"}, + ) + api_responses.add( + "GET", + url=f"{askanna_url.run.run_list()}?cursor=888", + status=503, + ) api_responses.add( "GET", url=f"{askanna_url.run.run_list()}?page_size=100&order_by=job.name,name&project_suuid=5678-5678-5678-5678", @@ -97,6 +112,18 @@ def run_response( content_type="application/json", json=run_status, ) + api_responses.add( + "GET", + url=askanna_url.run.status("7890-7890-7890-7890"), + status=404, + ) + api_responses.add( + "GET", + url=askanna_url.run.status("0987-0987-0987-0987"), + status=500, + content_type="application/json", + json={"error": "Internal Server Error"}, + ) # Run change info new_detail = run_detail.copy() @@ -118,6 +145,14 @@ def run_response( content_type="application/json", json=new_detail, ) + api_responses.add( + "PATCH", + url=askanna_url.run.run_detail("1234-1234-1234-1234"), + match=[matchers.json_params_matcher({"description": "new description"})], + status=200, + content_type="application/json", + json=new_detail, + ) api_responses.add( "PATCH", url=askanna_url.run.run_detail("7890-7890-7890-7890"), @@ -270,4 +305,113 @@ def run_response( json=run_log, ) + # Run metric + api_responses.add( + "GET", + url=askanna_url.run.metric("1234-1234-1234-1234"), + status=200, + content_type="application/json", + json=run_metric_list, + ) + api_responses.add( + "GET", + url=f"{askanna_url.run.metric('wxyz-wxyz-wxyz-wxyz')}", + status=404, + content_type="application/json", + json={"detail": "Not found."}, + ) + api_responses.add( + "GET", + url=f"{askanna_url.run.metric('zyxw-zyxw-zyxw-zyxw')}", + status=500, + content_type="application/json", + json={"error": "Internal Server Error"}, + ) + api_responses.add( + "PUT", + url=askanna_url.run.metric_detail("1234-1234-1234-1234"), + status=200, + content_type="application/json", + ) + api_responses.add( + "PUT", + url=f"{askanna_url.run.metric_detail('wxyz-wxyz-wxyz-wxyz')}", + status=404, + content_type="application/json", + json={"detail": "Not found."}, + ) + api_responses.add( + "PUT", + url=f"{askanna_url.run.metric_detail('zyxw-zyxw-zyxw-zyxw')}", + status=500, + content_type="application/json", + json={"error": "Internal Server Error"}, + ) + + # Run variable + api_responses.add( + "GET", + url=askanna_url.run.variable("1234-1234-1234-1234"), + status=200, + content_type="application/json", + json=run_variable_list, + ) + api_responses.add( + "GET", + url=f"{askanna_url.run.variable('wxyz-wxyz-wxyz-wxyz')}", + status=404, + content_type="application/json", + json={"detail": "Not found."}, + ) + api_responses.add( + "GET", + url=f"{askanna_url.run.variable('zyxw-zyxw-zyxw-zyxw')}", + status=500, + content_type="application/json", + json={"error": "Internal Server Error"}, + ) + api_responses.add( + "PATCH", + url=askanna_url.run.variable_detail("1234-1234-1234-1234"), + status=200, + content_type="application/json", + ) + api_responses.add( + "PATCH", + url=f"{askanna_url.run.variable_detail('wxyz-wxyz-wxyz-wxyz')}", + status=404, + content_type="application/json", + json={"detail": "Not found."}, + ) + api_responses.add( + "PATCH", + url=f"{askanna_url.run.variable_detail('zyxw-zyxw-zyxw-zyxw')}", + status=500, + content_type="application/json", + json={"error": "Internal Server Error"}, + ) + + # Ruin artifact + api_responses.add( + "GET", + url=askanna_url.run.artifact_detail("1234-1234-1234-1234", "abcd-abcd-abcd-abcd"), + status=200, + content_type="application/json", + json=run_artifact_item, + ) + api_responses.add( + "GET", + url=askanna_url.run.artifact_detail("wxyz-wxyz-wxyz-wxyz", "abcd-abcd-abcd-abcd"), + status=404, + content_type="application/json", + json={"detail": "Not found."}, + ) + api_responses.add( + "GET", + url=askanna_url.run.artifact_detail("zyxw-zyxw-zyxw-zyxw", "abcd-abcd-abcd-abcd"), + status=500, + content_type="application/json", + json={"error": "Internal Server Error"}, + ) + return api_responses diff --git a/tests/fixtures/responses/run.py b/tests/fixtures/responses/run.py index 32d4a5b..d254c15 100644 --- a/tests/fixtures/responses/run.py +++ b/tests/fixtures/responses/run.py @@ -2,6 +2,8 @@ import pytest +from tests.utils import faker + @pytest.fixture def run_detail() -> dict: @@ -231,6 +233,57 @@ def run_artifact_list() -> list: ] +@pytest.fixture +def run_artifact_item() -> dict: + return { + "suuid": "abcd-abcd-abcd-abcd", + "size": 198, + "count_dir": 0, + "count_files": 1, + "run": { + "relation": "run", + "suuid": "HyoP-rfIs-mghs-4YYm", + "name": "", + }, + "job": { + "relation": "job", + "suuid": "HyoP-rfIs-mghs-4YYm", + "name": "a job", + }, + "project": { + "relation": "project", + "suuid": "4YYm-HyoP-rfIs-mghs", + "name": "a project", + }, + "workspace": { + "relation": "workspace", + "suuid": "7aYw-rkCA-wdMo-1Gi6", + "name": "a workspace", + }, + "created_at": "2022-08-23T07:22:58.778753Z", + "modified_at": "2022-08-23T07:22:58.778794Z", + "cdn_base_url": "https://cdn-api.askanna.eu/files/blob/e123456c-a123-12c3-a1b2-12345abc89", + "files": [ + { + "path": "output", + "parent": "/", + "name": "output", + "size": 1456271, + "type": "directory", + "last_modified": "2023-01-30T03:27:36", + }, + { + "path": "output/report.html", + "parent": "output", + "name": "report.html", + "size": 1456271, + "type": "file", + "last_modified": "2023-01-30T03:27:36", + }, + ], + } + + @pytest.fixture def run_artifact_list_not_found() -> list: return [ @@ -299,3 +352,75 @@ def run_payload_detail() -> dict: "created_at": "2023-01-26T09:47:41.077335Z", "modified_at": "2023-01-26T09:48:15.774587Z", } + + +@pytest.fixture +def run_variable() -> dict: + return { + "variable": { + "name": faker.fake.name(), + "value": faker.fake.random_letters(faker.fake.random_int(min=1, max=10)), + "type": "string", + }, + "label": [ + { + "name": faker.fake.name(), + "value": None, + "type": "tag", + } + ], + "run_suuid": "1234-1234-1234-1234", + "created_at": faker.date_time_str(), + } + + +@pytest.fixture +def run_variable_list(run_variable) -> dict: + return { + "count": 1, + "next": None, + "previous": None, + "results": [run_variable], + } + + +@pytest.fixture +def run_metric() -> dict: + return { + "metric": { + "name": faker.fake.name(), + "value": faker.fake.random_letters(faker.fake.random_int(min=1, max=10)), + "type": "string", + }, + "label": [ + { + "name": faker.fake.name(), + "value": None, + "type": "tag", + } + ], + "run_suuid": "1234-1234-1234-1234", + "created_at": faker.date_time_str(), + } + + +@pytest.fixture +def run_metric_list(run_metric) -> dict: + return { + "count": 1, + "next": None, + "previous": None, + "results": [run_metric], + } + + +@pytest.fixture +def run_artifact_file_dict() -> dict: + return { + "name": "test.zip", + "type": "zip", + "size": 1234, + "path": "./demo", + "parent": "./", + "last_modified": "2023-03-23T14:02:00.000000Z", + } diff --git a/tests/test_cli/test_artifact.py b/tests/test_cli/test_artifact.py index 0089310..3021d36 100644 --- a/tests/test_cli/test_artifact.py +++ b/tests/test_cli/test_artifact.py @@ -112,7 +112,7 @@ def test_command_artifact_get_already_exist(self): ) def test_command_artifact_get_argument_output_file(self, temp_dir): - output_file = str(temp_dir) + "/artifact-1234-1234-12344-1244.zip" + output_file = temp_dir + "/artifact-1234-1234-12344-1244.zip" result = CliRunner().invoke(cli, f"artifact get --id 1234-1234-1234-1234 --output {output_file}") assert not result.exception @@ -123,7 +123,7 @@ def test_command_artifact_get_argument_output_file(self, temp_dir): assert os.stat(output_file).st_size == 198 def test_command_artifact_get_argument_output_file_short(self, temp_dir): - output_file = str(temp_dir) + "/artifact-1234-1234-12344-1244.zip" + output_file = temp_dir + "/artifact-1234-1234-12344-1244.zip" result = CliRunner().invoke(cli, f"artifact get -i 1234-1234-1234-1234 -o {output_file}") assert not result.exception diff --git a/tests/test_cli/test_run.py b/tests/test_cli/test_run.py index cd643e0..4ae2f44 100644 --- a/tests/test_cli/test_run.py +++ b/tests/test_cli/test_run.py @@ -20,6 +20,12 @@ def test_command_run_help(self): assert result.exit_code == 0 assert "run [OPTIONS]" in result.output + def test_command_run_double_arg(self): + result = CliRunner().invoke(cli, "run info info --help") + print(result.output) + assert result.exit_code == 0 + assert "run [JOB_NAME] info [OPTIONS]" in result.output + @pytest.mark.usefixtures("api_response") class TestCliRunRequest: @@ -56,6 +62,16 @@ def test_command_run_list(self): assert result.exit_code == 0 assert "1234-1234-1234-1234" in result.output + def test_command_run_list_status(self): + result = CliRunner().invoke(cli, "run list --status queued") + assert result.exit_code == 0 + assert "1234-1234-1234-1234" in result.output + + def test_command_run_list_false_status(self): + result = CliRunner().invoke(cli, "run list --status fals") + assert result.exit_code != 0 + assert "Invalid value for '--status'" in result.output + def test_command_run_list_job(self): result = CliRunner().invoke(cli, "run list -j 1234-1234-1234-1234") assert result.exit_code == 0 @@ -131,6 +147,15 @@ def test_command_run_info_ask_run(self): assert "Selected run" in result.output assert "Created: 2023-01-26 09:47:41 UTC" in result.output + def test_command_run_info_ask_run_with_project_suuid(self): + config.project.clean_config() + config.project.project_suuid = "1234-1234-1234-1234" + result = CliRunner().invoke(cli, "run info") + config.project.clean_config() + assert result.exit_code == 0 + assert "Selected run" in result.output + assert "Created: 2023-01-26 09:47:41 UTC" in result.output + @pytest.mark.usefixtures("api_response") class TestCliRunStatus: @@ -187,6 +212,24 @@ def test_command_run_change_no_input(self): assert result.exit_code == 0 assert "You succesfully changed run 'new name' with SUUID '1234-1234-1234-1234'" in result.output + def test_command_run_change_no_input_with_project_suuid(self): + config.project.clean_config() + config.project.project_suuid = "1234-1234-1234-1234" + result = CliRunner().invoke(cli, "run change", input="y\nnew name\ny\nnew description\ny") + config.project.clean_config() + assert result.exit_code == 0 + assert "You succesfully changed run 'new name' with SUUID '1234-1234-1234-1234'" in result.output + + def test_command_run_change_no_input_skip_name(self): + result = CliRunner().invoke(cli, "run change", input="n\ny\nnew description\ny") + assert result.exit_code == 0 + assert "You succesfully changed run 'new name' with SUUID '1234-1234-1234-1234'" in result.output + + def test_command_run_change_no_input_skip_description(self): + result = CliRunner().invoke(cli, "run change", input="y\nnew name\nn\ny") + assert result.exit_code == 0 + assert "You succesfully changed run 'new name' with SUUID '1234-1234-1234-1234'" in result.output + def test_command_run_change_not_found(self): result = CliRunner().invoke(cli, "run change --id 7890-7890-7890-7890 --description 'new description'") assert result.exit_code == 1 @@ -224,6 +267,14 @@ def test_command_run_remove_no_input(self): assert result.exit_code == 0 assert "You removed run SUUID '1234-1234-1234-1234'" in result.output + def test_command_run_remove_no_input_with_project_suuid(self): + config.project.clean_config() + config.project.project_suuid = "1234-1234-1234-1234" + result = CliRunner().invoke(cli, "run remove", input="y") + config.project.clean_config() + assert result.exit_code == 0 + assert "You removed run SUUID '1234-1234-1234-1234'" in result.output + def test_command_run_remove_abort(self): result = CliRunner().invoke(cli, "run remove --id 1234-1234-1234-1234", input="n") assert result.exit_code == 0 diff --git a/tests/test_cli/test_run_utils/test_get_package.py b/tests/test_cli/test_run_utils/test_get_package.py index bdf1d5e..7cbc3ff 100644 --- a/tests/test_cli/test_run_utils/test_get_package.py +++ b/tests/test_cli/test_run_utils/test_get_package.py @@ -31,7 +31,7 @@ def test_command_get_package_no_package_suuid(self): def test_command_get_package_success(self, temp_dir): package_suuid = "1234-1234-1234-1234" - code_dir = str(temp_dir) + "/code-1234" + code_dir = temp_dir + "/code-1234" result = CliRunner().invoke(cli, f"{self.verb} --package {package_suuid} --output {code_dir}") @@ -43,7 +43,7 @@ def test_command_get_package_success(self, temp_dir): def test_command_get_package_environment_variables(self, temp_dir): package_suuid = "1234-1234-1234-1234" - code_dir = str(temp_dir) + "/code-1234" + code_dir = temp_dir + "/code-1234" os.environ["AA_PACKAGE_SUUID"] = package_suuid os.environ["AA_CODE_DIR"] = code_dir diff --git a/tests/test_cli/test_run_utils/test_get_payload.py b/tests/test_cli/test_run_utils/test_get_payload.py index 1db9321..bd9daae 100644 --- a/tests/test_cli/test_run_utils/test_get_payload.py +++ b/tests/test_cli/test_run_utils/test_get_payload.py @@ -38,7 +38,7 @@ def test_command_get_payload_no_payload_suuid(self): def test_command_get_payload_successs(self, temp_dir, run_payload): run_suuid = "1234-1234-1234-1234" payload_suuid = "abcd-abcd-abcd-abcd" - payload_path = str(temp_dir) + "/payload-abcd/payload.json" + payload_path = temp_dir + "/payload-abcd/payload.json" result = CliRunner().invoke( cli, f"{self.verb} --run {run_suuid} --payload {payload_suuid} --output {payload_path}" @@ -53,7 +53,7 @@ def test_command_get_payload_successs(self, temp_dir, run_payload): def test_command_get_payload_successs_environment_variables(self, temp_dir, run_payload): run_suuid = "1234-1234-1234-1234" payload_suuid = "abcd-abcd-abcd-abcd" - payload_path = str(temp_dir) + "/payload-abcd/payload.json" + payload_path = temp_dir + "/payload-abcd/payload.json" os.environ["AA_RUN_SUUID"] = run_suuid os.environ["AA_PAYLOAD_SUUID"] = payload_suuid diff --git a/tests/test_cli/test_run_utils/test_get_run_manifest.py b/tests/test_cli/test_run_utils/test_get_run_manifest.py index 5c302e1..a48bd08 100644 --- a/tests/test_cli/test_run_utils/test_get_run_manifest.py +++ b/tests/test_cli/test_run_utils/test_get_run_manifest.py @@ -31,7 +31,7 @@ def test_command_get_run_manifest_no_run_suuid(self): def test_command_get_run_manifest_successs(self, temp_dir, run_manifest): run_suuid = "1234-1234-1234-1234" - run_manifest_path = str(temp_dir) + "/run-manifest-1234/entrypoint.sh" + run_manifest_path = temp_dir + "/run-manifest-1234/entrypoint.sh" result = CliRunner().invoke(cli, f"{self.verb} --run {run_suuid} --output {run_manifest_path}") @@ -42,7 +42,7 @@ def test_command_get_run_manifest_successs(self, temp_dir, run_manifest): def test_command_get_run_manifest_successs_environment_variables(self, temp_dir, run_manifest): run_suuid = "1234-1234-1234-1234" - run_manifest_path = str(temp_dir) + "/run-manifest-1234/entrypoint.sh" + run_manifest_path = temp_dir + "/run-manifest-1234/entrypoint.sh" os.environ["AA_RUN_SUUID"] = run_suuid os.environ["AA_RUN_MANIFEST_PATH"] = run_manifest_path diff --git a/tests/test_core/test_dataclasses/test_job.py b/tests/test_core/test_dataclasses/test_job.py index 10aebd0..1f1e3b4 100644 --- a/tests/test_core/test_dataclasses/test_job.py +++ b/tests/test_core/test_dataclasses/test_job.py @@ -1,6 +1,5 @@ -from datetime import datetime, timezone - from askanna.core.dataclasses.job import Job, Payload +from tests.utils import str_to_datetime def test_job(job_detail): @@ -15,12 +14,8 @@ def test_job(job_detail): assert job.notifications == job_detail["notifications"] assert job.project.suuid == job_detail["project"]["suuid"] assert job.workspace.suuid == job_detail["workspace"]["suuid"] - assert job.created_at == datetime.strptime(job_detail["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) - assert job.modified_at == datetime.strptime(job_detail["modified_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) + assert job.created_at == str_to_datetime(job_detail["created_at"]) + assert job.modified_at == str_to_datetime(job_detail["modified_at"]) def test_payload(run_payload_detail): @@ -29,11 +24,7 @@ def test_payload(run_payload_detail): assert payload.suuid == run_payload_detail["suuid"] assert payload.size == run_payload_detail["size"] assert payload.lines == run_payload_detail["lines"] - assert payload.created_at == datetime.strptime(run_payload_detail["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) - assert payload.modified_at == datetime.strptime( - run_payload_detail["modified_at"], "%Y-%m-%dT%H:%M:%S.%fZ" - ).replace(tzinfo=timezone.utc) + assert payload.created_at == str_to_datetime(run_payload_detail["created_at"]) + assert payload.modified_at == str_to_datetime(run_payload_detail["modified_at"]) assert str(payload) == "Payload: 1234-1234-1234-1234 (184 bytes & 30 lines)" diff --git a/tests/test_core/test_dataclasses/test_package.py b/tests/test_core/test_dataclasses/test_package.py index 7022af8..043f7da 100644 --- a/tests/test_core/test_dataclasses/test_package.py +++ b/tests/test_core/test_dataclasses/test_package.py @@ -1,6 +1,5 @@ -from datetime import datetime, timezone - from askanna.core.dataclasses.package import Package +from tests.utils import str_to_datetime def test_package(package_detail_for_list): @@ -9,9 +8,5 @@ def test_package(package_detail_for_list): assert package.suuid == package_detail_for_list["suuid"] assert package.name == package_detail_for_list["name"] assert package.description == package_detail_for_list["description"] - assert package.created_at == datetime.strptime( - package_detail_for_list["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ" - ).replace(tzinfo=timezone.utc) - assert package.modified_at == datetime.strptime( - package_detail_for_list["modified_at"], "%Y-%m-%dT%H:%M:%S.%fZ" - ).replace(tzinfo=timezone.utc) + assert package.created_at == str_to_datetime(package_detail_for_list["created_at"]) + assert package.modified_at == str_to_datetime(package_detail_for_list["modified_at"]) diff --git a/tests/test_core/test_dataclasses/test_project.py b/tests/test_core/test_dataclasses/test_project.py index 83ba6a9..3ec1477 100644 --- a/tests/test_core/test_dataclasses/test_project.py +++ b/tests/test_core/test_dataclasses/test_project.py @@ -1,6 +1,5 @@ -from datetime import datetime, timezone - from askanna.core.dataclasses.project import Project +from tests.utils import str_to_datetime def test_project(project_detail): @@ -9,11 +8,7 @@ def test_project(project_detail): assert project.suuid == project_detail["suuid"] assert project.name == project_detail["name"] assert project.description == project_detail["description"] - assert project.created_at == datetime.strptime(project_detail["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) - assert project.modified_at == datetime.strptime(project_detail["modified_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) + assert project.created_at == str_to_datetime(project_detail["created_at"]) + assert project.modified_at == str_to_datetime(project_detail["modified_at"]) assert str(project) == "Project: a project (1234-1234-1234-1234)" diff --git a/tests/test_core/test_dataclasses/test_run.py b/tests/test_core/test_dataclasses/test_run.py index 80487e8..b998f09 100644 --- a/tests/test_core/test_dataclasses/test_run.py +++ b/tests/test_core/test_dataclasses/test_run.py @@ -1,6 +1,22 @@ -from datetime import datetime, timezone - -from askanna.core.dataclasses.run import Run, RunStatus +from datetime import datetime + +import pytest + +from askanna.core.dataclasses.base import Label +from askanna.core.dataclasses.run import ( + ArtifactFile, + ArtifactFileList, + ArtifactInfo, + MetricList, + MetricObject, + Run, + RunStatus, + Variable, + VariableList, + VariableObject, +) +from askanna.core.exceptions import MultipleObjectsReturnedError +from tests.utils import str_to_datetime def test_run(run_detail): @@ -10,12 +26,8 @@ def test_run(run_detail): assert run.name == run_detail["name"] assert run.description == run_detail["description"] assert run.created_by.name == run_detail["created_by"]["name"] - assert run.created_at == datetime.strptime(run_detail["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) - assert run.modified_at == datetime.strptime(run_detail["modified_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) + assert run.created_at == str_to_datetime(run_detail["created_at"]) + assert run.modified_at == str_to_datetime(run_detail["modified_at"]) assert run.job.suuid == run_detail["job"]["suuid"] assert ( @@ -28,6 +40,18 @@ def test_run(run_detail): assert repr(run) == "Run(suuid='1234-1234-1234-1234', status='finished')" +def test_run_without_started_and_finished_date(run_detail): + run_without_dates = run_detail.copy() + run_without_dates["started_at"] = None + run_without_dates["finished_at"] = None + + run = Run.from_dict(run_without_dates) + + assert run.suuid == run_detail["suuid"] + assert run.started_at is None + assert run.finished_at is None + + def test_run_status(job_run_request): run_status = RunStatus.from_dict(job_run_request.copy()) @@ -39,18 +63,38 @@ def test_run_status(job_run_request): assert run_status.job.suuid == job_run_request["job"]["suuid"] assert run_status.project.suuid == job_run_request["project"]["suuid"] assert run_status.workspace.suuid == job_run_request["workspace"]["suuid"] - assert run_status.created_at == datetime.strptime(job_run_request["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) - assert run_status.modified_at == datetime.strptime( - job_run_request["modified_at"], "%Y-%m-%dT%H:%M:%S.%fZ" - ).replace(tzinfo=timezone.utc) + assert run_status.created_at == str_to_datetime(job_run_request["created_at"]) + assert run_status.modified_at == str_to_datetime(job_run_request["modified_at"]) assert str(run_status) == "a run (abcd-abcd-abcd-abcd): queued" assert repr(run_status) == "RunStatus(suuid='abcd-abcd-abcd-abcd', status='queued')" +def test_run_status_without_started_and_finished_date(job_run_request): + run_status_without_dates = job_run_request.copy() + run_status_without_dates["started_at"] = None + run_status_without_dates["finished_at"] = None + + run_status = RunStatus.from_dict(run_status_without_dates) + + assert run_status.suuid == job_run_request["suuid"] + assert run_status.started_at is None + assert run_status.finished_at is None + + +def test_run_status_with_started_and_finished_date(job_run_request): + run_status_without_dates = job_run_request.copy() + run_status_without_dates["started_at"] = "2023-03-23T14:02:00.000000Z" + run_status_without_dates["finished_at"] = "2023-03-23T14:03:00.000000Z" + + run_status = RunStatus.from_dict(run_status_without_dates) + + assert run_status.suuid == job_run_request["suuid"] + assert run_status.started_at == str_to_datetime("2023-03-23T14:02:00.000000Z") + assert run_status.finished_at == str_to_datetime("2023-03-23T14:03:00.000000Z") + + def test_run_status_no_name(job_run_request): job_run_request["name"] = None run_status = RunStatus.from_dict(job_run_request.copy()) @@ -63,13 +107,109 @@ def test_run_status_no_name(job_run_request): assert run_status.job.suuid == job_run_request["job"]["suuid"] assert run_status.project.suuid == job_run_request["project"]["suuid"] assert run_status.workspace.suuid == job_run_request["workspace"]["suuid"] - assert run_status.created_at == datetime.strptime(job_run_request["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) - assert run_status.modified_at == datetime.strptime( - job_run_request["modified_at"], "%Y-%m-%dT%H:%M:%S.%fZ" - ).replace(tzinfo=timezone.utc) + assert run_status.created_at == str_to_datetime(job_run_request["created_at"]) + assert run_status.modified_at == str_to_datetime(job_run_request["modified_at"]) assert str(run_status) == "Run abcd-abcd-abcd-abcd: queued" assert repr(run_status) == "RunStatus(suuid='abcd-abcd-abcd-abcd', status='queued')" + + +def test_variable_object_with_created_at(): + variable = Variable(name="test", value="test", type="string") + label = Label(name="test", value=None, type="tag") + created_at = datetime.now() + + variable_object = VariableObject(variable=variable, label=[label], created_at=created_at) + + assert variable_object.created_at == created_at + + +def test_variable_object_fom_dict(run_variable): + variable_object = VariableObject.from_dict(run_variable) + + assert variable_object.variable.name == run_variable["variable"]["name"] + assert variable_object.run_suuid == run_variable["run_suuid"] + + +def test_variable_list(run_variable): + variable_object = VariableObject.from_dict(run_variable) + variable_list = VariableList(variables=[variable_object]) + + assert len(variable_list) == 1 + + for variable in variable_list: + assert variable.variable.name == run_variable["variable"]["name"] + assert variable.run_suuid == run_variable["run_suuid"] + + assert variable_list[0].variable.name == run_variable["variable"]["name"] + + assert variable_list.get(run_variable["variable"]["name"]).variable.name == run_variable["variable"]["name"] + + assert variable_list.get("not_exist") is None + + variable_list = VariableList(variables=[variable_object, variable_object]) + + with pytest.raises(MultipleObjectsReturnedError): + variable_list.get(run_variable["variable"]["name"]) + + assert len(variable_list.filter(run_variable["variable"]["name"])) == 2 + + +def test_metric_object_fom_dict(run_metric): + metric_object = MetricObject.from_dict(run_metric) + + assert metric_object.metric.name == run_metric["metric"]["name"] + assert metric_object.run_suuid == run_metric["run_suuid"] + + +def test_metric_list(run_metric): + metric_object = MetricObject.from_dict(run_metric) + metric_list = MetricList(metrics=[metric_object]) + + assert len(metric_list) == 1 + + for metric in metric_list: + assert metric.metric.name == run_metric["metric"]["name"] + assert metric.run_suuid == run_metric["run_suuid"] + + assert metric_list[0].metric.name == run_metric["metric"]["name"] + + assert metric_list.get(run_metric["metric"]["name"]).metric.name == run_metric["metric"]["name"] + assert metric_list.get("not_exist") is None + + metric_list = MetricList(metrics=[metric_object, metric_object]) + + with pytest.raises(MultipleObjectsReturnedError): + metric_list.get(run_metric["metric"]["name"]) + + assert len(metric_list.filter(run_metric["metric"]["name"])) == 2 + + +def test_artifact_file(run_artifact_file_dict): + artifact_file = ArtifactFile.from_dict(run_artifact_file_dict.copy()) + + assert artifact_file.name == "test.zip" + assert artifact_file.size == 1234 + assert artifact_file.path == "./demo" + assert artifact_file.type == "zip" + + +def test_artifact_file_list(run_artifact_file_dict): + artifact_file = ArtifactFile.from_dict(run_artifact_file_dict.copy()) + artifact_file_list = ArtifactFileList(files=[artifact_file]) + + assert len(artifact_file_list) == 1 + assert artifact_file_list[0].name == "test.zip" + + for file in artifact_file_list: + assert file.name == "test.zip" + assert file.size == 1234 + assert file.path == "./demo" + assert file.type == "zip" + + +def test_artifact_info(run_artifact_item): + artifact_info = ArtifactInfo.from_dict(run_artifact_item.copy()) + + assert artifact_info.suuid == "abcd-abcd-abcd-abcd" diff --git a/tests/test_core/test_dataclasses/test_variable.py b/tests/test_core/test_dataclasses/test_variable.py index e423f71..40759f3 100644 --- a/tests/test_core/test_dataclasses/test_variable.py +++ b/tests/test_core/test_dataclasses/test_variable.py @@ -1,6 +1,5 @@ -from datetime import datetime, timezone - from askanna.core.dataclasses.variable import Variable +from tests.utils import str_to_datetime def test_variable(variable_detail): @@ -12,9 +11,5 @@ def test_variable(variable_detail): assert variable.is_masked == variable_detail["is_masked"] assert variable.project.suuid == variable_detail["project"]["suuid"] assert variable.workspace.suuid == variable_detail["workspace"]["suuid"] - assert variable.created_at == datetime.strptime(variable_detail["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) - assert variable.modified_at == datetime.strptime(variable_detail["modified_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) + assert variable.created_at == str_to_datetime(variable_detail["created_at"]) + assert variable.modified_at == str_to_datetime(variable_detail["modified_at"]) diff --git a/tests/test_core/test_dataclasses/test_workspace.py b/tests/test_core/test_dataclasses/test_workspace.py index 8100981..e7456de 100644 --- a/tests/test_core/test_dataclasses/test_workspace.py +++ b/tests/test_core/test_dataclasses/test_workspace.py @@ -1,6 +1,5 @@ -from datetime import datetime, timezone - from askanna.core.dataclasses.workspace import Workspace +from tests.utils import str_to_datetime def test_workspace(workspace_detail): @@ -13,11 +12,7 @@ def test_workspace(workspace_detail): assert workspace.created_by.suuid == workspace_detail["created_by"]["suuid"] assert workspace.permission == workspace_detail["permission"] assert workspace.is_member == workspace_detail["is_member"] - assert workspace.created_at == datetime.strptime(workspace_detail["created_at"], "%Y-%m-%dT%H:%M:%S.%fZ").replace( - tzinfo=timezone.utc - ) - assert workspace.modified_at == datetime.strptime( - workspace_detail["modified_at"], "%Y-%m-%dT%H:%M:%S.%fZ" - ).replace(tzinfo=timezone.utc) + assert workspace.created_at == str_to_datetime(workspace_detail["created_at"]) + assert workspace.modified_at == str_to_datetime(workspace_detail["modified_at"]) assert str(workspace) == "Workspace: test-workspace (1234-1234-1234-1234)" diff --git a/tests/test_core/test_utils/test_validate_yml.py b/tests/test_core/test_utils/test_validate_yml.py index 5a936f4..0fe92b0 100644 --- a/tests/test_core/test_utils/test_validate_yml.py +++ b/tests/test_core/test_utils/test_validate_yml.py @@ -4,7 +4,7 @@ try: from yaml import CLoader as Loader -except ImportError: +except ImportError: # pragma: no cover from yaml import Loader from askanna.core.utils.validate import ( diff --git a/tests/test_gateways/test_job.py b/tests/test_gateways/test_job.py index a0f9e1f..8158268 100644 --- a/tests/test_gateways/test_job.py +++ b/tests/test_gateways/test_job.py @@ -94,7 +94,7 @@ def test_job_delete_not_found(self): assert "404 - The job SUUID '7890-7890-7890-7890' was not found" in e.value.args[0] - def test_project_delete_error(self): + def test_job_delete_error(self): job_gateway = JobGateway() with pytest.raises(DeleteError) as e: job_gateway.delete("0987-0987-0987-0987") diff --git a/tests/test_gateways/test_package.py b/tests/test_gateways/test_package.py index e1e8a55..6672862 100644 --- a/tests/test_gateways/test_package.py +++ b/tests/test_gateways/test_package.py @@ -42,7 +42,7 @@ def test_download_package(self, package_zip_file): def test_download_package_to_file(self, package_zip_file, temp_dir): package_suuid = "1234-1234-1234-1234" - package_path = str(temp_dir) + "/artifact-1233/test.zip" + package_path = temp_dir + "/artifact-1233/test.zip" package = PackageGateway().download(package_suuid, package_path) assert package is None diff --git a/tests/test_gateways/test_run.py b/tests/test_gateways/test_run.py new file mode 100644 index 0000000..664f968 --- /dev/null +++ b/tests/test_gateways/test_run.py @@ -0,0 +1,339 @@ +from pathlib import Path + +import pytest + +from askanna.core.dataclasses.run import ( + ArtifactInfo, + MetricList, + MetricObject, + VariableList, + VariableObject, +) +from askanna.core.exceptions import DeleteError, GetError, PatchError, PutError +from askanna.gateways.run import RunGateway +from tests.utils import str_to_datetime + + +@pytest.mark.usefixtures("api_response") +class TestGatewayRun: + def test_run_list(self): + run_gateway = RunGateway() + result = run_gateway.list() + + assert len(result.runs) == 1 + assert result.runs[0].suuid == "1234-1234-1234-1234" + + def test_run_list_cursor(self): + run_gateway = RunGateway() + result = run_gateway.list(page_size=1, cursor="123") + + assert len(result.runs) == 1 + assert result.runs[0].suuid == "1234-1234-1234-1234" + + def test_run_list_error(self): + run_gateway = RunGateway() + with pytest.raises(GetError) as e: + run_gateway.list(cursor="999") + + assert ( + "500 - Something went wrong while retrieving the run list:\n {'error': 'Internal Server Error'}" + in e.value.args[0] + ) + + def test_run_list_error_2(self): + run_gateway = RunGateway() + with pytest.raises(GetError) as e: + run_gateway.list(cursor="888") + + assert "503 - Something went wrong while retrieving the run list" in e.value.args[0] + + def test_run_list_page_size_validation(self): + run_gateway = RunGateway() + + with pytest.raises(ValueError) as exc: + run_gateway.list(page_size=0) + assert "page_size must be a positive integer" in exc.value.args[0] + + with pytest.raises(ValueError) as exc: + run_gateway.list(page_size=-1) + assert "page_size must be a positive integer" in exc.value.args[0] + + with pytest.raises(ValueError) as exc: + run_gateway.list(page_size=True) + assert "page_size must be a positive integer" in exc.value.args[0] + + with pytest.raises(ValueError) as exc: + run_gateway.list(page_size="test") # type: ignore + assert "page_size must be a positive integer" in exc.value.args[0] + + run = run_gateway.list(page_size=1) + assert len(run.runs) == 1 + + def test_run_list_run_suuid(self): + run_gateway = RunGateway() + result = run_gateway.list(run_suuid_list=["1234-1234-1234-1234", "5678-5678-5678-5678"]) + + assert len(result.runs) == 1 + assert result.runs[0].suuid == "1234-1234-1234-1234" + + def test_run_list_project(self): + run_gateway = RunGateway() + result = run_gateway.list(project_suuid="1234-1234-1234-1234") + + assert len(result.runs) == 1 + assert result.runs[0].suuid == "1234-1234-1234-1234" + + def test_run_detail(self): + run_gateway = RunGateway() + result = run_gateway.detail("1234-1234-1234-1234") + + assert result.suuid == "1234-1234-1234-1234" + + def test_run_detail_not_found(self): + run_gateway = RunGateway() + with pytest.raises(GetError) as e: + run_gateway.detail("7890-7890-7890-7890") + + assert "404 - The run SUUID '7890-7890-7890-7890' was not found" in e.value.args[0] + + def test_run_detail_error(self): + run_gateway = RunGateway() + with pytest.raises(GetError) as e: + run_gateway.detail("0987-0987-0987-0987") + + assert ( + "500 - Something went wrong while retrieving run SUUID '0987-0987-0987-0987': " + + "{'error': 'Internal Server Error'}" + in e.value.args[0] + ) + + def test_run_status(self): + run_gateway = RunGateway() + result = run_gateway.status("1234-1234-1234-1234") + + assert result.suuid == "abcd-abcd-abcd-abcd" + + def test_run_status_404(self): + run_gateway = RunGateway() + with pytest.raises(GetError) as e: + run_gateway.status("7890-7890-7890-7890") + + assert "404 - The run SUUID '7890-7890-7890-7890' was not found" in e.value.args[0] + + def test_run_status_500(self): + run_gateway = RunGateway() + with pytest.raises(GetError) as e: + run_gateway.status("0987-0987-0987-0987") + + assert ( + "500 - Something went wrong while retrieving the status of run SUUID '0987-0987-0987-0987': " + + "{'error': 'Internal Server Error'}" + in e.value.args[0] + ) + + def test_run_change_no_changes(self): + run_gateway = RunGateway() + with pytest.raises(ValueError) as e: + run_gateway.change("1234-1234-1234-1234") + + assert "At least one of the parameters 'name' or 'description' must be set" in e.value.args[0] + + def test_run_change(self): + run_gateway = RunGateway() + result = run_gateway.change("1234-1234-1234-1234", name="new name") + + assert result.name == "new name" + + def test_run_change_error(self): + run_gateway = RunGateway() + with pytest.raises(PatchError) as e: + run_gateway.change("0987-0987-0987-0987", name="new name") + + assert "500 - Something went wrong while updating the run SUUID '0987-0987-0987-0987'" in e.value.args[0] + + def test_run_delete(self): + run_gateway = RunGateway() + result = run_gateway.delete("1234-1234-1234-1234") + + assert result is True + + def test_run_delete_not_found(self): + run_gateway = RunGateway() + with pytest.raises(DeleteError) as e: + run_gateway.delete("7890-7890-7890-7890") + + assert "404 - The run SUUID '7890-7890-7890-7890' was not found" in e.value.args[0] + + def test_run_delete_error(self): + run_gateway = RunGateway() + with pytest.raises(DeleteError) as e: + run_gateway.delete("0987-0987-0987-0987") + + assert "500 - Something went wrong while deleting the run SUUID '0987-0987-0987-0987'" in e.value.args[0] + + def test_run_manifest(self, run_manifest): + run_gateway = RunGateway() + result = run_gateway.manifest("1234-1234-1234-1234") + + assert result is not None + assert result.decode() == run_manifest + + def test_run_manifest_404(self): + run_gateway = RunGateway() + with pytest.raises(GetError) as e: + run_gateway.manifest("wxyz-wxyz-wxyz-wxyz") + + assert "404 - The manifest for run SUUID 'wxyz-wxyz-wxyz-wxyz' was not found" in e.value.args[0] + + def test_run_manifest_not_200_not_404(self): + run_gateway = RunGateway() + with pytest.raises(GetError) as e: + run_gateway.manifest("zyxw-zyxw-zyxw-zyxw") + + assert ( + "500 - Something went wrong while retrieving the manifest for run SUUID 'zyxw-zyxw-zyxw-zyxw'" + in e.value.args[0] + ) + + def test_run_manifest_output_path(self, temp_dir, run_manifest): + run_gateway = RunGateway() + run_manifest_path = temp_dir + "/run-manifest-1234.sh" + result = run_gateway.manifest("1234-1234-1234-1234", run_manifest_path) + + assert result is None + assert Path(run_manifest_path).exists() + assert Path(run_manifest_path).is_file() + assert Path(run_manifest_path).read_text() == run_manifest + + def test_run_manifest_output_path_404(self, temp_dir): + run_gateway = RunGateway() + run_manifest_path = temp_dir + "/run-manifest-1234.sh" + with pytest.raises(GetError) as e: + run_gateway.manifest("wxyz-wxyz-wxyz-wxyz", run_manifest_path) + + assert "404 - The manifest for run SUUID 'wxyz-wxyz-wxyz-wxyz' was not found" in e.value.args[0] + + def test_run_manifest_output_path_not_200_not_404(self, temp_dir): + run_gateway = RunGateway() + run_manifest_path = temp_dir + "/run-manifest-1234.sh" + with pytest.raises(GetError) as e: + run_gateway.manifest("zyxw-zyxw-zyxw-zyxw", run_manifest_path) + + assert ( + "500 - Something went wrong while retrieving the manifest for run SUUID 'zyxw-zyxw-zyxw-zyxw'" + in e.value.args[0] + ) + + def test_run_metric(self, run_metric): + run_gateway = RunGateway() + result = run_gateway.metric("1234-1234-1234-1234") + + assert result is not None + assert isinstance(result, MetricList) + assert result[0].metric.name == run_metric["metric"]["name"] + assert result[0].created_at == str_to_datetime(run_metric["created_at"]) + + def test_run_metric_404(self): + run_gateway = RunGateway() + + with pytest.raises(GetError) as e: + run_gateway.metric("wxyz-wxyz-wxyz-wxyz") + + assert "404 - The run SUUID 'wxyz-wxyz-wxyz-wxyz' was not found" in e.value.args[0] + + def test_run_metric_500(self): + run_gateway = RunGateway() + + with pytest.raises(GetError) as e: + run_gateway.metric("zyxw-zyxw-zyxw-zyxw") + + assert ( + "500 - Something went wrong while retrieving the metrics of run SUUID 'zyxw-zyxw-zyxw-zyxw'" + in e.value.args[0] + ) + + def test_run_metric_update(self, run_metric): + run_gateway = RunGateway() + metric_object = MetricObject.from_dict(run_metric) + metric_list = MetricList(metrics=[metric_object]) + + assert run_gateway.metric_update("1234-1234-1234-1234", metric_list) is None + + def test_run_metric_update_404(self, run_metric): + run_gateway = RunGateway() + metric_object = MetricObject.from_dict(run_metric) + metric_list = MetricList(metrics=[metric_object]) + + with pytest.raises(PutError) as exc: + run_gateway.metric_update("wxyz-wxyz-wxyz-wxyz", metric_list) + + assert "404 - The run SUUID 'wxyz-wxyz-wxyz-wxyz' was not found" in exc.value.args[0] + + def test_run_metric_update_500(self, run_metric): + run_gateway = RunGateway() + metric_object = MetricObject.from_dict(run_metric) + metric_list = MetricList(metrics=[metric_object]) + + with pytest.raises(PutError) as exc: + run_gateway.metric_update("zyxw-zyxw-zyxw-zyxw", metric_list) + + assert ( + "500 - Something went wrong while updating metrics of run SUUID 'zyxw-zyxw-zyxw-zyxw'" in exc.value.args[0] + ) + + def test_run_variable_update(self, run_variable): + run_gateway = RunGateway() + variable_object = VariableObject.from_dict(run_variable) + variable_list = VariableList(variables=[variable_object]) + + assert run_gateway.variable_update("1234-1234-1234-1234", variable_list) is None + + def test_run_variable_update_404(self, run_variable): + run_gateway = RunGateway() + variable_object = VariableObject.from_dict(run_variable) + variable_list = VariableList(variables=[variable_object]) + + with pytest.raises(PatchError) as e: + run_gateway.variable_update("wxyz-wxyz-wxyz-wxyz", variable_list) + + assert "404 - The run SUUID 'wxyz-wxyz-wxyz-wxyz' was not found" in e.value.args[0] + + def test_run_variable_update_500(self, run_variable): + run_gateway = RunGateway() + variable_object = VariableObject.from_dict(run_variable) + variable_list = VariableList(variables=[variable_object]) + + with pytest.raises(PatchError) as e: + run_gateway.variable_update("zyxw-zyxw-zyxw-zyxw", variable_list) + + assert ( + "500 - Something went wrong while updating variables of run SUUID 'zyxw-zyxw-zyxw-zyxw'" in e.value.args[0] + ) + + def test_run_artifiact_info(self, run_artifact_item): + run_gateway = RunGateway() + result = run_gateway.artifact_info("1234-1234-1234-1234", "abcd-abcd-abcd-abcd") + + assert result is not None + assert isinstance(result, ArtifactInfo) + assert result.files is not None + assert result.files[0].name == run_artifact_item["files"][0]["name"] + + def test_run_artifiact_info_404(self): + run_gateway = RunGateway() + + with pytest.raises(GetError) as e: + run_gateway.artifact_info("wxyz-wxyz-wxyz-wxyz", "abcd-abcd-abcd-abcd") + + assert "404 - The artifact for run SUUID 'wxyz-wxyz-wxyz-wxyz' was not found" in e.value.args[0] + + def test_run_artifiact_info_500(self): + run_gateway = RunGateway() + + with pytest.raises(GetError) as e: + run_gateway.artifact_info("zyxw-zyxw-zyxw-zyxw", "abcd-abcd-abcd-abcd") + + assert ( + "500 - Something went wrong while retrieving the artifact for run SUUID 'zyxw-zyxw-zyxw-zyxw'" + in e.value.args[0] + ) diff --git a/tests/test_sdk/test_run.py b/tests/test_sdk/test_run.py new file mode 100644 index 0000000..7dc6c76 --- /dev/null +++ b/tests/test_sdk/test_run.py @@ -0,0 +1,80 @@ +import pytest + +from askanna.core.dataclasses.run import ArtifactInfo, MetricList, VariableList +from askanna.sdk.run import RunSDK +from tests.utils import str_to_datetime + + +@pytest.mark.usefixtures("api_response") +class TestSDKRun: + def test_run_list(self): + run_sdk = RunSDK() + result = run_sdk.list() + + assert len(result) == 1 + assert result[0].suuid == "1234-1234-1234-1234" + + def test_run_list_run_suuid(self): + run_sdk = RunSDK() + result = run_sdk.list(run_suuid_list=["1234-1234-1234-1234", "5678-5678-5678-5678"]) + + assert len(result) == 1 + assert result[0].suuid == "1234-1234-1234-1234" + + def test_run_list_project(self): + run_sdk = RunSDK() + result = run_sdk.list(project_suuid="1234-1234-1234-1234") + + assert len(result) == 1 + assert result[0].suuid == "1234-1234-1234-1234" + + def test_run_get(self): + run_sdk = RunSDK() + result = run_sdk.get("1234-1234-1234-1234") + + assert result.suuid == "1234-1234-1234-1234" + + def test_run_status(self): + run_sdk = RunSDK() + result = run_sdk.status("1234-1234-1234-1234") + + assert result.suuid == "abcd-abcd-abcd-abcd" + + def test_run_change(self): + run_sdk = RunSDK() + result = run_sdk.change("1234-1234-1234-1234", name="new name") + + assert result.name == "new name" + + def test_run_delete(self): + run_sdk = RunSDK() + result = run_sdk.delete("1234-1234-1234-1234") + + assert result is True + + def test_run_metric(self, run_metric): + run_sdk = RunSDK() + result = run_sdk.get_metric("1234-1234-1234-1234") + + assert result is not None + assert isinstance(result, MetricList) + assert result[0].metric.name == run_metric["metric"]["name"] + assert result[0].created_at == str_to_datetime(run_metric["created_at"]) + + def test_run_variable(self, run_variable): + run_sdk = RunSDK() + result = run_sdk.get_variable("1234-1234-1234-1234") + + assert result is not None + assert isinstance(result, VariableList) + assert result[0].variable.name == run_variable["variable"]["name"] + assert result[0].created_at == str_to_datetime(run_variable["created_at"]) + + def test_run_artifiact_info(self, run_artifact_item): + run_sdk = RunSDK() + result = run_sdk.artifact_info("1234-1234-1234-1234") + + assert result is not None + assert isinstance(result, ArtifactInfo) + assert result.files is not None + assert result.files[0].name == run_artifact_item["files"][0]["name"] diff --git a/tests/test_sdk/test_sdk_import.py b/tests/test_sdk/test_sdk_import.py index bc217fb..a8f9976 100644 --- a/tests/test_sdk/test_sdk_import.py +++ b/tests/test_sdk/test_sdk_import.py @@ -8,11 +8,20 @@ def test_project(self): def test_run(self): from askanna import run # noqa - def test_runs(self): - from askanna import runs # noqa - def test_variable(self): from askanna import variable # noqa def test_workspace(self): from askanna import workspace # noqa + + def test_track_metric(self): + from askanna import track_metric # noqa + + def test_track_metrics(self): + from askanna import track_metrics # noqa + + def test_track_variable(self): + from askanna import track_variable # noqa + + def test_track_variables(self): + from askanna import track_variables # noqa diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 0000000..e0f5a27 --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,18 @@ +from datetime import datetime, timezone + +from faker import Faker + + +class AskAnnaFaker: + def __init__(self): + self.fake = Faker() + + def date_time_str(self) -> str: + return f"{self.fake.date_time(tzinfo=timezone.utc):%Y-%m-%dT%H:%M:%S.%fZ}" + + +faker = AskAnnaFaker() + + +def str_to_datetime(datetime_str: str) -> datetime: + return datetime.strptime(datetime_str, "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=timezone.utc) From d45cb42814cf432f6d9bd55df4999a6aa3a011cf Mon Sep 17 00:00:00 2001 From: Robbert Date: Wed, 5 Apr 2023 11:07:22 +0200 Subject: [PATCH 4/4] Prep release v0.23.0 and add release jobs to GitLab CI/CD --- .gitlab-ci.yml | 164 +++++++++++++++++++++++++++++++------------- CHANGELOG.md | 14 ++++ askanna/__init__.py | 2 +- 3 files changed, 131 insertions(+), 49 deletions(-) diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 68e3223..479f651 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -2,6 +2,7 @@ stages: - test - build - verify build + - release - publish include: @@ -25,6 +26,35 @@ include: - pip install build twine - python -m build +.publish_to_git_template: &publish_to_git_template + stage: publish + needs: + - job: twine check + - job: pytest + tags: + - kubernetes + allow_failure: true + rules: + - if: $CI_COMMIT_BRANCH == "main" + variables: + TARGET: HEAD:main + - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/ + variables: + TARGET: refs/tags/$CI_COMMIT_TAG + before_script: + - apt-get update -y && apt-get install openssh-client git curl -y + - eval $(ssh-agent -s) + - echo "$SSH_PRIVATE_KEY_GIT" | tr -d '\r' | ssh-add - > /dev/null + - mkdir -p ~/.ssh + - chmod 700 ~/.ssh + - ssh-keyscan $GIT_HOST >> ~/.ssh/known_hosts + - chmod 644 ~/.ssh/known_hosts + - git config --global user.email "hello@askanna.io" + - git config --global user.name "AskAnna Robot" + script: + - git remote add public $GIT_REPO + - git push public $TARGET + code_quality: rules: - if: $CODE_QUALITY_DISABLED @@ -74,7 +104,8 @@ twine check: build image: stage: build - needs: [pytest] + needs: + - job: pytest tags: - kubernetes image: @@ -84,13 +115,13 @@ build image: matrix: - PY_VERSION: ["3", "3.7", "3.8", "3.9", "3.10", "3.11"] rules: - - if: '$CI_COMMIT_BRANCH == "main"' + - if: $CI_COMMIT_BRANCH == "main" variables: - DESTINATION: $CI_REGISTRY_IMAGE:python-$PY_VERSION-$CI_COMMIT_REF_SLUG - - if: '$CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/' + DESTINATION: $CI_REGISTRY_IMAGE:python-$PY_VERSION + - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/ variables: DESTINATION: $CI_REGISTRY_IMAGE/tag:$CI_COMMIT_TAG-python-$PY_VERSION - - if: '$CI_PIPELINE_SOURCE == "push"' + - if: $CI_COMMIT_BRANCH variables: DESTINATION: $CI_REGISTRY_IMAGE/review:$CI_COMMIT_REF_SLUG-python-$PY_VERSION script: @@ -100,18 +131,19 @@ build image: verify image: stage: verify build - needs: [build image] + needs: + - job: build image parallel: matrix: - PY_VERSION: ["3", "3.7", "3.8", "3.9", "3.10", "3.11"] rules: - - if: '$CI_COMMIT_BRANCH == "main"' + - if: $CI_COMMIT_BRANCH == "main" variables: - DOCKER_IMAGE: $CI_REGISTRY_IMAGE:python-$PY_VERSION-$CI_COMMIT_REF_SLUG - - if: '$CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/' + DOCKER_IMAGE: $CI_REGISTRY_IMAGE:python-$PY_VERSION + - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/ variables: DOCKER_IMAGE: $CI_REGISTRY_IMAGE/tag:$CI_COMMIT_TAG-python-$PY_VERSION - - if: '$CI_PIPELINE_SOURCE == "push"' + - if: $CI_COMMIT_BRANCH variables: DOCKER_IMAGE: $CI_REGISTRY_IMAGE/review:$CI_COMMIT_REF_SLUG-python-$PY_VERSION image: $DOCKER_IMAGE @@ -125,22 +157,36 @@ verify image: - askanna-run-utils --version - askanna-run-utils +release: + stage: release + image: registry.gitlab.com/gitlab-org/release-cli:latest + rules: + - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/ + script: + - echo "Creating a new release..." + release: + tag_name: $CI_COMMIT_TAG + description: $CI_COMMIT_TAG_MESSAGE + publish to pypi: <<: *twine_template - needs: [twine check, pytest] + needs: + - job: twine check + - job: pytest rules: - - if: '$CI_COMMIT_BRANCH == "main"' + - if: $CI_COMMIT_BRANCH == "main" script: - echo $PYPIRC | base64 -d > ~/.pypirc - python -m twine upload --verbose --non-interactive --config-file ~/.pypirc -r pypi dist/* publish to testpypi: <<: *twine_template - needs: [twine check] + needs: + - job: twine check rules: - - if: '$CI_COMMIT_BRANCH == "main"' + - if: $CI_COMMIT_BRANCH == "main" when: never - - if: '$CI_COMMIT_BRANCH' + - if: $CI_COMMIT_BRANCH when: manual allow_failure: true script: @@ -149,27 +195,28 @@ publish to testpypi: publish to docker: stage: publish - needs: [verify image] + needs: + - job: verify image image: docker:latest services: - docker:dind tags: - docker rules: - - if: '$CI_COMMIT_BRANCH == "main"' + - if: $CI_COMMIT_BRANCH == "main" allow_failure: true before_script: - echo "$DOCKER_TOKEN" | docker login --username $DOCKER_USER --password-stdin - echo "$CI_REGISTRY_PASSWORD" | docker login $CI_REGISTRY --username $CI_REGISTRY_USER --password-stdin script: - - docker pull gitlab.askanna.io:4567/askanna/askanna-cli --all-tags - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3-main askanna/python:3 - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3-main askanna/python:latest - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3.7-main askanna/python:3.7 - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3.8-main askanna/python:3.8 - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3.9-main askanna/python:3.9 - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3.10-main askanna/python:3.10 - - docker tag gitlab.askanna.io:4567/askanna/askanna-cli:python-3.11-main askanna/python:3.11 + - docker pull $CI_REGISTRY_IMAGE --all-tags + - docker tag $CI_REGISTRY_IMAGE:python-3 askanna/python:3 + - docker tag $CI_REGISTRY_IMAGE:python-3 askanna/python:latest + - docker tag $CI_REGISTRY_IMAGE:python-3.7 askanna/python:3.7 + - docker tag $CI_REGISTRY_IMAGE:python-3.8 askanna/python:3.8 + - docker tag $CI_REGISTRY_IMAGE:python-3.9 askanna/python:3.9 + - docker tag $CI_REGISTRY_IMAGE:python-3.10 askanna/python:3.10 + - docker tag $CI_REGISTRY_IMAGE:python-3.11 askanna/python:3.11 - docker push askanna/python:latest - docker push askanna/python:3 - docker push askanna/python:3.7 @@ -180,33 +227,54 @@ publish to docker: after_script: - docker logout -publish to public repos: +publish to gitlab: + <<: *publish_to_git_template + variables: + SSH_PRIVATE_KEY_GIT: $SSH_PRIVATE_KEY_GITLAB_COM + GIT_HOST: gitlab.com + GIT_REPO: git@gitlab.com:askanna/askanna-python.git + +publish to github: + <<: *publish_to_git_template + variables: + SSH_PRIVATE_KEY_GIT: $SSH_PRIVATE_KEY_GITHUB_COM + GIT_HOST: github.com + GIT_REPO: git@github.com:askanna-io/askanna-python.git + +publish release to gitlab: stage: publish - needs: [twine check, pytest] + image: registry.gitlab.com/gitlab-org/release-cli:latest + needs: + - job: release + - job: publish to gitlab tags: - kubernetes allow_failure: true rules: - - if: '$CI_COMMIT_BRANCH == "main"' - variables: - TARGET: HEAD:main - - if: '$CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/' - variables: - TARGET: refs/tags/$CI_COMMIT_TAG + - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/ + variables: + GITLAB_PRIVATE_TOKEN: $GITLAB_COM_API_TOKEN + script: + - release-cli --server-url https://gitlab.com --project-id 33066876 create --tag-name $CI_COMMIT_TAG --description "$CI_COMMIT_TAG_MESSAGE" + +publish release to github: + stage: publish + image: ubuntu:22.04 + needs: + - job: release + - job: publish to github + tags: + - kubernetes + allow_failure: true + rules: + - if: $CI_COMMIT_TAG =~ /^v[0-9]+\.[0-9]+\.[0-9]+$/ + variables: + GH_TOKEN: $GITHUB_COM_API_TOKEN before_script: - - apt-get update -y && apt-get install openssh-client git curl -y - - eval $(ssh-agent -s) - - echo "$SSH_PRIVATE_KEY_GITLAB_COM" | tr -d '\r' | ssh-add - > /dev/null - - echo "$SSH_PRIVATE_KEY_GITHUB_COM" | tr -d '\r' | ssh-add - > /dev/null - - mkdir -p ~/.ssh - - chmod 700 ~/.ssh - - ssh-keyscan gitlab.com >> ~/.ssh/known_hosts - - ssh-keyscan github.com >> ~/.ssh/known_hosts - - chmod 644 ~/.ssh/known_hosts - - git config --global user.email "hello@askanna.io" - - git config --global user.name "AskAnna Robot" + - apt-get update -y && apt-get install curl -y + - curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg + - chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg + - echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | tee /etc/apt/sources.list.d/github-cli.list > /dev/null + - apt-get update -y && apt-get install gh -y script: - - git remote add gitlab git@gitlab.com:askanna/askanna-python.git - - git remote add github git@github.com:askanna-io/askanna-python.git - - git push gitlab $TARGET - - git push github $TARGET + - gh release create $CI_COMMIT_TAG --verify-tag --title $CI_COMMIT_TAG --notes "$CI_COMMIT_TAG_MESSAGE" --repo askanna-io/askanna-python diff --git a/CHANGELOG.md b/CHANGELOG.md index abd3d7f..cb37195 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## 0.23.0 (2023-04-05) + +- Added new run filters: `status`, `trigger`, `created_by` and `package_suuid` +- Implemented option for exclude version of run filters (e.g. `status__exclude`) +- CLI command `askanna run list` now includes a filter option for `status` +- Defined `status` and `trigger` values using a `Literal` type +- Expanded test coverage and added test utils with date-time helpers +- Removed deprecated `GetRunsSDK` +- Switch from `pytz` to `zoneinfo` and removed [pytz](http://pythonhosted.org/pytz) from dependencies +- Change the build system from [Wheel](https://wheel.readthedocs.io/) to [Hatch](https://hatch.pypa.io/) +- Switch from [Flake8](https://flake8.pycqa.org/) to [Ruff](https://beta.ruff.rs/docs/) +- Introduce parallel matrix for GitLab CI/CD to simplify the config +- Add release related jobs to GitLab CI/CD + ## 0.22.0 (2023-03-06) - Add '_at' suffix to datetime fields in dataclasses to support the new AskAnna Backend API response diff --git a/askanna/__init__.py b/askanna/__init__.py index 3115e81..5df3d6e 100644 --- a/askanna/__init__.py +++ b/askanna/__init__.py @@ -2,7 +2,7 @@ The AskAnna CLI & Python SDK is part of the AskAnna platform to kickstart your data science projects. """ -__version__ = "0.23.0.dev2" +__version__ = "0.23.0" import re import sys