diff --git a/BUILD b/BUILD index 571715e2..a90c0184 100644 --- a/BUILD +++ b/BUILD @@ -41,7 +41,6 @@ stardoc( "//npm/deploy:lib", "//packer:lib", "//pip:lib", - "//rpm:lib", ], symbol_names = [ # From: //apt:rules.bzl @@ -106,9 +105,5 @@ stardoc( # From: //pip:rules.bzl "assemble_pip", "deploy_pip", - - # From: //rpm:rules.bzl - "assemble_rpm", - "deploy_rpm", ], ) diff --git a/README.md b/README.md index a148b505..b57541fe 100755 --- a/README.md +++ b/README.md @@ -275,9 +275,9 @@ deploy_npm(name, re ## How to generate an auth token ### Using the command line (`npm adduser`) - 1. Run `npm adduser <repo_url>` (example: `npm adduser --registry=https://repo.vaticle.com/repository/npm-private`) + 1. Run `npm adduser <repo_url>` (example: `npm adduser --registry=https://npm.cloudsmith.io/typedb/private/`) 2. When prompted, provide login credentials to sign in to the user account that is used in your CI and has permissions to publish the package - 3. If successful, a line will be added to your `.npmrc` file (`$HOME/.npmrc` on Unix) which looks like: `//repo.vaticle.com/repository/npm-snapshot/:_authToken=NpmToken.00000000-0000-0000-0000-000000000000`. The token is the value of `_authToken`, in this case `NpmToken.00000000-0000-0000-0000-000000000000`. + 3. If successful, a line will be added to your `.npmrc` file (`$HOME/.npmrc` on Unix) which looks like: `//npm.cloudsmith.io/typedb/private/:_authToken=NpmToken.00000000-0000-0000-0000-000000000000`. The token is the value of `_authToken`, in this case `NpmToken.00000000-0000-0000-0000-000000000000`. 4. Save the auth token somewhere safe and then delete it from your `.npmrc` file ### Using a UI @@ -321,30 +321,6 @@ Execute Packer to perform deployment | target | assemble_packer label to be deployed. | Label | optional | None | - - -## deploy_rpm - -
-deploy_rpm(name, release, snapshot, target)
-
- -Deploy package built with `assemble_rpm` to RPM repository. - - Select deployment to `snapshot` or `release` repository with `bazel run //:some-deploy-rpm -- [snapshot|release] - - -**ATTRIBUTES** - - -| Name | Description | Type | Mandatory | Default | -| :------------- | :------------- | :------------- | :------------- | :------------- | -| name | A unique name for this target. | Name | required | | -| release | Remote repository to deploy rpm release to | String | required | | -| snapshot | Remote repository to deploy rpm snapshot to | String | required | | -| target | assemble_rpm target to deploy | Label | optional | None | - - ## doxygen_docs @@ -651,36 +627,6 @@ Assemble files for HashiCorp Packer deployment | files | Files to include into deployment | {} | - - -## assemble_rpm - -
-assemble_rpm(name, package_name, spec_file, version_file, workspace_refs, installation_dir,
-             archives, empty_dirs, files, permissions, symlinks, tags)
-
- -Assemble package for installation with RPM - -**PARAMETERS** - - -| Name | Description | Default Value | -| :------------- | :------------- | :------------- | -| name | A unique name for this target. | none | -| package_name | Package name for built .rpm package | none | -| spec_file | The RPM spec file to use | none | -| version_file | File containing version number of a package. Alternatively, pass --define version=VERSION to Bazel invocation. Not specifying version defaults to '0.0.0' | None | -| workspace_refs | JSON file with other Bazel workspace references | None | -| installation_dir | directory into which .rpm package is unpacked at installation | None | -| archives | Bazel labels of archives that go into .rpm package | [] | -| empty_dirs | list of empty directories created at package installation | [] | -| files | mapping between Bazel labels of archives that go into .rpm package and their resulting location on .rpm package installation | {} | -| permissions | mapping between paths and UNIX permissions | {} | -| symlinks | mapping between source and target of symbolic links created at installation | {} | -| tags | additional tags passed to all wrapped rules | [] | - - ## assemble_targz diff --git a/WORKSPACE b/WORKSPACE index 919ab313..1ac3defd 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -95,3 +95,9 @@ http_archive( load("@bazel_stardoc//:setup.bzl", "stardoc_repositories") stardoc_repositories() + +# Load @vaticle_bazel_distribution_uploader +load("//common/uploader:deps.bzl", uploader_deps = "deps") +uploader_deps() +load("@vaticle_bazel_distribution_uploader//:requirements.bzl", install_uploader_deps = "install_deps") +install_uploader_deps() diff --git a/apt/rules.bzl b/apt/rules.bzl index 3d16b62e..db9432c2 100644 --- a/apt/rules.bzl +++ b/apt/rules.bzl @@ -187,27 +187,25 @@ def assemble_apt(name, def _deploy_apt_impl(ctx): + _deploy_script = ctx.actions.declare_file(ctx.attr.deploy_script_name) + package_path = ctx.files.target[0].short_path ctx.actions.expand_template( template = ctx.file._deployment_script, - output = ctx.outputs.deployment_script, + output = _deploy_script, substitutions = { '{snapshot}' : ctx.attr.snapshot, - '{release}' : ctx.attr.release + '{release}' : ctx.attr.release, + '{package_path}' : package_path, }, is_executable = True ) - symlinks = { - 'package.deb': ctx.files.target[0], - } + deployment_lib_files = ctx.attr._deployment_wrapper_lib[DefaultInfo].default_runfiles.files.to_list() + return DefaultInfo(executable = _deploy_script, + runfiles = ctx.runfiles(files=[ctx.files.target[0]] + deployment_lib_files)) - return DefaultInfo(executable = ctx.outputs.deployment_script, - runfiles = ctx.runfiles( - files=[ctx.files.target[0]], - symlinks = symlinks)) - -deploy_apt = rule( +_deploy_apt = rule( attrs = { "target": attr.label( doc = 'assemble_apt label to deploy' @@ -220,13 +218,17 @@ deploy_apt = rule( mandatory = True, doc = 'Release repository to deploy apt artifact to' ), + "_deployment_wrapper_lib": attr.label( + default = "//common/uploader:uploader", + ), "_deployment_script": attr.label( allow_single_file = True, default = "//apt/templates:deploy.py" ), - }, - outputs = { - "deployment_script": "%{name}.sh", + "deploy_script_name": attr.string( + mandatory = True, + doc = 'Name of instantiated deployment script' + ), }, implementation = _deploy_apt_impl, executable = True, @@ -235,3 +237,22 @@ deploy_apt = rule( Select deployment to `snapshot` or `release` repository with `bazel run //:some-deploy-apt -- [snapshot|release] """ ) + +def deploy_apt(name, target, snapshot, release, **kwargs): + deploy_script_target_name = name + "__deploy" + deploy_script_name = deploy_script_target_name + "-deploy.py" + + _deploy_apt( + name = deploy_script_target_name, + target = target, + snapshot = snapshot, + release = release, + deploy_script_name = deploy_script_name, + **kwargs + ) + + native.py_binary( + name = name, + srcs = [deploy_script_target_name], + main = deploy_script_name, + ) diff --git a/apt/templates/deploy.py b/apt/templates/deploy.py index c8c2bf54..0a9f6f43 100644 --- a/apt/templates/deploy.py +++ b/apt/templates/deploy.py @@ -31,18 +31,25 @@ import tempfile from runpy import run_path +import sys, glob +# Prefer using the runfile dependency than system dependency +runfile_deps = [path for path in map(os.path.abspath, glob.glob('external/*/*'))] +sys.path = runfile_deps + sys.path + +from common.uploader.uploader import Uploader + parser = argparse.ArgumentParser() parser.add_argument('repo_type') args = parser.parse_args() repo_type_key = args.repo_type -apt_deployments = { +apt_repositories = { 'snapshot' : "{snapshot}", 'release' : "{release}" } -apt_registry = apt_deployments[repo_type_key] +repo_url = apt_repositories[repo_type_key] apt_username, apt_password = ( os.getenv('DEPLOY_APT_USERNAME'), @@ -61,19 +68,9 @@ '$DEPLOY_APT_PASSWORD env variable' ) -upload_status_code = subprocess.check_output([ - 'curl', - '--silent', - '--output', '/dev/stderr', - '--write-out', '%{http_code}', - '-u', '{}:{}'.format(apt_username, apt_password), - '-X', 'POST', - '-H', 'Content-Type: multipart/form-data', - '--data-binary', '@package.deb', - apt_registry -]).decode().strip() +package_path = "{package_path}" -if upload_status_code != '201': - raise Exception('upload failed, got HTTP status code {}'.format(upload_status_code)) +uploader = Uploader.create(apt_username, apt_password, repo_url) +uploader.apt(package_path) print('Deployment completed.') diff --git a/artifact/rules.bzl b/artifact/rules.bzl index 684acc1c..cc4d2c49 100644 --- a/artifact/rules.bzl +++ b/artifact/rules.bzl @@ -51,11 +51,11 @@ def _deploy_artifact_impl(ctx): symlinks = { 'VERSION': version_file, } - + deployment_lib_files = ctx.attr._deployment_wrapper_lib[DefaultInfo].default_runfiles.files.to_list() return DefaultInfo( executable = _deploy_script, runfiles = ctx.runfiles( - files = files, + files = files + deployment_lib_files, symlinks = symlinks, ), ) @@ -84,6 +84,9 @@ _deploy_artifact = rule( doc = "The artifact filename, automatic from the target file if not specified", default = '', ), + "_deployment_wrapper_lib": attr.label( + default = "//common/uploader:uploader", + ), "_deploy_script_template": attr.label( allow_single_file = True, default = "//artifact/templates:deploy.py", @@ -165,7 +168,7 @@ def artifact_file(name, http_file( name = name, - urls = ["{}/{}/{}/{}".format(repository_url, group_name, version, artifact_name)], + urls = ["{}/names/{}/versions/{}/{}".format(repository_url.rstrip("/"), group_name, version, artifact_name)], downloaded_file_path = artifact_name, sha = sha, tags = tags + ["{}={}".format(versiontype, version)], diff --git a/artifact/templates/deploy.py b/artifact/templates/deploy.py index 5219b3fa..910e3bbf 100644 --- a/artifact/templates/deploy.py +++ b/artifact/templates/deploy.py @@ -27,20 +27,12 @@ import sys from posixpath import join as urljoin +import glob +# Prefer using the runfile dependency than system dependency +runfile_deps = [path for path in map(os.path.abspath, glob.glob('external/*/*'))] +sys.path = runfile_deps + sys.path -def upload(url, username, password, local_fn, remote_fn): - upload_status_code = sp.check_output([ - 'curl', '--silent', - '--write-out', '%{http_code}', - '-u', '{}:{}'.format(username, password), - '--upload-file', local_fn, - urljoin(url, remote_fn) - ]).decode().strip() - - if upload_status_code != '201': - raise Exception('upload of {} failed, got HTTP status code {}'.format( - local_fn, upload_status_code)) - +from common.uploader.uploader import Uploader if len(sys.argv) != 2: raise ValueError('Should pass only as arguments') @@ -75,9 +67,6 @@ def upload(url, username, password, local_fn, remote_fn): .format(version, repo_type, version_snapshot_regex)) filename = '{artifact_filename}' -if filename == '': - filename = os.path.basename('{artifact_path}') - filename = filename.format(version = version) base_url = None @@ -86,6 +75,5 @@ def upload(url, username, password, local_fn, remote_fn): else: base_url = '{snapshot}' -dir_url = '{base_url}/{artifact_group}/{version}'.format(version=version, base_url=base_url) - -upload(dir_url, username, password, '{artifact_path}', filename) +uploader = Uploader.create(username, password, base_url) +uploader.artifact("{artifact_group}", version, '{artifact_path}', filename) diff --git a/rpm/BUILD b/common/uploader/BUILD similarity index 66% rename from rpm/BUILD rename to common/uploader/BUILD index 7a6e8a06..bc903e11 100644 --- a/rpm/BUILD +++ b/common/uploader/BUILD @@ -16,23 +16,11 @@ # specific language governing permissions and limitations # under the License. # +load("@vaticle_bazel_distribution_uploader//:requirements.bzl", cloudsmith_requirement = "requirement") -load("@bazel_skylib//:bzl_library.bzl", "bzl_library") - -bzl_library( - name = "lib", - srcs = [ - "rules.bzl", - "@rules_pkg//:rpm.bzl", - ], - deps = [ - "@rules_pkg//doc_build:rules_pkg_lib", - ], - visibility = ["//visibility:public"] -) - -py_binary( - name = "generate_spec_file", - srcs = ["generate_spec_file.py"], - visibility = ["//visibility:public"] +py_library( + name = "uploader", + srcs = glob(["*.py"]), + deps = [cloudsmith_requirement("requests")], + visibility = ["//visibility:public"], ) diff --git a/common/uploader/__init__.py b/common/uploader/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/common/uploader/cloudsmith.py b/common/uploader/cloudsmith.py new file mode 100644 index 00000000..59d4fc9d --- /dev/null +++ b/common/uploader/cloudsmith.py @@ -0,0 +1,158 @@ +import os +import re +import requests +import time +from .uploader import Uploader, DeploymentException + +class CloudsmithUploader(Uploader): + COMMON_OPTS = {"tags"} + _WAIT_FOR_SYNC_ATTEMPTS = 100 + _WAIT_FOR_SYNC_SLEEP_SEC = 3 + + # Interface with the cloudsmith api + def __init__(self, username, password, cloudsmith_url): + self.auth = requests.auth.HTTPBasicAuth(username, password) + res = re.search(r"cloudsmith:\/\/([^\/]+)/([^\/]+)\/?", cloudsmith_url) + if res is None: + raise DeploymentException( + "Invalid cloudsmith_url. Expected cloudsmith:/// but was: %s" % cloudsmith_url) + self.repo_owner = res.group(1) + self.repo = res.group(2) + + def _upload_file_impl(self, file, filename): + headers = {} + url = "https://upload.cloudsmith.io/%s/%s/%s" % (self.repo_owner, self.repo, filename) + return requests.put(url, auth=self.auth, headers=headers, data=open(file, 'rb').read()) + + def _post_metadata_impl(self, package_type, data): + headers = {} + url = "https://api-prd.cloudsmith.io/v1/packages/%s/%s/upload/%s/" % (self.repo_owner, self.repo, package_type) + return requests.post(url, auth=self.auth, headers=headers, json=data) + + def _wait_for_sync_impl(self, slug): + url = "https://api.cloudsmith.io/v1/packages/%s/%s/%s/status/" % (self.repo_owner, self.repo, slug) + syncing = True + response = None + ctr = 0 + while syncing: + if ctr >= CloudsmithUploader._WAIT_FOR_SYNC_ATTEMPTS: + raise DeploymentException("Sync still in progress after %d attempts. Failing..." % CloudsmithUploader._WAIT_FOR_SYNC_ATTEMPTS) + response = requests.get(url, auth=self.auth) + self._check_status_code("sync status", response) + json = response.json() + syncing = json["is_sync_in_progress"] or not (json["is_sync_completed"] or json["is_sync_failed"]) + ctr += 1 + time.sleep(CloudsmithUploader._WAIT_FOR_SYNC_SLEEP_SEC) + return response + + def _upload_file(self, file, filename): + print("Uploading file: %s" % filename) + resp = self._upload_file_impl(file, filename) + self._check_status_code("file upload", resp) + print("- Success!") + return resp.json()["identifier"] + + def _post_metadata(self, package_type, data): + print("Creating package: %s" % package_type) + resp = self._post_metadata_impl(package_type, data) + self._check_status_code("metadata post", resp) + print("- Success!") + return self._get_slug(resp) + + def _wait_for_sync(self, slug): + print("Checking sync status for slug: %s" % slug) + resp = self._wait_for_sync_impl(slug) + self._check_status_code("sync status", resp) + success = resp.json()["is_sync_completed"] + if success: + print("- Success!") + else: + raise DeploymentException("Syncing failed", resp) + return success + + def _check_status_code(self, stage, response): + if (response.status_code // 100) != 2: + raise DeploymentException("HTTP request for %s failed" % stage, response) + else: + return True + + def _validate_opts(self, opts, accepted_opts): + unrecognised_fields = [f for f in opts if f not in accepted_opts.union(CloudsmithUploader.COMMON_OPTS)] + if len(unrecognised_fields) != 0: + raise ValueError("Unrecognised option: " + str(unrecognised_fields)) + + def _get_slug(self, metadata_post_response): + return metadata_post_response.json()["slug_perm"] + + def _pick_filename(self, path, preferred_filename): + return preferred_filename if preferred_filename else os.path.basename(path) + + # Specific + def apt(self, deb_file, distro="any-distro/any-version", opts={}): + accepted_opts = set() + self._validate_opts(opts, accepted_opts) + # The uploaded filename is irrelevant. Cloudsmith sync will take care of it. + uploaded_id = self._upload_file(deb_file, os.path.basename(deb_file)) + data = { + "package_file": uploaded_id, + "distribution": distro, + } + slug = self._post_metadata("deb", data) + sync_success = self._wait_for_sync(slug) + assert (sync_success) + return sync_success, slug + + def artifact(self, artifact_group, version, artifact_path, filename, opts={}): + accepted_opts = {"description", "summary"} + self._validate_opts(opts, accepted_opts) + uploaded_id = self._upload_file(artifact_path, filename) + data = { + "package_file": uploaded_id, + "name": artifact_group, + "version": version + } + slug = self._post_metadata("raw", data) + sync_success = self._wait_for_sync(slug) + assert (sync_success) + return sync_success, slug + + def helm(self, tar_path, opts={}): + accepted_opts = set() + self._validate_opts(opts, accepted_opts) + uploaded_id = self._upload_file(tar_path, os.path.basename(tar_path)) + data = { + "package_file": uploaded_id, + } + slug = self._post_metadata("helm", data) + sync_success = self._wait_for_sync(slug) + assert (sync_success) + return sync_success, slug + + def maven(self, group_id, artifact_id, version, + jar_path, pom_path, + sources_path=None, javadoc_path=None, tests_path = None, + should_sign = True, + opts={}): + accepted_opts = {} + jar_filename, pom_filename, sources_filename, javadoc_filename, tests_filename = \ + Uploader._maven_names(artifact_id, version, sources_path, javadoc_path, tests_path) + self._validate_opts(opts, accepted_opts) + jar_id = self._upload_file(jar_path, self._pick_filename(jar_path, jar_filename)) + pom_id = self._upload_file(pom_path, self._pick_filename(pom_path, pom_filename)) + data = { + "group_id": group_id, + "artifact_id": artifact_id, + "package_file": jar_id, + "pom_file": pom_id + } + if sources_path is not None: + data["sources_file"] = self._upload_file(sources_path, self._pick_filename(sources_path, sources_filename)) + if javadoc_path is not None: + data["javadoc_file"] = self._upload_file(javadoc_path, self._pick_filename(javadoc_path, javadoc_filename)) + if tests_path is not None: + data["tests_file"] = self._upload_file(tests_path, self._pick_filename(tests_path, tests_filename)) + + slug = self._post_metadata("maven", data) + sync_success = self._wait_for_sync(slug) + assert (sync_success) + return sync_success, slug diff --git a/common/uploader/deps.bzl b/common/uploader/deps.bzl new file mode 100644 index 00000000..11d62467 --- /dev/null +++ b/common/uploader/deps.bzl @@ -0,0 +1,7 @@ +load("@rules_python//python:pip.bzl", "pip_parse") + +def deps(): + pip_parse( + name = "vaticle_bazel_distribution_uploader", + requirements_lock = "@vaticle_bazel_distribution//common/uploader:requirements.txt", + ) diff --git a/common/uploader/nexus.py b/common/uploader/nexus.py new file mode 100644 index 00000000..f2543875 --- /dev/null +++ b/common/uploader/nexus.py @@ -0,0 +1,133 @@ +import hashlib +import os +import requests + +from .uploader import Uploader, DeploymentException + +class NexusUploader(Uploader): + COMMON_OPTS = set() + + def __init__(self, username, password, url): + self.auth = requests.auth.HTTPBasicAuth(username, password) + + if not url.startswith("http"): + raise ValueError( + "Invalid url for repository. Expected http or https. Received:" % url) + self.repo_url = url.rstrip("/") + "/" + + def _upload_file_impl(self, file, url, use_post): + if use_post: + headers = {"Content-Type" : "multipart/form-data"} + return requests.post(url, auth = self.auth, data = open(file, "rb").read() , headers = headers) + else: + return requests.put(url, auth = self.auth, data = open(file, "rb").read()) + + def _upload_string_impl(self, data, url): + return requests.put(url, auth = self.auth, data = data) + + def _upload_file(self, file, url, use_post = False): + # self._upload_file_impl(file, url, use_post) + response = self._upload_file_impl(file, url, use_post) + success = (response.status_code // 100) == 2 + if not success: + raise DeploymentException("HTTP request for %s failed" % "upload", response) # TODO: Fix type + else: + return True + + def _upload_file_and_may_sign(self, file, url, should_sign): + use_post = False + stage = "upload" + response = self._upload_file_impl(file, url, use_post) + success = (response.status_code // 100)== 2 + if success and should_sign: + stage = "sign" + response = self._upload_file_impl(self._sign(file), url + ".asc", use_post) + success = (response.status_code // 100)== 2 + if success: + stage = "md5" + md5 = hashlib.md5(open(file, 'rb').read()).hexdigest() + response = self._upload_string_impl(md5, url + ".md5") + success = (response.status_code // 100)== 2 + if success: + stage = "sha1" + sha1 = hashlib.sha1(open(file, 'rb').read()).hexdigest() + response = self._upload_string_impl(sha1, url + ".sha1") + success = (response.status_code // 100)== 2 + + if not success: + from .cloudsmith import DeploymentException + raise DeploymentException("HTTP request for %s failed" % stage, response) + else: + return True + + def _validate_opts(self, opts, accepted_opts): + unrecognised_fields = [f for f in opts if f not in accepted_opts.union(NexusUploader.COMMON_OPTS)] + if len(unrecognised_fields) != 0: + raise ValueError("Unrecognised option: " + str(unrecognised_fields)) + + def _sign(self, fn): + import tempfile + import subprocess as sp + # TODO(vmax): current limitation of this functionality + # is that gpg key should already be present in keyring + # and should not require passphrase + asc_file = tempfile.mktemp() + sp.check_call([ + 'gpg', + '--detach-sign', + '--armor', + '--output', + asc_file, + fn + ]) + return asc_file + + #Impl + def apt(self, deb_file, distro="ignored", opts={}): + accepted_opts = set() + self._validate_opts(opts, accepted_opts) + upload_url = self.repo_url + success = self._upload_file(deb_file, upload_url, use_post = True) + return success, upload_url + + def artifact(self, artifact_group, version, artifact_path, filename, opts={}): + accepted_opts = set() + self._validate_opts(opts, accepted_opts) + upload_url = "%s/%s/%s/%s" %(self.repo_url.rstrip("/"), artifact_group, version, filename) + success = self._upload_file(artifact_path, upload_url) + return success, upload_url + + def helm(self, tar_path, opts={}): + accepted_opts = set() + self._validate_opts(opts, accepted_opts) + upload_url = "%s/api/charts/%s"%(self.repo_url.rstrip("/"), os.path.basename(tar_path)) + success = self._upload_file(tar_path, upload_url, use_post=False) + return success, upload_url + + def maven(self, group_id, artifact_id, version, + jar_path, pom_path, + sources_path=None, javadoc_path=None, tests_path = None, + should_sign = True, + opts={}): + accepted_opts = set() + self._validate_opts(opts, accepted_opts) + jar_filename, pom_filename, sources_filename, javadoc_filename, tests_filename = \ + Uploader._maven_names(artifact_id, version, sources_path, javadoc_path, tests_path) + base_url = "{repo_url}/{coordinates}/{artifact}/{version}/".format( + repo_url = self.repo_url.rstrip("/"), coordinates=group_id.text.replace('.', '/'), version=version, artifact=artifact_id) + jar_url = base_url + jar_filename + pom_url = base_url + pom_filename + success = True + success = success and self._upload_file_and_may_sign(jar_path, jar_url, should_sign) + success = success and self._upload_file_and_may_sign(pom_path, pom_url, should_sign) + if javadoc_path is not None: + javadoc_url = base_url + javadoc_filename + success = success and self._upload_file_and_may_sign(javadoc_path, javadoc_url, should_sign) + if sources_path is not None: + sources_url = base_url + sources_filename + success = success and self._upload_file_and_may_sign(sources_path, sources_url, should_sign) + if tests_path is not None: + tests_url = base_url + tests_filename + success = success and self._upload_file_and_may_sign(tests_path, tests_url, should_sign) + + return success, pom_url diff --git a/common/uploader/requirements.txt b/common/uploader/requirements.txt new file mode 100644 index 00000000..88648bd8 --- /dev/null +++ b/common/uploader/requirements.txt @@ -0,0 +1,5 @@ +requests==2.28.2 +certifi==2022.12.7 +charset-normalizer==3.0.1 +idna==3.4 +urllib3==1.26.14 diff --git a/common/uploader/uploader.py b/common/uploader/uploader.py new file mode 100644 index 00000000..169463ea --- /dev/null +++ b/common/uploader/uploader.py @@ -0,0 +1,56 @@ +import os +from abc import ABC,abstractmethod + +class DeploymentException(Exception): + def __init__(self, msg, response=None): + self.msg = msg + self.response = response + + def __str__(self): + ret = "DeploymentException: %s" % (self.msg) + if self.response is not None: + ret += ". HTTP response was [%d]: %s" % (self.response.status_code, self.response.text) + return ret + +class Uploader(ABC): + @staticmethod + def create(username, password, repo_url): + if repo_url.startswith("cloudsmith"): + from .cloudsmith import CloudsmithUploader + return CloudsmithUploader(username, password, repo_url) + elif repo_url.startswith("http"): + from .nexus import NexusUploader + return NexusUploader(username, password, repo_url) + else: + raise ValueError("Unrecognised url: ", repo_url) + + @staticmethod + def _maven_names(artifact_id, version, sources_path, javadoc_path, tests_path): + filename_base = '{artifact}-{version}'.format(artifact=artifact_id, version=version) + jar_filename = filename_base + ".jar" + pom_filename = filename_base + ".pom" + sources_filename = filename_base + "-sources.jar" if sources_path and os.path.exists(sources_path) else None + javadoc_filename = filename_base + "-javadoc.jar" if javadoc_path and os.path.exists(javadoc_path) else None + tests_path = filename_base + "-tests.jar" if tests_path and os.path.exists(tests_path) else None + return jar_filename, pom_filename, sources_filename, javadoc_filename, tests_path + + # Specific + @abstractmethod + def apt(self, deb_file, distro, opts={}): + raise NotImplementedError("Abstract") + + @abstractmethod + def artifact(self, artifact_group, version, artifact_path, filename, opts={}): + raise NotImplementedError("Abstract") + + @abstractmethod + def helm(self, tar_path, opts={}): + raise NotImplementedError("Abstract") + + @abstractmethod + def maven(self, group_id, artifact_id, version, + jar_path, pom_path, + sources_path=None, javadoc_path=None, tests_path = None, + should_sign = True, + opts={}): + raise NotImplementedError("Abstract") diff --git a/crates/CrateDeployer.kt b/crates/CrateDeployer.kt index d3113418..1e264168 100644 --- a/crates/CrateDeployer.kt +++ b/crates/CrateDeployer.kt @@ -60,8 +60,8 @@ class CrateDeployer : Callable { private val repoUrl: String get() = when (releaseMode) { - CrateRepoType.Snapshot -> snapshotRepo - CrateRepoType.Release -> releaseRepo + CrateRepoType.Snapshot -> snapshotRepo.trim('/') + CrateRepoType.Release -> releaseRepo.trim('/') } + "/api/v1/crates/new" private val token = System.getenv("DEPLOY_CRATE_TOKEN") ?: throw RuntimeException( @@ -99,7 +99,7 @@ class CrateDeployer : Callable { private fun httpPut(url: String, token: String, content: ByteArray): HttpResponse { return NetHttpTransport() .createRequestFactory() - .buildPutRequest(GenericUrl(url), ByteArrayContent("application/json", content)) + .buildPutRequest(GenericUrl(url), ByteArrayContent(null, content)) // TODO: Verify it works with crates.io .setHeaders( HttpHeaders().setAuthorization(token) ) diff --git a/doc_hub.bzl b/doc_hub.bzl index 2e54e82a..0b2677ff 100644 --- a/doc_hub.bzl +++ b/doc_hub.bzl @@ -67,8 +67,6 @@ load("//packer:rules.bzl", _assemble_packer = "assemble_packer", _deploy_packer load("//pip:rules.bzl", _assemble_pip = "assemble_pip", _deploy_pip = "deploy_pip") -load("//rpm:rules.bzl", _assemble_rpm = "assemble_rpm", _deploy_rpm = "deploy_rpm") - assemble_apt = _assemble_apt deploy_apt = _deploy_apt @@ -117,6 +115,3 @@ deploy_packer = _deploy_packer assemble_pip = _assemble_pip deploy_pip = _deploy_pip - -assemble_rpm = _assemble_rpm -deploy_rpm = _deploy_rpm diff --git a/helm/BUILD b/helm/BUILD new file mode 100644 index 00000000..e69de29b diff --git a/helm/rules.bzl b/helm/rules.bzl new file mode 100644 index 00000000..ae5bb909 --- /dev/null +++ b/helm/rules.bzl @@ -0,0 +1,91 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_file") + +def _deploy_helm_impl(ctx): + _deploy_script = ctx.actions.declare_file(ctx.attr.deploy_script_name) + ctx.actions.expand_template( + template = ctx.file._deploy_script_template, + output = _deploy_script, + substitutions = { + "{chart_path}": ctx.file.chart.short_path, + "{release}": ctx.attr.release, + "{snapshot}": ctx.attr.snapshot, + }, + ) + + deployment_lib_files = ctx.attr._deployment_wrapper_lib[DefaultInfo].default_runfiles.files.to_list() + return DefaultInfo( + executable = _deploy_script, + runfiles = ctx.runfiles( + files = [ctx.file.chart] + deployment_lib_files + ), + ) + +_deploy_helm = rule( + attrs = { + "chart": attr.label( + allow_single_file = True, + mandatory = True, + doc = "Chart to deploy to repo", + ), + "_deployment_wrapper_lib": attr.label( + default = "//common/uploader:uploader", + ), + "_deploy_script_template": attr.label( + allow_single_file = True, + default = "//helm/templates:deploy.py", + ), + "deploy_script_name": attr.string( + mandatory = True, + doc = 'Name of instantiated deployment script' + ), + "release": attr.string( + mandatory = True, + doc = "Repository that the release chart will be uploaded to" + ), + "snapshot": attr.string( + mandatory = True, + doc = "Repository that the snapshot chart will be uploaded to" + ), + }, + executable = True, + implementation = _deploy_helm_impl, + doc = "Deploy helm chart into a raw repo", +) + +def deploy_helm(name, chart, snapshot, release, **kwargs): + deploy_script_target_name = name + "__deploy" + deploy_script_name = deploy_script_target_name + "-deploy.py" + + _deploy_helm( + name = deploy_script_target_name, + chart = chart, + deploy_script_name = deploy_script_name, + snapshot = snapshot, + release = release, + **kwargs + ) + + native.py_binary( + name = name, + srcs = [deploy_script_target_name], + main = deploy_script_name, + ) diff --git a/rpm/templates/BUILD b/helm/templates/BUILD similarity index 100% rename from rpm/templates/BUILD rename to helm/templates/BUILD diff --git a/helm/templates/deploy.py b/helm/templates/deploy.py new file mode 100644 index 00000000..3fe78817 --- /dev/null +++ b/helm/templates/deploy.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 + +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +from __future__ import print_function + +import os +import re +import subprocess as sp +import sys +from posixpath import join as urljoin + +import glob +# Prefer using the runfile dependency than system dependency +runfile_deps = [path for path in map(os.path.abspath, glob.glob('external/*/*'))] +sys.path = runfile_deps + sys.path + +from common.uploader.uploader import Uploader + +if len(sys.argv) != 2: + raise ValueError('Should pass only as arguments') + +_, repo_type = sys.argv + +username, password = os.getenv('DEPLOY_HELM_USERNAME'), os.getenv('DEPLOY_HELM_PASSWORD') + +if not username: + raise ValueError('Error: username should be passed via $DEPLOY_HELM_USERNAME env variable') + +if not password: + raise ValueError('Error: password should be passed via $DEPLOY_HELM_PASSWORD env variable') + +chart_path = '{chart_path}' +filename = os.path.basename('{chart_path}') + +snapshot = 'snapshot' +version_snapshot_regex = '.*-0.0.0-[0-9|a-f|A-F]{40}.*' +release = 'release' +version_release_regex = '.*-[0-9]+.[0-9]+.[0-9]+(-[a-zA-Z0-9]+)*.*' + +if repo_type not in [snapshot, release]: + raise ValueError("Invalid repository type: {}. It should be one of these: {}" + .format(repo_type, [snapshot, release])) +if repo_type == 'snapshot' and len(re.findall(version_snapshot_regex, filename)) == 0: + raise ValueError('Invalid version: {}. A helm chart uploaded to a {} repository ' + 'must contain a version in its filename which complies to this regex: {}' + .format(filename, repo_type, version_snapshot_regex)) +if repo_type == 'release' and len(re.findall(version_release_regex, filename)) == 0: + raise ValueError('Invalid version: {}. An helm chart uploaded to a {} repository ' + 'must contain a version in its filename which complies to this regex: {}' + .format(filename, repo_type, version_snapshot_regex)) + +base_url = None +if repo_type == 'release': + base_url = '{release}' +else: + base_url = '{snapshot}' + +uploader = Uploader.create(username, password, base_url) +uploader.helm(chart_path) diff --git a/maven/rules.bzl b/maven/rules.bzl index 3e746b92..9aa19573 100644 --- a/maven/rules.bzl +++ b/maven/rules.bzl @@ -374,9 +374,10 @@ def _deploy_maven_impl(ctx): files.append(ctx.attr.target[MavenDeploymentInfo].srcjar) symlinks[src_jar_link] = ctx.attr.target[MavenDeploymentInfo].srcjar + deployment_lib_files = ctx.attr._deployment_wrapper_lib[DefaultInfo].default_runfiles.files.to_list() return DefaultInfo( executable = deploy_maven_script, - runfiles = ctx.runfiles(files=files, symlinks = symlinks) + runfiles = ctx.runfiles(files=files + deployment_lib_files, symlinks = symlinks) ) _deploy_maven = rule( @@ -394,6 +395,9 @@ _deploy_maven = rule( mandatory = True, doc = 'Release repository to release maven artifact to' ), + "_deployment_wrapper_lib": attr.label( + default = "//common/uploader:uploader", + ), "_deploy_script_template": attr.label( allow_single_file = True, default = "//maven/templates:deploy.py", diff --git a/maven/templates/deploy.py b/maven/templates/deploy.py index b194ee3b..6dd4b494 100644 --- a/maven/templates/deploy.py +++ b/maven/templates/deploy.py @@ -30,57 +30,13 @@ import tempfile from posixpath import join as urljoin +import sys, glob -def sha1(fn): - return hashlib.sha1(open(fn, 'rb').read()).hexdigest() +# Prefer using the runfile dependency than system dependency +runfile_deps = [path for path in map(os.path.abspath, glob.glob('external/*/*'))] +sys.path = runfile_deps + sys.path - -def md5(fn): - return hashlib.md5(open(fn, 'rb').read()).hexdigest() - - -def upload_file(url, username, password, local_fn, remote_fn): - upload_status_code = sp.check_output([ - 'curl', '--silent', - '--write-out', '%{http_code}', - '-u', '{}:{}'.format(username, password), - '--upload-file', local_fn, - urljoin(url, remote_fn) - ]).decode().strip() - - if upload_status_code not in {'200', '201'}: - raise Exception('upload_file of {} failed, got HTTP status code {}'.format( - local_fn, upload_status_code)) - - -def upload_str(url, username, password, string, remote_fn): - upload_status_code = sp.check_output([ - 'curl', '--silent', - '--write-out', '%{http_code}', - '-u', '{}:{}'.format(username, password), - '--upload-file', '-', - urljoin(url, remote_fn) - ], input=string.encode()).decode().strip() - - if upload_status_code not in {'200', '201'}: - raise Exception('upload_str of "{}" failed, got HTTP status code {}'.format( - string, upload_status_code)) - - -def sign(fn): - # TODO(vmax): current limitation of this functionality - # is that gpg key should already be present in keyring - # and should not require passphrase - asc_file = tempfile.mktemp() - sp.check_call([ - 'gpg', - '--detach-sign', - '--armor', - '--output', - asc_file, - fn - ]) - return asc_file +from common.uploader.uploader import Uploader def unpack_args(_, a, b=False): @@ -90,9 +46,10 @@ def unpack_args(_, a, b=False): if len(sys.argv) < 2: raise ValueError('Should pass [--gpg] as arguments') - repo_type, should_sign = unpack_args(*sys.argv) +if should_sign: raise NotImplementedError("Signing is not implemented yet") + username, password = os.getenv('DEPLOY_MAVEN_USERNAME'), os.getenv('DEPLOY_MAVEN_PASSWORD') if not username: @@ -110,7 +67,7 @@ def unpack_args(_, a, b=False): pom_file_path = "$POM_PATH" srcjar_path = "$SRCJAR_PATH" -namespace = { 'namespace': 'http://maven.apache.org/POM/4.0.0' } +namespace = {'namespace': 'http://maven.apache.org/POM/4.0.0'} root = ElementTree.parse(pom_file_path).getroot() group_id = root.find('namespace:groupId', namespace) artifact_id = root.find('namespace:artifactId', namespace) @@ -141,32 +98,10 @@ def unpack_args(_, a, b=False): 'must have a version which complies to this regex: {}' .format(version, repo_type, version_release_regex)) -filename_base = '{coordinates}/{artifact}/{version}/{artifact}-{version}'.format( - coordinates=group_id.text.replace('.', '/'), version=version, artifact=artifact_id.text) - -upload_file(maven_url, username, password, jar_path, filename_base + '.jar') -if should_sign: - upload_file(maven_url, username, password, sign(jar_path), filename_base + '.jar.asc') -upload_file(maven_url, username, password, pom_file_path, filename_base + '.pom') -if should_sign: - upload_file(maven_url, username, password, sign(pom_file_path), filename_base + '.pom.asc') -if os.path.exists(srcjar_path): - upload_file(maven_url, username, password, srcjar_path, filename_base + '-sources.jar') - if should_sign: - upload_file(maven_url, username, password, sign(srcjar_path), filename_base + '-sources.jar.asc') - # TODO(vmax): use real Javadoc instead of srcjar - upload_file(maven_url, username, password, srcjar_path, filename_base + '-javadoc.jar') - if should_sign: - upload_file(maven_url, username, password, sign(srcjar_path), filename_base + '-javadoc.jar.asc') - -upload_str(maven_url, username, password, md5(pom_file_path), filename_base + '.pom.md5') -upload_str(maven_url, username, password, sha1(pom_file_path), filename_base + '.pom.sha1') -upload_str(maven_url, username, password, md5(jar_path), filename_base + '.jar.md5') -upload_str(maven_url, username, password, sha1(jar_path), filename_base + '.jar.sha1') - -if os.path.exists(srcjar_path): - upload_str(maven_url, username, password, md5(srcjar_path), filename_base + '-sources.jar.md5') - upload_str(maven_url, username, password, sha1(srcjar_path), filename_base + '-sources.jar.sha1') - - upload_str(maven_url, username, password, md5(srcjar_path), filename_base + '-javadoc.jar.md5') - upload_str(maven_url, username, password, sha1(srcjar_path), filename_base + '-javadoc.jar.sha1') +uploader = Uploader.create(username, password, maven_url) +uploader.maven(group_id, artifact_id.text, version, + jar_path=jar_path, pom_path=pom_file_path, + sources_path=srcjar_path if os.path.exists(srcjar_path) else None, + javadoc_path=srcjar_path if os.path.exists(srcjar_path) else None, + tests_path = None +) diff --git a/npm/deploy/rules.bzl b/npm/deploy/rules.bzl index ef5c9819..76a535c4 100644 --- a/npm/deploy/rules.bzl +++ b/npm/deploy/rules.bzl @@ -80,9 +80,9 @@ deploy_npm = rule( ## How to generate an auth token ### Using the command line (`npm adduser`) - 1. Run `npm adduser ` (example: `npm adduser --registry=https://repo.vaticle.com/repository/npm-private`) + 1. Run `npm adduser ` (example: `npm adduser --registry=https://npm.cloudsmith.io/typedb/private/`) 2. When prompted, provide login credentials to sign in to the user account that is used in your CI and has permissions to publish the package - 3. If successful, a line will be added to your `.npmrc` file (`$HOME/.npmrc` on Unix) which looks like: `//repo.vaticle.com/repository/npm-snapshot/:_authToken=NpmToken.00000000-0000-0000-0000-000000000000`. The token is the value of `_authToken`, in this case `NpmToken.00000000-0000-0000-0000-000000000000`. + 3. If successful, a line will be added to your `.npmrc` file (`$HOME/.npmrc` on Unix) which looks like: `//npm.cloudsmith.io/typedb/private/:_authToken=NpmToken.00000000-0000-0000-0000-000000000000`. The token is the value of `_authToken`, in this case `NpmToken.00000000-0000-0000-0000-000000000000`. 4. Save the auth token somewhere safe and then delete it from your `.npmrc` file ### Using a UI diff --git a/rpm/generate_spec_file.py b/rpm/generate_spec_file.py deleted file mode 100644 index 027bcaa6..00000000 --- a/rpm/generate_spec_file.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python3 - -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -import argparse -import json -import re - -WORKSPACE_REF_PATTERN = re.compile(r'.*%{@(?P.*)}.*') - -parser = argparse.ArgumentParser() -parser.add_argument('--output', required=True, help='Output .spec file') -parser.add_argument('--spec_file', required=True, help='Input .spec file') -parser.add_argument('--workspace_refs', help='Optional file with workspace references') -args = parser.parse_args() - -workspace_refs = { - 'commits': {}, - 'tags': {} -} - -replacements = {} - -if args.workspace_refs: - with open(args.workspace_refs) as f: - workspace_refs = json.load(f) - -all_workspaces = set() - -for ws, commit in workspace_refs['commits'].items(): - replacements["%{{@{}}}".format(ws)] = '0.0.0_' + commit - all_workspaces.add(ws) - -for ws, tag in workspace_refs['tags'].items(): - replacements["%{{@{}}}".format(ws)] = tag - all_workspaces.add(ws) - -with open(args.spec_file) as spec, open(args.output, 'w') as output: - lines = spec.readlines() - for line in lines: - match = WORKSPACE_REF_PATTERN.match(line) - if match: - workspace_ref = match.group('workspace_ref') - if workspace_ref not in all_workspaces: - raise Exception('invalid workspace was referenced: `{}`; valid workspaces to reference are: {}'.format( - workspace_ref, list(all_workspaces) - )) - for replacement_key, replacement_value in replacements.items(): - line = line.replace(replacement_key, replacement_value) - output.write(line) diff --git a/rpm/rules.bzl b/rpm/rules.bzl deleted file mode 100644 index f7deaf4d..00000000 --- a/rpm/rules.bzl +++ /dev/null @@ -1,231 +0,0 @@ -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -load("@rules_pkg//:pkg.bzl", "pkg_tar") -load("@rules_pkg//:rpm.bzl", "pkg_rpm") - - -def _assemble_rpm_version_file_impl(ctx): - version = ctx.var.get('version', '0.0.0') - - if len(version) == 40: - # this is a commit SHA, most likely - version = "0.0.0_{}".format(version) - - ctx.actions.run_shell( - inputs = [], - outputs = [ctx.outputs.version_file], - command = "echo {} > {}".format(version, ctx.outputs.version_file.path) - ) - - -_assemble_rpm_version_file = rule( - outputs = { - "version_file": "%{name}.version" - }, - implementation = _assemble_rpm_version_file_impl -) - -def assemble_rpm(name, - package_name, - spec_file, - version_file = None, - workspace_refs = None, - installation_dir = None, - archives = [], - empty_dirs = [], - files = {}, - permissions = {}, - symlinks = {}, - tags = []): - """Assemble package for installation with RPM - - Args: - name: A unique name for this target. - package_name: Package name for built .rpm package - spec_file: The RPM spec file to use - version_file: File containing version number of a package. - Alternatively, pass --define version=VERSION to Bazel invocation. - Not specifying version defaults to '0.0.0' - workspace_refs: JSON file with other Bazel workspace references - installation_dir: directory into which .rpm package is unpacked at installation - archives: Bazel labels of archives that go into .rpm package - empty_dirs: list of empty directories created at package installation - files: mapping between Bazel labels of archives that go into .rpm package - and their resulting location on .rpm package installation - permissions: mapping between paths and UNIX permissions - symlinks: mapping between source and target of symbolic links - created at installation - tags: additional tags passed to all wrapped rules - """ - tag = "rpm_package_name={}".format(spec_file.split(':')[-1].replace('.spec', '')) - tar_name = "_{}-rpm-tar".format(package_name) - - rpm_data = [] - - if installation_dir: - pkg_tar( - name = tar_name, - extension = "tar.gz", - deps = archives, - package_dir = installation_dir, - empty_dirs = empty_dirs, - files = files, - mode = "0755", - symlinks = symlinks, - modes = permissions, - tags = tags, - ) - rpm_data.append(tar_name) - - if "osx_build" not in native.existing_rules(): - native.config_setting( - name = "osx_build", - constraint_values = [ - "@bazel_tools//platforms:osx", - "@bazel_tools//platforms:x86_64", - ] - ) - - if "linux_build" not in native.existing_rules(): - native.config_setting( - name = "linux_build", - constraint_values = [ - "@bazel_tools//platforms:linux", - "@bazel_tools//platforms:x86_64", - ], - tags = tags, - ) - - if workspace_refs: - modified_spec_target_name = name + "__spec_do_not_reference" - modified_spec_filename = name + '.spec' - args = [ - "$(location @vaticle_bazel_distribution//rpm:generate_spec_file)", - "--output", "$(location {})".format(modified_spec_filename), - "--spec_file", "$(location {})".format(spec_file), - "--workspace_refs", "$(location {})".format(workspace_refs), - ] - native.genrule( - name = modified_spec_target_name, - srcs = [spec_file, workspace_refs], - outs = [modified_spec_filename], - cmd = " ".join(args), - tools = ["@vaticle_bazel_distribution//rpm:generate_spec_file"], - tags = tags, - ) - spec_file = modified_spec_target_name - - if not version_file: - version_file = name + "__version__do_not_reference" - _assemble_rpm_version_file( - name = version_file - ) - - pkg_rpm( - name = "{}__do_not_reference__rpm".format(name), - architecture = "x86_64", - spec_file = spec_file, - version_file = version_file, - release = "1", - data = rpm_data, - rpmbuild_path = select({ - ":linux_build": "/usr/bin/rpmbuild", - ":osx_build": "/usr/local/bin/rpmbuild", - "//conditions:default": "" - }), - tags = tags, - ) - - native.genrule( - name = name, - srcs = ["{}__do_not_reference__rpm".format(name)], - cmd = "cp $$(echo $(SRCS) | awk '{print $$1}') $@", - outs = [package_name + ".rpm"], - tags = [tag] + tags, - ) - - -RpmInfo = provider( - fields = { - "package_name": "RPM package name" - } -) - -def _collect_rpm_package_name(target, ctx): - rpm_tag = ctx.rule.attr.tags[0] - package_name = rpm_tag.replace('rpm_package_name=', '') - return RpmInfo(package_name=package_name) - - -collect_rpm_package_name = aspect( - implementation = _collect_rpm_package_name -) - -def _deploy_rpm_impl(ctx): - ctx.actions.expand_template( - template = ctx.file._deployment_script, - output = ctx.outputs.deployment_script, - substitutions = { - "{RPM_PKG}": ctx.attr.target[RpmInfo].package_name, - "{snapshot}": ctx.attr.snapshot, - "{release}": ctx.attr.release, - }, - is_executable = True - ) - - symlinks = { - 'package.rpm': ctx.files.target[0], - } - - return DefaultInfo(executable = ctx.outputs.deployment_script, - runfiles = ctx.runfiles( - files=[ctx.files.target[0]], - symlinks = symlinks)) - - -deploy_rpm = rule( - attrs = { - "target": attr.label( - aspects = [collect_rpm_package_name], - doc = "`assemble_rpm` target to deploy" - ), - "snapshot": attr.string( - mandatory = True, - doc = "Remote repository to deploy rpm snapshot to" - ), - "release": attr.string( - mandatory = True, - doc = "Remote repository to deploy rpm release to" - ), - "_deployment_script": attr.label( - allow_single_file = True, - default = "//rpm/templates:deploy.py" - ), - }, - outputs = { - "deployment_script": "%{name}.py", - }, - implementation = _deploy_rpm_impl, - executable = True, - doc = """Deploy package built with `assemble_rpm` to RPM repository. - - Select deployment to `snapshot` or `release` repository with `bazel run //:some-deploy-rpm -- [snapshot|release] - """ -) diff --git a/rpm/templates/deploy.py b/rpm/templates/deploy.py deleted file mode 100644 index ea658801..00000000 --- a/rpm/templates/deploy.py +++ /dev/null @@ -1,80 +0,0 @@ -#!/usr/bin/env python3 - -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# - -import argparse -import os -import subprocess - - -rpm_pkg="{RPM_PKG}" - -parser = argparse.ArgumentParser() -parser.add_argument('repo_type') -args = parser.parse_args() - -repo_type_key = args.repo_type - -rpm_repositories = { - 'snapshot' : "{snapshot}", - 'release' : "{release}" -} - -rpm_registry = rpm_repositories[repo_type_key] - -rpm_username, rpm_password = ( - os.getenv('DEPLOY_RPM_USERNAME'), - os.getenv('DEPLOY_RPM_PASSWORD'), -) - -if not rpm_username: - raise Exception( - 'username should be passed via ' - '$DEPLOY_RPM_USERNAME env variable' - ) - -if not rpm_password: - raise Exception( - 'password should be passed via ' - '$DEPLOY_RPM_PASSWORD env variable' - ) - -package_name = '{}.rpm'.format( - subprocess.check_output([ - 'rpm', - '-qp', - 'package.rpm' -]).decode().strip()) - -upload_status_code = subprocess.check_output([ - 'curl', - '--silent', - '--output', '/dev/stderr', - '--write-out', '%{http_code}', - '-u', '{}:{}'.format(rpm_username, rpm_password), - '-X', 'PUT', - '--upload-file', 'package.rpm', - '{}/{}/{}'.format(rpm_registry, rpm_pkg, package_name) -]).decode().strip() - -if upload_status_code != '200': - raise Exception('upload failed, got HTTP status code {}'.format(upload_status_code)) - -print('Deployment completed.')