From 42f266293995c13f106a96bacb0f81823c3b8ce6 Mon Sep 17 00:00:00 2001 From: Yousaf Nabi Date: Wed, 19 Jun 2024 17:10:44 +0100 Subject: [PATCH 1/7] chore: rm unused ffi files --- src/ffi/internals/index.spec.ts | 34 ------------------ src/ffi/internals/index.ts | 63 --------------------------------- 2 files changed, 97 deletions(-) delete mode 100644 src/ffi/internals/index.spec.ts delete mode 100644 src/ffi/internals/index.ts diff --git a/src/ffi/internals/index.spec.ts b/src/ffi/internals/index.spec.ts deleted file mode 100644 index b3a1c2dc..00000000 --- a/src/ffi/internals/index.spec.ts +++ /dev/null @@ -1,34 +0,0 @@ -import chai = require('chai'); -import chaiAsPromised = require('chai-as-promised'); -import { libName } from '.'; - -const { expect } = chai; -chai.use(chaiAsPromised); - -describe('ffi names', () => { - it('has the correct name for windows', () => { - expect(libName('pact_ffi', 'v0.0.1', 'x64', 'win32')).to.be.equal( - 'v0.0.1-pact_ffi-windows-x86_64.dll' - ); - }); - it('has the correct name for linux intel', () => { - expect(libName('pact_ffi', 'v0.0.1', 'x64', 'linux')).to.be.equal( - 'v0.0.1-libpact_ffi-linux-x86_64.so' - ); - }); - it('has the correct name for linux arm', () => { - expect(libName('pact_ffi', 'v0.0.1', 'arm64', 'linux')).to.be.equal( - 'v0.0.1-libpact_ffi-linux-aarch64.so' - ); - }); - it('has the correct name for osx intel', () => { - expect(libName('pact_ffi', 'v0.0.1', 'x64', 'darwin')).to.be.equal( - 'v0.0.1-libpact_ffi-osx-x86_64.dylib' - ); - }); - it('has the correct name for osx arm', () => { - expect(libName('pact_ffi', 'v0.0.1', 'arm64', 'darwin')).to.be.equal( - 'v0.0.1-libpact_ffi-osx-aarch64.dylib' - ); - }); -}); diff --git a/src/ffi/internals/index.ts b/src/ffi/internals/index.ts deleted file mode 100644 index 0b34f6d2..00000000 --- a/src/ffi/internals/index.ts +++ /dev/null @@ -1,63 +0,0 @@ -// This is a lookup between process.platform and -// the platform names used in pact-reference -const PLATFORM_LOOKUP = { - linux: 'linux', - darwin: 'osx', - win32: 'windows', // yes, 'win32' is what process.platform returns on windows 64 bit -}; - -// This is a lookup between process.platform and -// the prefixes for the library name -const LIBNAME_PREFIX_LOOKUP = { - linux: 'lib', - darwin: 'lib', - win32: '', // yes, 'win32' is what process.platform returns on windows 64 bit -}; - -// This is a lookup between process.arch and -// the architecture names used in pact-reference -const ARCH_LOOKUP = { x64: 'x86_64', arm64: 'aarch64' }; - -// This is a lookup between "${platform}-${arch}" and -// the file extensions to link on that platform/arch combination -const EXTENSION_LOOKUP = { - 'osx-x86_64': 'dylib', - 'osx-aarch64': 'dylib', - 'linux-x86_64': 'so', - 'linux-aarch64': 'so', - 'windows-x86_64': 'dll', -}; - -export const libName = ( - library: string, - version: string, - processArch = process.arch, - processPlatform = process.platform -): string => { - const arch = ARCH_LOOKUP[processArch]; - const platform = PLATFORM_LOOKUP[processPlatform]; - - if (!arch || !platform) { - throw new Error( - `Pact does not currently support the operating system and architecture combination '${processPlatform}/${processArch}'` - ); - } - - const target = `${platform}-${arch}`; - - const extension = EXTENSION_LOOKUP[target]; - if (!extension) { - throw new Error( - `Pact doesn't know what extension to use for the libraries in the architecture combination '${target}'` - ); - } - - const libnamePrefix = LIBNAME_PREFIX_LOOKUP[processPlatform]; - if (libnamePrefix === undefined) { - throw new Error( - `Pact doesn't know what prefix to use for the libraries on '${processPlatform}'` - ); - } - - return `${version}-${libnamePrefix}${library}-${target}.${extension}`; -}; From 813d9a404072f2cc0833037e7e7a8154b9a35054 Mon Sep 17 00:00:00 2001 From: Yousaf Nabi Date: Wed, 19 Jun 2024 17:11:40 +0100 Subject: [PATCH 2/7] test: renable plugin tests --- script/ci/unpack-and-test.sh | 1 + script/download-libs.sh | 3 +- script/download-plugins.sh | 6 + script/install-plugin-cli.sh | 55 +++ script/lib/export-binary-versions.sh | 3 +- test/consumer.integration.spec.ts | 167 +++++---- test/integration/grpc/grpc.json | 4 +- test/integration/plugin.proto | 421 +++++++++++++++++++++++ test/matt.consumer.integration.spec.ts | 9 +- test/matt.provider.integration.spec.ts | 3 +- test/message.integration.spec.ts | 84 +++-- test/plugin-verifier.integration.spec.ts | 65 ++-- test/verifier.integration.spec.ts | 2 +- 13 files changed, 647 insertions(+), 176 deletions(-) create mode 100755 script/download-plugins.sh create mode 100755 script/install-plugin-cli.sh create mode 100644 test/integration/plugin.proto diff --git a/script/ci/unpack-and-test.sh b/script/ci/unpack-and-test.sh index 23b28453..b21ad2c3 100755 --- a/script/ci/unpack-and-test.sh +++ b/script/ci/unpack-and-test.sh @@ -10,4 +10,5 @@ ls -1 artifact mkdir -p prebuilds mv artifact*/*.tar.gz . || echo "no mac prebuilds" ls *.gz |xargs -n1 tar -xzf +"$SCRIPT_DIR"/../download-plugins.sh "$SCRIPT_DIR"/build-and-test.sh \ No newline at end of file diff --git a/script/download-libs.sh b/script/download-libs.sh index d37ebd2c..e0459a1c 100755 --- a/script/download-libs.sh +++ b/script/download-libs.sh @@ -2,4 +2,5 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")"; pwd)" # Figure out where the script is running . "${SCRIPT_DIR}/lib/export-binary-versions.sh" -"${SCRIPT_DIR}/lib/download-ffi.sh" \ No newline at end of file +"${SCRIPT_DIR}/lib/download-ffi.sh" +"${SCRIPT_DIR}/lib/download-plugins.sh" \ No newline at end of file diff --git a/script/download-plugins.sh b/script/download-plugins.sh new file mode 100755 index 00000000..76b850e3 --- /dev/null +++ b/script/download-plugins.sh @@ -0,0 +1,6 @@ +#!/bin/bash -eu +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")"; pwd)" # Figure out where the script is running + +. "${SCRIPT_DIR}/lib/export-binary-versions.sh" +"${SCRIPT_DIR}/install-plugin-cli.sh" +$HOME/.pact/bin/pact-plugin-cli install -y https://github.com/mefellows/pact-matt-plugin/releases/tag/$PACT_PLUGIN_MATT_VERSION \ No newline at end of file diff --git a/script/install-plugin-cli.sh b/script/install-plugin-cli.sh new file mode 100755 index 00000000..4abdd5be --- /dev/null +++ b/script/install-plugin-cli.sh @@ -0,0 +1,55 @@ +#!/bin/sh -e +# +# Usage: +# $ curl -fsSL https://raw.githubusercontent.com/pact-foundation/pact-plugins/master/install-cli.sh | bash +# or +# $ wget -q https://raw.githubusercontent.com/pact-foundation/pact-plugins/master/install-cli.sh -O- | bash +# +set -e # Needed for Windows bash, which doesn't read the shebang + +detect_osarch() { + # detect_musl + case $(uname -sm) in + 'Linux x86_64') + os='linux' + arch='x86_64' + ;; + 'Linux aarch64') + os='linux' + arch='aarch64' + ;; + 'Darwin x86' | 'Darwin x86_64') + os='osx' + arch='x86_64' + ;; + 'Darwin arm64') + os='osx' + arch='aarch64' + ;; + CYGWIN*|MINGW32*|MSYS*|MINGW*) + os="windows" + arch='x86_64' + ext='.exe' + ;; + *) + echo "Sorry, you'll need to install the plugin CLI manually." + exit 1 + ;; + esac +} + + +VERSION="0.1.2" +detect_osarch + +if [ ! -f ~/.pact/bin/pact-plugin-cli ]; then + echo "--- 🐿 Installing plugins CLI version '${VERSION}' (from tag ${TAG})" + mkdir -p ~/.pact/bin + DOWNLOAD_LOCATION=https://github.com/pact-foundation/pact-plugins/releases/download/pact-plugin-cli-v${VERSION}/pact-plugin-cli-${os}-${arch}${ext}.gz + echo " Downloading from: ${DOWNLOAD_LOCATION}" + curl -L -o ~/.pact/bin/pact-plugin-cli-${os}-${arch}.gz "${DOWNLOAD_LOCATION}" + echo " Downloaded $(file ~/.pact/bin/pact-plugin-cli-${os}-${arch}.gz)" + gunzip -f ~/.pact/bin/pact-plugin-cli-${os}-${arch}.gz + mv ~/.pact/bin/pact-plugin-cli-${os}-${arch} ~/.pact/bin/pact-plugin-cli + chmod +x ~/.pact/bin/pact-plugin-cli +fi \ No newline at end of file diff --git a/script/lib/export-binary-versions.sh b/script/lib/export-binary-versions.sh index 9cc2b95f..b8ceef48 100644 --- a/script/lib/export-binary-versions.sh +++ b/script/lib/export-binary-versions.sh @@ -2,4 +2,5 @@ LIB_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")"; pwd)" # Figure out where the script is running PROJECT_DIR="${LIB_DIR}"/../../ -export FFI_VERSION=v$(grep "PACT_FFI_VERSION = '" "$PROJECT_DIR"/src/ffi/index.ts | grep -E -o "'(.*)'" | cut -d"'" -f2) \ No newline at end of file +export FFI_VERSION=v$(grep "PACT_FFI_VERSION = '" "$PROJECT_DIR"/src/ffi/index.ts | grep -E -o "'(.*)'" | cut -d"'" -f2) +export PACT_PLUGIN_MATT_VERSION=v0.1.1 \ No newline at end of file diff --git a/test/consumer.integration.spec.ts b/test/consumer.integration.spec.ts index 9f1e3caf..0eb949fa 100644 --- a/test/consumer.integration.spec.ts +++ b/test/consumer.integration.spec.ts @@ -20,16 +20,6 @@ const { expect } = chai; const HOST = '127.0.0.1'; -const isWin = process.platform === 'win32'; -const isLinux = process.platform === 'linux'; -const isDarwinArm64 = process.platform === 'darwin' && process.arch === 'arm64'; -const isDarwinX64 = process.platform === 'darwin' && process.arch === 'x64'; -const isCirrusCi = process.env['CIRRUS_CI'] === 'true'; -const usesOctetStream = - isWin || - isDarwinArm64 || - (isCirrusCi && isLinux) || - (isCirrusCi && isDarwinX64); describe('FFI integration test for the HTTP Consumer API', () => { setLogLevel('trace'); @@ -82,9 +72,7 @@ describe('FFI integration test for the HTTP Consumer API', () => { .request({ baseURL: `http://${HOST}:${port}`, headers: { - 'content-type': usesOctetStream - ? 'application/octet-stream' - : 'application/gzip', + 'content-type': 'application/gzip', Accept: 'application/json', 'x-special-header': 'header', }, @@ -202,7 +190,7 @@ describe('FFI integration test for the HTTP Consumer API', () => { interaction.withQuery('someParam', 0, 'someValue'); interaction.withRequestBinaryBody( bytes, - usesOctetStream ? 'application/octet-stream' : 'application/gzip' + 'application/gzip' ); interaction.withResponseBody( JSON.stringify({ @@ -218,16 +206,12 @@ describe('FFI integration test for the HTTP Consumer API', () => { port = pact.createMockServer(HOST); }); - // TODO: find out what's going on here. Suspect binary matching has changed in the core? - // See https://github.com/pact-foundation/pact-reference/issues/171 it('generates a pact with success', () => axios .request({ baseURL: `http://${HOST}:${port}`, headers: { - 'content-type': usesOctetStream - ? 'application/octet-stream' - : 'application/gzip', + 'content-type': 'application/gzip', Accept: 'application/json', 'x-special-header': 'header', }, @@ -262,78 +246,81 @@ describe('FFI integration test for the HTTP Consumer API', () => { }); // Should only run this if the plugin is installed - describe.skip('using a plugin (protobufs)', () => { - const protoFile = `${__dirname}/integration/plugin.proto`; - - beforeEach(() => { - pact = makeConsumerPact( - 'foo-consumer', - 'bar-provider', - FfiSpecificationVersion['SPECIFICATION_VERSION_V3'] - ); - pact.addPlugin('protobuf', '0.1.14'); - - const interaction = pact.newInteraction('some description'); - const protobufContents = { - 'pact:proto': protoFile, - 'pact:message-type': 'InitPluginRequest', - 'pact:content-type': 'application/protobuf', - implementation: "notEmpty('pact-js-driver')", - version: "matching(semver, '0.0.0')", - }; - - interaction.uponReceiving('a request to get a protobuf'); - interaction.given('protobuf state'); - interaction.withRequest('GET', '/protobuf'); - interaction.withPluginResponseInteractionContents( - 'application/protobuf', - JSON.stringify(protobufContents) - ); - interaction.withStatus(200); - - port = pact.createMockServer(HOST); - }); - - afterEach(() => { - pact.cleanupPlugins(); - }); - - it('generates a pact with success', async () => { - const root = await load(protoFile); - - // Obtain a message type - const InitPluginRequest = root.lookupType( - 'io.pact.plugin.InitPluginRequest' - ); - - return axios - .request({ - baseURL: `http://${HOST}:${port}`, - method: 'GET', - url: '/protobuf', - responseType: 'arraybuffer', - }) - .then((res) => { - const message: any = InitPluginRequest.decode(res.data); - expect(message.implementation).to.equal('pact-js-driver'); - expect(message.version).to.equal('0.0.0'); - }) - .then(() => { - expect(pact.mockServerMatchedSuccessfully(port)).to.be.true; - }) - .then(() => { - // You don't have to call this, it's just here to check it works - const mismatches = pact.mockServerMismatches(port); - expect(mismatches).to.have.length(0); - }) - .then(() => { - pact.writePactFile(path.join(__dirname, '__testoutput__')); - }) - .then(() => { - pact.cleanupMockServer(port); - }); + const skipPluginTests = process.env['SKIP_PLUGIN_TESTS'] === 'true'; + (skipPluginTests ? describe.skip : describe)( + 'using a plugin (protobufs)', + () => { + const protoFile = `${__dirname}/integration/plugin.proto`; + + beforeEach(() => { + pact = makeConsumerPact( + 'foo-consumer', + 'bar-provider', + FfiSpecificationVersion['SPECIFICATION_VERSION_V3'] + ); + pact.addPlugin('protobuf', '0.3.15'); + + const interaction = pact.newInteraction('some description'); + const protobufContents = { + 'pact:proto': protoFile, + 'pact:message-type': 'InitPluginRequest', + 'pact:content-type': 'application/protobuf', + implementation: "notEmpty('pact-js-driver')", + version: "matching(semver, '0.0.0')", + }; + + interaction.uponReceiving('a request to get a protobuf'); + interaction.given('protobuf state'); + interaction.withRequest('GET', '/protobuf'); + interaction.withPluginResponseInteractionContents( + 'application/protobuf', + JSON.stringify(protobufContents) + ); + interaction.withStatus(200); + + port = pact.createMockServer(HOST); + }); + + afterEach(() => { + pact.cleanupPlugins(); + }); + + it('generates a pact with success', async () => { + const root = await load(protoFile); + + // Obtain a message type + const InitPluginRequest = root.lookupType( + 'io.pact.plugin.InitPluginRequest' + ); + + return axios + .request({ + baseURL: `http://${HOST}:${port}`, + method: 'GET', + url: '/protobuf', + responseType: 'arraybuffer', + }) + .then((res) => { + const message: any = InitPluginRequest.decode(res.data); + expect(message.implementation).to.equal('pact-js-driver'); + expect(message.version).to.equal('0.0.0'); + }) + .then(() => { + expect(pact.mockServerMatchedSuccessfully(port)).to.be.true; + }) + .then(() => { + // You don't have to call this, it's just here to check it works + const mismatches = pact.mockServerMismatches(port); + expect(mismatches).to.have.length(0); + }) + .then(() => { + pact.writePactFile(path.join(__dirname, '__testoutput__')); + }) + .then(() => { + pact.cleanupMockServer(port); + }); + }); }); - }); describe('with multipart data', () => { const form = new FormData(); diff --git a/test/integration/grpc/grpc.json b/test/integration/grpc/grpc.json index 74850ce0..d11983f7 100644 --- a/test/integration/grpc/grpc.json +++ b/test/integration/grpc/grpc.json @@ -5,7 +5,6 @@ "interactions": [ { "description": "A request to do a foo", - "key": "539a26be10e0124e", "pending": false, "request": { "body": { @@ -47,7 +46,6 @@ "markup": "```protobuf\nmessage Feature {\n string name = 1;\n message .routeguide.Point location = 2;\n}\n```\n", "markupType": "COMMON_MARK" }, - "key": "d81a62841ce862db", "pending": false, "pluginConfiguration": { "protobuf": { @@ -143,7 +141,7 @@ } }, "name": "protobuf", - "version": "0.1.14" + "version": "0.3.15" } ] }, diff --git a/test/integration/plugin.proto b/test/integration/plugin.proto new file mode 100644 index 00000000..5ba54f1a --- /dev/null +++ b/test/integration/plugin.proto @@ -0,0 +1,421 @@ +// Proto file for Pact plugin interface V1 + +syntax = "proto3"; + +import "google/protobuf/struct.proto"; +import "google/protobuf/wrappers.proto"; +import "google/protobuf/empty.proto"; + +package io.pact.plugin; +option go_package = "io.pact.plugin"; + +// Request to verify the plugin has loaded OK +message InitPluginRequest { + // Implementation calling the plugin + string implementation = 1; + // Version of the implementation + string version = 2; +} + +// Entry to be added to the core catalogue. Each entry describes one of the features the plugin provides. +// Entries will be stored in the catalogue under the key "plugin/$name/$type/$key". +message CatalogueEntry { + enum EntryType { + // Matcher for contents of messages, requests or response bodies + CONTENT_MATCHER = 0; + // Generator for contents of messages, requests or response bodies + CONTENT_GENERATOR = 1; + // Transport for a network protocol + TRANSPORT = 2; + // Matching rule for content field/values + MATCHER = 3; + // Type of interaction + INTERACTION = 4; + } + // Entry type + EntryType type = 1; + // Entry key + string key = 2; + // Associated data required for the entry. For CONTENT_MATCHER and CONTENT_GENERATOR types, a "content-types" + // value (separated by semi-colons) is required for all the content types the plugin supports. + map values = 3; +} + +// Response to init plugin, providing the catalogue entries the plugin provides +message InitPluginResponse { + // List of entries the plugin supports + repeated CatalogueEntry catalogue = 1; +} + +// Catalogue of Core Pact + Plugin features +message Catalogue { + // List of entries from the core catalogue + repeated CatalogueEntry catalogue = 1; +} + +// Message representing a request, response or message body +message Body { + // The content type of the body in MIME format (i.e. application/json) + string contentType = 1; + // Bytes of the actual content + google.protobuf.BytesValue content = 2; + // Enum of content type override. This is a hint on how the content type should be treated. + enum ContentTypeHint { + // Determine the form of the content using the default rules of the Pact implementation + DEFAULT = 0; + // Contents must always be treated as a text form + TEXT = 1; + // Contents must always be treated as a binary form + BINARY = 2; + } + // Content type override to apply (if required). If omitted, the default rules of the Pact implementation + // will be used + ContentTypeHint contentTypeHint = 3; +} + +// Request to preform a comparison on an actual body given the expected one +message CompareContentsRequest { + // Expected body from the Pact interaction + Body expected = 1; + // Actual received body + Body actual = 2; + // If unexpected keys or attributes should be allowed. Setting this to false results in additional keys or fields + // will cause a mismatch + bool allow_unexpected_keys = 3; + // Map of expressions to matching rules. The expressions follow the documented Pact matching rule expressions + map rules = 4; + // Additional data added to the Pact/Interaction by the plugin + PluginConfiguration pluginConfiguration = 5; +} + +// Indicates that there was a mismatch with the content type +message ContentTypeMismatch { + // Expected content type (MIME format) + string expected = 1; + // Actual content type received (MIME format) + string actual = 2; +} + +// A mismatch for an particular item of content +message ContentMismatch { + // Expected data bytes + google.protobuf.BytesValue expected = 1; + // Actual data bytes + google.protobuf.BytesValue actual = 2; + // Description of the mismatch + string mismatch = 3; + // Path to the item that was matched. This is the value as per the documented Pact matching rule expressions. + string path = 4; + // Optional diff of the contents + string diff = 5; + // Part of the interaction that the mismatch is for: body, headers, metadata, etc. + string mismatchType = 6; +} + +// List of content mismatches +message ContentMismatches { + repeated ContentMismatch mismatches = 1; +} + +// Response to the CompareContentsRequest with the results of the comparison +message CompareContentsResponse { + // Error message if an error occurred. If this field is set, the remaining fields will be ignored and the + // verification marked as failed + string error = 1; + // There was a mismatch with the types of content. If this is set, the results may not be set. + ContentTypeMismatch typeMismatch = 2; + // Results of the match, keyed by matching rule expression + map results = 3; +} + +// Request to configure/setup an interaction so that it can be verified later +message ConfigureInteractionRequest { + // Content type of the interaction (MIME format) + string contentType = 1; + // This is data specified by the user in the consumer test + google.protobuf.Struct contentsConfig = 2; +} + +// Represents a matching rule +message MatchingRule { + // Type of the matching rule + string type = 1; + // Associated data for the matching rule + google.protobuf.Struct values = 2; +} + +// List of matching rules +message MatchingRules { + repeated MatchingRule rule = 1; +} + +// Example generator +message Generator { + // Type of generator + string type = 1; + // Associated data for the generator + google.protobuf.Struct values = 2; +} + +// Plugin configuration added to the pact file by the ConfigureInteraction step +message PluginConfiguration { + // Data to be persisted against the interaction + google.protobuf.Struct interactionConfiguration = 1; + // Data to be persisted in the Pact file metadata (Global data) + google.protobuf.Struct pactConfiguration = 2; +} + +// Response to the configure/setup an interaction request +message InteractionResponse { + // Contents for the interaction + Body contents = 1; + // All matching rules to apply + map rules = 2; + // Generators to apply + map generators = 3; + // For message interactions, any metadata to be applied + google.protobuf.Struct messageMetadata = 4; + // Plugin specific data to be persisted in the pact file + PluginConfiguration pluginConfiguration = 5; + // Markdown/HTML formatted text representation of the interaction + string interactionMarkup = 6; + // Type of markup used + enum MarkupType { + // CommonMark format + COMMON_MARK = 0; + // HTML format + HTML = 1; + } + MarkupType interactionMarkupType = 7; + // Description of what part this interaction belongs to (in the case of there being more than one, for instance, + // request/response messages) + string partName = 8; + // All matching rules to apply to any message metadata + map metadata_rules = 9; + // Generators to apply to any message metadata + map metadata_generators = 10; +} + +// Response to the configure/setup an interaction request +message ConfigureInteractionResponse { + // If an error occurred. In this case, the other fields will be ignored/not set + string error = 1; + // The actual response if no error occurred. + repeated InteractionResponse interaction = 2; + // Plugin specific data to be persisted in the pact file + PluginConfiguration pluginConfiguration = 3; +} + +// Request to generate the contents using any defined generators +message GenerateContentRequest { + // Original contents + Body contents = 1; + // Generators to apply + map generators = 2; + // Additional data added to the Pact/Interaction by the plugin + PluginConfiguration pluginConfiguration = 3; + // Context data provided by the test framework + google.protobuf.Struct testContext = 4; + + // The mode of the generation, if running from a consumer test or during provider verification + enum TestMode { + Unknown = 0; + // Running on the consumer side + Consumer = 1; + // Running on the provider side + Provider = 2; + } + TestMode testMode = 5; + + // Which part the content is for + enum ContentFor { + Request = 0; + Response = 1; + } + ContentFor contentFor = 6; +} + +// Generated body/message response +message GenerateContentResponse { + Body contents = 1; +} + +// Request to start a mock server +message StartMockServerRequest { + // Interface to bind to. Will default to the loopback adapter + string hostInterface = 1; + // Port to bind to. Default (or a value of 0) get the OS to open a random port + uint32 port = 2; + // If TLS should be used (if supported by the mock server) + bool tls = 3; + // Pact as JSON to use for the mock server behaviour + string pact = 4; + // Context data provided by the test framework + google.protobuf.Struct testContext = 5; +} + +// Response to the start mock server request +message StartMockServerResponse { + oneof response { + // If an error occurred + string error = 1; + + // Mock server details + MockServerDetails details = 2; + } +} + +// Details on a running mock server +message MockServerDetails { + // Mock server unique ID + string key = 1; + // Port the mock server is running on + uint32 port = 2; + // IP address the mock server is bound to. Probably an IP6 address, but may be IP4 + string address = 3; +} + +// Request to shut down a running mock server +// TODO: replace this with MockServerRequest in the next major version +message ShutdownMockServerRequest { + // The server ID to shutdown + string serverKey = 1; +} + +// Request for a running mock server by ID +message MockServerRequest { + // The server ID to shutdown + string serverKey = 1; +} + +// Result of a request that the mock server received +message MockServerResult { + // service + method that was requested + string path = 1; + // If an error occurred trying to handle the request + string error = 2; + // Any mismatches that occurred + repeated ContentMismatch mismatches = 3; +} + +// Response to the shut down mock server request +// TODO: replace this with MockServerResults in the next major version +message ShutdownMockServerResponse { + // If the mock status is all ok + bool ok = 1; + // The results of the test run, will contain an entry for each request received by the mock server + repeated MockServerResult results = 2; +} + +// Matching results of the mock server. +message MockServerResults { + // If the mock status is all ok + bool ok = 1; + // The results of the test run, will contain an entry for each request received by the mock server + repeated MockServerResult results = 2; +} + +// Request to prepare an interaction for verification +message VerificationPreparationRequest { + // Pact as JSON to use for the verification + string pact = 1; + // Interaction key for the interaction from the Pact that is being verified + string interactionKey = 2; + // Any data supplied by the user to verify the interaction + google.protobuf.Struct config = 3; +} + +// Request metadata value. Will either be a JSON-like value, or binary data +message MetadataValue { + oneof value { + google.protobuf.Value nonBinaryValue = 1; + bytes binaryValue = 2; + } +} + +// Interaction request data to be sent or received for verification +message InteractionData { + // Request/Response body as bytes + Body body = 1; + // Metadata associated with the request/response + map metadata = 2; +} + +// Response for the prepare an interaction for verification request +message VerificationPreparationResponse { + oneof response { + // If an error occurred + string error = 1; + + // Interaction data required to construct any request + InteractionData interactionData = 2; + } +} + +// Request data to verify an interaction +message VerifyInteractionRequest { + // Interaction data required to construct the request + InteractionData interactionData = 1; + // Any data supplied by the user to verify the interaction + google.protobuf.Struct config = 2; + // Pact as JSON to use for the verification + string pact = 3; + // Interaction key for the interaction from the Pact that is being verified + string interactionKey = 4; +} + +message VerificationResultItem { + oneof result { + string error = 1; + ContentMismatch mismatch = 2; + } +} + +// Result of running the verification +message VerificationResult { + // Was the verification successful? + bool success = 1; + // Interaction data retrieved from the provider (optional) + InteractionData responseData = 2; + // Any mismatches that occurred + repeated VerificationResultItem mismatches = 3; + // Output for the verification to display to the user + repeated string output = 4; +} + +// Result of running the verification +message VerifyInteractionResponse { + oneof response { + // If an error occurred trying to run the verification + string error = 1; + + VerificationResult result = 2; + } +} + +service PactPlugin { + // Check that the plugin loaded OK. Returns the catalogue entries describing what the plugin provides + rpc InitPlugin(InitPluginRequest) returns (InitPluginResponse); + // Updated catalogue. This will be sent when the core catalogue has been updated (probably by a plugin loading). + rpc UpdateCatalogue(Catalogue) returns (google.protobuf.Empty); + // Request to perform a comparison of some contents (matching request) + rpc CompareContents(CompareContentsRequest) returns (CompareContentsResponse); + // Request to configure/setup the interaction for later verification. Data returned will be persisted in the pact file. + rpc ConfigureInteraction(ConfigureInteractionRequest) returns (ConfigureInteractionResponse); + // Request to generate the content using any defined generators + rpc GenerateContent(GenerateContentRequest) returns (GenerateContentResponse); + + // Start a mock server + rpc StartMockServer(StartMockServerRequest) returns (StartMockServerResponse); + // Shutdown a running mock server + // TODO: Replace the message types with MockServerRequest and MockServerResults in the next major version + rpc ShutdownMockServer(ShutdownMockServerRequest) returns (ShutdownMockServerResponse); + // Get the matching results from a running mock server + rpc GetMockServerResults(MockServerRequest) returns (MockServerResults); + + // Prepare an interaction for verification. This should return any data required to construct any request + // so that it can be amended before the verification is run + rpc PrepareInteractionForVerification(VerificationPreparationRequest) returns (VerificationPreparationResponse); + // Execute the verification for the interaction. + rpc VerifyInteraction(VerifyInteractionRequest) returns (VerifyInteractionResponse); +} \ No newline at end of file diff --git a/test/matt.consumer.integration.spec.ts b/test/matt.consumer.integration.spec.ts index 512bb2f9..fa439829 100644 --- a/test/matt.consumer.integration.spec.ts +++ b/test/matt.consumer.integration.spec.ts @@ -42,7 +42,8 @@ const sendMattMessageTCP = ( }); }; -describe.skip('MATT protocol test', () => { +const skipPluginTests = process.env['SKIP_PLUGIN_TESTS'] === 'true'; +(skipPluginTests ? describe.skip : describe)('MATT protocol test', () => { setLogLevel('trace'); let provider: ConsumerPact; @@ -60,7 +61,7 @@ describe.skip('MATT protocol test', () => { 'matt-provider', FfiSpecificationVersion['SPECIFICATION_VERSION_V4'] ); - provider.addPlugin('matt', '0.0.2'); + provider.addPlugin('matt', '0.1.1'); const interaction = provider.newInteraction(''); interaction.uponReceiving('A request to communicate via MATT'); @@ -131,7 +132,7 @@ describe.skip('MATT protocol test', () => { beforeEach(() => { const mattMessage = `{"request": {"body": "hellotcp"}, "response":{"body":"tcpworld"}}`; - tcpProvider.addPlugin('matt', '0.0.2'); + tcpProvider.addPlugin('matt', '0.1.1'); const message = tcpProvider.newSynchronousMessage('a MATT message'); message.withPluginRequestResponseInteractionContents( @@ -148,7 +149,7 @@ describe.skip('MATT protocol test', () => { }); it('generates a pact with success', async () => { - const message = await sendMattMessageTCP('hello', HOST, port); + const message = await sendMattMessageTCP('hellotcp', HOST, port); expect(message).to.eq('tcpworld'); const res = tcpProvider.mockServerMatchedSuccessfully(port); diff --git a/test/matt.provider.integration.spec.ts b/test/matt.provider.integration.spec.ts index adca3474..ad0097ed 100644 --- a/test/matt.provider.integration.spec.ts +++ b/test/matt.provider.integration.spec.ts @@ -52,7 +52,8 @@ const startTCPServer = (host: string, port: number) => { }); }; -describe.skip('MATT protocol test', () => { +const skipPluginTests = process.env['SKIP_PLUGIN_TESTS'] === 'true'; +(skipPluginTests ? describe.skip : describe)('MATT protocol test', () => { setLogLevel('info'); describe('HTTP and TCP Provider', () => { diff --git a/test/message.integration.spec.ts b/test/message.integration.spec.ts index 27c0f2d8..4ec1f829 100644 --- a/test/message.integration.spec.ts +++ b/test/message.integration.spec.ts @@ -13,12 +13,6 @@ import { setLogLevel } from '../src/logger'; chai.use(chaiAsPromised); const { expect } = chai; -const isWin = process.platform === 'win32'; -const isLinux = process.platform === 'linux'; -const isDarwinArm64 = process.platform === 'darwin' && process.arch === 'arm64'; -const isCirrusCi = process.env['CIRRUS_CI'] === 'true'; -const usesOctetStream = isWin || isDarwinArm64 || (isCirrusCi && isLinux); - const getFeature = async (address: string, protoFile: string) => { const def = await load(protoFile); const { routeguide } = grpc.loadPackageDefinition(def); @@ -89,11 +83,6 @@ describe('FFI integration test for the Message Consumer API', () => { }); }); - // See https://github.com/pact-foundation/pact-reference/issues/171 for why we have an OS switch here - // Windows: does not have magic mime matcher, uses content-type - // OSX on CI: does not magic mime matcher, uses content-type - // OSX: has magic mime matcher, sniffs content - // Linux: has magic mime matcher, sniffs content describe('with binary data', () => { it('generates a pact with success', () => { const message = pact.newAsynchronousMessage(''); @@ -102,7 +91,7 @@ describe('FFI integration test for the Message Consumer API', () => { message.givenWithParam('some state 2', 'state2 key', 'state2 val'); message.withBinaryContents( bytes, - usesOctetStream ? 'application/octet-stream' : 'application/gzip' + 'application/gzip' ); message.withMetadata('meta-key', 'meta-val'); @@ -144,17 +133,24 @@ describe('FFI integration test for the Message Consumer API', () => { }); }); - describe.skip('with plugin contents (gRPC)', () => { - const protoFile = `${__dirname}/integration/grpc/route_guide.proto`; + const skipPluginTests = process.env['SKIP_PLUGIN_TESTS'] === 'true'; + (skipPluginTests ? describe.skip : describe)( + 'with plugin contents (gRPC)', + () => { + let protoFile = `${__dirname}/integration/grpc/route_guide.proto`; + if (process.platform === 'win32') { + const escapedProtoFile = protoFile.replace(/\\/g, '\\\\'); + protoFile = escapedProtoFile; + } - let port: number; + let port: number; - afterEach(() => { - pact.cleanupPlugins(); - }); + afterEach(() => { + pact.cleanupPlugins(); + }); - beforeEach(() => { - const grpcInteraction = `{ + beforeEach(() => { + const grpcInteraction = `{ "pact:proto": "${protoFile}", "pact:proto-service": "RouteGuide/GetFeature", "pact:content-type": "application/protobuf", @@ -171,36 +167,36 @@ describe('FFI integration test for the Message Consumer API', () => { } }`; - pact.addMetadata('pact-node', 'meta-key', 'meta-val'); - pact.addPlugin('protobuf', '0.1.14'); + pact.addMetadata('pact-node', 'meta-key', 'meta-val'); + pact.addPlugin('protobuf', '0.3.15'); - const message = pact.newSynchronousMessage('a grpc test 1'); - message.given('some state 1'); - message.withPluginRequestResponseInteractionContents( - 'application/protobuf', - grpcInteraction - ); - message.withMetadata('meta-key 1', 'meta-val 2'); + const message = pact.newSynchronousMessage('a grpc test 1'); + message.given('some state 1'); + message.withPluginRequestResponseInteractionContents( + 'application/protobuf', + grpcInteraction + ); + message.withMetadata('meta-key 1', 'meta-val 2'); - port = pact.pactffiCreateMockServerForTransport( - '127.0.0.1', - 'grpc', - '' - ); - }); + port = pact.pactffiCreateMockServerForTransport( + '127.0.0.1', + 'grpc', + '' + ); + }); - it('generates a pact with success', async () => { - const feature: any = await getFeature(`127.0.0.1:${port}`, protoFile); - expect(feature.name).to.eq('Big Tree'); + it('generates a pact with success', async () => { + const feature: any = await getFeature(`127.0.0.1:${port}`, protoFile); + expect(feature.name).to.eq('Big Tree'); - const res = pact.mockServerMatchedSuccessfully(port); - expect(res).to.eq(true); + const res = pact.mockServerMatchedSuccessfully(port); + expect(res).to.eq(true); - const mismatches = pact.mockServerMismatches(port); - expect(mismatches.length).to.eq(0); + const mismatches = pact.mockServerMismatches(port); + expect(mismatches.length).to.eq(0); - pact.writePactFile(path.join(__dirname, '__testoutput__')); + pact.writePactFile(path.join(__dirname, '__testoutput__')); + }); }); - }); }); }); diff --git a/test/plugin-verifier.integration.spec.ts b/test/plugin-verifier.integration.spec.ts index 47c46de4..617a8083 100644 --- a/test/plugin-verifier.integration.spec.ts +++ b/test/plugin-verifier.integration.spec.ts @@ -99,37 +99,40 @@ const getFeature = async (address: string, protoFile: string) => { }); }; -describe.skip('Plugin Verifier Integration Spec', () => { - context('plugin tests', () => { - describe('grpc interaction', () => { - before(async () => { - const server = getGRPCServer(); - startGRPCServer(server, GRPC_PORT); - await startHTTPServer(HTTP_PORT); - }); - - it('should verify the gRPC interactions', async () => { - await verifierFactory({ - providerBaseUrl: `http://127.0.0.1:${HTTP_PORT}`, - transports: [ - { - port: GRPC_PORT, - protocol: 'grpc', - }, - ], - logLevel: 'debug', - pactUrls: [`${__dirname}/integration/grpc/grpc.json`], - }).verify(); - - expect('').to.eq(''); - }); - - it('runs the grpc client', async () => { - const protoFile = `${__dirname}/integration/grpc/route_guide.proto`; - const feature = await getFeature(`127.0.0.1:${GRPC_PORT}`, protoFile); - - console.log(feature); +const skipPluginTests = process.env['SKIP_PLUGIN_TESTS'] === 'true'; +(skipPluginTests ? describe.skip : describe)( + 'Plugin Verifier Integration Spec', + () => { + context('plugin tests', () => { + describe('grpc interaction', () => { + before(async () => { + const server = getGRPCServer(); + startGRPCServer(server, GRPC_PORT); + await startHTTPServer(HTTP_PORT); + }); + + it('should verify the gRPC interactions', async () => { + await verifierFactory({ + providerBaseUrl: `http://127.0.0.1:${HTTP_PORT}`, + transports: [ + { + port: GRPC_PORT, + protocol: 'grpc', + }, + ], + logLevel: 'debug', + pactUrls: [`${__dirname}/integration/grpc/grpc.json`], + }).verify(); + + expect('').to.eq(''); + }); + + it('runs the grpc client', async () => { + const protoFile = `${__dirname}/integration/grpc/route_guide.proto`; + const feature = await getFeature(`127.0.0.1:${GRPC_PORT}`, protoFile); + + console.log(feature); + }); }); }); }); -}); diff --git a/test/verifier.integration.spec.ts b/test/verifier.integration.spec.ts index 3d7ce917..9a74f165 100644 --- a/test/verifier.integration.spec.ts +++ b/test/verifier.integration.spec.ts @@ -175,7 +175,7 @@ describe('Verifier Integration Spec', () => { // thread '' panicked at 'Cannot drop a runtime in a context where blocking is not allowed. This happens when a runtime is dropped from within an asynchronous context // with RUST_BACKTRACE=1 it seems that it relates to fetching from the broker, and something bad // is happening in reqwest - context.skip('from a Pact Broker', () => { + context('from a Pact Broker', () => { context('without authentication', () => { it('should return a successful promise', () => expect( From 34c7612a4b4c12c324a076d5c0234a9520509d83 Mon Sep 17 00:00:00 2001 From: Yousaf Nabi Date: Wed, 19 Jun 2024 17:11:55 +0100 Subject: [PATCH 3/7] fix: update to libpact_ffi 0.4.21 --- src/ffi/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ffi/index.ts b/src/ffi/index.ts index 403a1173..ac217c2f 100644 --- a/src/ffi/index.ts +++ b/src/ffi/index.ts @@ -4,7 +4,7 @@ import logger, { DEFAULT_LOG_LEVEL } from '../logger'; import { LogLevel } from '../logger/types'; import { Ffi } from './types'; -export const PACT_FFI_VERSION = '0.4.20'; +export const PACT_FFI_VERSION = '0.4.21'; // supported prebuilds // darwin-arm64 From 2d4db3918b28afe25e770ed3fbadca025faed20d Mon Sep 17 00:00:00 2001 From: Yousaf Nabi Date: Wed, 19 Jun 2024 17:12:24 +0100 Subject: [PATCH 4/7] feat: provide musl arm64/x86_64 prebuilds --- binding.gyp | 325 ++++++++++++++++++-------------- script/ci/check-release-libs.sh | 10 + script/ci/prebuild-alpine.sh | 34 ++++ script/lib/download-ffi.sh | 23 ++- 4 files changed, 239 insertions(+), 153 deletions(-) create mode 100755 script/ci/prebuild-alpine.sh diff --git a/binding.gyp b/binding.gyp index bb0c7998..033e05e1 100644 --- a/binding.gyp +++ b/binding.gyp @@ -1,150 +1,185 @@ { - "targets": [ - { - "target_name": "pact", - "sources": [ - "native/addon.cc", - "native/ffi.cc", - "native/consumer.cc", - "native/provider.cc", - "native/plugin.cc" - ], - "include_dirs": [ - " Date: Wed, 19 Jun 2024 17:12:53 +0100 Subject: [PATCH 5/7] test(ci): refactor build-and-test workflow to cover musl builds --- .github/workflows/build-and-test.yml | 146 ++++++++++++++++++++------- 1 file changed, 110 insertions(+), 36 deletions(-) diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 49d4bb18..b042a1e7 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -15,12 +15,12 @@ jobs: with: fetch-depth: 0 - run: GH_CREATE_PRE_RELEASE=true ./script/ci/release.sh - if: github.ref == 'refs/heads/master' && env.ACT != 'true' && runner.os == 'Linux' + if: github.ref == 'refs/heads/master' && env.ACT != true && runner.os == 'Linux' env: GITHUB_TOKEN: ${{ github.token }} - + prebuild: - needs: [ create_pre_release ] + needs: [create_pre_release] runs-on: ${{ matrix.os }} defaults: run: @@ -30,11 +30,31 @@ jobs: matrix: node-version: [20] os: [ - macos-14, - macos-12, - ubuntu-latest, - windows-latest + macos-12, + ubuntu-latest, + windows-latest, ] + docker: [false] + alpine: [false] + arch: ['amd64'] + include: + - os: ubuntu-latest + docker: true + alpine: false + arch: arm64 + - os: ubuntu-latest + docker: true + alpine: true + arch: arm64 + - os: ubuntu-latest + docker: true + alpine: true + arch: amd64 + - os: macos-14 + docker: false + alpine: false + arch: arm64 + name: Prebuild ${{ matrix.docker == true && matrix.alpine == true && 'linux-musl' || matrix.docker == true && matrix.alpine == false && 'linux' || matrix.os }}-${{ matrix.arch }} env: NODE_VERSION: ${{ matrix.node-version }} @@ -43,7 +63,7 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - + - name: Use Node.js ${{ env.NODE_VERSION }} uses: actions/setup-node@v4 with: @@ -52,27 +72,32 @@ jobs: - if: runner.os == 'Windows' run: echo "ONLY_DOWNLOAD_PACT_FOR_WINDOWS=true" >> $GITHUB_ENV - - run: ./script/ci/prebuild.sh - - name: Set up QEMU - if: runner.os == 'Linux' + if: ${{ matrix.docker == true && matrix.arch == 'arm64' }} uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx - if: runner.os == 'Linux' - uses: docker/setup-buildx-action@v3 - - if: runner.os == 'Linux' - name: prebuild arm64 - run: docker run -v $PWD:/home --platform linux/arm64 --rm node:20 bin/bash -c 'cd /home && /home/script/ci/prebuild.sh' + - if: ${{ matrix.docker == true && matrix.alpine == true }} + name: prebuild linux ${{ matrix.arch }} musl + run: docker run -v $PWD:/home --platform linux/${{ matrix.arch }} --rm node:20-alpine bin/sh -c 'apk add bash && cd /home && bash -c "/home/script/ci/prebuild-alpine.sh" && rm -rf ffi node_modules' + + - if: ${{ matrix.docker == true && matrix.alpine != true }} + name: prebuild linux ${{ matrix.arch }} + run: docker run -v $PWD:/home --platform linux/${{ matrix.arch }} --rm node:20 bin/bash -c 'cd /home && /home/script/ci/prebuild.sh && rm -rf ffi node_modules' + + - run: sudo chown -R $(id -u):$(id -g) prebuilds + if: ${{ matrix.docker == true }} + + - run: ./script/ci/prebuild.sh + if: ${{ matrix.docker != true }} - name: Upload prebuild for ${{ runner.os }}-${{ runner.arch }} uses: actions/upload-artifact@v3 with: path: prebuilds/*.tar.gz - + - run: GH_PRE_RELEASE_UPLOAD=true ./script/ci/release.sh - if: github.ref == 'refs/heads/master' && env.ACT != 'true' + if: github.ref == 'refs/heads/master' && env.ACT != true env: - GITHUB_TOKEN: ${{ github.token }} + GITHUB_TOKEN: ${{ github.token }} test: runs-on: ${{ matrix.os }} @@ -83,13 +108,53 @@ jobs: strategy: fail-fast: false matrix: - node-version: [16,18,20] - os: [ - macos-14, - macos-12, - ubuntu-latest, - windows-latest - ] + node-version: [16, 18, 20, 22] + os: [macos-14, macos-12, ubuntu-latest, windows-latest] + docker: [false] + include: + - os: ubuntu-latest + docker: true + alpine: false + arch: arm64 + node-version: 22 + - os: ubuntu-latest + docker: true + alpine: true + arch: arm64 + node-version: 22 + - os: ubuntu-latest + docker: true + alpine: true + arch: amd64 + node-version: 22 + - os: ubuntu-latest + docker: true + alpine: false + arch: arm64 + node-version: 20 + - os: ubuntu-latest + docker: true + alpine: true + arch: arm64 + node-version: 20 + - os: ubuntu-latest + docker: true + alpine: true + arch: amd64 + node-version: 20 + - os: ubuntu-latest + docker: true + alpine: true + arch: arm64 + node-version: 18 + - os: ubuntu-latest + docker: true + alpine: true + arch: amd64 + node-version: 18 + + name: Test ${{ matrix.docker == true && matrix.alpine == true && 'linux-musl' || matrix.docker == true && matrix.alpine == false && 'linux' || matrix.os }}-${{ matrix.arch }}-node-${{ matrix.node-version }} + env: NODE_VERSION: ${{ matrix.node-version }} LOG_LEVEL: debug @@ -109,22 +174,31 @@ jobs: - if: runner.os == 'Windows' run: echo "ONLY_DOWNLOAD_PACT_FOR_WINDOWS=true" >> $GITHUB_ENV - - run: LOG_LEVEL=debug ./script/ci/unpack-and-test.sh + - if: matrix.os == 'macos-14' + run: brew install protobuf + - run: LOG_LEVEL=debug ./script/ci/unpack-and-test.sh + if: ${{ matrix.docker != true }} - name: Set up QEMU - if: runner.os == 'Linux' + if: ${{ matrix.docker == true && matrix.arch == 'arm64' }} uses: docker/setup-qemu-action@v3 - - name: Set up Docker Buildx - if: runner.os == 'Linux' - uses: docker/setup-buildx-action@v3 - - if: runner.os == 'Linux' + + - if: ${{ matrix.docker == true && matrix.alpine != true && matrix.arch == 'arm64' }} name: test arm64 - run: docker run -v $PWD:/home --platform linux/arm64 --rm node:20 bin/bash -c 'cd /home && npm test' + run: docker run -v $PWD:/home --platform linux/${{ matrix.arch }} --rm node:${{ matrix.node-version }} bin/bash -c 'cd /home && /home/script/ci/unpack-and-test.sh' + + - if: ${{ matrix.docker == true && matrix.alpine == true && matrix.arch == 'amd64' }} + name: test linux amd64 musl + run: docker run -v $PWD:/home --platform linux/${{ matrix.arch }} --rm node:${{ matrix.node-version }}-alpine bin/sh -c 'apk add bash curl gcompat file && cd /home && /home/script/ci/unpack-and-test.sh' + + - if: ${{ matrix.docker == true && matrix.alpine == true && matrix.arch == 'arm64' }} + name: test linux arm64 musl + run: docker run -v $PWD:/home --platform linux/${{ matrix.arch }} --rm node:${{ matrix.node-version }}-alpine bin/sh -c 'apk add bash curl file protoc protobuf-dev && cd /home && /home/script/ci/unpack-and-test.sh' release_dry_run: runs-on: ubuntu-latest - needs: [ create_pre_release, prebuild ] + needs: [create_pre_release, prebuild] if: github.ref == 'refs/heads/master' env: @@ -141,7 +215,7 @@ jobs: node-version: ${{ env.NODE_VERSION }} registry-url: 'https://registry.npmjs.org' - - name: "release - dry run: ${{ env.DRY_RUN }}" + - name: 'release - dry run: ${{ env.DRY_RUN }}' id: publish run: script/ci/release.sh env: From 465416918d68f9ea83d2ce1b4a500231d0a2513d Mon Sep 17 00:00:00 2001 From: Yousaf Nabi Date: Wed, 19 Jun 2024 17:17:36 +0100 Subject: [PATCH 6/7] chore: update osx binary names for macos --- binding.gyp | 4 ++-- script/download-libs.sh | 2 +- script/lib/download-ffi.sh | 8 ++++---- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/binding.gyp b/binding.gyp index 033e05e1..b719727e 100644 --- a/binding.gyp +++ b/binding.gyp @@ -56,7 +56,7 @@ ] }, "copies": [{ - "files": ["<(module_root_dir)/ffi/osx-x86_64/libpact_ffi.dylib"], + "files": ["<(module_root_dir)/ffi/macos-x86_64/libpact_ffi.dylib"], "destination": "<(PRODUCT_DIR)" }], } @@ -79,7 +79,7 @@ ] }, "copies": [{ - "files": ["<(module_root_dir)/ffi/osx-aarch64/libpact_ffi.dylib"], + "files": ["<(module_root_dir)/ffi/macos-aarch64/libpact_ffi.dylib"], "destination": "<(PRODUCT_DIR)" }], } diff --git a/script/download-libs.sh b/script/download-libs.sh index e0459a1c..8402cbac 100755 --- a/script/download-libs.sh +++ b/script/download-libs.sh @@ -3,4 +3,4 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")"; pwd)" # Figure out where the . "${SCRIPT_DIR}/lib/export-binary-versions.sh" "${SCRIPT_DIR}/lib/download-ffi.sh" -"${SCRIPT_DIR}/lib/download-plugins.sh" \ No newline at end of file +"${SCRIPT_DIR}/download-plugins.sh" \ No newline at end of file diff --git a/script/lib/download-ffi.sh b/script/lib/download-ffi.sh index cd4aef00..a58a8b6f 100755 --- a/script/lib/download-ffi.sh +++ b/script/lib/download-ffi.sh @@ -18,11 +18,11 @@ fi warn "Cleaning ffi directory $FFI_DIR" rm -rf "${FFI_DIR:?}" -mkdir -p "$FFI_DIR/osx-x86_64" +mkdir -p "$FFI_DIR/macos-x86_64" mkdir -p "$FFI_DIR/linux-x86_64" mkdir -p "$FFI_DIR/linux-musl-x86_64" mkdir -p "$FFI_DIR/windows-x86_64" -mkdir -p "$FFI_DIR/osx-aarch64" +mkdir -p "$FFI_DIR/macos-aarch64" mkdir -p "$FFI_DIR/linux-aarch64" mkdir -p "$FFI_DIR/linux-musl-aarch64" @@ -69,8 +69,8 @@ if [ -z "${ONLY_DOWNLOAD_PACT_FOR_WINDOWS:-}" ]; then download_ffi "linux-aarch64.so.gz" "lib" "linux-aarch64/libpact_ffi.so.gz" download_ffi "linux-x86_64-musl.so.gz" "lib" "linux-musl-x86_64/libpact_ffi_musl.so.gz" download_ffi "linux-aarch64-musl.so.gz" "lib" "linux-musl-aarch64/libpact_ffi_musl.so.gz" - download_ffi "osx-x86_64.dylib.gz" "lib" "osx-x86_64/libpact_ffi.dylib.gz" - download_ffi "osx-aarch64.dylib.gz" "lib" "osx-aarch64/libpact_ffi.dylib.gz" + download_ffi "macos-x86_64.dylib.gz" "lib" "macos-x86_64/libpact_ffi.dylib.gz" + download_ffi "macos-aarch64.dylib.gz" "lib" "macos-aarch64/libpact_ffi.dylib.gz" else warn "Skipped download of non-windows FFI libs because ONLY_DOWNLOAD_PACT_FOR_WINDOWS is set" fi From 2fb19feadd87d15e4e2fdbc38b7f711728683698 Mon Sep 17 00:00:00 2001 From: Yousaf Nabi Date: Wed, 19 Jun 2024 17:25:20 +0100 Subject: [PATCH 7/7] chore: lint corrections --- test/consumer.integration.spec.ts | 9 +++------ test/message.integration.spec.ts | 8 +++----- test/plugin-verifier.integration.spec.ts | 3 ++- 3 files changed, 8 insertions(+), 12 deletions(-) diff --git a/test/consumer.integration.spec.ts b/test/consumer.integration.spec.ts index 0eb949fa..c76fa114 100644 --- a/test/consumer.integration.spec.ts +++ b/test/consumer.integration.spec.ts @@ -20,7 +20,6 @@ const { expect } = chai; const HOST = '127.0.0.1'; - describe('FFI integration test for the HTTP Consumer API', () => { setLogLevel('trace'); @@ -188,10 +187,7 @@ describe('FFI integration test for the HTTP Consumer API', () => { interaction.withRequest('POST', '/dogs/1234'); interaction.withRequestHeader('x-special-header', 0, 'header'); interaction.withQuery('someParam', 0, 'someValue'); - interaction.withRequestBinaryBody( - bytes, - 'application/gzip' - ); + interaction.withRequestBinaryBody(bytes, 'application/gzip'); interaction.withResponseBody( JSON.stringify({ name: like('fido'), @@ -320,7 +316,8 @@ describe('FFI integration test for the HTTP Consumer API', () => { pact.cleanupMockServer(port); }); }); - }); + } + ); describe('with multipart data', () => { const form = new FormData(); diff --git a/test/message.integration.spec.ts b/test/message.integration.spec.ts index 4ec1f829..429eb1a6 100644 --- a/test/message.integration.spec.ts +++ b/test/message.integration.spec.ts @@ -89,10 +89,7 @@ describe('FFI integration test for the Message Consumer API', () => { message.expectsToReceive('a binary event'); message.given('some state'); message.givenWithParam('some state 2', 'state2 key', 'state2 val'); - message.withBinaryContents( - bytes, - 'application/gzip' - ); + message.withBinaryContents(bytes, 'application/gzip'); message.withMetadata('meta-key', 'meta-val'); const reified = message.reifyMessage(); @@ -197,6 +194,7 @@ describe('FFI integration test for the Message Consumer API', () => { pact.writePactFile(path.join(__dirname, '__testoutput__')); }); - }); + } + ); }); }); diff --git a/test/plugin-verifier.integration.spec.ts b/test/plugin-verifier.integration.spec.ts index 617a8083..40f2d671 100644 --- a/test/plugin-verifier.integration.spec.ts +++ b/test/plugin-verifier.integration.spec.ts @@ -135,4 +135,5 @@ const skipPluginTests = process.env['SKIP_PLUGIN_TESTS'] === 'true'; }); }); }); - }); + } +);