From 6be1820306ce6b91c762633dbcc264990cc569d4 Mon Sep 17 00:00:00 2001
From: Ryan Fox-Tyler <60440289+ryanfoxtyler@users.noreply.github.com>
Date: Fri, 10 Jan 2025 07:54:33 -0500
Subject: [PATCH 01/16] chore: init trunk
---
.github/CODEOWNERS | 2 +-
.github/PULL_REQUEST_TEMPLATE.md | 4 +-
.github/actionlint.yml | 7 +
.github/labeler.yml | 108 +-
.github/workflows/cd-dgraph.yml | 77 +-
.../ci-aqua-security-trivy-tests.yml | 45 -
.github/workflows/ci-dgraph-code-coverage.yml | 14 +-
.github/workflows/ci-dgraph-core-tests.yml | 15 +-
.../ci-dgraph-core-upgrade-tests.yml | 11 +-
.github/workflows/ci-dgraph-fuzz.yml | 11 +-
.../ci-dgraph-integration2-tests.yml | 11 +-
.github/workflows/ci-dgraph-jepsen-tests.yml | 10 +-
.github/workflows/ci-dgraph-ldbc-tests.yml | 13 +-
.github/workflows/ci-dgraph-load-tests.yml | 11 +-
.github/workflows/ci-dgraph-oss-build.yml | 11 +-
.../ci-dgraph-system-upgrade-tests.yml | 11 +-
.github/workflows/ci-dgraph-systest-tests.yml | 17 +-
.github/workflows/ci-dgraph-tests-arm64.yml | 12 +-
...ci-dgraph-upgrade-fixed-versions-tests.yml | 7 +-
.github/workflows/ci-dgraph-vector-tests.yml | 15 +-
.github/workflows/ci-golang-lint.yml | 33 -
.github/workflows/codeql.yml | 55 +-
.github/workflows/labeler.yml | 8 +-
.golangci.yml | 41 -
.trunk/.gitignore | 9 +
.trunk/configs/.checkov.yaml | 2 +
.trunk/configs/.golangci.json | 5 +
.trunk/configs/.hadolint.yaml | 4 +
.trunk/configs/.markdownlint.json | 8 +
.trunk/configs/.prettierrc | 5 +
.trunk/configs/.shellcheckrc | 7 +
.trunk/configs/.yamllint.yaml | 7 +
.trunk/trunk.yaml | 50 +
.vscode/extensions.json | 3 +
.vscode/settings.json | 6 +
CHANGELOG.md | 3690 +-
CONTRIBUTING.md | 220 +-
LICENSE.md | 14 +-
README.md | 134 +-
compose/run.sh | 126 +-
contrib/README.md | 7 +-
contrib/config/backups/README.md | 23 +-
contrib/config/backups/azure/README.md | 65 +-
.../config/backups/azure/azure_cli/README.md | 26 +-
.../backups/azure/azure_cli/create_blob.sh | 163 +-
.../backups/azure/azure_cli/create_secrets.sh | 111 +-
.../config/backups/azure/terraform/README.md | 21 +-
contrib/config/backups/client/README.md | 119 +-
.../config/backups/client/backup_helper.sh | 339 +-
.../config/backups/client/compose-setup.sh | 400 +-
.../config/backups/client/dgraph-backup.sh | 307 +-
contrib/config/backups/gcp/README.md | 70 +-
.../config/backups/gcp/terraform/README.md | 43 +-
contrib/config/backups/nfs/README.md | 166 +-
.../backups/nfs/charts/rook/fetch-operator.sh | 13 +-
.../config/backups/nfs/charts/rook/helmify.sh | 52 +-
.../backups/nfs/efs-terraform/README.md | 71 +-
contrib/config/backups/nfs/gcfs-cli/README.md | 30 +-
.../backups/nfs/gcfs-cli/create_gcfs.sh | 103 +-
.../backups/nfs/gcfs-terraform/README.md | 42 +-
.../config/backups/nfs/vagrant/provision.sh | 219 +-
contrib/config/backups/s3/README.md | 53 +-
contrib/config/backups/s3/terraform/README.md | 28 +-
contrib/config/monitoring/jaeger/README.md | 10 +-
.../config/monitoring/jaeger/chart/README.md | 20 +-
.../monitoring/jaeger/operator/README.md | 19 +-
.../monitoring/jaeger/operator/helmify.sh | 52 +-
.../config/monitoring/prometheus/README.md | 33 +-
.../prometheus/chart-values/README.md | 66 +-
contrib/config/terraform/aws/ha/README.md | 32 +-
.../aws/ha/dgraph/ratel/variables.tf | 7 +-
.../terraform/aws/ha/dgraph/zero/variables.tf | 12 +-
.../config/terraform/aws/standalone/README.md | 29 +-
.../config/terraform/gcp/standalone/README.md | 18 +-
.../terraform/gcp/standalone/outputs.tf | 2 +-
.../terraform/gcp/standalone/variables.tf | 8 +-
contrib/config/terraform/kubernetes/README.md | 103 +-
.../modules/aws/modules/eks/outputs.tf | 4 +-
.../modules/aws/modules/eks/variables.tf | 8 +-
.../modules/aws/modules/vpc/nacl-config.tf | 6 +-
.../modules/aws/modules/vpc/outputs.tf | 6 +-
.../modules/alpha/templates/alpha_init.sh | 2 +-
.../modules/dgraph/modules/zero/variables.tf | 8 +-
contrib/config/vault/README.md | 2 +-
contrib/config/vault/docker/README.md | 91 +-
contrib/docker-build/README.md | 15 +-
contrib/docker-build/build.sh | 2 +-
contrib/local-test/README.md | 310 +-
contrib/manual_tests/README.md | 4 +-
contrib/manual_tests/log.sh | 10 +-
contrib/manual_tests/test.sh | 888 +-
contrib/scripts/cover.sh | 33 +-
contrib/scripts/functions.sh | 86 +-
contrib/scripts/goldendata-queries.sh | 77 +-
contrib/scripts/install-dependencies.sh | 1 -
contrib/scripts/load-test.sh | 14 +-
contrib/scripts/loader.sh | 36 +-
contrib/scripts/transactions.sh | 12 +-
contrib/standalone/hooks/build | 2 +-
contrib/standalone/run.sh | 3 +-
contrib/systemd/centos/README.md | 7 +-
contrib/systemd/centos/add_dgraph_account.sh | 26 +-
contrib/systemd/ha_cluster/README.md | 44 +-
contrib/systemd/ha_cluster/tests/README.md | 54 +-
.../ha_cluster/tests/centos8/provision.sh | 229 +-
.../ha_cluster/tests/ubuntu1804/provision.sh | 229 +-
contrib/tlstest/README.md | 21 +-
contrib/tlstest/alpha_notls.sh | 2 +-
contrib/tlstest/alpha_tls.sh | 2 +-
contrib/tlstest/alpha_tls_auth.sh | 2 +-
contrib/tlstest/live_notls.sh | 2 +-
contrib/tlstest/live_tls.sh | 2 +-
contrib/tlstest/live_tls_auth.sh | 2 +-
contrib/tlstest/run.sh | 2 +-
contrib/tlstest/server_nopass.sh | 2 +-
contrib/tlstest/server_pass.sh | 2 +-
contrib/tlstest/test.sh | 26 +-
contrib/tlstest/test_reload.sh | 16 +-
contrib/wait-for-it.sh | 267 +-
.../alpha/mutations_mode/docker-compose.yml | 115 +-
dgraph/cmd/alpha/testrun.sh | 49 +-
dgraph/cmd/alpha/testrun/conf1.yaml | 1 -
dgraph/cmd/alpha/testrun/conf2.yaml | 1 -
dgraph/cmd/alpha/testrun/conf3.yaml | 1 -
dgraph/cmd/alpha/thoughts.md | 20 +-
dgraph/cmd/bulk/speed_tests/run.sh | 31 +-
dgraph/cmd/bulk/split_gz.sh | 22 +-
dgraph/cmd/bulk/systest/run.sh | 21 +-
dgraph/cmd/bulk/systest/test-bulk-schema.sh | 257 +-
dgraph/cmd/migrate/README.md | 22 +-
dgraph/docker-compose.yml | 67 +-
dgraphtest/README.md | 11 +-
dql/README.md | 8 +-
ee/README.md | 5 +-
go.mod | 3 +-
graphql/bench/README.md | 42 +-
.../poorman_auth/docker-compose.yml | 13 +-
.../poorman_auth_with_acl/docker-compose.yml | 15 +-
graphql/e2e/auth/debug_off/docker-compose.yml | 13 +-
graphql/e2e/auth/docker-compose.yml | 14 +-
.../auth_closed_by_default/docker-compose.yml | 14 +-
graphql/e2e/common/README.md | 4 +-
graphql/e2e/common/error_test.yaml | 289 +-
graphql/e2e/custom_logic/README.md | 7 +-
graphql/e2e/custom_logic/cmd/go.mod | 4 +-
.../e2e/custom_logic/cmd/graphqlresponse.yaml | 339 +-
graphql/e2e/custom_logic/cmd/index.js | 8 +-
graphql/e2e/custom_logic/docker-compose.yml | 38 +-
graphql/e2e/directives/docker-compose.yml | 15 +-
graphql/e2e/directives/script.js | 101 +-
graphql/e2e/multi_tenancy/docker-compose.yml | 108 +-
graphql/e2e/normal/docker-compose.yml | 15 +-
graphql/e2e/normal/script.js | 101 +-
graphql/e2e/schema/docker-compose.yml | 78 +-
graphql/e2e/subscription/docker-compose.yml | 78 +-
graphql/resolve/add_mutation_test.yaml | 753 +-
graphql/resolve/auth_add_test.yaml | 165 +-
.../auth_closed_by_default_add_test.yaml | 20 +-
.../auth_closed_by_default_delete_test.yaml | 30 +-
.../auth_closed_by_default_query_test.yaml | 10 +-
.../auth_closed_by_default_update_test.yaml | 14 +-
graphql/resolve/auth_delete_test.yaml | 126 +-
graphql/resolve/auth_query_test.yaml | 322 +-
graphql/resolve/auth_tests.yaml | 124 +-
graphql/resolve/auth_update_test.yaml | 130 +-
graphql/resolve/custom_mutation_test.yaml | 12 +-
graphql/resolve/custom_query_test.yaml | 10 +-
graphql/resolve/delete_mutation_test.yaml | 43 +-
graphql/resolve/mutation_query_test.yaml | 48 +-
graphql/resolve/query_test.yaml | 340 +-
graphql/resolve/resolver_error_test.yaml | 342 +-
graphql/resolve/update_mutation_test.yaml | 330 +-
graphql/resolve/validate_mutation_test.yaml | 36 +-
graphql/schema/auth_schemas_test.yaml | 165 +-
graphql/schema/custom_http_config_test.yaml | 27 +-
graphql/schema/dgraph_schemagen_test.yml | 67 +-
graphql/schema/gqlschema_test.yml | 434 +-
.../schemagen/input/password-type.graphql | 4 +-
.../input/searchables-references.graphql | 16 +-
.../schemagen/input/searchables.graphql | 58 +-
.../input/single-type-with-enum.graphql | 2 +-
.../schemagen/input/single-type.graphql | 10 +-
...ype-implements-multiple-interfaces.graphql | 12 +-
.../type-with-arguments-on-field.graphql | 10 +-
...e-with-custom-field-on-dgraph-type.graphql | 23 +-
...-with-custom-fields-on-remote-type.graphql | 23 +-
.../input/type-without-orderables.graphql | 10 +-
.../testdata/schemagen/input/union.graphql | 39 +-
.../output/apollo-federation.graphql | 767 +-
.../output/auth-on-interfaces.graphql | 583 +-
.../schemagen/output/authorization.graphql | 542 +-
.../output/comments-and-descriptions.graphql | 477 +-
.../output/created-updated-directives.graphql | 503 +-
...custom-dql-query-with-subscription.graphql | 483 +-
.../schemagen/output/custom-mutation.graphql | 342 +-
.../output/custom-nested-types.graphql | 294 +-
.../output/custom-query-mixed-types.graphql | 343 +-
.../custom-query-not-dgraph-type.graphql | 278 +-
.../custom-query-with-dgraph-type.graphql | 339 +-
.../schemagen/output/deprecated.graphql | 345 +-
...e-on-concrete-type-with-interfaces.graphql | 536 +-
...-reverse-directive-with-interfaces.graphql | 534 +-
...ing-directive-with-similar-queries.graphql | 603 +-
.../output/field-with-id-directive.graphql | 529 +-
.../field-with-multiple-@id-fields.graphql | 531 +-
...erse-predicate-in-dgraph-directive.graphql | 451 +-
.../filter-cleanSchema-all-empty.graphql | 367 +-
.../filter-cleanSchema-circular.graphql | 441 +-
...filter-cleanSchema-custom-mutation.graphql | 342 +-
.../filter-cleanSchema-directLink.graphql | 407 +-
.../output/generate-directive.graphql | 535 +-
.../schemagen/output/geo-type.graphql | 403 +-
...se-with-interface-having-directive.graphql | 654 +-
.../output/hasInverse-with-interface.graphql | 667 +-
...Inverse-with-type-having-directive.graphql | 654 +-
.../schemagen/output/hasInverse.graphql | 385 +-
.../hasInverse_withSubscription.graphql | 390 +-
.../schemagen/output/hasfilter.graphql | 423 +-
.../ignore-unsupported-directive.graphql | 383 +-
.../output/interface-with-dgraph-pred.graphql | 527 +-
.../interface-with-id-directive.graphql | 525 +-
.../output/interface-with-no-ids.graphql | 459 +-
...interfaces-with-types-and-password.graphql | 712 +-
.../output/interfaces-with-types.graphql | 692 +-
.../schemagen/output/lambda-directive.graphql | 359 +-
.../schemagen/output/language-tags.graphql | 575 +-
.../no-id-field-with-searchables.graphql | 331 +-
.../schemagen/output/no-id-field.graphql | 493 +-
.../schemagen/output/password-type.graphql | 357 +-
.../testdata/schemagen/output/random.graphql | 347 +-
.../output/searchables-references.graphql | 481 +-
.../schemagen/output/searchables.graphql | 763 +-
.../output/single-type-with-enum.graphql | 369 +-
.../schemagen/output/single-type.graphql | 389 +-
...ype-implements-multiple-interfaces.graphql | 542 +-
.../schemagen/output/type-reference.graphql | 433 +-
.../type-with-arguments-on-field.graphql | 445 +-
...e-with-custom-field-on-dgraph-type.graphql | 418 +-
...-with-custom-fields-on-remote-type.graphql | 352 +-
.../output/type-without-orderables.graphql | 345 +-
.../testdata/schemagen/output/union.graphql | 820 +-
graphql/testdata/custom_bench/README.md | 108 +-
.../profiling/benchmarks/0/schema.graphql | 80 +-
.../benchmarks/1_batch/schema.graphql | 93 +-
.../benchmarks/1_single/schema.graphql | 93 +-
.../benchmarks/2_batch/schema.graphql | 106 +-
.../benchmarks/2_single/schema.graphql | 106 +-
.../benchmarks/3_batch/schema.graphql | 93 +-
.../benchmarks/3_single/schema.graphql | 93 +-
.../benchmarks/4_batch/schema.graphql | 106 +-
.../benchmarks/4_single/schema.graphql | 106 +-
.../benchmarks/5_batch/schema.graphql | 93 +-
.../benchmarks/5_single/schema.graphql | 93 +-
.../benchmarks/6_batch/schema.graphql | 106 +-
.../benchmarks/6_single/schema.graphql | 106 +-
.../benchmarks/7_batch/schema.graphql | 132 +-
.../benchmarks/7_single/schema.graphql | 132 +-
.../benchmarks/8_batch/schema.graphql | 145 +-
.../benchmarks/8_single/schema.graphql | 145 +-
graphql/testdata/datagen/README.md | 64 +-
graphql/testdata/datagen/schema.graphql | 82 +-
logo-dark.png | Bin 13481 -> 6102 bytes
logo.png | Bin 7234 -> 5747 bytes
ocagent/ocagent-config.yaml | 6 +-
paper/architecture.png | Bin 47000 -> 31993 bytes
paper/datasharding.png | Bin 76148 -> 62249 bytes
paper/integerstorage.png | Bin 29689 -> 23460 bytes
paper/maxassigned-derivation.png | Bin 21503 -> 16790 bytes
paper/maxassigned.png | Bin 20423 -> 15250 bytes
paper/mvcc.png | Bin 35526 -> 28549 bytes
paper/posting.png | Bin 65722 -> 49960 bytes
posting/size_test.sh | 16 +-
protos/depcheck.sh | 19 +-
protos/patch_pb.sh | 18 +-
protos/pb/pb.pb.go | 22 +-
query/benchmark/README.md | 39 +-
query/benchmark/run.sh | 28 +-
query/thoughts.md | 25 +-
systest/1million/test-reindex.sh | 50 +-
systest/21million/test-21million.sh | 170 +-
.../acl/restore/data/backups/manifest.json | 64 +-
.../dgraph.20201125.173944.587/manifest.json | 26 +-
.../dgraph.20210517.094442.220/manifest.json | 24 +-
.../dgraph.20210517.094634.533/manifest.json | 24 +-
.../dgraph.20210517.094648.003/manifest.json | 24 +-
.../data/to_restore/3/manifest.json | 87 +-
systest/backup/nfs-backup/docker-compose.yml | 254 +-
systest/bgindex/test-bgindex.sh | 24 +-
systest/group-delete/README.md | 4 +-
systest/ldbc/test_cases.yaml | 156 +-
systest/loader-benchmark/loader-benchmark.sh | 48 +-
.../dgraph.20200617.210005.792/manifest.json | 9 +-
.../dgraph.20200617.210012.038/manifest.json | 9 +-
.../dgraph.20200617.210106.464/manifest.json | 9 +-
.../dgraph.20200617.210343.124/manifest.json | 31 +-
.../dgraph.20200617.212208.094/manifest.json | 12 +-
.../dgraph.20210706.130427.532/manifest.json | 23 +-
.../dgraph.20210706.130444.413/manifest.json | 23 +-
.../dgraph.20210706.130453.552/manifest.json | 23 +-
.../dgraph.20210706.130519.083/manifest.json | 23 +-
.../dgraph.20210706.130544.666/manifest.json | 23 +-
.../dgraph.20210706.130634.385/manifest.json | 23 +-
t/README.md | 17 +-
test.sh | 302 +-
testutil/README.md | 2 +-
tok/options/README.md | 66 +-
types/testdata/aruba.json | 104 +-
types/testdata/sudan.json | 166 +-
types/testdata/us.json | 37514 +++++++++++++++-
types/testdata/zip.json | 1866 +-
worker/README.md | 2 +-
311 files changed, 63202 insertions(+), 21483 deletions(-)
create mode 100644 .github/actionlint.yml
delete mode 100644 .github/workflows/ci-aqua-security-trivy-tests.yml
delete mode 100644 .github/workflows/ci-golang-lint.yml
delete mode 100644 .golangci.yml
create mode 100644 .trunk/.gitignore
create mode 100644 .trunk/configs/.checkov.yaml
create mode 100644 .trunk/configs/.golangci.json
create mode 100644 .trunk/configs/.hadolint.yaml
create mode 100644 .trunk/configs/.markdownlint.json
create mode 100644 .trunk/configs/.prettierrc
create mode 100644 .trunk/configs/.shellcheckrc
create mode 100644 .trunk/configs/.yamllint.yaml
create mode 100644 .trunk/trunk.yaml
create mode 100644 .vscode/extensions.json
create mode 100644 .vscode/settings.json
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 8ef236d6d31..908cced9a26 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -1,4 +1,4 @@
# CODEOWNERS info: https://help.github.com/en/articles/about-code-owners
# Owners are automatically requested for review for PRs that changes code
# that they own.
-* @hypermodeinc/database
+* @hypermodeinc/database
\ No newline at end of file
diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md
index cae77b76401..e8c683fd5a7 100644
--- a/.github/PULL_REQUEST_TEMPLATE.md
+++ b/.github/PULL_REQUEST_TEMPLATE.md
@@ -8,8 +8,8 @@ Please explain the changes you made here.
- [ ] For all _code_ changes, an entry added to the `CHANGELOG.md` file describing and linking to
this PR
- [ ] Tests added for new functionality, or regression tests for bug fixes added as applicable
-- [ ] For public APIs, new features, etc., PR on [docs repo](https://github.com/dgraph-io/dgraph-docs)
- staged and linked here
+- [ ] For public APIs, new features, etc., PR on
+ [docs repo](https://github.com/dgraph-io/dgraph-docs) staged and linked here
**Instructions**
diff --git a/.github/actionlint.yml b/.github/actionlint.yml
new file mode 100644
index 00000000000..03454752e8b
--- /dev/null
+++ b/.github/actionlint.yml
@@ -0,0 +1,7 @@
+self-hosted-runner:
+ # Labels of self-hosted runner in array of string
+ labels:
+ - warp-ubuntu-latest-arm64-4x
+ - warp-ubuntu-latest-x64-4x
+ - warp-ubuntu-latest-arm64-16x
+ - warp-ubuntu-latest-x64-16x
diff --git a/.github/labeler.yml b/.github/labeler.yml
index cd3b29fce2a..5d26187207e 100644
--- a/.github/labeler.yml
+++ b/.github/labeler.yml
@@ -1,82 +1,82 @@
area/graphql:
-- changed-files:
- - any-glob-to-any-file: graphql/**
+ - changed-files:
+ - any-glob-to-any-file: graphql/**
area/documentation:
-- changed-files:
- - any-glob-to-any-file:
- - '**/*.md'
- - '**/*.pdf'
- - '**/*.tex'
+ - changed-files:
+ - any-glob-to-any-file:
+ - "**/*.md"
+ - "**/*.pdf"
+ - "**/*.tex"
area/bulk-loader:
-- changed-files:
- - any-glob-to-any-file: dgraph/cmd/bulk/**
+ - changed-files:
+ - any-glob-to-any-file: dgraph/cmd/bulk/**
area/live-loader:
-- changed-files:
- - any-glob-to-any-file: dgraph/cmd/live/**
+ - changed-files:
+ - any-glob-to-any-file: dgraph/cmd/live/**
area/querylang:
-- changed-files:
- - any-glob-to-any-file: dql/**
+ - changed-files:
+ - any-glob-to-any-file: dql/**
area/integrations:
-- changed-files:
- - any-glob-to-any-file:
- - contrib/**
- - .github/**
- - .travis/**
+ - changed-files:
+ - any-glob-to-any-file:
+ - contrib/**
+ - .github/**
+ - .travis/**
area/testing/jepsen:
-- changed-files:
- - any-glob-to-any-file: contrib/jepsen/**
+ - changed-files:
+ - any-glob-to-any-file: contrib/jepsen/**
area/enterprise:
-- changed-files:
- - any-glob-to-any-file: ee/**
+ - changed-files:
+ - any-glob-to-any-file: ee/**
area/enterprise/backup:
-- changed-files:
- - any-glob-to-any-file: ee/backup/**
+ - changed-files:
+ - any-glob-to-any-file: ee/backup/**
area/enterprise/acl:
-- changed-files:
- - any-glob-to-any-file: ee/acl/**
-
+ - changed-files:
+ - any-glob-to-any-file: ee/acl/**
+
area/schema:
-- changed-files:
- - any-glob-to-any-file: schema/**
+ - changed-files:
+ - any-glob-to-any-file: schema/**
area/testing:
-- changed-files:
- - any-glob-to-any-file:
- - systest/**
- - '**/*test.go'
- - graphql/e2e/**
- - '**/*test.yaml'
- - t/**
- - testutil/**
- - tlstest/**
+ - changed-files:
+ - any-glob-to-any-file:
+ - systest/**
+ - "**/*test.go"
+ - graphql/e2e/**
+ - "**/*test.yaml"
+ - t/**
+ - testutil/**
+ - tlstest/**
area/core:
-- changed-files:
- - any-glob-to-any-file:
- - protos/**
- - posting/**
- - raftwal/**
- - query/**
- - schema/**
- - protos/**
- - x/**
- - xidmap/**
- - worker/**
- - graphql/**
+ - changed-files:
+ - any-glob-to-any-file:
+ - protos/**
+ - posting/**
+ - raftwal/**
+ - query/**
+ - schema/**
+ - protos/**
+ - x/**
+ - xidmap/**
+ - worker/**
+ - graphql/**
go:
-- changed-files:
- - any-glob-to-any-file: '**/*.go'
+ - changed-files:
+ - any-glob-to-any-file: "**/*.go"
python:
-- changed-files:
- - any-glob-to-any-file: '**/*.py'
+ - changed-files:
+ - any-glob-to-any-file: "**/*.py"
diff --git a/.github/workflows/cd-dgraph.yml b/.github/workflows/cd-dgraph.yml
index dbb09ede123..d5fe6f8ea6b 100644
--- a/.github/workflows/cd-dgraph.yml
+++ b/.github/workflows/cd-dgraph.yml
@@ -1,4 +1,5 @@
name: cd-dgraph
+
on:
workflow_dispatch:
inputs:
@@ -13,14 +14,18 @@ on:
custom-build:
type: boolean
default: false
- description: if checked, images will be pushed to dgraph-custom repo in Dockerhub
+ description: if checked, images will be pushed to dgraph-custom repo in Dockerhub
+
+permissions:
+ contents: read
+
jobs:
dgraph-build-amd64:
runs-on: warp-ubuntu-latest-x64-16x
steps:
- uses: actions/checkout@v4
with:
- ref: '${{ github.event.inputs.releasetag }}'
+ ref: "${{ github.event.inputs.releasetag }}"
- name: Set up Go
uses: actions/setup-go@v5
with:
@@ -119,7 +124,7 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
- ref: '${{ github.event.inputs.releasetag }}'
+ ref: "${{ github.event.inputs.releasetag }}"
- name: Set up Go
uses: actions/setup-go@v5
with:
@@ -195,8 +200,9 @@ jobs:
with:
name: dgraph-docker-arm64
path: dgraph-docker-arm64.tar
- - name: Make Dgraph Standalone Docker Image with Version
- #No need to build and push Standalone Image when its a custom build
+ - name:
+ Make Dgraph Standalone Docker Image with Version
+ #No need to build and push Standalone Image when its a custom build
if: inputs.custom-build == false
run: |
set -e
@@ -217,7 +223,7 @@ jobs:
steps:
- uses: actions/checkout@v4
with:
- ref: '${{ github.event.inputs.releasetag }}'
+ ref: "${{ github.event.inputs.releasetag }}"
- name: Set Dgraph Release Version
run: |
#!/bin/bash
@@ -237,7 +243,7 @@ jobs:
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_PASSWORD_TOKEN }}
-
+
# Download AMD64 Tar File
- name: Download Dgraph AMD64 Tar
uses: actions/download-artifact@v4
@@ -259,7 +265,7 @@ jobs:
- name: Load ARM64 Docker Image
run: |
docker load -i dgraph-docker-arm64.tar
-
+
# Download Standalone AMD64 Tar File
- name: Download Standalone AMD64 Tar
if: inputs.custom-build == false
@@ -272,7 +278,7 @@ jobs:
if: inputs.custom-build == false
run: |
docker load -i dgraph-standalone-amd64.tar
-
+
# Download Standalone ARM64 Tar File
- name: Download Standalone ARM64 Tar
if: inputs.custom-build == false
@@ -285,36 +291,33 @@ jobs:
if: inputs.custom-build == false
run: |
docker load -i dgraph-standalone-arm64.tar
-
-
+
- name: Docker Manifest
run: |
- if [ "${{ github.event.inputs.custom-build }}" == "true" ]; then
- #Push AMD and ARM images to dgraph-custom repo
- docker push dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }}-amd64
- docker push dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
- docker manifest create dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }} --amend dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }}-amd64 --amend dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
- docker manifest push dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }}
- else
- # Push standalone Images and manifest
- docker push dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-amd64
- docker push dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
- docker manifest create dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }} --amend dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-amd64 --amend dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
- docker manifest push dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}
-
- # Push Dgraph Images and Manifest
- docker push dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-amd64
- docker push dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
- docker manifest create dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }} --amend dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-amd64 --amend dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
- docker manifest push dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}
+ if [ "${{ github.event.inputs.custom-build }}" == "true" ]; then
+ #Push AMD and ARM images to dgraph-custom repo
+ docker push dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }}-amd64
+ docker push dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
+ docker manifest create dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }} --amend dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }}-amd64 --amend dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
+ docker manifest push dgraph/dgraph-custom:${{ env.DGRAPH_RELEASE_VERSION }}
+ else
+ # Push standalone Images and manifest
+ docker push dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-amd64
+ docker push dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
+ docker manifest create dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }} --amend dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-amd64 --amend dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
+ docker manifest push dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}
-
- if [ "${{ github.event.inputs.latest }}" == "true" ]; then
- docker manifest create dgraph/standalone:latest --amend dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-amd64 --amend dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
- docker manifest create dgraph/dgraph:latest --amend dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-amd64 --amend dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
- docker manifest push dgraph/standalone:latest
- docker manifest push dgraph/dgraph:latest
- fi
- fi
+ # Push Dgraph Images and Manifest
+ docker push dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-amd64
+ docker push dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
+ docker manifest create dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }} --amend dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-amd64 --amend dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
+ docker manifest push dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}
+ if [ "${{ github.event.inputs.latest }}" == "true" ]; then
+ docker manifest create dgraph/standalone:latest --amend dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-amd64 --amend dgraph/standalone:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
+ docker manifest create dgraph/dgraph:latest --amend dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-amd64 --amend dgraph/dgraph:${{ env.DGRAPH_RELEASE_VERSION }}-arm64
+ docker manifest push dgraph/standalone:latest
+ docker manifest push dgraph/dgraph:latest
+ fi
+ fi
diff --git a/.github/workflows/ci-aqua-security-trivy-tests.yml b/.github/workflows/ci-aqua-security-trivy-tests.yml
deleted file mode 100644
index 26295e1d520..00000000000
--- a/.github/workflows/ci-aqua-security-trivy-tests.yml
+++ /dev/null
@@ -1,45 +0,0 @@
-name: ci-aqua-security-trivy-tests
-on:
- pull_request:
- paths:
- - '**/*.go'
- - '**/go.mod'
- types:
- - opened
- - reopened
- - synchronize
- - ready_for_review
- branches:
- - main
- - 'release/**'
- schedule:
- - cron: "0 0 * * *"
-
-permissions:
- security-events: write
-
-jobs:
- build:
- name: trivy-tests
- if: github.event.pull_request.draft == false
- runs-on: warp-ubuntu-latest-x64-4x
- steps:
- - name: Checkout code
- uses: actions/checkout@v4
- - name: Set up Go
- uses: actions/setup-go@v5
- with:
- go-version-file: go.mod
- - name: Build Docker image
- run: |
- make docker-image
- - name: Run Trivy vulnerability scanner
- uses: aquasecurity/trivy-action@master
- with:
- image-ref: 'dgraph/dgraph:local'
- format: 'sarif'
- output: 'trivy-results.sarif'
- - name: Upload Trivy scan results to GitHub Security tab
- uses: github/codeql-action/upload-sarif@v3
- with:
- sarif_file: 'trivy-results.sarif'
diff --git a/.github/workflows/ci-dgraph-code-coverage.yml b/.github/workflows/ci-dgraph-code-coverage.yml
index 663dba9fe15..1bc85ab99eb 100644
--- a/.github/workflows/ci-dgraph-code-coverage.yml
+++ b/.github/workflows/ci-dgraph-code-coverage.yml
@@ -1,12 +1,17 @@
name: ci-dgraph-code-coverage
+
on:
push:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
branches:
- main
- - 'release/**'
+ - release/**
+
+permissions:
+ contents: read
+
jobs:
dgraph-code-coverage:
runs-on: warp-ubuntu-latest-x64-4x
@@ -48,4 +53,5 @@ jobs:
- name: Install Goveralls
run: go install github.com/mattn/goveralls@latest
- name: Send Coverage Results
- run: cd t && goveralls -repotoken ${{ secrets.COVERALLSIO_TOKEN }} -coverprofile=coverage.out
+ run:
+ cd t && goveralls -repotoken ${{ secrets.COVERALLSIO_TOKEN }} -coverprofile=coverage.out
diff --git a/.github/workflows/ci-dgraph-core-tests.yml b/.github/workflows/ci-dgraph-core-tests.yml
index 1182308e5ad..d74b51150ed 100644
--- a/.github/workflows/ci-dgraph-core-tests.yml
+++ b/.github/workflows/ci-dgraph-core-tests.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-core-tests
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,9 +12,13 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
schedule:
- - cron: "0 0 * * *" # 1 run per day
+ - cron: 0 0 * * * # 1 run per day
+
+permissions:
+ contents: read
+
jobs:
dgraph-core-tests:
if: github.event.pull_request.draft == false
@@ -66,6 +71,6 @@ jobs:
continue-on-error: true # don't fail this job if the upload fails
uses: trunk-io/analytics-uploader@main
with:
- junit-paths: "./test-results.xml"
+ junit-paths: ./test-results.xml
org-slug: hypermode
token: ${{ secrets.TRUNK_TOKEN }}
diff --git a/.github/workflows/ci-dgraph-core-upgrade-tests.yml b/.github/workflows/ci-dgraph-core-upgrade-tests.yml
index 7b1230f87fe..ec6353ae1ca 100644
--- a/.github/workflows/ci-dgraph-core-upgrade-tests.yml
+++ b/.github/workflows/ci-dgraph-core-upgrade-tests.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-core-upgrade-tests
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,7 +12,11 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
+
+permissions:
+ contents: read
+
jobs:
dgraph-upgrade-tests:
if: github.event.pull_request.draft == false
diff --git a/.github/workflows/ci-dgraph-fuzz.yml b/.github/workflows/ci-dgraph-fuzz.yml
index d8b46f53c3a..10f312499de 100644
--- a/.github/workflows/ci-dgraph-fuzz.yml
+++ b/.github/workflows/ci-dgraph-fuzz.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-fuzz
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,7 +12,11 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
+
+permissions:
+ contents: read
+
jobs:
fuzz-test:
if: github.event.pull_request.draft == false
diff --git a/.github/workflows/ci-dgraph-integration2-tests.yml b/.github/workflows/ci-dgraph-integration2-tests.yml
index 722952a7478..44a5e246be5 100644
--- a/.github/workflows/ci-dgraph-integration2-tests.yml
+++ b/.github/workflows/ci-dgraph-integration2-tests.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-integration2-tests
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,7 +12,11 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
+
+permissions:
+ contents: read
+
jobs:
dgraph-integration2-tests:
if: github.event.pull_request.draft == false
diff --git a/.github/workflows/ci-dgraph-jepsen-tests.yml b/.github/workflows/ci-dgraph-jepsen-tests.yml
index 18b0e57c879..e85992d7f76 100644
--- a/.github/workflows/ci-dgraph-jepsen-tests.yml
+++ b/.github/workflows/ci-dgraph-jepsen-tests.yml
@@ -1,9 +1,15 @@
name: ci-dgraph-jepsen-tests
+
on:
schedule:
- - cron: "0 4 * * 3,6" # run twice weekly
+ - cron: 0 4 * * 3,6 # run twice weekly
+
env:
- GOPATH: /home/ubuntu/go
+ GOPATH: /home/ubuntu/go
+
+permissions:
+ contents: read
+
jobs:
dgraph-jepsen-tests:
runs-on: warp-ubuntu-latest-x64-4x
diff --git a/.github/workflows/ci-dgraph-ldbc-tests.yml b/.github/workflows/ci-dgraph-ldbc-tests.yml
index a039263fc38..bb98a0bf9ed 100644
--- a/.github/workflows/ci-dgraph-ldbc-tests.yml
+++ b/.github/workflows/ci-dgraph-ldbc-tests.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-ldbc-tests
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,7 +12,11 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
+
+permissions:
+ contents: read
+
jobs:
dgraph-ldbc-tests:
if: github.event.pull_request.draft == false
@@ -28,7 +33,7 @@ jobs:
- name: Install gotestsum
run: go install gotest.tools/gotestsum@latest
- name: Build Test Binary
- run : |
+ run: |
#!/bin/bash
# build the test binary
cd t; go build .
diff --git a/.github/workflows/ci-dgraph-load-tests.yml b/.github/workflows/ci-dgraph-load-tests.yml
index 74b545b8f9e..f5822e7d642 100644
--- a/.github/workflows/ci-dgraph-load-tests.yml
+++ b/.github/workflows/ci-dgraph-load-tests.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-load-tests
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,7 +12,11 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
+
+permissions:
+ contents: read
+
jobs:
dgraph-load-tests:
if: github.event.pull_request.draft == false
diff --git a/.github/workflows/ci-dgraph-oss-build.yml b/.github/workflows/ci-dgraph-oss-build.yml
index 4eb1d510595..771a7c01d45 100644
--- a/.github/workflows/ci-dgraph-oss-build.yml
+++ b/.github/workflows/ci-dgraph-oss-build.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-oss-build
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,7 +12,11 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
+
+permissions:
+ contents: read
+
jobs:
dgraph-oss-build:
if: github.event.pull_request.draft == false
diff --git a/.github/workflows/ci-dgraph-system-upgrade-tests.yml b/.github/workflows/ci-dgraph-system-upgrade-tests.yml
index 5a1812fc232..766877595bd 100644
--- a/.github/workflows/ci-dgraph-system-upgrade-tests.yml
+++ b/.github/workflows/ci-dgraph-system-upgrade-tests.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-system-upgrade-tests
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,7 +12,11 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
+
+permissions:
+ contents: read
+
jobs:
dgraph-upgrade-tests:
if: github.event.pull_request.draft == false
diff --git a/.github/workflows/ci-dgraph-systest-tests.yml b/.github/workflows/ci-dgraph-systest-tests.yml
index fe114150cdd..a50d0e868e1 100644
--- a/.github/workflows/ci-dgraph-systest-tests.yml
+++ b/.github/workflows/ci-dgraph-systest-tests.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-system-tests
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,9 +12,13 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
schedule:
- - cron: "0 0 * * *" # 1 run per day
+ - cron: 0 0 * * * # 1 run per day
+
+permissions:
+ contents: read
+
jobs:
dgraph-systest-tests:
if: github.event.pull_request.draft == false
@@ -66,6 +71,6 @@ jobs:
continue-on-error: true # don't fail this job if the upload fails
uses: trunk-io/analytics-uploader@main
with:
- junit-paths: "./test-results.xml"
+ junit-paths: ./test-results.xml
org-slug: hypermode
- token: ${{ secrets.TRUNK_TOKEN }}
\ No newline at end of file
+ token: ${{ secrets.TRUNK_TOKEN }}
diff --git a/.github/workflows/ci-dgraph-tests-arm64.yml b/.github/workflows/ci-dgraph-tests-arm64.yml
index 60d9f45be8a..824bcde9582 100644
--- a/.github/workflows/ci-dgraph-tests-arm64.yml
+++ b/.github/workflows/ci-dgraph-tests-arm64.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-tests-arm64
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,7 +12,11 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
+
+permissions:
+ contents: read
+
jobs:
dgraph-tests:
if: github.event.pull_request.draft == false
@@ -59,4 +64,3 @@ jobs:
./t -r
# sleep
sleep 5
-
diff --git a/.github/workflows/ci-dgraph-upgrade-fixed-versions-tests.yml b/.github/workflows/ci-dgraph-upgrade-fixed-versions-tests.yml
index 1cc5fdb6bab..23105cbdf2c 100644
--- a/.github/workflows/ci-dgraph-upgrade-fixed-versions-tests.yml
+++ b/.github/workflows/ci-dgraph-upgrade-fixed-versions-tests.yml
@@ -1,7 +1,12 @@
name: ci-dgraph-upgrade-fixed-versions-tests
+
on:
schedule:
- - cron: "00 20 * * *" # 1 run per day
+ - cron: 00 20 * * * # 1 run per day
+
+permissions:
+ contents: read
+
jobs:
dgraph-upgrade-fixed-versions-tests:
runs-on: warp-ubuntu-latest-x64-4x
diff --git a/.github/workflows/ci-dgraph-vector-tests.yml b/.github/workflows/ci-dgraph-vector-tests.yml
index 50561aa29bf..737421fee43 100644
--- a/.github/workflows/ci-dgraph-vector-tests.yml
+++ b/.github/workflows/ci-dgraph-vector-tests.yml
@@ -1,9 +1,10 @@
name: ci-dgraph-vector-tests
+
on:
pull_request:
paths:
- - '**/*.go'
- - '**/go.mod'
+ - "**/*.go"
+ - "**/go.mod"
types:
- opened
- reopened
@@ -11,9 +12,13 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
schedule:
- - cron: "0 0 * * *" # 1 run per day
+ - cron: 0 0 * * * # 1 run per day
+
+permissions:
+ contents: read
+
jobs:
dgraph-vector-tests:
if: github.event.pull_request.draft == false
@@ -66,6 +71,6 @@ jobs:
continue-on-error: true # don't fail this job if the upload fails
uses: trunk-io/analytics-uploader@main
with:
- junit-paths: "./test-results.xml"
+ junit-paths: ./test-results.xml
org-slug: hypermode
token: ${{ secrets.TRUNK_TOKEN }}
diff --git a/.github/workflows/ci-golang-lint.yml b/.github/workflows/ci-golang-lint.yml
deleted file mode 100644
index 6b935d52dc4..00000000000
--- a/.github/workflows/ci-golang-lint.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-name: ci-golang-lint
-on:
- pull_request:
- paths:
- - '**/*.go'
- - '**/go.mod'
- types:
- - opened
- - reopened
- - synchronize
- - ready_for_review
- branches:
- - main
- - 'release/**'
-jobs:
- golang-lint:
- if: github.event.pull_request.draft == false
- name: lint
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v4
- - name: Setup Go
- uses: actions/setup-go@v5
- with:
- go-version-file: go.mod
- - name: golang-lint
- uses: golangci/golangci-lint-action@v6
- with:
- # Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version.
- version: latest
- only-new-issues: true
- args: --timeout=10m
- skip-cache: true
diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml
index 0f4298f94f2..a73c5d1935b 100644
--- a/.github/workflows/codeql.yml
+++ b/.github/workflows/codeql.yml
@@ -1,4 +1,4 @@
-name: "CodeQL"
+name: CodeQL
on:
push:
@@ -10,7 +10,10 @@ on:
- main
- release/**
schedule:
- - cron: '0 0 * * *'
+ - cron: 0 0 * * *
+
+permissions:
+ contents: read
jobs:
analyze:
@@ -25,29 +28,29 @@ jobs:
fail-fast: false
matrix:
include:
- - language: go
- build-mode: autobuild
+ - language: go
+ build-mode: autobuild
steps:
- - name: Checkout repository
- uses: actions/checkout@v4
-
- - name: Initialize CodeQL
- uses: github/codeql-action/init@v3
- with:
- languages: ${{ matrix.language }}
- build-mode: ${{ matrix.build-mode }}
-
- - if: matrix.build-mode == 'manual'
- run: |
- echo 'If you are using a "manual" build mode for one or more of the' \
- 'languages you are analyzing, replace this with the commands to build' \
- 'your code, for example:'
- echo ' make bootstrap'
- echo ' make release'
- exit 1
-
- - name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v3
- with:
- category: "/language:${{matrix.language}}"
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ - name: Initialize CodeQL
+ uses: github/codeql-action/init@v3
+ with:
+ languages: ${{ matrix.language }}
+ build-mode: ${{ matrix.build-mode }}
+
+ - if: matrix.build-mode == 'manual'
+ run: |
+ echo 'If you are using a "manual" build mode for one or more of the' \
+ 'languages you are analyzing, replace this with the commands to build' \
+ 'your code, for example:'
+ echo ' make bootstrap'
+ echo ' make release'
+ exit 1
+
+ - name: Perform CodeQL Analysis
+ uses: github/codeql-action/analyze@v3
+ with:
+ category: "/language:${{matrix.language}}"
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
index 566d8acfa8d..c25112c363f 100644
--- a/.github/workflows/labeler.yml
+++ b/.github/workflows/labeler.yml
@@ -1,4 +1,5 @@
name: labeler
+
on:
pull_request:
types:
@@ -8,7 +9,12 @@ on:
- ready_for_review
branches:
- main
- - 'release/**'
+ - release/**
+
+permissions:
+ contents: read
+ pull-requests: write
+
jobs:
label:
permissions:
diff --git a/.golangci.yml b/.golangci.yml
deleted file mode 100644
index a387ca1cd43..00000000000
--- a/.golangci.yml
+++ /dev/null
@@ -1,41 +0,0 @@
-run:
- skip-dirs:
- skip-files:
-
-linters-settings:
- lll:
- line-length: 120
- staticcheck:
- checks:
- - all
- - '-SA1019' # it is okay to use math/rand at times.
- gosec:
- excludes: # these are not relevant for us right now
- - G101
- - G107
- - G108
- - G110
- - G112
- - G114
- - G201
- - G204
- - G306
- - G402
- - G404
-
-linters:
- disable-all: true
- enable:
- - errcheck
- - gofmt
- - goimports
- - gosec
- - gosimple
- - govet
- - ineffassign
- - lll
- - staticcheck
- - unconvert
- - unused
- - intrange
- - bodyclose
diff --git a/.trunk/.gitignore b/.trunk/.gitignore
new file mode 100644
index 00000000000..15966d087eb
--- /dev/null
+++ b/.trunk/.gitignore
@@ -0,0 +1,9 @@
+*out
+*logs
+*actions
+*notifications
+*tools
+plugins
+user_trunk.yaml
+user.yaml
+tmp
diff --git a/.trunk/configs/.checkov.yaml b/.trunk/configs/.checkov.yaml
new file mode 100644
index 00000000000..521b240fe2c
--- /dev/null
+++ b/.trunk/configs/.checkov.yaml
@@ -0,0 +1,2 @@
+skip-check:
+ - CKV_GHA_7
diff --git a/.trunk/configs/.golangci.json b/.trunk/configs/.golangci.json
new file mode 100644
index 00000000000..58c035f24c5
--- /dev/null
+++ b/.trunk/configs/.golangci.json
@@ -0,0 +1,5 @@
+{
+ "run": {
+ "build-tags": ["integration"]
+ }
+}
diff --git a/.trunk/configs/.hadolint.yaml b/.trunk/configs/.hadolint.yaml
new file mode 100644
index 00000000000..98bf0cd2ee9
--- /dev/null
+++ b/.trunk/configs/.hadolint.yaml
@@ -0,0 +1,4 @@
+# Following source doesn't work in most setups
+ignored:
+ - SC1090
+ - SC1091
diff --git a/.trunk/configs/.markdownlint.json b/.trunk/configs/.markdownlint.json
new file mode 100644
index 00000000000..449148d135e
--- /dev/null
+++ b/.trunk/configs/.markdownlint.json
@@ -0,0 +1,8 @@
+{
+ "line-length": { "line_length": 150, "tables": false },
+ "no-inline-html": false,
+ "no-bare-urls": false,
+ "no-space-in-emphasis": false,
+ "no-emphasis-as-heading": false,
+ "first-line-heading": false
+}
diff --git a/.trunk/configs/.prettierrc b/.trunk/configs/.prettierrc
new file mode 100644
index 00000000000..577642c89aa
--- /dev/null
+++ b/.trunk/configs/.prettierrc
@@ -0,0 +1,5 @@
+{
+ "semi": false,
+ "proseWrap": "always",
+ "printWidth": 100
+}
diff --git a/.trunk/configs/.shellcheckrc b/.trunk/configs/.shellcheckrc
new file mode 100644
index 00000000000..8c7b1ada8a3
--- /dev/null
+++ b/.trunk/configs/.shellcheckrc
@@ -0,0 +1,7 @@
+enable=all
+source-path=SCRIPTDIR
+disable=SC2154
+
+# If you're having issues with shellcheck following source, disable the errors via:
+# disable=SC1090
+# disable=SC1091
diff --git a/.trunk/configs/.yamllint.yaml b/.trunk/configs/.yamllint.yaml
new file mode 100644
index 00000000000..184e251f8de
--- /dev/null
+++ b/.trunk/configs/.yamllint.yaml
@@ -0,0 +1,7 @@
+rules:
+ quoted-strings:
+ required: only-when-needed
+ extra-allowed: ["{|}"]
+ key-duplicates: {}
+ octal-values:
+ forbid-implicit-octal: true
diff --git a/.trunk/trunk.yaml b/.trunk/trunk.yaml
new file mode 100644
index 00000000000..ac5f84c284d
--- /dev/null
+++ b/.trunk/trunk.yaml
@@ -0,0 +1,50 @@
+# This file controls the behavior of Trunk: https://docs.trunk.io/cli
+# To learn more about the format of this file, see https://docs.trunk.io/reference/trunk-yaml
+version: 0.1
+cli:
+ version: 1.22.8
+
+# Trunk provides extensibility via plugins. (https://docs.trunk.io/plugins)
+plugins:
+ sources:
+ - id: trunk
+ ref: v1.6.6
+ uri: https://github.com/trunk-io/plugins
+
+# Many linters and tools depend on runtimes - configure them here. (https://docs.trunk.io/runtimes)
+runtimes:
+ enabled:
+ - go@1.23.4
+ - node@18.20.5
+ - python@3.10.8
+
+# This is the section where you manage your linters. (https://docs.trunk.io/check/configuration)
+lint:
+ ignore:
+ - linters: [ALL]
+ paths:
+ - contrib/**
+ enabled:
+ - actionlint@1.7.6
+ - checkov@3.2.351
+ - dotenv-linter@3.3.0
+ - git-diff-check
+ - gofmt@1.20.4
+ - golangci-lint@1.63.4
+ - hadolint@2.12.1-beta
+ - markdownlint@0.43.0
+ - osv-scanner@1.9.2
+ - oxipng@9.1.3
+ - prettier@3.4.2
+ - renovate@39.92.0
+ - shellcheck@0.10.0
+ - shfmt@3.6.0
+ - tflint@0.54.0
+ - trufflehog@3.88.1
+ - yamllint@1.35.1
+actions:
+ enabled:
+ - trunk-announce
+ - trunk-check-pre-push
+ - trunk-fmt-pre-commit
+ - trunk-upgrade-available
diff --git a/.vscode/extensions.json b/.vscode/extensions.json
new file mode 100644
index 00000000000..29d43383de0
--- /dev/null
+++ b/.vscode/extensions.json
@@ -0,0 +1,3 @@
+{
+ "recommendations": ["trunk.io"]
+}
diff --git a/.vscode/settings.json b/.vscode/settings.json
new file mode 100644
index 00000000000..93ff3ace95b
--- /dev/null
+++ b/.vscode/settings.json
@@ -0,0 +1,6 @@
+{
+ "editor.formatOnSave": true,
+ "editor.defaultFormatter": "trunk.io",
+ "editor.trimAutoWhitespace": true,
+ "trunk.autoInit": false
+}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2cab1071dcb..6e5c21a7638 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,24 +1,28 @@
# Changelog
+
All notable changes to this project will be documented in this file.
-The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
-and this project will adhere to [Semantic Versioning](https://semver.org) starting `v22.0.0`.
+The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project will
+adhere to [Semantic Versioning](https://semver.org) starting `v22.0.0`.
## [v24.0.5] - 2024-11-05
+
[v24.0.5]: https://github.com/hypermodeinc/dgraph/compare/v24.0.4...v24.0.5
-### Fixed
+- **Fixed**
- **Core**
+
- fix(core): fix duplicate mutation entries for count index (#9208)
- **Chore**
- Update cd-dgraph.yml to create unique artifacts (#9194)
## [v24.0.4] - 2024-10-08
+
[v24.0.4]: https://github.com/hypermodeinc/dgraph/compare/v24.0.2...v24.0.4
-### Fixed
+- **Fixed**
- **Vector**
@@ -58,198 +62,217 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- chore(deps): bump google.golang.org/grpc from 1.66.0 to 1.66.1 (#9163)
- chore(deps): bump google.golang.org/grpc from 1.65.0 to 1.66.0 (#9152)
- chore(deps): bump github.com/Masterminds/semver/v3 from 3.2.1 to 3.3.0 (#9151)
- - chore(deps): bump github.com/docker/docker from 27.1.2+incompatible to 27.2.0+incompatible (#9150)
+ - chore(deps): bump github.com/docker/docker from 27.1.2+incompatible to 27.2.0+incompatible
+ (#9150)
- chore(deps): bump the minor-and-patch group across 1 directory with 2 updates (#9174)
- - chore(deps): bump github.com/docker/docker from 24.0.5+incompatible to 25.0.6+incompatible (#9123)
+ - chore(deps): bump github.com/docker/docker from 24.0.5+incompatible to 25.0.6+incompatible
+ (#9123)
- chore(deps): bump the minor-and-patch group with 2 updates (#9178)
- - chore(deps): bump google.golang.org/grpc from 1.67.0 to 1.67.1 in the minor-and-patch group (#9178)
+ - chore(deps): bump google.golang.org/grpc from 1.67.0 to 1.67.1 in the minor-and-patch group
+ (#9178)
## [v24.0.2] - 2024-08-16
-[v24.0.1]: https://github.com/hypermodeinc/dgraph/compare/v24.0.1...v24.0.2
-
+[v24.0.2]: https://github.com/hypermodeinc/dgraph/compare/v24.0.1...v24.0.2
- **Fixed**
-
- - fix(core): Fix namespace used by unique query https://github.com/hypermodeinc/dgraph/pull/9119
- - fix(core): Fix regression in computing cond variables https://github.com/hypermodeinc/dgraph/pull/9126
- - fix(live): Fix derigster while retrying https://github.com/hypermodeinc/dgraph/pull/9121
-
+
+ - fix(core): Fix namespace used by unique query https://github.com/hypermodeinc/dgraph/pull/9119
+ - fix(core): Fix regression in computing cond variables
+ https://github.com/hypermodeinc/dgraph/pull/9126
+ - fix(live): Fix derigster while retrying https://github.com/hypermodeinc/dgraph/pull/9121
- **Chore**
- - chore(deps): bump certifi from 2023.7.22 to 2024.7.4 in /contrib/config/marketplace/aws/tests https://github.com/hypermodeinc/dgraph/pull/9110
- - chore(deps): bump idna from 2.9 to 3.7 in /contrib/config/marketplace/aws/tests https://github.com/hypermodeinc/dgraph/pull/9072
+ - chore(deps): bump certifi from 2023.7.22 to 2024.7.4 in /contrib/config/marketplace/aws/tests
+ https://github.com/hypermodeinc/dgraph/pull/9110
+ - chore(deps): bump idna from 2.9 to 3.7 in /contrib/config/marketplace/aws/tests
+ https://github.com/hypermodeinc/dgraph/pull/9072
- **Perf**
- - perf(query): Read just the latest value for scalar types https://github.com/hypermodeinc/dgraph/pull/8966
+ - perf(query): Read just the latest value for scalar types
+ https://github.com/hypermodeinc/dgraph/pull/8966
- perf(vector): Add heap to neighbour edges https://github.com/hypermodeinc/dgraph/pull/9122
## [v24.0.1] - 2024-07-30
+
[v24.0.1]: https://github.com/hypermodeinc/dgraph/compare/v24.0.0...v24.0.1
-> **Warning**
-> After upgrading to v24.0.1, vector index needs to be rebuilt as underlying data has changed.
+> **Warning** After upgrading to v24.0.1, vector index needs to be rebuilt as underlying data has
+> changed.
- **Fixed**
-
- - fix(core): Fix regression in parsing json empty string #9108
- - fix(upgrade): fix failing upgrade tests #9042
- - fix(ci): fixing health endpoint issue #9116
- - Fix(graphql): issue with local variable squashing intended JWK index by @matthewmcneely in #9114
+
+ - fix(core): Fix regression in parsing json empty string #9108
+ - fix(upgrade): fix failing upgrade tests #9042
+ - fix(ci): fixing health endpoint issue #9116
+ - Fix(graphql): issue with local variable squashing intended JWK index by @matthewmcneely in #9114
- **Chore**
- - chore(deps): bump urllib3 from 1.26.18 to 1.26.19 /contrib/config/marketplace/aws/tests #9103
- - chore(deps): bump requests from 2.31.0 to 2.32.0 /contrib/config/marketplace/aws/tests #9090
+ - chore(deps): bump urllib3 from 1.26.18 to 1.26.19 /contrib/config/marketplace/aws/tests #9103
+ - chore(deps): bump requests from 2.31.0 to 2.32.0 /contrib/config/marketplace/aws/tests #9090
- **Perf**
- - perf(vector): updated marshalling of vector #9109
-
+ - perf(vector): updated marshalling of vector #9109
## [v24.0.0] - 2024-06-06
+
[v24.0.0]: https://github.com/hypermodeinc/dgraph/compare/v24.0.0...v23.1.0
-> **Warning**
-> This will be a breaking change for anyone moving from to `v.24.0.0`.
-> If you have any duplicate users or groups in Dgraph ACL, they would not be accessible from now on. Please delete any
-> duplicate users and groups before you upgrade. File format is the same, so binary can be directly replaced after.
-> deleting duplicate users and groups.
+> **Warning** This will be a breaking change for anyone moving from to `v.24.0.0`. If you have any
+> duplicate users or groups in Dgraph ACL, they would not be accessible from now on. Please delete
+> any duplicate users and groups before you upgrade. File format is the same, so binary can be
+> directly replaced after. deleting duplicate users and groups.
-### Added
+**Added**
- **Vector**
- - feat(graphql): Add vector support to graphql (#9074)
- - feat(vector): add vector to schema in #9060
- - feat(vector): Added similar_to in vector in #9062
- - feat(vector): Add vector type to Dgraph in #9050
- - feat(vector): fix live loader and add tests for dropall, drop namespace, live load in #9063
- - fix(vector): show error is invalid input is provided to vector predicate in #9064
- - fix(vector):fix similar_to() error return when data is not present in #9084
- - fix(vector): Update query_rewriter to fix dotproduct and cosine query conversion in #9083
+
+ - feat(graphql): Add vector support to graphql (#9074)
+ - feat(vector): add vector to schema in #9060
+ - feat(vector): Added similar_to in vector in #9062
+ - feat(vector): Add vector type to Dgraph in #9050
+ - feat(vector): fix live loader and add tests for dropall, drop namespace, live load in #9063
+ - fix(vector): show error is invalid input is provided to vector predicate in #9064
+ - fix(vector):fix similar_to() error return when data is not present in #9084
+ - fix(vector): Update query_rewriter to fix dotproduct and cosine query conversion in #9083
- **Core**
- - feat(core): Add cache to dgraph.type predicate in #9068
- - [BREAKING]feat(core): add unique constraint support in schema for new predicates in #8827
- - feat(debug): add parse_key to debug tool in #7640
- - feat(acl): support more JWT algorithms for ACL by in #8912
- - feat(restore): add support for namespace aware restore by in #8968
+
+ - feat(core): Add cache to dgraph.type predicate in #9068
+ - [BREAKING]feat(core): add unique constraint support in schema for new predicates in #8827
+ - feat(debug): add parse_key to debug tool in #7640
+ - feat(acl): support more JWT algorithms for ACL by in #8912
+ - feat(restore): add support for namespace aware restore by in #8968
- **GraphQL**
- - feat(vector): Added lang support by in #8924
- - feat(graphql): allow updatable and nullable id fields. (#7736) in #9020
-### Fixed
+ - feat(vector): Added lang support by in #8924
+ - feat(graphql): allow updatable and nullable id fields. (#7736) in #9020
+
+**Fixed**
- **Core**
- - Fix(debug): Close file correctly before exiting on error in #9076
- - fix(restore): fix incr restore and normal restore for vector predicates in #9078
- - Fix(core): Fix deadlock in runMutation and error handling in #9085
- - fix(core): Fixed deadlock that happens due to timeout in #9007
- - fix(core): limit #edges using LimitMutationsNquad config and add logs in #9010
- - fix(core): Update math parsing function by in #9053
- - fix(restore): use different map directory for each group (#8047) in #8972
- - fix(export): Support for any s3 endpoint by in #8978
- - fix(restore): use custom type for sensitive fields by in #8969
- - fix(export): Escape MySQL column names in #8961
- - fix(debug): fix debug tool for schema keys in #7939
- - fix(restore): allow incrementalFrom to be 1 in restore API by in #8988
- - fix(raft):alpha leader fails to stream snapshot to new alpha nodes in #9022
- - fix(query): fix has function in filter in #9043
- - fix(core):Reduce x.ParsedKey memory allocation from 72 to 56 bytes by optimizing struct memory alignment in #9047
- - fix(restore): do not retry restore proposal (#8058) in #9017
+
+ - Fix(debug): Close file correctly before exiting on error in #9076
+ - fix(restore): fix incr restore and normal restore for vector predicates in #9078
+ - Fix(core): Fix deadlock in runMutation and error handling in #9085
+ - fix(core): Fixed deadlock that happens due to timeout in #9007
+ - fix(core): limit #edges using LimitMutationsNquad config and add logs in #9010
+ - fix(core): Update math parsing function by in #9053
+ - fix(restore): use different map directory for each group (#8047) in #8972
+ - fix(export): Support for any s3 endpoint by in #8978
+ - fix(restore): use custom type for sensitive fields by in #8969
+ - fix(export): Escape MySQL column names in #8961
+ - fix(debug): fix debug tool for schema keys in #7939
+ - fix(restore): allow incrementalFrom to be 1 in restore API by in #8988
+ - fix(raft):alpha leader fails to stream snapshot to new alpha nodes in #9022
+ - fix(query): fix has function in filter in #9043
+ - fix(core):Reduce x.ParsedKey memory allocation from 72 to 56 bytes by optimizing struct memory
+ alignment in #9047
+ - fix(restore): do not retry restore proposal (#8058) in #9017
- **Perf**
- - perf(core): Fix performance issue in type filter (#9065) in #9089
- - perf(core): Update postinglistCountAndLength function to improve performance in #9088
- - perf(query): use quickselect instead of sorting while pagination by in #8995
- - perf(query): Update CompressedBin IntersectionAlgo by in #9000
+
+ - perf(core): Fix performance issue in type filter (#9065) in #9089
+ - perf(core): Update postinglistCountAndLength function to improve performance in #9088
+ - perf(query): use quickselect instead of sorting while pagination by in #8995
+ - perf(query): Update CompressedBin IntersectionAlgo by in #9000
- **Chore**
- - chore(upgrade): run tests from v23.1.0 -> main in #9097
- - chore(deps): upgrade etcd/raft to v3 by in #7688
- - chore(restore): add more logs for restore request (#8050) in #8975
- - upgrade(go): update go version to 1.22 in #9058
- - chore(deps): bump github.com/apache/thrift from 0.12.0 to 0.13.0 by in #8982
- - chore(deps): bump golang.org/x/net from 0.14.0 to 0.17.0 in #9015
- - chore(deps): use bleve 2.3.10 for more languages in full text search in #9030
- - chore(deps): bump golang.org/x/crypto from 0.12.0 to 0.17.0 in #9032
- - chore(deps): bump urllib3 from 1.26.5 to 1.26.18 in /contrib/config/marketplace/aws/tests in #9018
- - chore(deps): bump google.golang.org/grpc from 1.56.2 to 1.56.3 in #9024
- - chore(deps): bump google.golang.org/protobuf from 1.31.0 to 1.33.0in #9051[
-
-## [23.1.1] - 2024-04-26
-[v23.1.1]: https://github.com/hypermodeinc/dgraph/compare/v23.1.0...v23.1.1
-
-### Fixed
+ - chore(upgrade): run tests from v23.1.0 -> main in #9097
+ - chore(deps): upgrade etcd/raft to v3 by in #7688
+ - chore(restore): add more logs for restore request (#8050) in #8975
+ - upgrade(go): update go version to 1.22 in #9058
+ - chore(deps): bump github.com/apache/thrift from 0.12.0 to 0.13.0 by in #8982
+ - chore(deps): bump golang.org/x/net from 0.14.0 to 0.17.0 in #9015
+ - chore(deps): use bleve 2.3.10 for more languages in full text search in #9030
+ - chore(deps): bump golang.org/x/crypto from 0.12.0 to 0.17.0 in #9032
+ - chore(deps): bump urllib3 from 1.26.5 to 1.26.18 in /contrib/config/marketplace/aws/tests in
+ #9018
+ - chore(deps): bump google.golang.org/grpc from 1.56.2 to 1.56.3 in #9024
+ - chore(deps): bump google.golang.org/protobuf from 1.31.0 to 1.33.0in #9051[
+
+## [23.1.1](https://github.com/hypermodeinc/dgraph/compare/v23.1.0...v23.1.1) - 2024-04-26
+
+- **Fixed**
- **Core Dgraph**
- - perf(core): Fix performance issue in type filter (#9065)
+ - perf(core): Fix performance issue in type filter (#9065)
- **CI & Testing**
- - ci/cd optimizations (#9069)
+ - ci/cd optimizations (#9069)
## [v23.1.0] - 2023-08-17
+
[v23.1.0]: https://github.com/hypermodeinc/dgraph/compare/v23.0.1...v23.1.0
-### Added
+- **Added**
- **Core Dgraph**
- - perf(query): Improve IntersectCompressedWithBin for UID Pack (#8941)
- - feat(query): add feature flag normalize-compatibility-mode (#8845) (#8929)
- - feat(alpha): support RDF response via http query request (#8004) (#8639)
- - perf(query): speed up parsing of a huge query (#8942)
- - fix(live): replace panic in live loader with errors (#7798) (#8944)
+
+ - perf(query): Improve IntersectCompressedWithBin for UID Pack (#8941)
+ - feat(query): add feature flag normalize-compatibility-mode (#8845) (#8929)
+ - feat(alpha): support RDF response via http query request (#8004) (#8639)
+ - perf(query): speed up parsing of a huge query (#8942)
+ - fix(live): replace panic in live loader with errors (#7798) (#8944)
- **GraphQL**
- - feat(graphql): This PR allows @id field in interface to be unique across all implementing types (#8876)
+ - feat(graphql): This PR allows @id field in interface to be unique across all implementing types
+ (#8876)
-### Fixed
+**Fixed**
- **Core Dgraph**
- - docs(zero): add comments in zero and clarify naming (#8945)
- - fix(cdc): skip bad events in CDC (#8076)
- - fix(bulk): enable running bulk loader with only gql schema (#8903)
- - chore(badger): upgrade badger to v4.2.0 (#8932) (#8925)
- - doc(restore): add docs for mutations in between incremental restores (#8908)
- - chore: fix compilation on 32bit (#8895)
- - chore(raft): add debug logs to print all transactions (#8890)
- - chore(alpha): add logs for processing entries in applyCh (#8930)
- - fix(acl): allow data deletion for non-reserved predicates (#8937)
- - fix(alpha): convert numbers correctly in superflags (#7712) (#8943)
- - chore(raft): better logging message for cleaning banned ns pred (#7886)
+
+ - docs(zero): add comments in zero and clarify naming (#8945)
+ - fix(cdc): skip bad events in CDC (#8076)
+ - fix(bulk): enable running bulk loader with only gql schema (#8903)
+ - chore(badger): upgrade badger to v4.2.0 (#8932) (#8925)
+ - doc(restore): add docs for mutations in between incremental restores (#8908)
+ - chore: fix compilation on 32bit (#8895)
+ - chore(raft): add debug logs to print all transactions (#8890)
+ - chore(alpha): add logs for processing entries in applyCh (#8930)
+ - fix(acl): allow data deletion for non-reserved predicates (#8937)
+ - fix(alpha): convert numbers correctly in superflags (#7712) (#8943)
+ - chore(raft): better logging message for cleaning banned ns pred (#7886)
- **Security**
- - sec(acl): convert x.Sensitive to string type for auth hash (#8931)
- - chore(deps): bump google.golang.org/grpc from 1.52.0 to 1.53.0 (#8900)
- - chore(deps): bump certifi from 2022.12.7 to 2023.7.22 in /contrib/config/marketplace/aws/tests (#8920)
- - chore(deps): bump certifi from 2022.12.7 to 2023.7.22 in /contrib/embargo (#8921)
- - chore(deps): bump pygments from 2.7.4 to 2.15.0 in /contrib/embargo (#8913)
- - chore: upgrade bleve to v2.3.9 (#8948)
+
+ - sec(acl): convert x.Sensitive to string type for auth hash (#8931)
+ - chore(deps): bump google.golang.org/grpc from 1.52.0 to 1.53.0 (#8900)
+ - chore(deps): bump certifi from 2022.12.7 to 2023.7.22 in /contrib/config/marketplace/aws/tests
+ (#8920)
+ - chore(deps): bump certifi from 2022.12.7 to 2023.7.22 in /contrib/embargo (#8921)
+ - chore(deps): bump pygments from 2.7.4 to 2.15.0 in /contrib/embargo (#8913)
+ - chore: upgrade bleve to v2.3.9 (#8948)
- **CI & Testing**
- - chore: update cron job frequency to reset github notifications (#8956)
- - test(upgrade): add v20.11 upgrade tests in query package (#8954)
- - chore(contrib) - fixes for Vault (#7739)
- - chore(build): make build codename configurable (#8951)
- - fix(upgrade): look for version string in logs bottom up (#8926)
- - fix(upgrade): check commit SHA to find running dgraph version (#8923)
- - chore(upgrade): run upgrade tests for v23.0.1 (#8918)
- - chore(upgrade): ensure we run right version of Dgraph (#8910)
- - chore(upgrade): add workaround for multiple groot issue in export-import (#8897)
- - test(upgrade): add upgrade tests for systest/license package (#8902)
- - chore(upgrade): increase the upgrade job duration limit to 12h (#8907)
- - chore(upgrade): increase the duration of the CI workflow (#8906)
- - ci(upgrade): break down upgrade tests CI workflow (#8904)
- - test(acl): add upgrade tests for ee/acl package (#8792)
- - chore: update pull request template (#8899)
+ - chore: update cron job frequency to reset github notifications (#8956)
+ - test(upgrade): add v20.11 upgrade tests in query package (#8954)
+ - chore(contrib) - fixes for Vault (#7739)
+ - chore(build): make build codename configurable (#8951)
+ - fix(upgrade): look for version string in logs bottom up (#8926)
+ - fix(upgrade): check commit SHA to find running dgraph version (#8923)
+ - chore(upgrade): run upgrade tests for v23.0.1 (#8918)
+ - chore(upgrade): ensure we run right version of Dgraph (#8910)
+ - chore(upgrade): add workaround for multiple groot issue in export-import (#8897)
+ - test(upgrade): add upgrade tests for systest/license package (#8902)
+ - chore(upgrade): increase the upgrade job duration limit to 12h (#8907)
+ - chore(upgrade): increase the duration of the CI workflow (#8906)
+ - ci(upgrade): break down upgrade tests CI workflow (#8904)
+ - test(acl): add upgrade tests for ee/acl package (#8792)
+ - chore: update pull request template (#8899)
## [v23.0.1] - 2023-07-09
+
[v23.0.1]: https://github.com/hypermodeinc/dgraph/compare/v23.0.0...v23.0.1
-### Fixed
+- **Fixed**
- **Core Dgraph**
+
- chore(restore): add log message when restore fails (#8893)
- fix(zero): fix zero's health endpoint to return json response (#8858)
- chore(zero): improve error message while unmarshalling WAL (#8882)
@@ -259,922 +282,1154 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- upgrade dgo to v230.0.1 (#8785)
- **CI**
+
- ci(dql): add workflow for fuzz testing (#8874)
- chore(ci): add workflow for OSS build + unit tests (#8834)
- **Security**
- - chore(deps): bump requests from 2.23.0 to 2.31.0 in /contrib/config/marketplace/aws/tests (#8836)
+ - chore(deps): bump requests from 2.23.0 to 2.31.0 in /contrib/config/marketplace/aws/tests
+ (#8836)
- chore(deps): bump requests from 2.23.0 to 2.31.0 in /contrib/embargo (#8835)
- - chore(deps): bump github.com/docker/distribution from 2.8.0+incompatible to 2.8.2+incompatible (#8821)
+ - chore(deps): bump github.com/docker/distribution from 2.8.0+incompatible to 2.8.2+incompatible
+ (#8821)
- chore(deps): bump github.com/cloudflare/circl from 1.1.0 to 1.3.3 (#8822)
## [v23.0.0] - 2023-05-08
+
[v23.0.0]: https://github.com/hypermodeinc/dgraph/compare/v22.0.2...v23.0.0
-### Added
+- **Added**
- **GraphQL**
- - fix(GraphQL): pass on HTTP request headers for subscriptions (https://github.com/hypermodeinc/dgraph/pull/8574)
+
+ - fix(GraphQL): pass on HTTP request headers for subscriptions
+ (https://github.com/hypermodeinc/dgraph/pull/8574)
- **Core Dgraph**
- - feat(metrics): add badger metrics (#8034) (https://github.com/hypermodeinc/dgraph/pull/8737)
- - feat(restore): introduce incremental restore (#7942) (https://github.com/hypermodeinc/dgraph/pull/8624)
- - chore(debug): add `only-summary` flag in `dgraph debug` to show LSM tree and namespace size (https://github.com/hypermodeinc/dgraph/pull/8516)
- - feat(cloud): add `shared-instance` flag in limit superflag in alpha (https://github.com/hypermodeinc/dgraph/pull/8625)
- - chore(deps): update prometheus dependency, adds new metrics (https://github.com/hypermodeinc/dgraph/pull/8655)
- - feat(cdc): add superflag `tls` to enable TLS without CA or certs (https://github.com/hypermodeinc/dgraph/pull/8564)
- - feat(multitenancy): namespace aware drop data (https://github.com/hypermodeinc/dgraph/pull/8511)
+ - feat(metrics): add badger metrics (#8034) (https://github.com/hypermodeinc/dgraph/pull/8737)
+ - feat(restore): introduce incremental restore (#7942)
+ (https://github.com/hypermodeinc/dgraph/pull/8624)
+ - chore(debug): add `only-summary` flag in `dgraph debug` to show LSM tree and namespace size
+ (https://github.com/hypermodeinc/dgraph/pull/8516)
+ - feat(cloud): add `shared-instance` flag in limit superflag in alpha
+ (https://github.com/hypermodeinc/dgraph/pull/8625)
+ - chore(deps): update prometheus dependency, adds new metrics
+ (https://github.com/hypermodeinc/dgraph/pull/8655)
+ - feat(cdc): add superflag `tls` to enable TLS without CA or certs
+ (https://github.com/hypermodeinc/dgraph/pull/8564)
+ - feat(multitenancy): namespace aware drop data (https://github.com/hypermodeinc/dgraph/pull/8511)
-### Fixed
+**Fixed**
- **GraphQL**
- - fix(GraphQL): nested Auth Rules not working properly (https://github.com/hypermodeinc/dgraph/pull/8571)
+
+ - fix(GraphQL): nested Auth Rules not working properly
+ (https://github.com/hypermodeinc/dgraph/pull/8571)
- **Core Dgraph**
- - Fix wal replay issue during rollup (https://github.com/hypermodeinc/dgraph/pull/8774)
- - security(logging): fix aes implementation in audit logging (https://github.com/hypermodeinc/dgraph/pull/8323)
- - chore(worker): unify mapper receiver names (https://github.com/hypermodeinc/dgraph/pull/8740)
- - fix(dql): fix panic in parsing of regexp (https://github.com/hypermodeinc/dgraph/pull/8739)
- - fix(Query): Do an error check before bubbling up nil error (https://github.com/hypermodeinc/dgraph/pull/8769)
- - chore: replace global index with local one & fix typos (https://github.com/hypermodeinc/dgraph/pull/8719)
- - chore(logs): add logs to track dropped proposals (https://github.com/hypermodeinc/dgraph/pull/8568)
- - fix(debug): check length of wal entry before parsing (https://github.com/hypermodeinc/dgraph/pull/8560)
- - opt(schema): optimize populateSchema() (https://github.com/hypermodeinc/dgraph/pull/8565)
- - fix(zero): fix update membership to make bulk tablet proposal instead of multiple small (https://github.com/hypermodeinc/dgraph/pull/8573)
- - fix(groot): do not upsert groot for all namespaces on restart (https://github.com/hypermodeinc/dgraph/pull/8561)
- - fix(restore): set kv version to restoreTs for all keys (https://github.com/hypermodeinc/dgraph/pull/8563)
- - fix(probe): do not contend for lock in lazy load (https://github.com/hypermodeinc/dgraph/pull/8566)
- - fix(core): fixed infinite loop in CommitToDisk (https://github.com/hypermodeinc/dgraph/pull/8614)
- - fix(proposals): incremental proposal key for zero proposals (https://github.com/hypermodeinc/dgraph/pull/8567)
- - fix(zero): fix waiting for random time while rate limiting (https://github.com/hypermodeinc/dgraph/pull/8656)
- - chore(deps): upgrade badger (https://github.com/hypermodeinc/dgraph/pull/8654, https://github.com/hypermodeinc/dgraph/pull/8658)
- - opt(schema): load schema and types using Stream framework (https://github.com/hypermodeinc/dgraph/pull/8562)
- - fix(backup): use StreamWriter instead of KVLoader during backup restore (https://github.com/hypermodeinc/dgraph/pull/8510)
- - fix(audit): fixing audit logs for websocket connections (https://github.com/hypermodeinc/dgraph/pull/8627)
- - fix(restore): consider the banned namespaces while bumping (https://github.com/hypermodeinc/dgraph/pull/8559)
- - fix(backup): create directory before writing backup (https://github.com/hypermodeinc/dgraph/pull/8638)
+
+ - Fix wal replay issue during rollup (https://github.com/hypermodeinc/dgraph/pull/8774)
+ - security(logging): fix aes implementation in audit logging
+ (https://github.com/hypermodeinc/dgraph/pull/8323)
+ - chore(worker): unify mapper receiver names (https://github.com/hypermodeinc/dgraph/pull/8740)
+ - fix(dql): fix panic in parsing of regexp (https://github.com/hypermodeinc/dgraph/pull/8739)
+ - fix(Query): Do an error check before bubbling up nil error
+ (https://github.com/hypermodeinc/dgraph/pull/8769)
+ - chore: replace global index with local one & fix typos
+ (https://github.com/hypermodeinc/dgraph/pull/8719)
+ - chore(logs): add logs to track dropped proposals
+ (https://github.com/hypermodeinc/dgraph/pull/8568)
+ - fix(debug): check length of wal entry before parsing
+ (https://github.com/hypermodeinc/dgraph/pull/8560)
+ - opt(schema): optimize populateSchema() (https://github.com/hypermodeinc/dgraph/pull/8565)
+ - fix(zero): fix update membership to make bulk tablet proposal instead of multiple small
+ (https://github.com/hypermodeinc/dgraph/pull/8573)
+ - fix(groot): do not upsert groot for all namespaces on restart
+ (https://github.com/hypermodeinc/dgraph/pull/8561)
+ - fix(restore): set kv version to restoreTs for all keys
+ (https://github.com/hypermodeinc/dgraph/pull/8563)
+ - fix(probe): do not contend for lock in lazy load
+ (https://github.com/hypermodeinc/dgraph/pull/8566)
+ - fix(core): fixed infinite loop in CommitToDisk
+ (https://github.com/hypermodeinc/dgraph/pull/8614)
+ - fix(proposals): incremental proposal key for zero proposals
+ (https://github.com/hypermodeinc/dgraph/pull/8567)
+ - fix(zero): fix waiting for random time while rate limiting
+ (https://github.com/hypermodeinc/dgraph/pull/8656)
+ - chore(deps): upgrade badger (https://github.com/hypermodeinc/dgraph/pull/8654,
+ https://github.com/hypermodeinc/dgraph/pull/8658)
+ - opt(schema): load schema and types using Stream framework
+ (https://github.com/hypermodeinc/dgraph/pull/8562)
+ - fix(backup): use StreamWriter instead of KVLoader during backup restore
+ (https://github.com/hypermodeinc/dgraph/pull/8510)
+ - fix(audit): fixing audit logs for websocket connections
+ (https://github.com/hypermodeinc/dgraph/pull/8627)
+ - fix(restore): consider the banned namespaces while bumping
+ (https://github.com/hypermodeinc/dgraph/pull/8559)
+ - fix(backup): create directory before writing backup
+ (https://github.com/hypermodeinc/dgraph/pull/8638)
- **Test**
- - chore(tests): add upgrade tests in query package (https://github.com/hypermodeinc/dgraph/pull/8750)
- - simplify test setup in query package (https://github.com/hypermodeinc/dgraph/pull/8782)
- - add a test for incremental restore (https://github.com/hypermodeinc/dgraph/pull/8754)
- - chore(tests): run tests in query package against dgraph cloud (https://github.com/hypermodeinc/dgraph/pull/8726)
- - fix the backup test cluster compose file (https://github.com/hypermodeinc/dgraph/pull/8775)
- - cleanup tests to reduce the scope of err var (https://github.com/hypermodeinc/dgraph/pull/8771)
- - use t.TempDir() for using a temp dir in tests (https://github.com/hypermodeinc/dgraph/pull/8772)
- - fix(test): clan cruft from test run (https://github.com/hypermodeinc/dgraph/pull/8348)
- - chore(tests): avoid calling os.Exit in TestMain (https://github.com/hypermodeinc/dgraph/pull/8765)
- - chore: fix linter issue on main (https://github.com/hypermodeinc/dgraph/pull/8749)
- - recreate the context variable for parallel test (https://github.com/hypermodeinc/dgraph/pull/8748)
- - fix(tests): wait for license to be applied before trying to login (https://github.com/hypermodeinc/dgraph/pull/8744)
- - fix(tests): sleep longer so that ACLs are updated (https://github.com/hypermodeinc/dgraph/pull/8745)
- - chore(test): use pointer receiver for LocalCluster methods (https://github.com/hypermodeinc/dgraph/pull/8734)
- - chore(linter): fix unconvert linter issues on linux (https://github.com/hypermodeinc/dgraph/pull/8718)
- - chore(linter): add unconvert linter and address related issues (https://github.com/hypermodeinc/dgraph/pull/8685)
- - chore(ci): resolve community PR goveralls failure (https://github.com/hypermodeinc/dgraph/pull/8716)
- - chore(test): increased iterations of the health check (https://github.com/hypermodeinc/dgraph/pull/8711)
- - fix(test): avoid host volume mount in minio container (https://github.com/hypermodeinc/dgraph/pull/8569)
- - chore(test): add tests for lex/iri.go,chunker/chunk.go (https://github.com/hypermodeinc/dgraph/pull/8515)
- - chore(test): add Backup/Restore test for NFS (https://github.com/hypermodeinc/dgraph/pull/8551)
- - chore(test): add test that after snapshot is applied, GraphQL schema is refreshed (https://github.com/hypermodeinc/dgraph/pull/8619)
- - chore(test): upgrade graphql tests to use go 1.19 (https://github.com/hypermodeinc/dgraph/pull/8662)
- - chore(test): add automated test to test multitenant --limit flag (https://github.com/hypermodeinc/dgraph/pull/8646)
- - chore(test): add restore test for more than 127 namespaces (https://github.com/hypermodeinc/dgraph/pull/8643)
- - fix(test): fix the corner case for raft entries test (https://github.com/hypermodeinc/dgraph/pull/8617)
+
+ - chore(tests): add upgrade tests in query package
+ (https://github.com/hypermodeinc/dgraph/pull/8750)
+ - simplify test setup in query package (https://github.com/hypermodeinc/dgraph/pull/8782)
+ - add a test for incremental restore (https://github.com/hypermodeinc/dgraph/pull/8754)
+ - chore(tests): run tests in query package against dgraph cloud
+ (https://github.com/hypermodeinc/dgraph/pull/8726)
+ - fix the backup test cluster compose file (https://github.com/hypermodeinc/dgraph/pull/8775)
+ - cleanup tests to reduce the scope of err var (https://github.com/hypermodeinc/dgraph/pull/8771)
+ - use t.TempDir() for using a temp dir in tests (https://github.com/hypermodeinc/dgraph/pull/8772)
+ - fix(test): clan cruft from test run (https://github.com/hypermodeinc/dgraph/pull/8348)
+ - chore(tests): avoid calling os.Exit in TestMain
+ (https://github.com/hypermodeinc/dgraph/pull/8765)
+ - chore: fix linter issue on main (https://github.com/hypermodeinc/dgraph/pull/8749)
+ - recreate the context variable for parallel test
+ (https://github.com/hypermodeinc/dgraph/pull/8748)
+ - fix(tests): wait for license to be applied before trying to login
+ (https://github.com/hypermodeinc/dgraph/pull/8744)
+ - fix(tests): sleep longer so that ACLs are updated
+ (https://github.com/hypermodeinc/dgraph/pull/8745)
+ - chore(test): use pointer receiver for LocalCluster methods
+ (https://github.com/hypermodeinc/dgraph/pull/8734)
+ - chore(linter): fix unconvert linter issues on linux
+ (https://github.com/hypermodeinc/dgraph/pull/8718)
+ - chore(linter): add unconvert linter and address related issues
+ (https://github.com/hypermodeinc/dgraph/pull/8685)
+ - chore(ci): resolve community PR goveralls failure
+ (https://github.com/hypermodeinc/dgraph/pull/8716)
+ - chore(test): increased iterations of the health check
+ (https://github.com/hypermodeinc/dgraph/pull/8711)
+ - fix(test): avoid host volume mount in minio container
+ (https://github.com/hypermodeinc/dgraph/pull/8569)
+ - chore(test): add tests for lex/iri.go,chunker/chunk.go
+ (https://github.com/hypermodeinc/dgraph/pull/8515)
+ - chore(test): add Backup/Restore test for NFS (https://github.com/hypermodeinc/dgraph/pull/8551)
+ - chore(test): add test that after snapshot is applied, GraphQL schema is refreshed
+ (https://github.com/hypermodeinc/dgraph/pull/8619)
+ - chore(test): upgrade graphql tests to use go 1.19
+ (https://github.com/hypermodeinc/dgraph/pull/8662)
+ - chore(test): add automated test to test multitenant --limit flag
+ (https://github.com/hypermodeinc/dgraph/pull/8646)
+ - chore(test): add restore test for more than 127 namespaces
+ (https://github.com/hypermodeinc/dgraph/pull/8643)
+ - fix(test): fix the corner case for raft entries test
+ (https://github.com/hypermodeinc/dgraph/pull/8617)
- **CD**
- - fix(build): update dockerfile to use cache busting and reduce image size (https://github.com/hypermodeinc/dgraph/pull/8652)
- - chore(deps): update min go build version (https://github.com/hypermodeinc/dgraph/pull/8423)
- - chore(cd): add badger binary to dgraph docker image (https://github.com/hypermodeinc/dgraph/pull/8790)
+
+ - fix(build): update dockerfile to use cache busting and reduce image size
+ (https://github.com/hypermodeinc/dgraph/pull/8652)
+ - chore(deps): update min go build version (https://github.com/hypermodeinc/dgraph/pull/8423)
+ - chore(cd): add badger binary to dgraph docker image
+ (https://github.com/hypermodeinc/dgraph/pull/8790)
- **Security**
- - chore(deps): bump certifi from 2020.4.5.1 to 2022.12.7 in /contrib/config/marketplace/aws/tests (https://github.com/hypermodeinc/dgraph/pull/8496)
- - chore(deps): bump github.com/docker/distribution from 2.7.1+incompatible to 2.8.0+incompatible (https://github.com/hypermodeinc/dgraph/pull/8575)
- - chore(deps): bump werkzeug from 0.16.1 to 2.2.3 in /contrib/embargo (https://github.com/hypermodeinc/dgraph/pull/8676)
- - fix(sec): upgrade networkx to (https://github.com/hypermodeinc/dgraph/pull/8613)
- - fix(sec): CVE-2022-41721 (https://github.com/hypermodeinc/dgraph/pull/8633)
- - fix(sec): CVE & OS Patching (https://github.com/hypermodeinc/dgraph/pull/8634)
-
- -
- CVE Fixes (31 total)
-
- - CVE-2013-4235
- - CVE-2016-20013
- - CVE-2016-2781
- - CVE-2017-11164
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-39537
- - CVE-2021-44758
- - CVE-2022-28321
- - CVE-2022-29458
- - CVE-2022-3219
- - CVE-2022-3437
- - CVE-2022-3821
- - CVE-2022-41717
- - CVE-2022-41721
- - CVE-2022-41723
- - CVE-2022-42898
- - CVE-2022-4304
- - CVE-2022-43552
- - CVE-2022-4415
- - CVE-2022-4450
- - CVE-2022-44640
- - CVE-2022-48303
- - CVE-2023-0215
- - CVE-2023-0286
- - CVE-2023-0361
- - CVE-2023-0464
- - CVE-2023-0465
- - CVE-2023-0466
- - CVE-2023-23916
- - CVE-2023-26604
-
-
-### Changed
+
+ - chore(deps): bump certifi from 2020.4.5.1 to 2022.12.7 in /contrib/config/marketplace/aws/tests
+ (https://github.com/hypermodeinc/dgraph/pull/8496)
+ - chore(deps): bump github.com/docker/distribution from 2.7.1+incompatible to 2.8.0+incompatible
+ (https://github.com/hypermodeinc/dgraph/pull/8575)
+ - chore(deps): bump werkzeug from 0.16.1 to 2.2.3 in /contrib/embargo
+ (https://github.com/hypermodeinc/dgraph/pull/8676)
+ - fix(sec): upgrade networkx to (https://github.com/hypermodeinc/dgraph/pull/8613)
+ - fix(sec): CVE-2022-41721 (https://github.com/hypermodeinc/dgraph/pull/8633)
+ - fix(sec): CVE & OS Patching (https://github.com/hypermodeinc/dgraph/pull/8634)
+
+ -
+ CVE Fixes (31 total)
+
+ - CVE-2013-4235
+ - CVE-2016-20013
+ - CVE-2016-2781
+ - CVE-2017-11164
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-39537
+ - CVE-2021-44758
+ - CVE-2022-28321
+ - CVE-2022-29458
+ - CVE-2022-3219
+ - CVE-2022-3437
+ - CVE-2022-3821
+ - CVE-2022-41717
+ - CVE-2022-41721
+ - CVE-2022-41723
+ - CVE-2022-42898
+ - CVE-2022-4304
+ - CVE-2022-43552
+ - CVE-2022-4415
+ - CVE-2022-4450
+ - CVE-2022-44640
+ - CVE-2022-48303
+ - CVE-2023-0215
+ - CVE-2023-0286
+ - CVE-2023-0361
+ - CVE-2023-0464
+ - CVE-2023-0465
+ - CVE-2023-0466
+ - CVE-2023-23916
+ - CVE-2023-26604
+
+
+**Changed**
- **Core Dgraph**
- - upgrade badger to v4.1.0 (https://github.com/hypermodeinc/dgraph/pull/8783) (https://github.com/hypermodeinc/dgraph/pull/8709)
- - fix(multitenancy) store namespace in predicate as a hex separated by a hyphen to prevent json marshal issues (https://github.com/hypermodeinc/dgraph/pull/8601)
- - fix(query): handle bad timezone correctly (https://github.com/hypermodeinc/dgraph/pull/8657)
- - chore(ludicroud): remove ludicrous mode from the code (https://github.com/hypermodeinc/dgraph/pull/8612)
- - fix(backup): make the /admin/backup and /admin/export API asynchronous (https://github.com/hypermodeinc/dgraph/pull/8554)
- - fix(mutation): validate mutation before applying it (https://github.com/hypermodeinc/dgraph/pull/8623)
+
+ - upgrade badger to v4.1.0 (https://github.com/hypermodeinc/dgraph/pull/8783)
+ (https://github.com/hypermodeinc/dgraph/pull/8709)
+ - fix(multitenancy) store namespace in predicate as a hex separated by a hyphen to prevent json
+ marshal issues (https://github.com/hypermodeinc/dgraph/pull/8601)
+ - fix(query): handle bad timezone correctly (https://github.com/hypermodeinc/dgraph/pull/8657)
+ - chore(ludicroud): remove ludicrous mode from the code
+ (https://github.com/hypermodeinc/dgraph/pull/8612)
+ - fix(backup): make the /admin/backup and /admin/export API asynchronous
+ (https://github.com/hypermodeinc/dgraph/pull/8554)
+ - fix(mutation): validate mutation before applying it
+ (https://github.com/hypermodeinc/dgraph/pull/8623)
- **CI Enhancements**
- - fix(ci): unpin curl (https://github.com/hypermodeinc/dgraph/pull/8577)
- - fix(ci): adjust cron schedules (https://github.com/hypermodeinc/dgraph/pull/8592)
- - chore(ci): Capture coverage from bulk load and LDBC tests (https://github.com/hypermodeinc/dgraph/pull/8478)
- - chore(linter): enable gosec linter (https://github.com/hypermodeinc/dgraph/pull/8678)
- - chore: apply go vet improvements (https://github.com/hypermodeinc/dgraph/pull/8620)
- - chore(linter): fix some of the warnings from gas linter (https://github.com/hypermodeinc/dgraph/pull/8664)
- - chore(linter): fix golangci config and some issues in tests (https://github.com/hypermodeinc/dgraph/pull/8669)
- - fix(linter): address gosimple linter reports & errors (https://github.com/hypermodeinc/dgraph/pull/8628)
+ - fix(ci): unpin curl (https://github.com/hypermodeinc/dgraph/pull/8577)
+ - fix(ci): adjust cron schedules (https://github.com/hypermodeinc/dgraph/pull/8592)
+ - chore(ci): Capture coverage from bulk load and LDBC tests
+ (https://github.com/hypermodeinc/dgraph/pull/8478)
+ - chore(linter): enable gosec linter (https://github.com/hypermodeinc/dgraph/pull/8678)
+ - chore: apply go vet improvements (https://github.com/hypermodeinc/dgraph/pull/8620)
+ - chore(linter): fix some of the warnings from gas linter
+ (https://github.com/hypermodeinc/dgraph/pull/8664)
+ - chore(linter): fix golangci config and some issues in tests
+ (https://github.com/hypermodeinc/dgraph/pull/8669)
+ - fix(linter): address gosimple linter reports & errors
+ (https://github.com/hypermodeinc/dgraph/pull/8628)
## [v23.0.0-rc1] - 2023-04-11
+
[v23.0.0-rc1]: https://github.com/hypermodeinc/dgraph/compare/v22.0.2...v23.0.0-rc1
-### Added
+- **Added**
- **GraphQL**
- - fix(GraphQL): pass on HTTP request headers for subscriptions (https://github.com/hypermodeinc/dgraph/pull/8574)
+
+ - fix(GraphQL): pass on HTTP request headers for subscriptions
+ (https://github.com/hypermodeinc/dgraph/pull/8574)
- **Core Dgraph**
- - feat(metrics): add badger metrics (#8034) (https://github.com/hypermodeinc/dgraph/pull/8737)
- - feat(restore): introduce incremental restore (#7942) (https://github.com/hypermodeinc/dgraph/pull/8624)
- - chore(debug): add `only-summary` flag in `dgraph debug` to show LSM tree and namespace size (https://github.com/hypermodeinc/dgraph/pull/8516)
- - feat(cloud): add `shared-instance` flag in limit superflag in alpha (https://github.com/hypermodeinc/dgraph/pull/8625)
- - chore(deps): update prometheus dependency, adds new metrics (https://github.com/hypermodeinc/dgraph/pull/8655)
- - feat(cdc): add superflag `tls` to enable TLS without CA or certs (https://github.com/hypermodeinc/dgraph/pull/8564)
- - feat(multitenancy): namespace aware drop data (https://github.com/hypermodeinc/dgraph/pull/8511)
+ - feat(metrics): add badger metrics (#8034) (https://github.com/hypermodeinc/dgraph/pull/8737)
+ - feat(restore): introduce incremental restore (#7942)
+ (https://github.com/hypermodeinc/dgraph/pull/8624)
+ - chore(debug): add `only-summary` flag in `dgraph debug` to show LSM tree and namespace size
+ (https://github.com/hypermodeinc/dgraph/pull/8516)
+ - feat(cloud): add `shared-instance` flag in limit superflag in alpha
+ (https://github.com/hypermodeinc/dgraph/pull/8625)
+ - chore(deps): update prometheus dependency, adds new metrics
+ (https://github.com/hypermodeinc/dgraph/pull/8655)
+ - feat(cdc): add superflag `tls` to enable TLS without CA or certs
+ (https://github.com/hypermodeinc/dgraph/pull/8564)
+ - feat(multitenancy): namespace aware drop data (https://github.com/hypermodeinc/dgraph/pull/8511)
-### Fixed
+**Fixed**
- **GragphQL**
- - fix(GraphQL): nested Auth Rules not working properly (https://github.com/hypermodeinc/dgraph/pull/8571)
+
+ - fix(GraphQL): nested Auth Rules not working properly
+ (https://github.com/hypermodeinc/dgraph/pull/8571)
- **Core Dgraph**
- - Fix wal replay issue during rollup (https://github.com/hypermodeinc/dgraph/pull/8774)
- - security(logging): fix aes implementation in audit logging (https://github.com/hypermodeinc/dgraph/pull/8323)
- - chore(worker): unify mapper receiver names (https://github.com/hypermodeinc/dgraph/pull/8740)
- - fix(dql): fix panic in parsing of regexp (https://github.com/hypermodeinc/dgraph/pull/8739)
- - fix(Query): Do an error check before bubbling up nil error (https://github.com/hypermodeinc/dgraph/pull/8769)
- - chore: replace global index with local one & fix typos (https://github.com/hypermodeinc/dgraph/pull/8719)
- - chore(logs): add logs to track dropped proposals (https://github.com/hypermodeinc/dgraph/pull/8568)
- - fix(debug): check length of wal entry before parsing (https://github.com/hypermodeinc/dgraph/pull/8560)
- - opt(schema): optimize populateSchema() (https://github.com/hypermodeinc/dgraph/pull/8565)
- - fix(zero): fix update membership to make bulk tablet proposal instead of multiple small (https://github.com/hypermodeinc/dgraph/pull/8573)
- - fix(groot): do not upsert groot for all namespaces on restart (https://github.com/hypermodeinc/dgraph/pull/8561)
- - fix(restore): set kv version to restoreTs for all keys (https://github.com/hypermodeinc/dgraph/pull/8563)
- - fix(probe): do not contend for lock in lazy load (https://github.com/hypermodeinc/dgraph/pull/8566)
- - fix(core): fixed infinite loop in CommitToDisk (https://github.com/hypermodeinc/dgraph/pull/8614)
- - fix(proposals): incremental proposal key for zero proposals (https://github.com/hypermodeinc/dgraph/pull/8567)
- - fix(zero): fix waiting for random time while rate limiting (https://github.com/hypermodeinc/dgraph/pull/8656)
- - chore(deps): upgrade badger (https://github.com/hypermodeinc/dgraph/pull/8654, https://github.com/hypermodeinc/dgraph/pull/8658)
- - opt(schema): load schema and types using Stream framework (https://github.com/hypermodeinc/dgraph/pull/8562)
- - fix(backup): use StreamWriter instead of KVLoader during backup restore (https://github.com/hypermodeinc/dgraph/pull/8510)
- - fix(audit): fixing audit logs for websocket connections (https://github.com/hypermodeinc/dgraph/pull/8627)
- - fix(restore): consider the banned namespaces while bumping (https://github.com/hypermodeinc/dgraph/pull/8559)
- - fix(backup): create directory before writing backup (https://github.com/hypermodeinc/dgraph/pull/8638)
+
+ - Fix wal replay issue during rollup (https://github.com/hypermodeinc/dgraph/pull/8774)
+ - security(logging): fix aes implementation in audit logging
+ (https://github.com/hypermodeinc/dgraph/pull/8323)
+ - chore(worker): unify mapper receiver names (https://github.com/hypermodeinc/dgraph/pull/8740)
+ - fix(dql): fix panic in parsing of regexp (https://github.com/hypermodeinc/dgraph/pull/8739)
+ - fix(Query): Do an error check before bubbling up nil error
+ (https://github.com/hypermodeinc/dgraph/pull/8769)
+ - chore: replace global index with local one & fix typos
+ (https://github.com/hypermodeinc/dgraph/pull/8719)
+ - chore(logs): add logs to track dropped proposals
+ (https://github.com/hypermodeinc/dgraph/pull/8568)
+ - fix(debug): check length of wal entry before parsing
+ (https://github.com/hypermodeinc/dgraph/pull/8560)
+ - opt(schema): optimize populateSchema() (https://github.com/hypermodeinc/dgraph/pull/8565)
+ - fix(zero): fix update membership to make bulk tablet proposal instead of multiple small
+ (https://github.com/hypermodeinc/dgraph/pull/8573)
+ - fix(groot): do not upsert groot for all namespaces on restart
+ (https://github.com/hypermodeinc/dgraph/pull/8561)
+ - fix(restore): set kv version to restoreTs for all keys
+ (https://github.com/hypermodeinc/dgraph/pull/8563)
+ - fix(probe): do not contend for lock in lazy load
+ (https://github.com/hypermodeinc/dgraph/pull/8566)
+ - fix(core): fixed infinite loop in CommitToDisk
+ (https://github.com/hypermodeinc/dgraph/pull/8614)
+ - fix(proposals): incremental proposal key for zero proposals
+ (https://github.com/hypermodeinc/dgraph/pull/8567)
+ - fix(zero): fix waiting for random time while rate limiting
+ (https://github.com/hypermodeinc/dgraph/pull/8656)
+ - chore(deps): upgrade badger (https://github.com/hypermodeinc/dgraph/pull/8654,
+ https://github.com/hypermodeinc/dgraph/pull/8658)
+ - opt(schema): load schema and types using Stream framework
+ (https://github.com/hypermodeinc/dgraph/pull/8562)
+ - fix(backup): use StreamWriter instead of KVLoader during backup restore
+ (https://github.com/hypermodeinc/dgraph/pull/8510)
+ - fix(audit): fixing audit logs for websocket connections
+ (https://github.com/hypermodeinc/dgraph/pull/8627)
+ - fix(restore): consider the banned namespaces while bumping
+ (https://github.com/hypermodeinc/dgraph/pull/8559)
+ - fix(backup): create directory before writing backup
+ (https://github.com/hypermodeinc/dgraph/pull/8638)
- **Test**
- - chore(tests): add upgrade tests in query package (https://github.com/hypermodeinc/dgraph/pull/8750)
- - simplify test setup in query package (https://github.com/hypermodeinc/dgraph/pull/8782)
- - add a test for incremental restore (https://github.com/hypermodeinc/dgraph/pull/8754)
- - chore(tests): run tests in query package against dgraph cloud (https://github.com/hypermodeinc/dgraph/pull/8726)
- - fix the backup test cluster compose file (https://github.com/hypermodeinc/dgraph/pull/8775)
- - cleanup tests to reduce the scope of err var (https://github.com/hypermodeinc/dgraph/pull/8771)
- - use t.TempDir() for using a temp dir in tests (https://github.com/hypermodeinc/dgraph/pull/8772)
- - fix(test): clan cruft from test run (https://github.com/hypermodeinc/dgraph/pull/8348)
- - chore(tests): avoid calling os.Exit in TestMain (https://github.com/hypermodeinc/dgraph/pull/8765)
- - chore: fix linter issue on main (https://github.com/hypermodeinc/dgraph/pull/8749)
- - recreate the context variable for parallel test (https://github.com/hypermodeinc/dgraph/pull/8748)
- - fix(tests): wait for license to be applied before trying to login (https://github.com/hypermodeinc/dgraph/pull/8744)
- - fix(tests): sleep longer so that ACLs are updated (https://github.com/hypermodeinc/dgraph/pull/8745)
- - chore(test): use pointer receiver for LocalCluster methods (https://github.com/hypermodeinc/dgraph/pull/8734)
- - chore(linter): fix unconvert linter issues on linux (https://github.com/hypermodeinc/dgraph/pull/8718)
- - chore(linter): add unconvert linter and address related issues (https://github.com/hypermodeinc/dgraph/pull/8685)
- - chore(ci): resolve community PR goveralls failure (https://github.com/hypermodeinc/dgraph/pull/8716)
- - chore(test): increased iterations of the health check (https://github.com/hypermodeinc/dgraph/pull/8711)
- - fix(test): avoid host volume mount in minio container (https://github.com/hypermodeinc/dgraph/pull/8569)
- - chore(test): add tests for lex/iri.go,chunker/chunk.go (https://github.com/hypermodeinc/dgraph/pull/8515)
- - chore(test): add Backup/Restore test for NFS (https://github.com/hypermodeinc/dgraph/pull/8551)
- - chore(test): add test that after snapshot is applied, GraphQL schema is refreshed (https://github.com/hypermodeinc/dgraph/pull/8619)
- - chore(test): upgrade graphql tests to use go 1.19 (https://github.com/hypermodeinc/dgraph/pull/8662)
- - chore(test): add automated test to test multitenant --limit flag (https://github.com/hypermodeinc/dgraph/pull/8646)
- - chore(test): add restore test for more than 127 namespaces (https://github.com/hypermodeinc/dgraph/pull/8643)
- - fix(test): fix the corner case for raft entries test (https://github.com/hypermodeinc/dgraph/pull/8617)
+
+ - chore(tests): add upgrade tests in query package
+ (https://github.com/hypermodeinc/dgraph/pull/8750)
+ - simplify test setup in query package (https://github.com/hypermodeinc/dgraph/pull/8782)
+ - add a test for incremental restore (https://github.com/hypermodeinc/dgraph/pull/8754)
+ - chore(tests): run tests in query package against dgraph cloud
+ (https://github.com/hypermodeinc/dgraph/pull/8726)
+ - fix the backup test cluster compose file (https://github.com/hypermodeinc/dgraph/pull/8775)
+ - cleanup tests to reduce the scope of err var (https://github.com/hypermodeinc/dgraph/pull/8771)
+ - use t.TempDir() for using a temp dir in tests (https://github.com/hypermodeinc/dgraph/pull/8772)
+ - fix(test): clan cruft from test run (https://github.com/hypermodeinc/dgraph/pull/8348)
+ - chore(tests): avoid calling os.Exit in TestMain
+ (https://github.com/hypermodeinc/dgraph/pull/8765)
+ - chore: fix linter issue on main (https://github.com/hypermodeinc/dgraph/pull/8749)
+ - recreate the context variable for parallel test
+ (https://github.com/hypermodeinc/dgraph/pull/8748)
+ - fix(tests): wait for license to be applied before trying to login
+ (https://github.com/hypermodeinc/dgraph/pull/8744)
+ - fix(tests): sleep longer so that ACLs are updated
+ (https://github.com/hypermodeinc/dgraph/pull/8745)
+ - chore(test): use pointer receiver for LocalCluster methods
+ (https://github.com/hypermodeinc/dgraph/pull/8734)
+ - chore(linter): fix unconvert linter issues on linux
+ (https://github.com/hypermodeinc/dgraph/pull/8718)
+ - chore(linter): add unconvert linter and address related issues
+ (https://github.com/hypermodeinc/dgraph/pull/8685)
+ - chore(ci): resolve community PR goveralls failure
+ (https://github.com/hypermodeinc/dgraph/pull/8716)
+ - chore(test): increased iterations of the health check
+ (https://github.com/hypermodeinc/dgraph/pull/8711)
+ - fix(test): avoid host volume mount in minio container
+ (https://github.com/hypermodeinc/dgraph/pull/8569)
+ - chore(test): add tests for lex/iri.go,chunker/chunk.go
+ (https://github.com/hypermodeinc/dgraph/pull/8515)
+ - chore(test): add Backup/Restore test for NFS (https://github.com/hypermodeinc/dgraph/pull/8551)
+ - chore(test): add test that after snapshot is applied, GraphQL schema is refreshed
+ (https://github.com/hypermodeinc/dgraph/pull/8619)
+ - chore(test): upgrade graphql tests to use go 1.19
+ (https://github.com/hypermodeinc/dgraph/pull/8662)
+ - chore(test): add automated test to test multitenant --limit flag
+ (https://github.com/hypermodeinc/dgraph/pull/8646)
+ - chore(test): add restore test for more than 127 namespaces
+ (https://github.com/hypermodeinc/dgraph/pull/8643)
+ - fix(test): fix the corner case for raft entries test
+ (https://github.com/hypermodeinc/dgraph/pull/8617)
- **CD**
- - fix(build): update dockerfile to use cache busting and reduce image size (https://github.com/hypermodeinc/dgraph/pull/8652)
- - chore(deps): update min go build version (https://github.com/hypermodeinc/dgraph/pull/8423)
- - chore(cd): add badger binary to dgraph docker image (https://github.com/hypermodeinc/dgraph/pull/8790)
+
+ - fix(build): update dockerfile to use cache busting and reduce image size
+ (https://github.com/hypermodeinc/dgraph/pull/8652)
+ - chore(deps): update min go build version (https://github.com/hypermodeinc/dgraph/pull/8423)
+ - chore(cd): add badger binary to dgraph docker image
+ (https://github.com/hypermodeinc/dgraph/pull/8790)
- **Security**
- - chore(deps): bump certifi from 2020.4.5.1 to 2022.12.7 in /contrib/config/marketplace/aws/tests (https://github.com/hypermodeinc/dgraph/pull/8496)
- - chore(deps): bump github.com/docker/distribution from 2.7.1+incompatible to 2.8.0+incompatible (https://github.com/hypermodeinc/dgraph/pull/8575)
- - chore(deps): bump werkzeug from 0.16.1 to 2.2.3 in /contrib/embargo (https://github.com/hypermodeinc/dgraph/pull/8676)
- - fix(sec): upgrade networkx to (https://github.com/hypermodeinc/dgraph/pull/8613)
- - fix(sec): CVE-2022-41721 (https://github.com/hypermodeinc/dgraph/pull/8633)
- - fix(sec): CVE & OS Patching (https://github.com/hypermodeinc/dgraph/pull/8634)
+ - chore(deps): bump certifi from 2020.4.5.1 to 2022.12.7 in /contrib/config/marketplace/aws/tests
+ (https://github.com/hypermodeinc/dgraph/pull/8496)
+ - chore(deps): bump github.com/docker/distribution from 2.7.1+incompatible to 2.8.0+incompatible
+ (https://github.com/hypermodeinc/dgraph/pull/8575)
+ - chore(deps): bump werkzeug from 0.16.1 to 2.2.3 in /contrib/embargo
+ (https://github.com/hypermodeinc/dgraph/pull/8676)
+ - fix(sec): upgrade networkx to (https://github.com/hypermodeinc/dgraph/pull/8613)
+ - fix(sec): CVE-2022-41721 (https://github.com/hypermodeinc/dgraph/pull/8633)
+ - fix(sec): CVE & OS Patching (https://github.com/hypermodeinc/dgraph/pull/8634)
-### Changed
+**Changed**
- **Core Dgraph**
- - upgrade badger to v4.1.0 (https://github.com/hypermodeinc/dgraph/pull/8783) (https://github.com/hypermodeinc/dgraph/pull/8709)
- - fix(multitenancy) store namespace in predicate as a hex separated by a hyphen to prevent json marshal issues (https://github.com/hypermodeinc/dgraph/pull/8601)
- - fix(query): handle bad timezone correctly (https://github.com/hypermodeinc/dgraph/pull/8657)
- - chore(ludicroud): remove ludicrous mode from the code (https://github.com/hypermodeinc/dgraph/pull/8612)
- - fix(backup): make the /admin/backup and /admin/export API asynchronous (https://github.com/hypermodeinc/dgraph/pull/8554)
- - fix(mutation): validate mutation before applying it (https://github.com/hypermodeinc/dgraph/pull/8623)
-- **CI Enhancements**
- - fix(ci): unpin curl (https://github.com/hypermodeinc/dgraph/pull/8577)
- - fix(ci): adjust cron schedules (https://github.com/hypermodeinc/dgraph/pull/8592)
- - chore(ci): Capture coverage from bulk load and LDBC tests (https://github.com/hypermodeinc/dgraph/pull/8478)
- - chore(linter): enable gosec linter (https://github.com/hypermodeinc/dgraph/pull/8678)
- - chore: apply go vet improvements (https://github.com/hypermodeinc/dgraph/pull/8620)
- - chore(linter): fix some of the warnings from gas linter (https://github.com/hypermodeinc/dgraph/pull/8664)
- - chore(linter): fix golangci config and some issues in tests (https://github.com/hypermodeinc/dgraph/pull/8669)
- - fix(linter): address gosimple linter reports & errors (https://github.com/hypermodeinc/dgraph/pull/8628)
+ - upgrade badger to v4.1.0 (https://github.com/hypermodeinc/dgraph/pull/8783)
+ (https://github.com/hypermodeinc/dgraph/pull/8709)
+ - fix(multitenancy) store namespace in predicate as a hex separated by a hyphen to prevent json
+ marshal issues (https://github.com/hypermodeinc/dgraph/pull/8601)
+ - fix(query): handle bad timezone correctly (https://github.com/hypermodeinc/dgraph/pull/8657)
+ - chore(ludicroud): remove ludicrous mode from the code
+ (https://github.com/hypermodeinc/dgraph/pull/8612)
+ - fix(backup): make the /admin/backup and /admin/export API asynchronous
+ (https://github.com/hypermodeinc/dgraph/pull/8554)
+ - fix(mutation): validate mutation before applying it
+ (https://github.com/hypermodeinc/dgraph/pull/8623)
+- **CI Enhancements**
+ - fix(ci): unpin curl (https://github.com/hypermodeinc/dgraph/pull/8577)
+ - fix(ci): adjust cron schedules (https://github.com/hypermodeinc/dgraph/pull/8592)
+ - chore(ci): Capture coverage from bulk load and LDBC tests
+ (https://github.com/hypermodeinc/dgraph/pull/8478)
+ - chore(linter): enable gosec linter (https://github.com/hypermodeinc/dgraph/pull/8678)
+ - chore: apply go vet improvements (https://github.com/hypermodeinc/dgraph/pull/8620)
+ - chore(linter): fix some of the warnings from gas linter
+ (https://github.com/hypermodeinc/dgraph/pull/8664)
+ - chore(linter): fix golangci config and some issues in tests
+ (https://github.com/hypermodeinc/dgraph/pull/8669)
+ - fix(linter): address gosimple linter reports & errors
+ (https://github.com/hypermodeinc/dgraph/pull/8628)
## [v23.0.0-beta1] - 2023-03-01
+
[v23.0.0-beta1]: https://github.com/hypermodeinc/dgraph/compare/v22.0.2...v23.0.0-beta1
-### Added
+- **Added**
- **GraphQL**
- - fix(GraphQL): pass on HTTP request headers for subscriptions (https://github.com/hypermodeinc/dgraph/pull/8574)
+
+ - fix(GraphQL): pass on HTTP request headers for subscriptions
+ (https://github.com/hypermodeinc/dgraph/pull/8574)
- **Core Dgraph**
- - chore(debug): add `only-summary` flag in `dgraph debug` to show LSM tree and namespace size (https://github.com/hypermodeinc/dgraph/pull/8516)
- - feat(cloud): add `shared-instance` flag in limit superflag in alpha (https://github.com/hypermodeinc/dgraph/pull/8625)
- - chore(deps): update prometheus dependency, adds new metrics (https://github.com/hypermodeinc/dgraph/pull/8655)
- - feat(cdc): add superflag `tls` to enable TLS without CA or certs (https://github.com/hypermodeinc/dgraph/pull/8564)
+ - chore(debug): add `only-summary` flag in `dgraph debug` to show LSM tree and namespace size
+ (https://github.com/hypermodeinc/dgraph/pull/8516)
+ - feat(cloud): add `shared-instance` flag in limit superflag in alpha
+ (https://github.com/hypermodeinc/dgraph/pull/8625)
+ - chore(deps): update prometheus dependency, adds new metrics
+ (https://github.com/hypermodeinc/dgraph/pull/8655)
+ - feat(cdc): add superflag `tls` to enable TLS without CA or certs
+ (https://github.com/hypermodeinc/dgraph/pull/8564)
- chore(deps): bump badger up to v4 (https://github.com/hypermodeinc/dgraph/pull/8709)
- feat(multitenancy): namespace aware drop data (https://github.com/hypermodeinc/dgraph/pull/8511)
-### Fixed
+**Fixed**
- **GragphQL**
- - fix(GraphQL): nested Auth Rules not working properly (https://github.com/hypermodeinc/dgraph/pull/8571)
+
+ - fix(GraphQL): nested Auth Rules not working properly
+ (https://github.com/hypermodeinc/dgraph/pull/8571)
- **Core Dgraph**
- - chore(logs): add logs to track dropped proposals (https://github.com/hypermodeinc/dgraph/pull/8568)
- - fix(debug): check length of wal entry before parsing (https://github.com/hypermodeinc/dgraph/pull/8560)
+
+ - chore(logs): add logs to track dropped proposals
+ (https://github.com/hypermodeinc/dgraph/pull/8568)
+ - fix(debug): check length of wal entry before parsing
+ (https://github.com/hypermodeinc/dgraph/pull/8560)
- opt(schema): optimize populateSchema() (https://github.com/hypermodeinc/dgraph/pull/8565)
- - fix(zero): fix update membership to make bulk tablet proposal instead of multiple small (https://github.com/hypermodeinc/dgraph/pull/8573)
- - fix(groot): do not upsert groot for all namespaces on restart (https://github.com/hypermodeinc/dgraph/pull/8561)
- - fix(restore): set kv version to restoreTs for all keys (https://github.com/hypermodeinc/dgraph/pull/8563)
- - fix(probe): do not contend for lock in lazy load (https://github.com/hypermodeinc/dgraph/pull/8566)
- - fix(core): fixed infinite loop in CommitToDisk (https://github.com/hypermodeinc/dgraph/pull/8614)
- - fix(proposals): incremental proposal key for zero proposals (https://github.com/hypermodeinc/dgraph/pull/8567)
- - fix(zero): fix waiting for random time while rate limiting (https://github.com/hypermodeinc/dgraph/pull/8656)
- - chore(deps): upgrade badger (https://github.com/hypermodeinc/dgraph/pull/8654, https://github.com/hypermodeinc/dgraph/pull/8658)
- - opt(schema): load schema and types using Stream framework (https://github.com/hypermodeinc/dgraph/pull/8562)
- - fix(backup): use StreamWriter instead of KVLoader during backup restore (https://github.com/hypermodeinc/dgraph/pull/8510)
- - fix(audit): fixing audit logs for websocket connections (https://github.com/hypermodeinc/dgraph/pull/8627)
- - fix(restore): consider the banned namespaces while bumping (https://github.com/hypermodeinc/dgraph/pull/8559)
- - fix(backup): create directory before writing backup (https://github.com/hypermodeinc/dgraph/pull/8638)
+ - fix(zero): fix update membership to make bulk tablet proposal instead of multiple small
+ (https://github.com/hypermodeinc/dgraph/pull/8573)
+ - fix(groot): do not upsert groot for all namespaces on restart
+ (https://github.com/hypermodeinc/dgraph/pull/8561)
+ - fix(restore): set kv version to restoreTs for all keys
+ (https://github.com/hypermodeinc/dgraph/pull/8563)
+ - fix(probe): do not contend for lock in lazy load
+ (https://github.com/hypermodeinc/dgraph/pull/8566)
+ - fix(core): fixed infinite loop in CommitToDisk
+ (https://github.com/hypermodeinc/dgraph/pull/8614)
+ - fix(proposals): incremental proposal key for zero proposals
+ (https://github.com/hypermodeinc/dgraph/pull/8567)
+ - fix(zero): fix waiting for random time while rate limiting
+ (https://github.com/hypermodeinc/dgraph/pull/8656)
+ - chore(deps): upgrade badger (https://github.com/hypermodeinc/dgraph/pull/8654,
+ https://github.com/hypermodeinc/dgraph/pull/8658)
+ - opt(schema): load schema and types using Stream framework
+ (https://github.com/hypermodeinc/dgraph/pull/8562)
+ - fix(backup): use StreamWriter instead of KVLoader during backup restore
+ (https://github.com/hypermodeinc/dgraph/pull/8510)
+ - fix(audit): fixing audit logs for websocket connections
+ (https://github.com/hypermodeinc/dgraph/pull/8627)
+ - fix(restore): consider the banned namespaces while bumping
+ (https://github.com/hypermodeinc/dgraph/pull/8559)
+ - fix(backup): create directory before writing backup
+ (https://github.com/hypermodeinc/dgraph/pull/8638)
- **Test**
- - fix(test): avoid host volume mount in minio container (https://github.com/hypermodeinc/dgraph/pull/8569)
- - chore(test): add tests for lex/iri.go,chunker/chunk.go (https://github.com/hypermodeinc/dgraph/pull/8515)
+
+ - fix(test): avoid host volume mount in minio container
+ (https://github.com/hypermodeinc/dgraph/pull/8569)
+ - chore(test): add tests for lex/iri.go,chunker/chunk.go
+ (https://github.com/hypermodeinc/dgraph/pull/8515)
- chore(test): add Backup/Restore test for NFS (https://github.com/hypermodeinc/dgraph/pull/8551)
- - chore(test): add test that after snapshot is applied, GraphQL schema is refreshed (https://github.com/hypermodeinc/dgraph/pull/8619)
- - chore(test): upgrade graphql tests to use go 1.19 (https://github.com/hypermodeinc/dgraph/pull/8662)
- - chore(test): add automated test to test multitenant --limit flag (https://github.com/hypermodeinc/dgraph/pull/8646)
- - chore(test): add restore test for more than 127 namespaces (https://github.com/hypermodeinc/dgraph/pull/8643)
- - fix(test): fix the corner case for raft entries test (https://github.com/hypermodeinc/dgraph/pull/8617)
+ - chore(test): add test that after snapshot is applied, GraphQL schema is refreshed
+ (https://github.com/hypermodeinc/dgraph/pull/8619)
+ - chore(test): upgrade graphql tests to use go 1.19
+ (https://github.com/hypermodeinc/dgraph/pull/8662)
+ - chore(test): add automated test to test multitenant --limit flag
+ (https://github.com/hypermodeinc/dgraph/pull/8646)
+ - chore(test): add restore test for more than 127 namespaces
+ (https://github.com/hypermodeinc/dgraph/pull/8643)
+ - fix(test): fix the corner case for raft entries test
+ (https://github.com/hypermodeinc/dgraph/pull/8617)
- **CD**
- - fix(build): update dockerfile to use cache busting and reduce image size (https://github.com/hypermodeinc/dgraph/pull/8652)
+
+ - fix(build): update dockerfile to use cache busting and reduce image size
+ (https://github.com/hypermodeinc/dgraph/pull/8652)
- chore(deps): update min go build version (https://github.com/hypermodeinc/dgraph/pull/8423)
- **Security**
- - chore(deps): bump certifi from 2020.4.5.1 to 2022.12.7 in /contrib/config/marketplace/aws/tests (https://github.com/hypermodeinc/dgraph/pull/8496)
- - chore(deps): bump github.com/docker/distribution from 2.7.1+incompatible to 2.8.0+incompatible (https://github.com/hypermodeinc/dgraph/pull/8575)
- - chore(deps): bump werkzeug from 0.16.1 to 2.2.3 in /contrib/embargo (https://github.com/hypermodeinc/dgraph/pull/8676)
- - fix(sec): upgrade networkx to (https://github.com/hypermodeinc/dgraph/pull/8613)
+ - chore(deps): bump certifi from 2020.4.5.1 to 2022.12.7 in /contrib/config/marketplace/aws/tests
+ (https://github.com/hypermodeinc/dgraph/pull/8496)
+ - chore(deps): bump github.com/docker/distribution from 2.7.1+incompatible to 2.8.0+incompatible
+ (https://github.com/hypermodeinc/dgraph/pull/8575)
+ - chore(deps): bump werkzeug from 0.16.1 to 2.2.3 in /contrib/embargo
+ (https://github.com/hypermodeinc/dgraph/pull/8676)
+ - fix(sec): upgrade networkx to (https://github.com/hypermodeinc/dgraph/pull/8613)
- fix(sec): CVE-2022-41721 (https://github.com/hypermodeinc/dgraph/pull/8633)
- fix(sec): CVE & OS Patching (https://github.com/hypermodeinc/dgraph/pull/8634)
-### Changed
+**Changed**
- **Core Dgraph**
- - fix(multitenancy) store namespace in predicate as a hex separated by a hyphen to prevent json marshal issues (https://github.com/hypermodeinc/dgraph/pull/8601)
+
+ - fix(multitenancy) store namespace in predicate as a hex separated by a hyphen to prevent json
+ marshal issues (https://github.com/hypermodeinc/dgraph/pull/8601)
- fix(query): handle bad timezone correctly (https://github.com/hypermodeinc/dgraph/pull/8657)
- - chore(ludicroud): remove ludicrous mode from the code (https://github.com/hypermodeinc/dgraph/pull/8612)
- - fix(backup): make the /admin/backup and /admin/export API asynchronous (https://github.com/hypermodeinc/dgraph/pull/8554)
- - fix(mutation): validate mutation before applying it (https://github.com/hypermodeinc/dgraph/pull/8623)
+ - chore(ludicroud): remove ludicrous mode from the code
+ (https://github.com/hypermodeinc/dgraph/pull/8612)
+ - fix(backup): make the /admin/backup and /admin/export API asynchronous
+ (https://github.com/hypermodeinc/dgraph/pull/8554)
+ - fix(mutation): validate mutation before applying it
+ (https://github.com/hypermodeinc/dgraph/pull/8623)
- **CI Enhancements**
- - fix(ci): unpin curl (https://github.com/hypermodeinc/dgraph/pull/8577)
- - fix(ci): adjust cron schedules (https://github.com/hypermodeinc/dgraph/pull/8592)
- - chore(ci): Capture coverage from bulk load and LDBC tests (https://github.com/hypermodeinc/dgraph/pull/8478)
- - chore(linter): enable gosec linter (https://github.com/hypermodeinc/dgraph/pull/8678)
- - chore: apply go vet improvements (https://github.com/hypermodeinc/dgraph/pull/8620)
- - chore(linter): fix some of the warnings from gas linter (https://github.com/hypermodeinc/dgraph/pull/8664)
- - chore(linter): fix golangci config and some issues in tests (https://github.com/hypermodeinc/dgraph/pull/8669)
- - fix(linter): address gosimple linter reports & errors (https://github.com/hypermodeinc/dgraph/pull/8628)
+ - fix(ci): unpin curl (https://github.com/hypermodeinc/dgraph/pull/8577)
+ - fix(ci): adjust cron schedules (https://github.com/hypermodeinc/dgraph/pull/8592)
+ - chore(ci): Capture coverage from bulk load and LDBC tests
+ (https://github.com/hypermodeinc/dgraph/pull/8478)
+ - chore(linter): enable gosec linter (https://github.com/hypermodeinc/dgraph/pull/8678)
+ - chore: apply go vet improvements (https://github.com/hypermodeinc/dgraph/pull/8620)
+ - chore(linter): fix some of the warnings from gas linter
+ (https://github.com/hypermodeinc/dgraph/pull/8664)
+ - chore(linter): fix golangci config and some issues in tests
+ (https://github.com/hypermodeinc/dgraph/pull/8669)
+ - fix(linter): address gosimple linter reports & errors
+ (https://github.com/hypermodeinc/dgraph/pull/8628)
## [v22.0.2] - 2022-12-16
+
[v22.0.2]: https://github.com/hypermodeinc/dgraph/compare/v22.0.1...v22.0.2
-### Added
+- **Added**
-- **ARM Support** - Dgraph now supports ARM64 Architecture for development (https://github.com/hypermodeinc/dgraph/pull/8543 https://github.com/hypermodeinc/dgraph/pull/8520 https://github.com/hypermodeinc/dgraph/pull/8503 https://github.com/hypermodeinc/dgraph/pull/8436 https://github.com/hypermodeinc/dgraph/pull/8405 https://github.com/hypermodeinc/dgraph/pull/8395)
+- **ARM Support** - Dgraph now supports ARM64 Architecture for development
+ (https://github.com/hypermodeinc/dgraph/pull/8543 https://github.com/hypermodeinc/dgraph/pull/8520
+ https://github.com/hypermodeinc/dgraph/pull/8503 https://github.com/hypermodeinc/dgraph/pull/8436
+ https://github.com/hypermodeinc/dgraph/pull/8405 https://github.com/hypermodeinc/dgraph/pull/8395)
- Additional logging and trace tags for debugging (https://github.com/hypermodeinc/dgraph/pull/8490)
-### Fixed
+- **Fixed**
- **EDgraph**
- - fix(ACL): Prevents permissions overrride and merges acl cache to persist permissions across different namespaces (https://github.com/hypermodeinc/dgraph/pull/8506)
+
+ - fix(ACL): Prevents permissions overrride and merges acl cache to persist permissions across
+ different namespaces (https://github.com/hypermodeinc/dgraph/pull/8506)
- **Core Dgraph**
- - Fix(badger): Upgrade badger version to fix manifest corruption (https://github.com/hypermodeinc/dgraph/pull/8365)
- - fix(pagination): Fix after for regexp, match functions (https://github.com/hypermodeinc/dgraph/pull/8471)
- - fix(query): Do not execute filters if there are no source uids(https://github.com/hypermodeinc/dgraph/pull/8452)
- - fix(admin): make config changes to pass through gog middlewares (https://github.com/hypermodeinc/dgraph/pull/8442)
- - fix(sort): Only filter out nodes with positive offsets (https://github.com/hypermodeinc/dgraph/pull/8441)
- - fix(fragment): merge the nested fragments fields (https://github.com/hypermodeinc/dgraph/pull/8435)
+
+ - Fix(badger): Upgrade badger version to fix manifest corruption
+ (https://github.com/hypermodeinc/dgraph/pull/8365)
+ - fix(pagination): Fix after for regexp, match functions
+ (https://github.com/hypermodeinc/dgraph/pull/8471)
+ - fix(query): Do not execute filters if there are no source
+ uids(https://github.com/hypermodeinc/dgraph/pull/8452)
+ - fix(admin): make config changes to pass through gog middlewares
+ (https://github.com/hypermodeinc/dgraph/pull/8442)
+ - fix(sort): Only filter out nodes with positive offsets
+ (https://github.com/hypermodeinc/dgraph/pull/8441)
+ - fix(fragment): merge the nested fragments fields
+ (https://github.com/hypermodeinc/dgraph/pull/8435)
- Fix(lsbackup): Fix profiler in lsBackup (https://github.com/hypermodeinc/dgraph/pull/8432)
- - fix(DQL): optimize query for has function with offset (https://github.com/hypermodeinc/dgraph/pull/8431)
+ - fix(DQL): optimize query for has function with offset
+ (https://github.com/hypermodeinc/dgraph/pull/8431)
- **GraphQL**
- - Fix(GraphQL): Make mutation rewriting tests more robust (https://github.com/hypermodeinc/dgraph/pull/8449)
+
+ - Fix(GraphQL): Make mutation rewriting tests more robust
+ (https://github.com/hypermodeinc/dgraph/pull/8449)
- **Security**
- -
- CVE Fixes (35 total)
-
- #### CVE Fixes (35 total)
- - CVE-2013-4235
- - CVE-2016-20013
- - CVE-2016-2781
- - CVE-2017-11164
- - CVE-2018-16886
- - CVE-2019-0205
- - CVE-2019-0210
- - CVE-2019-11254
- - CVE-2019-16167
- - CVE-2020-29652
- - CVE-2021-31525
- - CVE-2021-33194
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-38561
- - CVE-2021-39537
- - CVE-2021-43565
- - CVE-2021-44716
- - CVE-2021-44758
- - CVE-2022-21698
- - CVE-2022-27191
- - CVE-2022-27664
- - CVE-2022-29458
- - CVE-2022-29526
- - CVE-2022-3219
- - CVE-2022-32221
- - CVE-2022-3437
- - CVE-2022-35737
- - CVE-2022-3715
- - CVE-2022-3821
- - CVE-2022-39377
- - CVE-2022-41916
- - CVE-2022-42800
- - CVE-2022-42898
- - CVE-2022-44640
- -
- GHSA Fixes (2 total)
-
- #### GHSE Fixes (2 total)
- - GHSA-69ch-w2m2-3vjp
- - GHSA-m332-53r6-2w93
-
-### Changed
+
+ -
+ CVE Fixes (35 total)
+
+ **CVE Fixes** (35 total)
+
+ - CVE-2013-4235
+ - CVE-2016-20013
+ - CVE-2016-2781
+ - CVE-2017-11164
+ - CVE-2018-16886
+ - CVE-2019-0205
+ - CVE-2019-0210
+ - CVE-2019-11254
+ - CVE-2019-16167
+ - CVE-2020-29652
+ - CVE-2021-31525
+ - CVE-2021-33194
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-38561
+ - CVE-2021-39537
+ - CVE-2021-43565
+ - CVE-2021-44716
+ - CVE-2021-44758
+ - CVE-2022-21698
+ - CVE-2022-27191
+ - CVE-2022-27664
+ - CVE-2022-29458
+ - CVE-2022-29526
+ - CVE-2022-3219
+ - CVE-2022-32221
+ - CVE-2022-3437
+ - CVE-2022-35737
+ - CVE-2022-3715
+ - CVE-2022-3821
+ - CVE-2022-39377
+ - CVE-2022-41916
+ - CVE-2022-42800
+ - CVE-2022-42898
+ - CVE-2022-44640
+
+ -
+ GHSA Fixes (2 total)
+
+ **GHSE Fixes** (2 total)
+
+ - GHSA-69ch-w2m2-3vjp
+ - GHSA-m332-53r6-2w93
+
+**Changed**
- **CI Enhancements**
- - Added more unit tests (https://github.com/hypermodeinc/dgraph/pull/8470 https://github.com/hypermodeinc/dgraph/pull/8489 https://github.com/hypermodeinc/dgraph/pull/8479 https://github.com/hypermodeinc/dgraph/pull/8488 https://github.com/hypermodeinc/dgraph/pull/8433)
- - [Coveralls](https://coveralls.io/github/dgraph-io/dgraph?branch=main) on CI is enhanced to measure code coverage for integration tests (https://github.com/hypermodeinc/dgraph/pull/8494)
- - [**LDBC Benchmarking**](https://ldbcouncil.org) in enabled on [CI](https://github.com/hypermodeinc/dgraph/actions/workflows/ci-dgraph-ldbc-tests.yml)
-- **CD Enhancements**
- - Enhanced our [CD Pipeline](https://github.com/hypermodeinc/dgraph/actions/workflows/cd-dgraph.yml) to support ARM64 binaries and docker-images (https://github.com/hypermodeinc/dgraph/pull/8520)
- - Enhanced [dgraph-lambda](https://github.com/hypermodeinc/dgraph-lambda) to support arm64 (https://github.com/hypermodeinc/dgraph-lambda/pull/39 https://github.com/hypermodeinc/dgraph-lambda/pull/38 https://github.com/hypermodeinc/dgraph-lambda/pull/37)
- - Enhanced [badger](https://github.com/dgraph-io/badger) to support arm64 (https://github.com/dgraph-io/badger/pull/1838)
+ - Added more unit tests (https://github.com/hypermodeinc/dgraph/pull/8470
+ https://github.com/hypermodeinc/dgraph/pull/8489
+ https://github.com/hypermodeinc/dgraph/pull/8479
+ https://github.com/hypermodeinc/dgraph/pull/8488
+ https://github.com/hypermodeinc/dgraph/pull/8433)
+ - [Coveralls](https://coveralls.io/github/dgraph-io/dgraph?branch=main) on CI is enhanced to
+ measure code coverage for integration tests (https://github.com/hypermodeinc/dgraph/pull/8494)
+ - [**LDBC Benchmarking**](https://ldbcouncil.org) in enabled on
+ [CI](https://github.com/hypermodeinc/dgraph/actions/workflows/ci-dgraph-ldbc-tests.yml)
+- **CD Enhancements**
+ - Enhanced our
+ [CD Pipeline](https://github.com/hypermodeinc/dgraph/actions/workflows/cd-dgraph.yml) to support
+ ARM64 binaries and docker-images (https://github.com/hypermodeinc/dgraph/pull/8520)
+ - Enhanced [dgraph-lambda](https://github.com/hypermodeinc/dgraph-lambda) to support arm64
+ (https://github.com/hypermodeinc/dgraph-lambda/pull/39
+ https://github.com/hypermodeinc/dgraph-lambda/pull/38
+ https://github.com/hypermodeinc/dgraph-lambda/pull/37)
+ - Enhanced [badger](https://github.com/dgraph-io/badger) to support arm64
+ (https://github.com/dgraph-io/badger/pull/1838)
## [v22.0.1] - 2022-11-10
+
[v22.0.1]: https://github.com/hypermodeinc/dgraph/compare/v22.0.0...v22.0.1
-### Fixed
+- **Fixed**
+
- **CD Release Pipeline**
- - Badger Binary fetch steps added to the release CD pipeline (https://github.com/hypermodeinc/dgraph/pull/8425)
+ - Badger Binary fetch steps added to the release CD pipeline
+ (https://github.com/hypermodeinc/dgraph/pull/8425)
- Corresponding Badger artifacts will be fetched & uploaded from v22.0.1 onwards
## [v22.0.0] - 2022-10-21
+
[v22.0.0]: https://github.com/hypermodeinc/dgraph/compare/v21.03.2...v22.0.0
-> **Note**
-> `v22.0.0` release is based of `v21.03.2` release.
-> https://discuss.dgraph.io/t/dgraph-v22-0-0-rc1-20221003-release-candidate/17839
+**Note** > `v22.0.0` release is based of `v21.03.2` release.
+https://discuss.dgraph.io/t/dgraph-v22-0-0-rc1-20221003-release-candidate/17839
-> **Warning**
-> We are discontinuing support for `v21.12.0`.
-> This will be a breaking change for anyone moving from `v21.12.0` to `v.22.0.0`.
+> **Warning** We are discontinuing support for `v21.12.0`. This will be a breaking change for anyone
+> moving from `v21.12.0` to `v.22.0.0`.
+
+- **Fixed**
-### Fixed
- **GraphQL**
- fix(GraphQL): optimize eq filter queries (https://github.com/hypermodeinc/dgraph/pull/7895)
- - fix(GraphQL): add validation of null values with correct order of graphql rule validation (https://github.com/hypermodeinc/dgraph/pull/8333)
- - fix(GraphQL) fix auth query rewriting with ID filter (https://github.com/hypermodeinc/dgraph/pull/8157)
+ - fix(GraphQL): add validation of null values with correct order of graphql rule validation
+ (https://github.com/hypermodeinc/dgraph/pull/8333)
+ - fix(GraphQL) fix auth query rewriting with ID filter
+ (https://github.com/hypermodeinc/dgraph/pull/8157)
- **EDgraph**
- - fix(query): Prevent multiple entries for same predicate in mutations (https://github.com/hypermodeinc/dgraph/pull/8332)
+ - fix(query): Prevent multiple entries for same predicate in mutations
+ (https://github.com/hypermodeinc/dgraph/pull/8332)
- **Posting**
- fix(rollups): Fix splits in roll-up (https://github.com/hypermodeinc/dgraph/pull/8297)
- **Security**
- -
- CVE Fixes (417 total)
-
- #### CVE Fixes (417 total)
- - CVE-2019-0210
- - CVE-2019-0205
- - CVE-2021-43565
- - CVE-2022-27664
- - CVE-2021-38561
- - CVE-2021-44716
- - CVE-2021-33194
- - CVE-2022-27191
- - CVE-2020-29652
- - CVE-2018-16886
- - CVE-2022-21698
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-3116
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2022-37434
- - CVE-2020-16156
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2021-37750
- - CVE-2021-36222
- - CVE-2020-35525
- - CVE-2020-35527
- - CVE-2021-20223
- - CVE-2020-9794
- - CVE-2022-29526
- - CVE-2021-31525
- - CVE-2019-11254
- - CVE-2022-3219
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2017-11164
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2016-2781
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2017-11164
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2022-3219
- - CVE-2016-2781
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2017-11164
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2022-3219
- - CVE-2016-2781
- - CVE-2021-3671
- - CVE-2022-3219
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2021-3671
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2017-11164
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2021-3671
- - CVE-2016-2781
- - CVE-2021-3671
- - CVE-2022-3219
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2021-3671
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2017-11164
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2021-3671
- - CVE-2016-2781
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2021-3671
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2017-11164
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2021-3671
- - CVE-2022-3219
- - CVE-2016-2781
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2021-3671
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2017-11164
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2021-3671
- - CVE-2016-2781
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2021-3671
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2017-11164
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2021-3671
- - CVE-2016-2781
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2021-3671
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2017-11164
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2021-3671
- - CVE-2016-2781
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2021-3671
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2017-11164
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2021-3671
- - CVE-2016-2781
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2021-3671
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2017-11164
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2021-3671
- - CVE-2016-2781
- - CVE-2019-16167
- - CVE-2013-4235
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2013-4235
- - CVE-2021-3671
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2017-11164
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2022-29458
- - CVE-2021-39537
- - CVE-2021-3671
- - CVE-2021-43618
- - CVE-2016-20013
- - CVE-2021-3671
- - CVE-2016-2781
- - CVE-2021-3671
- - CVE-2022-1587
- - CVE-2022-1586
- - CVE-2021-3671
- - CVE-2020-9991
- - CVE-2020-9849
-
- -
- GHSA Fixes (5 total)
-
- #### GHSA Fixes (5 total)
- - GHSA-jq7p-26h5-w78r
- - GHSA-8c26-wmh5-6g9v
- - GHSA-h6xx-pmxh-3wgp
- - GHSA-cg3q-j54f-5p7p
- - GHSA-wxc4-f4m6-wwqv
-
- - fix(sec): fixing HIGH CVEs (https://github.com/hypermodeinc/dgraph/pull/8289)
- - fix(sec): CVE High Vulnerability (https://github.com/hypermodeinc/dgraph/pull/8277)
- - fix(sec): Fixing CVE-2021-31525 (https://github.com/hypermodeinc/dgraph/pull/8274)
- - fix(sec): CVE-2019-11254 (https://github.com/hypermodeinc/dgraph/pull/8270)
-
-### Changed
+
+ -
+ CVE Fixes (417 total)
+
+ **CVE Fixes** (417 total)
+
+ - CVE-2019-0210
+ - CVE-2019-0205
+ - CVE-2021-43565
+ - CVE-2022-27664
+ - CVE-2021-38561
+ - CVE-2021-44716
+ - CVE-2021-33194
+ - CVE-2022-27191
+ - CVE-2020-29652
+ - CVE-2018-16886
+ - CVE-2022-21698
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-3116
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2022-37434
+ - CVE-2020-16156
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2021-37750
+ - CVE-2021-36222
+ - CVE-2020-35525
+ - CVE-2020-35527
+ - CVE-2021-20223
+ - CVE-2020-9794
+ - CVE-2022-29526
+ - CVE-2021-31525
+ - CVE-2019-11254
+ - CVE-2022-3219
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2017-11164
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2016-2781
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2017-11164
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2022-3219
+ - CVE-2016-2781
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2017-11164
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2022-3219
+ - CVE-2016-2781
+ - CVE-2021-3671
+ - CVE-2022-3219
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2021-3671
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2017-11164
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2021-3671
+ - CVE-2016-2781
+ - CVE-2021-3671
+ - CVE-2022-3219
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2021-3671
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2017-11164
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2021-3671
+ - CVE-2016-2781
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2021-3671
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2017-11164
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2021-3671
+ - CVE-2022-3219
+ - CVE-2016-2781
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2021-3671
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2017-11164
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2021-3671
+ - CVE-2016-2781
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2021-3671
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2017-11164
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2021-3671
+ - CVE-2016-2781
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2021-3671
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2017-11164
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2021-3671
+ - CVE-2016-2781
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2021-3671
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2017-11164
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2021-3671
+ - CVE-2016-2781
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2021-3671
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2017-11164
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2021-3671
+ - CVE-2016-2781
+ - CVE-2019-16167
+ - CVE-2013-4235
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2013-4235
+ - CVE-2021-3671
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2017-11164
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2022-29458
+ - CVE-2021-39537
+ - CVE-2021-3671
+ - CVE-2021-43618
+ - CVE-2016-20013
+ - CVE-2021-3671
+ - CVE-2016-2781
+ - CVE-2021-3671
+ - CVE-2022-1587
+ - CVE-2022-1586
+ - CVE-2021-3671
+ - CVE-2020-9991
+ - CVE-2020-9849
+
+
+ -
+ GHSA Fixes (5 total)
+
+ **GHSA Fixes** (5 total)
+
+ - GHSA-jq7p-26h5-w78r
+ - GHSA-8c26-wmh5-6g9v
+ - GHSA-h6xx-pmxh-3wgp
+ - GHSA-cg3q-j54f-5p7p
+ - GHSA-wxc4-f4m6-wwqv
+
+
+ - fix(sec): fixing HIGH CVEs (https://github.com/hypermodeinc/dgraph/pull/8289)
+ - fix(sec): CVE High Vulnerability (https://github.com/hypermodeinc/dgraph/pull/8277)
+ - fix(sec): Fixing CVE-2021-31525 (https://github.com/hypermodeinc/dgraph/pull/8274)
+ - fix(sec): CVE-2019-11254 (https://github.com/hypermodeinc/dgraph/pull/8270)
+
+**Changed**
+
- **CI Test Infrastructure**
- - Configured to run with [Github Actions](https://github.com/hypermodeinc/dgraph/tree/main/.github/workflows)
+ - Configured to run with
+ [Github Actions](https://github.com/hypermodeinc/dgraph/tree/main/.github/workflows)
- Stability Improvements to test harness
- - Enabled [Unit/Integration Tests](https://github.com/hypermodeinc/dgraph/actions/workflows/ci-dgraph-tests.yml)
- - Enabled [Load Tests](https://github.com/hypermodeinc/dgraph/actions/workflows/ci-dgraph-load-tests.yml)
+ - Enabled
+ [Unit/Integration Tests](https://github.com/hypermodeinc/dgraph/actions/workflows/ci-dgraph-tests.yml)
+ - Enabled
+ [Load Tests](https://github.com/hypermodeinc/dgraph/actions/workflows/ci-dgraph-load-tests.yml)
- Enabled [Linters](https://github.com/hypermodeinc/dgraph/actions/workflows/ci-golang-lint.yml)
- Enabled [Code Coverage](https://coveralls.io/github/dgraph-io/dgraph?branch=main)
- **CI Security**
- - Configured to run with [Github Actions](https://github.com/hypermodeinc/dgraph/blob/main/.github/workflows/ci-aqua-security-trivy-tests.yml)
- - Enabled [Trivy Scans](https://github.com/hypermodeinc/dgraph/actions/workflows/ci-aqua-security-trivy-tests.yml)
+ - Configured to run with
+ [Github Actions](https://github.com/hypermodeinc/dgraph/blob/main/.github/workflows/ci-aqua-security-trivy-tests.yml)
+ - Enabled
+ [Trivy Scans](https://github.com/hypermodeinc/dgraph/actions/workflows/ci-aqua-security-trivy-tests.yml)
- Enabled dependabot scans
- - Configured to run with [Github Actions](https://github.com/hypermodeinc/dgraph/blob/main/.github/workflows/ci-aqua-security-trivy-tests.yml)
+ - Configured to run with
+ [Github Actions](https://github.com/hypermodeinc/dgraph/blob/main/.github/workflows/ci-aqua-security-trivy-tests.yml)
- **CD Release Pipeline**
- - Automated [Release Pipeline](https://github.com/hypermodeinc/dgraph/blob/main/.github/workflows/cd-dgraph.yml) to facilitate building of dgraph-binary & corresponding docker-images. The built artifacts are published to repositories through the same pipeline.
+ - Automated
+ [Release Pipeline](https://github.com/hypermodeinc/dgraph/blob/main/.github/workflows/cd-dgraph.yml)
+ to facilitate building of dgraph-binary & corresponding docker-images. The built artifacts are
+ published to repositories through the same pipeline.
- [**Github Issues Enabled**](https://github.com/hypermodeinc/dgraph/issues/new/choose)
-
## [21.03.2] - 2021-08-26
+
[21.03.2]: https://github.com/hypermodeinc/dgraph/compare/v21.03.1...v21.03.2
-### Fixed
+- **Fixed**
- GraphQL
+
- Handle extend keyword for Queries and Mutations ([#7923][])
- Core Dgraph
@@ -1188,12 +1443,12 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- fix(live): quote the xid when doing upsert ([#7999][])
- fix(export): Write temporary files for export to the t directory. ([#7998][])
-### Changed
+**Changed**
- protobuf: upgrade golang/protobuf library v1.4.1 -> v1.5.2 ([#7949][])
- chore(raft): Log packets message less frequently. ([#7913][])
-### Added
+**Added**
- feat(acl): allow access to all the predicates using wildcard. ([#7993][])
- feat(Multi-tenancy): Add namespaces field to state. ([#7936][])
@@ -1214,14 +1469,17 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#7936]: https://github.com/hypermodeinc/dgraph/issues/7936
## [21.03.1] - 2021-06-16
+
[21.03.1]: https://github.com/hypermodeinc/dgraph/compare/v21.03.0...v21.03.1
-### Fixed
+- **Fixed**
+
- GraphQL
+
- fix(GraphQL): fix @cascade with Pagination for @auth queries ([#7695][])
- Fix(GraphQL): Fix GraphQL encoding in case of empty list ([#7726][]) ([#7730][])
- Fix(GraphQL): Add filter in DQL query in case of reverse predicate ([#7728][]) ([#7733][])
- - Fix(graphql): Fix error message of lambdaOnMutate directive ([#7751][]) ([#7754][])
+ - Fix(graphql): Fix error message of lambdaOnMutate directive ([#7751][]) ([#7754][])
- Core Dgraph
- fix(vault): Hide ACL flags when not required ([#7701][])
@@ -1233,7 +1491,8 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- bug fix to permit audit streaming to stdout writer([#7803][]) ([#7804][])
- fix(drop): attach galaxy namespace to drop attr done on 20.11 backup ([#7827][])
- fix: Prevent proposal from being dropped accidentally ([#7741][]) ([#7811][])
- - fix(schema-update): Start opIndexing only when index creation is required. ([#7845][]) ([#7847][])
+ - fix(schema-update): Start opIndexing only when index creation is required. ([#7845][])
+ ([#7847][])
- fix(export): Fix facet export of reference type postings to JSON format ([#7744][]) ([#7756][])
- fix(lease): don't do rate limiting when not limit is not specified ([#7787][])
- fix(lease): prevent ID lease overflow ([#7802][])
@@ -1244,14 +1503,17 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- fix(DQL): revert changes related to cascade pagination with sort ([#7885][]) ([#7888][])
- fix(metrics): Expose dgraph_num_backups_failed_total metric view. ([#7900][]) ([#7904][])
-### Changed
- - opt(GraphQL): filter existence queries on GraphQL side instead of using @filter(type) ([#7757][]) ([#7760][])
+**Changed**
+
+- opt(GraphQL): filter existence queries on GraphQL side instead of using @filter(type) ([#7757][])
+ ([#7760][])
-### Added
- - feat(cdc): Add support for SCRAM SASL mechanism ([#7765][]) ([#7767][])
- - Add asynchronous task API ([#7781][])
- - make exports synchronous again ([#7877][])
- - feat(schema): do schema versioning and make backup non-blocking for i… ([#7856][]) ([#7873][])
+**Added**
+
+- feat(cdc): Add support for SCRAM SASL mechanism ([#7765][]) ([#7767][])
+- Add asynchronous task API ([#7781][])
+- make exports synchronous again ([#7877][])
+- feat(schema): do schema versioning and make backup non-blocking for i… ([#7856][]) ([#7873][])
[#7701]: https://github.com/hypermodeinc/dgraph/issues/7701
[#7737]: https://github.com/hypermodeinc/dgraph/issues/7737
@@ -1279,7 +1541,6 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#7802]: https://github.com/hypermodeinc/dgraph/issues/7802
[#7832]: https://github.com/hypermodeinc/dgraph/issues/7832
[#7834]: https://github.com/hypermodeinc/dgraph/issues/7834
-[#7796]: https://github.com/hypermodeinc/dgraph/issues/7796
[#7781]: https://github.com/hypermodeinc/dgraph/issues/7781
[#7713]: https://github.com/hypermodeinc/dgraph/issues/7713
[#7797]: https://github.com/hypermodeinc/dgraph/issues/7797
@@ -1303,52 +1564,62 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#7900]: https://github.com/hypermodeinc/dgraph/issues/7900
[#7904]: https://github.com/hypermodeinc/dgraph/issues/7904
-
## [21.03.0] - 2021-04-07
+
[21.03.0]: https://github.com/hypermodeinc/dgraph/compare/v20.11.0...v21.03.0
-### Changed
+- **Changed**
- [BREAKING] Feat(flags): expand badger to accept all valid options ([#7677][])
- [BREAKING] Feat(Dgraph): Read-Only replicas ([#7272][])
-- [BREAKING] Consolidate multiple flags into a few SuPerflags ([#7436][]) ([#7337][]) ([#7560][]) ([#7652][]) ([#7675][])
+- [BREAKING] Consolidate multiple flags into a few SuPerflags ([#7436][]) ([#7337][]) ([#7560][])
+ ([#7652][]) ([#7675][])
- [BREAKING] Feat(zero): Make zero lease out namespace IDs ([#7341][])
- [BREAKING] Fix(commit): make txn context more robust ([#7659][])
- [BREAKING] Fix(Query): Return error for illegal math operations. ([#7631][])
- [BREAKING] Rename Badger metrics. ([#7507][])
- [BREAKING] Fix(Backups): new badger Superflag, NumGoroutines option solves OOM crashes ([#7387][])
- [BREAKING] Remove restore tracker as its not necessary ([#7148][])
-- [BREAKING] Chore(GraphQL): Remove `dgraph.graphql.p_sha256hash` predicate and merge it into `dgraph.graphql.p_query` ([#7451][])
-- [BREAKING] Introducing Multi-Tenancy in dgraph ([#7293][]) ([#7400][]) ([#7397][]) ([#7399][]) ([#7377][]) ([#7414][]) ([#7418][])
+- [BREAKING] Chore(GraphQL): Remove `dgraph.graphql.p_sha256hash` predicate and merge it into
+ `dgraph.graphql.p_query` ([#7451][])
+- [BREAKING] Introducing Multi-Tenancy in dgraph ([#7293][]) ([#7400][]) ([#7397][]) ([#7399][])
+ ([#7377][]) ([#7414][]) ([#7418][])
-### Added
+**Added**
- GraphQL
- - Feat(GraphQL): Zero HTTP endpoints are now available at GraphQL admin (GraphQL-1118) ([#6649][]) ([#7670][])
+
+ - Feat(GraphQL): Zero HTTP endpoints are now available at GraphQL admin (GraphQL-1118) ([#6649][])
+ ([#7670][])
- Feat(GraphQL): Webhooks on add/update/delete mutations (GraphQL-1045) ([#7494][]) ([#7616][])
- Feat(GraphQL): Allow Multiple JWKUrls for auth. ([#7528][]) ([#7581][])
- Feat(GraphQL): allow string --> Int64 hardcoded coercing ([#7584][])
- - Feat(Apollo): Add support for `@provides` and `@requires` directive. ([#7503][])
- - Feat(GraphQL): Handle upsert with multiple XIDs in case one of the XIDs does not exist ([#7472][])
+ - Feat(Apollo): Add support for `@provides` and `@requires` directive. ([#7503][])
+ - Feat(GraphQL): Handle upsert with multiple XIDs in case one of the XIDs does not exist
+ ([#7472][])
- Feat(GraphQL): Delete redundant reference to inverse object ([#7469][])
- Feat(GraphQL): upgarde GraphQL-transport-ws module ([#7441][])
- Feat(GraphQL): This PR allow multiple `@id` fields in a type. ([#7235][])
- Feat(GraphQL): Add support for GraphQL Upsert Mutations ([#7433][])
- - Feat(GraphQL): This PR adds subscriptions to custom DQL. ([#7385][])
- - Feat(GraphQL): Make XID node referencing invariant of order in which XIDs are referenced in Mutation Rewriting ([#7448][])
- - Feat(GraphQL): Dgraph.Authorization should with irrespective of number of spaces after # ([#7410][])
- - Feat(GraphQL): adding auth token support for regexp, in and arrays ([#7039][])
+ - Feat(GraphQL): This PR adds subscriptions to custom DQL. ([#7385][])
+ - Feat(GraphQL): Make XID node referencing invariant of order in which XIDs are referenced in
+ Mutation Rewriting ([#7448][])
+ - Feat(GraphQL): Dgraph.Authorization should with irrespective of number of spaces after #
+ ([#7410][])
+ - Feat(GraphQL): adding auth token support for regexp, in and arrays ([#7039][])
- Feat(GraphQL): Extend Support of IN filter to all the scalar data types ([#7340][])
- Feat(GraphQL): Add `@include` and `@skip` to the Directives ([#7314][])
- Feat(GraphQL): add support for has filter with list of arguments. ([#7406][])
- Feat(GraphQL): Add support for has filter on list of fields. ([#7363][])
- Feat(GraphQL): Allow standard claims into auth variables ([#7381][])
- - Perf(GraphQL): Generate GraphQL query response by optimized JSON encoding (GraphQL-730) ([#7371][])
+ - Perf(GraphQL): Generate GraphQL query response by optimized JSON encoding (GraphQL-730)
+ ([#7371][])
- Feat(GraphQL): Extend Support For Apollo Federation ([#7275][])
- Feat(GraphQL): Support using custom DQL with `@groupby` ([#7476][])
- Feat(GraphQL): Add support for passing OAuth Bearer token as authorization JWT ([#7490][])
- Core Dgraph
+
- Feat(query): Add mechanism to have a limit on number of pending queries ([#7603][])
- Perf(bulk): Reuse allocator ([#7360][])
- Perf(compression): Use gzip with BestSpeed in export and backup ([#7643][]) ([#7683][])
@@ -1374,8 +1645,10 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Perf(dgraph) - Use badger sinceTs in backups ([#7392][])
- Perf(backup): Reorganize the output of lsbackup command ([#7354][])
-### Fixed
+**Fixed**
+
- GraphQL
+
- Fix(GraphQL): Fix Execution Trace for Add and Update Mutations ([#7656][])
- Fix(GraphQL): Add error handling for unrecognized args to generate directive. ([#7612][])
- Fix(GraphQL): Fix panic when no schema exists for a new namespace ([#7630][])
@@ -1390,28 +1663,37 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Fix(GraphQL): Fix custom(dql: ...) with `__typename` (GraphQL-1098) ([#7569][])
- Fix(GraphQL): Change variable name generation for interface auth rules ([#7559][])
- Fix(GraphQL): Apollo federation now works with lambda (GraphQL-1084) ([#7558][])
- - Fix(GraphQL): Fix empty remove in update mutation patch, that remove all the data for nodes in filter. ([#7563][])
- - Fix(GraphQL): Fix order of entities query result ([#7542][])
+ - Fix(GraphQL): Fix empty remove in update mutation patch, that remove all the data for nodes in
+ filter. ([#7563][])
+ - Fix(GraphQL): Fix order of entities query result ([#7542][])
- Fix(GraphQL): Change variable name generation from `Type` to `Type_` ([#7556][])
- Fix(GraphQL): Fix duplicate xid error for multiple xid fields. ([#7546][])
- Fix(GraphQL): Fix query rewriting for multiple order on nested field. ([#7523][])
- - Fix(GraphQL) Fix empty `type Query` with single extended type definition in the schema. ([#7517][])
+ - Fix(GraphQL) Fix empty `type Query` with single extended type definition in the schema.
+ ([#7517][])
- Fix(GraphQL): Added support for parameterized cascade with variables. ([#7477][])
- Fix(GraphQL): Fix fragment expansion in auth queries (GraphQL-1030) ([#7467][])
- Fix(GraphQL): Refactor Mutation Rewriter for Add and Update Mutations ([#7409][])
- - Fix(GraphQL): Fix `@auth` rules evaluation in case of null variables in custom claims. ([#7380][])
+ - Fix(GraphQL): Fix `@auth` rules evaluation in case of null variables in custom claims.
+ ([#7380][])
- Fix(GraphQL): Fix interface query with auth rules. ([#7401][])
- - Fix(GraphQL): Added error for case when multiple filter functions are used in filter. ([#7368][])
+ - Fix(GraphQL): Added error for case when multiple filter functions are used in filter.
+ ([#7368][])
- Fix(subscriptions): Fix subscription to use the kv with the max version ([#7349][])
- - Fix(GraphQL):This PR Fix a panic when we pass a single ID as a integer and expected type is `[ID]`.We now coerce that to type array of string. ([#7325][])
- - Fix(GraphQL): This PR Fix multi cors and multi schema nodes issue by selecting one of the latest added nodes, and add dgraph type to cors. ([#7270][])
+ - Fix(GraphQL):This PR Fix a panic when we pass a single ID as a integer and expected type is
+ `[ID]`.We now coerce that to type array of string. ([#7325][])
+ - Fix(GraphQL): This PR Fix multi cors and multi schema nodes issue by selecting one of the latest
+ added nodes, and add dgraph type to cors. ([#7270][])
- Fix(GraphQL): This PR allow to use `__typename` in mutation. ([#7285][])
- - Fix(GraphQL): Fix auth-token propagation for HTTP endpoints resolved through GraphQL (GraphQL-946) ([#7245][])
- - Fix(GraphQL): This PR addd input coercion from single object to list and Fix panic when we pass single ID in filter as a string. ([#7133][])
+ - Fix(GraphQL): Fix auth-token propagation for HTTP endpoints resolved through GraphQL
+ (GraphQL-946) ([#7245][])
+ - Fix(GraphQL): This PR addd input coercion from single object to list and Fix panic when we pass
+ single ID in filter as a string. ([#7133][])
- Fix(GraphQL): adding support for `@id` with type other than strings ([#7019][])
- Fix(GraphQL): Fix panic caused by incorrect input coercion of scalar to list ([#7405][])
- Core Dgraph
+
- Fix(flag): Fix bulk loader flag and remove flag parsing from critical path ([#7679][])
- Fix(query): Fix pagination with match functions ([#7668][])
- Fix(postingList): Acquire lock before reading the cached posting list ([#7632][])
@@ -1459,7 +1741,8 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Fix(export-backup): Fix memory leak in backup export ([#7452][])
- Fix(ACL): use acl for export, add GoG admin resolvers ([#7420][])
- Fix(restore): reset acl accounts once restore is done if necessary ([#7202][])
- - Fix(restore): multiple restore requests should be rejected and proposals should not be submitted ([#7118][])
+ - Fix(restore): multiple restore requests should be rejected and proposals should not be submitted
+ ([#7118][])
[#7677]: https://github.com/hypermodeinc/dgraph/issues/7677
[#7272]: https://github.com/hypermodeinc/dgraph/issues/7272
@@ -1474,7 +1757,6 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#7507]: https://github.com/hypermodeinc/dgraph/issues/7507
[#7387]: https://github.com/hypermodeinc/dgraph/issues/7387
[#7148]: https://github.com/hypermodeinc/dgraph/issues/7148
-[#7143]: https://github.com/hypermodeinc/dgraph/issues/7143
[#7451]: https://github.com/hypermodeinc/dgraph/issues/7451
[#6649]: https://github.com/hypermodeinc/dgraph/issues/6649
[#7670]: https://github.com/hypermodeinc/dgraph/issues/7670
@@ -1612,9 +1894,10 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#7118]: https://github.com/hypermodeinc/dgraph/issues/7118
## [20.07.1] - 2020-09-17
+
[20.07.1]: https://github.com/hypermodeinc/dgraph/compare/v20.07.0...v20.07.1
-### Changed
+- **Changed**
- GraphQL
- Remove github issues link from the error messages. ([#6183][])
@@ -1622,7 +1905,7 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Add retry for schema update ([#6098][])
- Queue keys for rollup during mutation. ([#6151][])
-### Added
+**Added**
- GraphQL
- Adds auth for subscriptions. ([#6165][])
@@ -1630,7 +1913,7 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Add flags to set table and vlog loading mode for zero. ([#6342][])
- Add flag to set up compression in zero. ([#6355][])
-### Fixed
+**Fixed**
- GraphQL
- Multiple queries in a single request should not share the same variables. ([#6158][])
@@ -1655,7 +1938,7 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Sort manifests by BackupNum in file handler. ([#6279][])
- Fixes queries which use variable at the top level. ([#6290][])
- Return error on closed DB. ([#6320][])
-- Optimize splits by doing binary search. Clear the pack from the main list. ([#6332][])
+- Optimize splits by doing binary search. Clear the pack from the main list. ([#6332][])
- Proto fix needed for PR [#6331][]. ([#6346][])
- Sentry nil pointer check. ([#6374][])
- Don't store start_ts in postings. ([#6213][])
@@ -1700,8 +1983,6 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#6180]: https://github.com/hypermodeinc/dgraph/issues/6180
[#6183]: https://github.com/hypermodeinc/dgraph/issues/6183
[#6179]: https://github.com/hypermodeinc/dgraph/issues/6179
-[#6009]: https://github.com/hypermodeinc/dgraph/issues/6009
-[#6095]: https://github.com/hypermodeinc/dgraph/issues/6095
[#6098]: https://github.com/hypermodeinc/dgraph/issues/6098
[#6151]: https://github.com/hypermodeinc/dgraph/issues/6151
[#6165]: https://github.com/hypermodeinc/dgraph/issues/6165
@@ -1717,20 +1998,21 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#6213]: https://github.com/hypermodeinc/dgraph/issues/6213
## [20.03.5] - 2020-09-17
+
[20.03.5]: https://github.com/hypermodeinc/dgraph/compare/v20.03.4...v20.03.5
-### Changed
+- **Changed**
- Add retry for schema update. ([#6097][])
- Queue keys for rollup during mutation. ([#6150][])
-### Added
+**Added**
- Add --cache_mb and --cache_percentage flags. ([#6287][])
- Add flag to set up compression in zero. ([#6356][])
- Add flags to set table and vlog loading mode for zero. ([#6343][])
-### Fixed
+**Fixed**
- GraphQL
- Prevent empty values in fields having `id` directive. ([#6197][])
@@ -1740,19 +2022,19 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Perform rollups more aggresively. ([#6147][])
- Don't allow idx flag to be set to 0 on dgraph zero. ([#6156][])
- Stop forcing RAM mode for the write-ahead log. ([#6260][])
-- Fix panicwrap parent check. ([#6300][])
+- Fix panicwrap parent check. ([#6300][])
- Sort manifests by backup number. ([#6280][])
- Don't store start_ts in postings. ([#6214][])
- Update reverse index when updating single UID predicates. ([#6006][])
-- Return error on closed DB. ([#6321][])
-- Optimize splits by doing binary search. Clear the pack from the main list. ([#6333][])
+- Return error on closed DB. ([#6321][])
+- Optimize splits by doing binary search. Clear the pack from the main list. ([#6333][])
- Sentry nil pointer check. ([#6375][])
- Use z.Closer instead of y.Closer. ([#6398][])
- Make Alpha Shutdown Again. ([#6403][])
- Force exit if CTRL-C is caught before initialization. ([#6409][])
- Batch list in bulk loader to avoid panic. ([#6445][])
- Enterprise features
- - Make backups cancel other tasks. ([#6244][])
+ - Make backups cancel other tasks. ([#6244][])
- Add a lock to backups to process one request at a time. ([#6340][])
[#6409]: https://github.com/hypermodeinc/dgraph/issues/6409
@@ -1773,9 +2055,7 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#6375]: https://github.com/hypermodeinc/dgraph/issues/6375
[#6287]: https://github.com/hypermodeinc/dgraph/issues/6287
[#6356]: https://github.com/hypermodeinc/dgraph/issues/6356
-[#5988]: https://github.com/hypermodeinc/dgraph/issues/5988
[#6097]: https://github.com/hypermodeinc/dgraph/issues/6097
-[#6094]: https://github.com/hypermodeinc/dgraph/issues/6094
[#6150]: https://github.com/hypermodeinc/dgraph/issues/6150
[#6125]: https://github.com/hypermodeinc/dgraph/issues/6125
[#6138]: https://github.com/hypermodeinc/dgraph/issues/6138
@@ -1784,21 +2064,22 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#6147]: https://github.com/hypermodeinc/dgraph/issues/6147
## [1.2.7] - 2020-09-21
+
[1.2.7]: https://github.com/hypermodeinc/dgraph/compare/v1.2.6...v1.2.7
-### Added
+- **Added**
- Add --cache_mb and --cache_percentage flags. ([#6288][])
- Add flag to set up compression in zero. ([#6357][])
- Add flags to set table and vlog loading mode for zero. ([#6344][])
-### Fixed
+- **Fixed**
- Don't allow idx flag to be set to 0 on dgraph zero. ([#6193][])
- Stop forcing RAM mode for the write-ahead log. ([#6261][])
- Return error on closed DB. ([#6319][])
- Don't store start_ts in postings. ([#6212][])
-- Optimize splits by doing binary search. Clear the pack from the main list. ([#6334][])
+- Optimize splits by doing binary search. Clear the pack from the main list. ([#6334][])
- Add a lock to backups to process one request at a time. ([#6341][])
- Use z.Closer instead of y.Closer' ([#6396][])
- Force exit if CTRL-C is caught before initialization. ([#6408][])
@@ -1820,13 +2101,13 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#6288]: https://github.com/hypermodeinc/dgraph/issues/6288
[#6357]: https://github.com/hypermodeinc/dgraph/issues/6357
[#6344]: https://github.com/hypermodeinc/dgraph/issues/6344
-[#5987]: https://github.com/hypermodeinc/dgraph/issues/5987
[#6193]: https://github.com/hypermodeinc/dgraph/issues/6193
## [20.07.0] - 2020-07-28
+
[20.07.0]: https://github.com/hypermodeinc/dgraph/compare/v20.03.4...v20.07.0
-### Changed
+- **Changed**
- GraphQL
- Make updateGQLSchema always return the new schema. ([#5540][])
@@ -1861,7 +2142,7 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- **Breaking changes**
- [BREAKING] GraphQL: Add camelCase for add/update mutation. Fixes [#5380][]. ([#5547][])
-### Added
+**Added**
- GraphQL
- Add Graphql-TouchedUids header in HTTP response. ([#5572][])
@@ -1885,7 +2166,8 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Add support for xidmap in bulkloader. Fixes [#4917][]. ([#5090][])
- Add GraphQL admin endpoint to list backups. ([#5307][])
- Enterprise features
- - GraphQL schema get/update, Dgraph schema query/alter and /login are now admin operations. ([#5833][])
+ - GraphQL schema get/update, Dgraph schema query/alter and /login are now admin operations.
+ ([#5833][])
- Backup can take S3 credentials from IAM. ([#5387][])
- Online restore. ([#5095][])
- Retry restore proposals. ([#5765][])
@@ -1893,7 +2175,7 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- **Breaking changes**
- [BREAKING] Vault Integration. ([#5402][])
-### Fixed
+**Fixed**
- GraphQL
- Validate JWT Claims and test JWT expiry. ([#6050][])
@@ -1901,7 +2183,8 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Nested auth queries no longer search through all possible records. ([#5950][])
- Apply auth rules on type having @dgraph directive. ([#5863][])
- Custom Claim will be parsed as JSON if it is encoded as a string. ([#5862][])
- - Dgraph directive with reverse edge should work smoothly with interfaces. Fixed [#5744][]. ([#5982][])
+ - Dgraph directive with reverse edge should work smoothly with interfaces. Fixed [#5744][].
+ ([#5982][])
- Fix case where Dgraph type was not generated for GraphQL interface. Fixes [#5311][]. ([#5828][])
- Fix panic error when there is no @withSubscription directive on any type. ([#5921][])
- Fix OOM issue in graphql mutation rewriting. ([#5854][])
@@ -1909,11 +2192,13 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Maintain Master's backward compatibility for `Dgraph.Authorization` in schema. ([#6014][])
- Remote schema introspection for single remote endpoint. ([#5824][])
- Requesting only \_\-typename now returns results. ([#5823][])
- - Typename for types should be filled in query for schema introspection queries. Fixes [#5792][]. ([#5891][])
+ - Typename for types should be filled in query for schema introspection queries. Fixes [#5792][].
+ ([#5891][])
- Update GraphQL schema only on Group-1 leader. ([#5829][])
- Add more validations for coercion of object/scalar and vice versa. ([#5534][])
- Apply type filter for get query at root level. ([#5497][])
- - Fix mutation on predicate with special characters having dgraph directive. Fixes [#5296][]. ([#5526][])
+ - Fix mutation on predicate with special characters having dgraph directive. Fixes [#5296][].
+ ([#5526][])
- Return better error message if a type only contains ID field. ([#5531][])
- Coerce value for scalar types correctly. ([#5487][])
- Minor delete mutation msg fix. ([#5316][])
@@ -1940,7 +2225,7 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Replace TxnWriter with WriteBatch. ([#5007][])
- Add a check to throw an error is a nil pointer is passed to unmarshalOrCopy. ([#5334][])
- Remove noisy logs in tablet move. ([#5333][])
-- Support bulk loader use-case to import unencrypted export and encrypt the result. ([#5209][])
+- Support bulk loader use-case to import unencrypted export and encrypt the result. ([#5209][])
- Handle Dgraph shutdown gracefully. Fixes [#3873][]. ([#5137][], [#5138][])
- If we don't have any schema updates, avoid running the indexing sequence. ([#5126][])
- Pass read timestamp to getNew. ([#5085][])
@@ -1950,15 +2235,17 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Fix alpha start in ludicrous mode. Fixes [#5601][]. ([#5912][])
- Handle schema updates correctly in ludicrous mode. ([#5970][])
- Fix Panic because of nil map in groups.go. ([#6008][])
-- update reverse index when updating single UID predicates. Fixes [#5732][]. ([#6005][]), ([#6015][])
+- update reverse index when updating single UID predicates. Fixes [#5732][]. ([#6005][]),
+ ([#6015][])
- Fix expand(\_all\_) queries in ACL. Fixes [#5687][]. ([#5993][])
- Fix val queries when ACL is enabled. Fixes [#5687][]. ([#5995][])
- Return error if server is not ready. ([#6020][])
- Reduce memory consumption of the map. ([#5957][])
- Cancel the context when opening connection to leader for streaming snapshot. ([#6045][])
- **Breaking changes**
- - [BREAKING] Namespace dgraph internal types/predicates with `dgraph.` Fixes [#4878][]. ([#5185][])
- - [BREAKING] Remove shorthand for store_xids in bulk loader. ([#5148][])
+ - [BREAKING] Namespace dgraph internal types/predicates with `dgraph.` Fixes [#4878][].
+ ([#5185][])
+ - [BREAKING] Remove shorthand for store_xids in bulk loader. ([#5148][])
- [BREAKING] Introduce new facets format. Fixes [#4798][], [#4581][], [#4907][]. ([#5424][])
- Enterprise:
- Backup: Change groupId from int to uint32. ([#5605][])
@@ -1994,7 +2281,6 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#5162]: https://github.com/hypermodeinc/dgraph/issues/5162
[#5457]: https://github.com/hypermodeinc/dgraph/issues/5457
[#5004]: https://github.com/hypermodeinc/dgraph/issues/5004
-[#5134]: https://github.com/hypermodeinc/dgraph/issues/5134
[#5157]: https://github.com/hypermodeinc/dgraph/issues/5157
[#5197]: https://github.com/hypermodeinc/dgraph/issues/5197
[#5387]: https://github.com/hypermodeinc/dgraph/issues/5387
@@ -2015,7 +2301,6 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#5292]: https://github.com/hypermodeinc/dgraph/issues/5292
[#5234]: https://github.com/hypermodeinc/dgraph/issues/5234
[#5293]: https://github.com/hypermodeinc/dgraph/issues/5293
-[#5234]: https://github.com/hypermodeinc/dgraph/issues/5234
[#5308]: https://github.com/hypermodeinc/dgraph/issues/5308
[#4963]: https://github.com/hypermodeinc/dgraph/issues/4963
[#5380]: https://github.com/hypermodeinc/dgraph/issues/5380
@@ -2056,7 +2341,6 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#5209]: https://github.com/hypermodeinc/dgraph/issues/5209
[#3873]: https://github.com/hypermodeinc/dgraph/issues/3873
[#5138]: https://github.com/hypermodeinc/dgraph/issues/5138
-[#3873]: https://github.com/hypermodeinc/dgraph/issues/3873
[#5137]: https://github.com/hypermodeinc/dgraph/issues/5137
[#5126]: https://github.com/hypermodeinc/dgraph/issues/5126
[#5085]: https://github.com/hypermodeinc/dgraph/issues/5085
@@ -2117,24 +2401,26 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#5993]: https://github.com/hypermodeinc/dgraph/issues/5993
[#5687]: https://github.com/hypermodeinc/dgraph/issues/5687
[#5995]: https://github.com/hypermodeinc/dgraph/issues/5995
-[#5687]: https://github.com/hypermodeinc/dgraph/issues/5687
[#6020]: https://github.com/hypermodeinc/dgraph/issues/6020
[#5950]: https://github.com/hypermodeinc/dgraph/issues/5950
-[#5809]: https://github.com/hypermodeinc/dgraph/issues/5809
[#6034]: https://github.com/hypermodeinc/dgraph/issues/6034
[#6043]: https://github.com/hypermodeinc/dgraph/issues/6043
## [20.03.4] - 2020-07-23
+
[20.03.4]: https://github.com/hypermodeinc/dgraph/compare/v20.03.3...v20.03.4
-### Changed
+**Changed**
+
- Update Badger 07/13/2020. ([#5941][], [#5616][])
-### Added
+**Added**
+
- Sentry opt out banner. ([#5729][])
- Tag sentry events with additional version details. ([#5728][])
-### Fixed
+**Fixed**
+
- GraphQL
- Minor delete mutation msg fix. ([#5564][])
- Make updateGQLSchema always return the new schema. ([#5582][])
@@ -2153,7 +2439,8 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Send CID for sentry events. ([#5633][])
- Use rampMeter for Executor. ([#5503][])
- Fix snapshot calculation in ludicrous mode. ([#5636][])
-- Update badger: Avoid panic in fillTables(). Fix assert in background compression and encryption. ([#5680][])
+- Update badger: Avoid panic in fillTables(). Fix assert in background compression and encryption.
+ ([#5680][])
- Avoid panic in handleValuePostings. ([#5678][])
- Fix facets response with normalize. Fixes [#5241][]. ([#5691][])
- Badger iterator key copy in count index query. ([#5916][])
@@ -2175,7 +2462,6 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#5907]: https://github.com/hypermodeinc/dgraph/issues/5907
[#5860]: https://github.com/hypermodeinc/dgraph/issues/5860
[#5971]: https://github.com/hypermodeinc/dgraph/issues/5971
-[#5311]: https://github.com/hypermodeinc/dgraph/issues/5311
[#5844]: https://github.com/hypermodeinc/dgraph/issues/5844
[#5857]: https://github.com/hypermodeinc/dgraph/issues/5857
[#5941]: https://github.com/hypermodeinc/dgraph/issues/5941
@@ -2189,7 +2475,6 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#5557]: https://github.com/hypermodeinc/dgraph/issues/5557
[#5555]: https://github.com/hypermodeinc/dgraph/issues/5555
[#5584]: https://github.com/hypermodeinc/dgraph/issues/5584
-[#3873]: https://github.com/hypermodeinc/dgraph/issues/3873
[#5597]: https://github.com/hypermodeinc/dgraph/issues/5597
[#5548]: https://github.com/hypermodeinc/dgraph/issues/5548
[#5594]: https://github.com/hypermodeinc/dgraph/issues/5594
@@ -2197,20 +2482,19 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#4536]: https://github.com/hypermodeinc/dgraph/issues/4536
[#5604]: https://github.com/hypermodeinc/dgraph/issues/5604
[#5633]: https://github.com/hypermodeinc/dgraph/issues/5633
-[#5503]: https://github.com/hypermodeinc/dgraph/issues/5503
[#5636]: https://github.com/hypermodeinc/dgraph/issues/5636
[#5680]: https://github.com/hypermodeinc/dgraph/issues/5680
[#5614]: https://github.com/hypermodeinc/dgraph/issues/5614
-[#5579]: https://github.com/hypermodeinc/dgraph/issues/5579
[#5678]: https://github.com/hypermodeinc/dgraph/issues/5678
[#5969]: https://github.com/hypermodeinc/dgraph/issues/5969
[#6007]: https://github.com/hypermodeinc/dgraph/issues/6007
[#6021]: https://github.com/hypermodeinc/dgraph/issues/6021
## [1.2.6] - 2020-07-31
+
[1.2.6]: https://github.com/hypermodeinc/dgraph/compare/v1.2.5...v1.2.6
-### Changed
+**Changed**
- Update Badger. ([#5940][], [#5990][])
- Fix assert in background compression and encryption. (dgraph-io/badger#1366)
@@ -2228,14 +2512,16 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Add a contribution guide (dgraph-io/badger#1379)
- Add assert to check integer overflow for table size (dgraph-io/badger#1402)
- Return error if the vlog writes exceeds more that 4GB. (dgraph-io/badger#1400)
- - Revert "add assert to check integer overflow for table size (dgraph-io/badger#1402)" (dgraph-io/badger#1406)
+ - Revert "add assert to check integer overflow for table size (dgraph-io/badger#1402)"
+ (dgraph-io/badger#1406)
- Revert "fix: Fix race condition in block.incRef (dgraph-io/badger#1337)" (dgraph-io/badger#1407)
- Revert "Buffer pool for decompression (dgraph-io/badger#1308)" (dgraph-io/badger#1408)
- - Revert "Compress/Encrypt Blocks in the background (dgraph-io/badger#1227)" (dgraph-io/badger#1409)
+ - Revert "Compress/Encrypt Blocks in the background (dgraph-io/badger#1227)"
+ (dgraph-io/badger#1409)
- Add missing changelog for v2.0.3 (dgraph-io/badger#1410)
- Changelog for v20.07.0 (dgraph-io/badger#1411)
-### Fixed
+**Fixed**
- Alpha: Enable bloom filter caching. ([#5554][])
- K shortest paths queries fix. ([#5596][])
@@ -2243,7 +2529,8 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Change error message in case of successful license application. ([#5593][])
- Remove noisy logs in tablet move. ([#5592][])
- Avoid assigning duplicate RAFT IDs to new nodes. Fixes [#5436][]. ([#5603][])
-- Update badger: Set KeepL0InMemory to false (badger default), and Set DetectConflicts to false. ([#5615][])
+- Update badger: Set KeepL0InMemory to false (badger default), and Set DetectConflicts to false.
+ ([#5615][])
- Use /tmp dir to store temporary index. Fixes [#4600][]. ([#5730][])
- Split posting lists recursively. ([#4867][])
- Set version when rollup is called with no splits. ([#4945][])
@@ -2258,7 +2545,6 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#4600]: https://github.com/hypermodeinc/dgraph/issues/4600
[#4682]: https://github.com/hypermodeinc/dgraph/issues/4682
[#4867]: https://github.com/hypermodeinc/dgraph/issues/4867
-[#5579]: https://github.com/hypermodeinc/dgraph/issues/5579
[#4945]: https://github.com/hypermodeinc/dgraph/issues/4945
[#5908]: https://github.com/hypermodeinc/dgraph/issues/5908
[#5749]: https://github.com/hypermodeinc/dgraph/issues/5749
@@ -2268,7 +2554,6 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#5595]: https://github.com/hypermodeinc/dgraph/issues/5595
[#5593]: https://github.com/hypermodeinc/dgraph/issues/5593
[#5592]: https://github.com/hypermodeinc/dgraph/issues/5592
-[#5436]: https://github.com/hypermodeinc/dgraph/issues/5436
[#5603]: https://github.com/hypermodeinc/dgraph/issues/5603
[#5615]: https://github.com/hypermodeinc/dgraph/issues/5615
[#5613]: https://github.com/hypermodeinc/dgraph/issues/5613
@@ -2276,20 +2561,24 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#5990]: https://github.com/hypermodeinc/dgraph/issues/5613
## [20.03.3] - 2020-06-02
+
[20.03.3]: https://github.com/hypermodeinc/dgraph/compare/v20.03.1...v20.03.3
-### Changed
+**Changed**
-- Sentry Improvements: Segregate dev and prod events into their own Sentry projects. Remove Panic back-traces, Set the type of exception to the panic message. ([#5305][])
+- Sentry Improvements: Segregate dev and prod events into their own Sentry projects. Remove Panic
+ back-traces, Set the type of exception to the panic message. ([#5305][])
- /health endpoint now shows EE Features available and GraphQL changes. ([#5304][])
-- Return error response if encoded response is > 4GB in size. Replace idMap with idSlice in encoder. ([#5359][])
+- Return error response if encoded response is > 4GB in size. Replace idMap with idSlice in encoder.
+ ([#5359][])
- Initialize sentry at the beginning of alpha.Run(). ([#5429][])
-### Added
+**Added**
+
- Adds ludicrous mode to live loader. ([#5419][])
- GraphQL: adds transactions to graphql mutations ([#5485][])
-### Fixed
+**Fixed**
- Export: Ignore deleted predicates from schema. Fixes [#5053][]. ([#5326][])
- GraphQL: ensure upserts don't have accidental edge removal. Fixes [#5355][]. ([#5356][])
@@ -2313,13 +2602,15 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- GraphQL: Fix non-unique schema issue. ([#5481][])
- Enterprise features
- Print error when applying enterprise license fails. ([#5342][])
- - Apply the option enterprise_license only after the node's Raft is initialized and it is the leader. Don't apply the trial license if a license already exists. Disallow the enterprise_license option for OSS build and bail out. Apply the option even if there is a license from a previous life of the Zero. ([#5384][])
+ - Apply the option enterprise_license only after the node's Raft is initialized and it is the
+ leader. Don't apply the trial license if a license already exists. Disallow the
+ enterprise_license option for OSS build and bail out. Apply the option even if there is a
+ license from a previous life of the Zero. ([#5384][])
### Security
- Use SensitiveByteSlice type for hmac secret. ([#5450][])
-
[#5444]: https://github.com/hypermodeinc/dgraph/issues/5444
[#5305]: https://github.com/hypermodeinc/dgraph/issues/5305
[#5304]: https://github.com/hypermodeinc/dgraph/issues/5304
@@ -2347,23 +2638,24 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
[#5488]: https://github.com/hypermodeinc/dgraph/issues/5488
[#5483]: https://github.com/hypermodeinc/dgraph/issues/5483
[#5481]: https://github.com/hypermodeinc/dgraph/issues/5481
-[#5481]: https://github.com/hypermodeinc/dgraph/issues/5481
+[#5521]: https://github.com/hypermodeinc/dgraph/issues/5521
[#5235]: https://github.com/hypermodeinc/dgraph/issues/5235
[#5419]: https://github.com/hypermodeinc/dgraph/issues/5419
[#5485]: https://github.com/hypermodeinc/dgraph/issues/5485
[#5479]: https://github.com/hypermodeinc/dgraph/issues/5479
[#5361]: https://github.com/hypermodeinc/dgraph/issues/5361
-[#5537]: https://github.com/hypermodeinc/dgraph/issues/5537
## [1.2.5] - 2020-06-02
+
[1.2.5]: https://github.com/hypermodeinc/dgraph/compare/v1.2.3...v1.2.5
-### Changed
+**Changed**
-- Return error response if encoded response is > 4GB in size. Replace idMap with idSlice in encoder. ([#5359][])
+- Return error response if encoded response is > 4GB in size. Replace idMap with idSlice in encoder.
+ ([#5359][])
- Change the default ratio of traces from 1 to 0.01. ([#5405][])
-### Fixed
+**Fixed**
- Export: Ignore deleted predicates from schema. Fixes [#5053][]. ([#5327][])
- Fix segmentation fault in query.go. ([#5377][])
@@ -2379,42 +2671,29 @@ and this project will adhere to [Semantic Versioning](https://semver.org) starti
- Move runVlogGC to x and use it in zero as well. ([#5468][])
- Fix inconsistent bulk loader failures. Fixes [#5361][]. ([#5537][])
-### Security
+**Security**
- Use SensitiveByteSlice type for hmac secret. ([#5451][])
-[#5444]: https://github.com/hypermodeinc/dgraph/issues/5444
-[#5359]: https://github.com/hypermodeinc/dgraph/issues/5359
-[#5405]: https://github.com/hypermodeinc/dgraph/issues/5405
[#5327]: https://github.com/hypermodeinc/dgraph/issues/5327
-[#5377]: https://github.com/hypermodeinc/dgraph/issues/5377
-[#5394]: https://github.com/hypermodeinc/dgraph/issues/5394
[#5396]: https://github.com/hypermodeinc/dgraph/issues/5396
-[#5053]: https://github.com/hypermodeinc/dgraph/issues/5053
-[#5368]: https://github.com/hypermodeinc/dgraph/issues/5368
[#5451]: https://github.com/hypermodeinc/dgraph/issues/5451
-[#5381]: https://github.com/hypermodeinc/dgraph/issues/5381
-[#5327]: https://github.com/hypermodeinc/dgraph/issues/5327
-[#5377]: https://github.com/hypermodeinc/dgraph/issues/5377
[#5508]: https://github.com/hypermodeinc/dgraph/issues/5508
-[#5494]: https://github.com/hypermodeinc/dgraph/issues/5494
-[#5469]: https://github.com/hypermodeinc/dgraph/issues/5469
-[#5476]: https://github.com/hypermodeinc/dgraph/issues/5476
-[#5488]: https://github.com/hypermodeinc/dgraph/issues/5488
[#5468]: https://github.com/hypermodeinc/dgraph/issues/5468
-[#5361]: https://github.com/hypermodeinc/dgraph/issues/5361
-[#5537]: https://github.com/hypermodeinc/dgraph/issues/5537
## [20.03.2] - 2020-05-15
+
This release was removed
## [1.2.4] - 2020-05-15
+
This release was removed
## [20.03.1] - 2020-04-24
+
[20.03.1]: https://github.com/hypermodeinc/dgraph/compare/v20.03.0...v20.03.1
-### Changed
+**Changed**
- Support comma separated list of zero addresses in alpha. ([#5258][])
- Optimization: Optimize snapshot creation ([#4901][])
@@ -2428,7 +2707,7 @@ This release was removed
- Backup/restore: Force users to explicitly tell restore command to run without zero. ([#5206][])
- Alpha: Expose compression_level option. ([#5280][])
-### Fixed
+**Fixed**
- Implement json.Marshal just for strings. ([#4979][])
- Change error message in case of successful license application. Fixes [#4965][]. ([#5230][])
@@ -2447,9 +2726,10 @@ This release was removed
- Shutdown executor goroutines. ([#5150][])
- Update RAFT checkpoint when doing a clean shutdown. ([#5097][])
- Enterprise features
- - Backup schema keys in incremental backups. Before, the schema was only stored in the full backup. ([#5158][])
+ - Backup schema keys in incremental backups. Before, the schema was only stored in the full
+ backup. ([#5158][])
-### Added
+**Added**
- Return list of ongoing tasks in /health endpoint. ([#4961][])
- Propose snapshot once indexing is complete. ([#5005][])
@@ -2461,7 +2741,9 @@ This release was removed
- Support bulk loader use-case to import unencrypted export and encrypt. ([#5213][])
- Create encrypted restore directory from encrypted backups. ([#5144][])
- Add option "--encryption_key_file"/"-k" to debug tool for encryption support. ([#5146][])
- - Support for encrypted backups/restore. **Note**: Older backups without encryption will be incompatible with this Dgraph version. Solution is to force a full backup before creating further incremental backups. ([#5103][])
+ - Support for encrypted backups/restore. **Note**: Older backups without encryption will be
+ incompatible with this Dgraph version. Solution is to force a full backup before creating
+ further incremental backups. ([#5103][])
- Add encryption support for export and import (via bulk, live loaders). ([#5155][])
- Add Badger expvar metrics to Prometheus metrics. Fixes [#4772][]. ([#5094][])
- Add option to apply enterprise license at zero's startup. ([#5170][])
@@ -2494,16 +2776,16 @@ This release was removed
[#5158]: https://github.com/hypermodeinc/dgraph/issues/5158
[#5213]: https://github.com/hypermodeinc/dgraph/issues/5213
[#5144]: https://github.com/hypermodeinc/dgraph/issues/5144
-[#5146]: https://github.com/hypermodeinc/dgraph/issues/5146
[#5103]: https://github.com/hypermodeinc/dgraph/issues/5103
[#5155]: https://github.com/hypermodeinc/dgraph/issues/5155
[#5238]: https://github.com/hypermodeinc/dgraph/issues/5238
[#5272]: https://github.com/hypermodeinc/dgraph/issues/5272
## [1.2.3] - 2020-04-24
+
[1.2.3]: https://github.com/hypermodeinc/dgraph/compare/v1.2.2...v1.2.3
-### Changed
+**Changed**
- Support comma separated list of zero addresses in alpha. ([#5258][])
- Optimization: Optimize snapshot creation. ([#4901][])
@@ -2516,7 +2798,7 @@ This release was removed
- Enterprise features
- Backup/restore: Force users to explicitly tell restore command to run without zero. ([#5206][])
-### Fixed
+**Fixed**
- Check uid list is empty when filling shortest path vars. ([#5152][])
- Return error for invalid UID 0x0. Fixes [#5238][]. ([#5252][])
@@ -2526,41 +2808,30 @@ This release was removed
- Fix bug, aggregate value var works with blank node in upsert. Fixes [#4712][]. ([#4767][])
- Always set BlockSize in encoder. Fixes [#5102][]. ([#5255][])
- Enterprise features
- - Backup schema keys in incremental backups. Before, the schema was only stored in the full backup. ([#5158][])
+ - Backup schema keys in incremental backups. Before, the schema was only stored in the full
+ backup. ([#5158][])
-### Added
+**Added**
- Add Badger expvar metrics to Prometheus metrics. Fixes [#4772][]. ([#5094][])
- Enterprise features
- - Support bulk loader use-case to import unencrypted export and encrypt. ([#5213][])
+ - Support bulk loader use-case to import unencrypted export and encrypt. ([#5213][])
- Create encrypted restore directory from encrypted backups. ([#5144][])
- Add option "--encryption_key_file"/"-k" to debug tool for encryption support. ([#5146][])
- - Support for encrypted backups/restore. **Note**: Older backups without encryption will be incompatible with this Dgraph version. Solution is to force a full backup before creating further incremental backups. ([#5103][])
+ - Support for encrypted backups/restore. **Note**: Older backups without encryption will be
+ incompatible with this Dgraph version. Solution is to force a full backup before creating
+ further incremental backups. ([#5103][])
- Add encryption support for export and import (via bulk, live loaders). ([#5155][])
-[#5146]: https://github.com/hypermodeinc/dgraph/issues/5146
-[#5206]: https://github.com/hypermodeinc/dgraph/issues/5206
-[#5152]: https://github.com/hypermodeinc/dgraph/issues/5152
-[#5252]: https://github.com/hypermodeinc/dgraph/issues/5252
-[#5199]: https://github.com/hypermodeinc/dgraph/issues/5199
[#5163]: https://github.com/hypermodeinc/dgraph/issues/5163
-[#5158]: https://github.com/hypermodeinc/dgraph/issues/5158
-[#5213]: https://github.com/hypermodeinc/dgraph/issues/5213
-[#5144]: https://github.com/hypermodeinc/dgraph/issues/5144
-[#5146]: https://github.com/hypermodeinc/dgraph/issues/5146
-[#5103]: https://github.com/hypermodeinc/dgraph/issues/5103
-[#5155]: https://github.com/hypermodeinc/dgraph/issues/5155
-[#5238]: https://github.com/hypermodeinc/dgraph/issues/5238
[#5012]: https://github.com/hypermodeinc/dgraph/issues/5012
[#4674]: https://github.com/hypermodeinc/dgraph/issues/4674
[#4842]: https://github.com/hypermodeinc/dgraph/issues/4842
-[#5116]: https://github.com/hypermodeinc/dgraph/issues/5116
[#5258]: https://github.com/hypermodeinc/dgraph/issues/5258
[#4901]: https://github.com/hypermodeinc/dgraph/issues/4901
[#5184]: https://github.com/hypermodeinc/dgraph/issues/5184
[#5088]: https://github.com/hypermodeinc/dgraph/issues/5088
[#5273]: https://github.com/hypermodeinc/dgraph/issues/5273
-[#5216]: https://github.com/hypermodeinc/dgraph/issues/5216
[#5268]: https://github.com/hypermodeinc/dgraph/issues/5268
[#5102]: https://github.com/hypermodeinc/dgraph/issues/5102
[#5255]: https://github.com/hypermodeinc/dgraph/issues/5255
@@ -2568,10 +2839,13 @@ This release was removed
[#5094]: https://github.com/hypermodeinc/dgraph/issues/5094
## [20.03.0] - 2020-03-30
+
[20.03.0]: https://github.com/hypermodeinc/dgraph/compare/v1.2.2...v20.03.0
-** Note: This release requires you to export and re-import data prior to upgrading or rolling back. The underlying data format has been changed. **
-### Changed
+** Note: This release requires you to export and re-import data prior to upgrading or rolling back.
+The underlying data format has been changed. **
+
+**Changed**
- Report GraphQL stats from alpha. ([#4607][])
- During backup, collapse split posting lists into a single list. ([#4682][])
@@ -2587,11 +2861,13 @@ This release was removed
- Add support for tinyint in migrate tool. Fixes [#4674][]. ([#4842][])
- Enterprise features
- **Breaking changes**
- - [BREAKING] Underlying schema for ACL has changed. Use the upgrade tool to migrate to the new data format. ([#4725][])
+ - [BREAKING] Underlying schema for ACL has changed. Use the upgrade tool to migrate to the new
+ data format. ([#4725][])
-### Added
+**Added**
-- Add GraphQL API for Dgraph accessible via the `/graphql` and `/admin` HTTP endpoints on Dgraph Alpha. ([#933][])
+- Add GraphQL API for Dgraph accessible via the `/graphql` and `/admin` HTTP endpoints on Dgraph
+ Alpha. ([#933][])
- Add support for sorting on multiple facets. Fixes [#3638][]. ([#4579][])
- Expose Badger Compression Level option in Bulk Loader. ([#4669][])
- GraphQL Admin API: Support Backup operation. ([#4706][])
@@ -2599,16 +2875,18 @@ This release was removed
- GraphQL Admin API: duplicate `/health` in GraphQL `/admin` ([#4768][])
- GraphQL Admin API: Add `/admin/schema` endpoint ([#4777][])
- Perform indexing in background. ([#4819][])
-- Basic Sentry Integration - Capture manual panics with Sentry exception and runtime panics with a wrapper on panic. ([#4756][])
+- Basic Sentry Integration - Capture manual panics with Sentry exception and runtime panics with a
+ wrapper on panic. ([#4756][])
- Ludicrous Mode. ([#4872][])
- Enterprise features
- ACL: Allow users to query data for their groups, username, and permissions. ([#4774][])
- ACL: Support ACL operations using the admin GraphQL API. ([#4760][])
- ACL: Add tool to upgrade ACLs. ([#5016][])
-### Fixed
+**Fixed**
-- Avoid running GC frequently. Only run for every 2GB of increase. Small optimizations in Bulk.reduce.
+- Avoid running GC frequently. Only run for every 2GB of increase. Small optimizations in
+ Bulk.reduce.
- Check response status when posting telemetry data. ([#4726][])
- Add support for $ in quoted string. Fixes [#4695][]. ([#4702][])
- Do not include empty nodes in the export output. Fixes [#3610][]. ([#4773][])
@@ -2620,13 +2898,14 @@ This release was removed
- Change split keys to have a different prefix. Fixes [#4905][]. ([#4908][])
- Various optimizations for facets filter queries. ([#4923][])
- Throw errors returned by retrieveValuesAndFacets. Fixes [#4958][]. ([#4970][])
-- Add "runInBackground" option to Alter to run indexing in background. When set to `true`, then the Alter call returns immediately. When set to `false`, the call blocks until indexing is complete. This is set to `false` by default. ([#4981][])
+- Add "runInBackground" option to Alter to run indexing in background. When set to `true`, then the
+ Alter call returns immediately. When set to `false`, the call blocks until indexing is complete.
+ This is set to `false` by default. ([#4981][])
- Set correct posting list type while creating it in the live loader. Fixes [#4889][]. ([#5012][])
- **Breaking changes**
- [BREAKING] Language sorting on Indexed data. Fixes [#4005][]. ([#4316][])
[#5016]: https://github.com/hypermodeinc/dgraph/issues/5016
-[#5012]: https://github.com/hypermodeinc/dgraph/issues/5012
[#4889]: https://github.com/hypermodeinc/dgraph/issues/4889
[#4958]: https://github.com/hypermodeinc/dgraph/issues/4958
[#4905]: https://github.com/hypermodeinc/dgraph/issues/4905
@@ -2644,14 +2923,11 @@ This release was removed
[#4935]: https://github.com/hypermodeinc/dgraph/issues/4935
[#4972]: https://github.com/hypermodeinc/dgraph/issues/4972
[#4916]: https://github.com/hypermodeinc/dgraph/issues/4916
-[#4945]: https://github.com/hypermodeinc/dgraph/issues/4945
[#4875]: https://github.com/hypermodeinc/dgraph/issues/4875
-[#4867]: https://github.com/hypermodeinc/dgraph/issues/4867
[#4872]: https://github.com/hypermodeinc/dgraph/issues/4872
[#4756]: https://github.com/hypermodeinc/dgraph/issues/4756
[#4819]: https://github.com/hypermodeinc/dgraph/issues/4819
[#4755]: https://github.com/hypermodeinc/dgraph/issues/4755
-[#4600]: https://github.com/hypermodeinc/dgraph/issues/4600
[#4766]: https://github.com/hypermodeinc/dgraph/issues/4766
[#4468]: https://github.com/hypermodeinc/dgraph/issues/4468
[#4793]: https://github.com/hypermodeinc/dgraph/issues/4793
@@ -2664,7 +2940,6 @@ This release was removed
[#933]: https://github.com/hypermodeinc/dgraph/issues/933
[#3638]: https://github.com/hypermodeinc/dgraph/issues/3638
[#4579]: https://github.com/hypermodeinc/dgraph/issues/4579
-[#4682]: https://github.com/hypermodeinc/dgraph/issues/4682
[#4725]: https://github.com/hypermodeinc/dgraph/issues/4725
[#4669]: https://github.com/hypermodeinc/dgraph/issues/4669
[#4774]: https://github.com/hypermodeinc/dgraph/issues/4774
@@ -2677,23 +2952,26 @@ This release was removed
[#4316]: https://github.com/hypermodeinc/dgraph/issues/4316
## [1.2.2] - 2020-03-19
+
[1.2.2]: https://github.com/hypermodeinc/dgraph/compare/v1.2.1...v1.2.2
-### Changed
+**Changed**
- Wrap errors thrown in posting/list.go for easier debugging. ([#4880][])
- Print keys using hex encoding in error messages in list.go. ([#4891][])
-### Fixed
+**Fixed**
- Do not include empty nodes in the export output. ([#4896][])
- Fix error when lexing language list. ([#4784][])
- Properly initialize posting package in debug tool. ([#4893][])
- Handle special characters in schema and type queries. Fixes [#4933][]. ([#4937][])
-- Overwrite values for uid predicates. Fixes [#4879][]. ([#4883][])
-- Disable @* language queries when the predicate does not support langs. ([#4881][])
+- Overwrite values for uid predicates. Fixes [#4879][]. ([#4883][])
+- Disable @\* language queries when the predicate does not support langs. ([#4881][])
- Fix bug in exporting types with reverse predicates. Fixes [#4856][]. ([#4857][])
-- Do not skip over split keys. (Trying to skip over the split keys sometimes skips over keys belonging to a different split key. This is a fix just for this release as the actual fix requires changes to the data format.) ([#4951][])
+- Do not skip over split keys. (Trying to skip over the split keys sometimes skips over keys
+ belonging to a different split key. This is a fix just for this release as the actual fix requires
+ changes to the data format.) ([#4951][])
- Fix point-in-time Prometheus metrics. Fixes [#4532][]. ([#4948][])
- Split lists in the bulk loader. ([#4967][])
- Allow remote MySQL server with dgraph migrate tool. Fixes [#4707][]. ([#4860][])
@@ -2703,12 +2981,10 @@ This release was removed
- Backups: Verify host when default and custom credentials are used. Fixes [#4855][]. ([#4858][])
- Backups: Split lists when restoring from backup. ([#4912][])
-
[#4967]: https://github.com/hypermodeinc/dgraph/issues/4967
[#4951]: https://github.com/hypermodeinc/dgraph/issues/4951
[#4532]: https://github.com/hypermodeinc/dgraph/issues/4532
[#4948]: https://github.com/hypermodeinc/dgraph/issues/4948
-[#4893]: https://github.com/hypermodeinc/dgraph/issues/4893
[#4784]: https://github.com/hypermodeinc/dgraph/issues/4784
[#4896]: https://github.com/hypermodeinc/dgraph/issues/4896
[#4856]: https://github.com/hypermodeinc/dgraph/issues/4856
@@ -2729,21 +3005,23 @@ This release was removed
[#4707]: https://github.com/hypermodeinc/dgraph/issues/4707
[#4860]: https://github.com/hypermodeinc/dgraph/issues/4860
-
## [1.2.1] - 2020-02-06
+
[1.2.1]: https://github.com/hypermodeinc/dgraph/compare/v1.2.0...v1.2.1
-### Fixed
+**Fixed**
-- Fix bug related to posting list split, and re-enable posting list splits. Fixes [#4733][]. ([#4742][])
+- Fix bug related to posting list split, and re-enable posting list splits. Fixes [#4733][].
+ ([#4742][])
[#4733]: https://github.com/hypermodeinc/dgraph/issues/4733
[#4742]: https://github.com/hypermodeinc/dgraph/issues/4742
## [1.2.0] - 2020-01-27
+
[1.2.0]: https://github.com/hypermodeinc/dgraph/compare/v1.1.1...v1.2.0
-### Changed
+**Changed**
- Allow overwriting values of predicates of type uid. Fixes [#4136][]. ([#4411][])
- Algorithms to handle UidPack. ([#4321][])
@@ -2757,29 +3035,34 @@ This release was removed
- Change default behavior to block operations with ACLs enabled. ([#4390][])
- Remove unauthorized predicates from query instead of rejecting the query entirely. ([#4479][])
-### Added
+**Added**
- Add `debuginfo` subcommand to dgraph. ([#4464][])
- Support filtering on non-indexed predicate. Fixes [#4305][]. ([#4531][])
- Add support for variables in recurse. Fixes [#3301][]. ([#4385][]).
-- Adds `@noconflict` schema directive to prevent conflict detection. This is an experimental feature. This is not a recommended directive, but exists to help avoid conflicts for predicates which don't have high correctness requirements. Fixes [#4079][]. ([#4454][])
+- Adds `@noconflict` schema directive to prevent conflict detection. This is an experimental
+ feature. This is not a recommended directive, but exists to help avoid conflicts for predicates
+ which don't have high correctness requirements. Fixes [#4079][]. ([#4454][])
- Implement the state HTTP endpoint on Alpha. Login is required if ACL is enabled. ([#4435][]).
- Implement `/health?all` endpoint on Alpha nodes. ([#4535][])
- Add `/health` endpoint to Zero. ([#4405][])
- **Breaking changes**
- - Support for fetching facets from value edge list. The query response format is backwards-incompatible. Fixes [#4081][]. ([#4267][])
+ - Support for fetching facets from value edge list. The query response format is
+ backwards-incompatible. Fixes [#4081][]. ([#4267][])
- Enterprise features
- Add guardians group with full authorization. ([#4447][])
- ### Fixed
+**Fixed**
-- Infer type of schema from JSON and RDF mutations. Fixes [#3788][]. ([#4328][])
-- Fix retrieval of facets with cascade. Fixes [#4310][]. ([#4530][])
-- Do not use type keys during tablet size calculation. Fixes [#4473][]. ([#4517][])
-- Fix Levenshtein distance calculation with match function. Fixes [#4494][]. ([#4545][])
+- Infer type of schema from JSON and RDF mutations. Fixes [#3788][]. ([#4328][])
+- Fix retrieval of facets with cascade. Fixes [#4310][]. ([#4530][])
+- Do not use type keys during tablet size calculation. Fixes [#4473][]. ([#4517][])
+- Fix Levenshtein distance calculation with match function. Fixes [#4494][]. ([#4545][])
- Add `` RDF type for int schema type. Fixes [#4460][]. ([#4465][])
- Allow `@filter` directive with expand queries. Fixes [#3904][]. ([#4404][]).
-- A multi-part posting list should only be accessed via the main key. Accessing the posting list via one of the other keys was causing issues during rollup and adding spurious keys to the database. Now fixed. ([#4574][])
+- A multi-part posting list should only be accessed via the main key. Accessing the posting list via
+ one of the other keys was causing issues during rollup and adding spurious keys to the database.
+ Now fixed. ([#4574][])
- Enterprise features
- Backup types. Fixes [#4507][]. ([#4514][])
@@ -2808,7 +3091,6 @@ This release was removed
[#4390]: https://github.com/hypermodeinc/dgraph/issues/4390
[#4479]: https://github.com/hypermodeinc/dgraph/issues/4479
[#4136]: https://github.com/hypermodeinc/dgraph/issues/4136
-[#4411]: https://github.com/hypermodeinc/dgraph/issues/4411
[#4464]: https://github.com/hypermodeinc/dgraph/issues/4464
[#4531]: https://github.com/hypermodeinc/dgraph/issues/4531
[#4305]: https://github.com/hypermodeinc/dgraph/issues/4305
@@ -2817,22 +3099,23 @@ This release was removed
[#4405]: https://github.com/hypermodeinc/dgraph/issues/4405
[#4267]: https://github.com/hypermodeinc/dgraph/issues/4267
[#4081]: https://github.com/hypermodeinc/dgraph/issues/4081
-[#4447]: https://github.com/hypermodeinc/dgraph/issues/4447
[#4535]: https://github.com/hypermodeinc/dgraph/issues/4535
[#4385]: https://github.com/hypermodeinc/dgraph/issues/4385
[#3301]: https://github.com/hypermodeinc/dgraph/issues/3301
[#4435]: https://github.com/hypermodeinc/dgraph/issues/4435
## [1.1.1] - 2019-12-16
+
[1.1.1]: https://github.com/hypermodeinc/dgraph/compare/v1.1.0...v1.1.1
-### Changed
+**Changed**
- **Breaking changes for expand() queries**
- Remove `expand(_forward_)` and `expand(_reverse_)`. ([#4119][])
- Change `expand(_all_)` functionality to only include the predicates in the type. ([#4171][])
- Add support for Go Modules. ([#4146][])
-- Simplify type definitions: type definitions no longer require the type (string, int, etc.) per field name. ([#4017][])
+- Simplify type definitions: type definitions no longer require the type (string, int, etc.) per
+ field name. ([#4017][])
- Adding log lines to help troubleshoot snapshot and rollup. ([#3889][])
- Add `--http` flag to configure pprof endpoint for live loader. ([#3846][])
- Use snappy compression for internal gRPC communication. ([#3368][])
@@ -2852,7 +3135,7 @@ Enterprise features:
- ACL: Disallow schema queries when an user has not logged in. ([#4107][])
- Block delete if predicate permission is zero. Fixes [#4265][]. ([#4349][])
-### Added
+**Added**
- Support `@cascade` directive at subqueries. ([#4006][])
- Support `@normalize` directive for subqueries. ([#4042][])
@@ -2869,28 +3152,30 @@ Enterprise features:
- Add encryption-at-rest. ([#4351][])
-### Removed
+**Removed**
-- **Breaking change**: Remove `@type` directive from query language. To filter
- an edge by a type, use `@filter(type(TypeName))` instead of `@type(TypeName)`.
- ([#4016][])
+- **Breaking change**: Remove `@type` directive from query language. To filter an edge by a type,
+ use `@filter(type(TypeName))` instead of `@type(TypeName)`. ([#4016][])
Enterprise features:
- Remove regexp ACL rules. ([#4360][])
-### Fixed
+**Fixed**
- Avoid changing order if multiple versions of the same edge is found.
-- Consider reverse count index keys for conflict detection in transactions. Fixes [#3893][]. ([#3932][])
+- Consider reverse count index keys for conflict detection in transactions. Fixes [#3893][].
+ ([#3932][])
- Clear the unused variable tlsCfg. ([#3937][])
- Do not require the last type declaration to have a new line. ([#3926][])
- Verify type definitions do not have duplicate fields. Fixes [#3924][]. ([#3925][])
- Fix bug in bulk loader when store_xids is true. Fixes [#3922][]. ([#3950][])
- Call cancel function only if err is not nil. Fixes [#3966][]. ([#3990][])
-- Change the mapper output directory from $TMP/shards to $TMP/map_output. Fixes [#3959][]. ([#3960][])
+- Change the mapper output directory from $TMP/shards to $TMP/map_output. Fixes [#3959][].
+ ([#3960][])
- Return error if keywords used as alias in groupby. ([#3725][])
-- Fix bug where language strings are not filtered when using custom tokenizer. Fixes [#3991][]. ([#3992][])
+- Fix bug where language strings are not filtered when using custom tokenizer. Fixes [#3991][].
+ ([#3992][])
- Support named queries without query variables. Fixes [#3994][]. ([#4028][])
- Correctly set up client connection in x package. ([#4036][])
- Fix data race in regular expression processing. Fixes [#4030][]. ([#4065][])
@@ -2899,7 +3184,8 @@ Enterprise features:
- Ensure that clients can send OpenCensus spans over to the server. ([#4144][])
- Change lexer to allow unicode escape sequences. Fixes [#4157][].([#4175][])
- Handle the count(uid) subgraph correctly. Fixes [#4038][]. ([#4122][])
-- Don't traverse immutable layer while calling iterate if deleteBelowTs > 0. Fixes [#4182][]. ([#4204][])
+- Don't traverse immutable layer while calling iterate if deleteBelowTs > 0. Fixes [#4182][].
+ ([#4204][])
- Bulk loader allocates reserved predicates in first reduce shard. Fixes [#3968][]. ([#4202][])
- Only allow one alias per predicate. ([#4236][])
- Change member removal logic to remove members only once. ([#4254][])
@@ -2915,13 +3201,15 @@ Enterprise features:
- Limit UIDs per variable in upsert. Fixes [#4021][]. ([#4268][])
- Return error instead of panic when geo data is corrupted. Fixes [#3740][]. ([#4318][])
- Use txn writer to write schema postings. ([#4296][])
-- Fix connection log message in dgraph alpha from "CONNECTED" to "CONNECTING" when establishing a connection to a peer. Fixes [#4298][]. ([#4303][])
+- Fix connection log message in dgraph alpha from "CONNECTED" to "CONNECTING" when establishing a
+ connection to a peer. Fixes [#4298][]. ([#4303][])
- Fix segmentation fault in backup. ([#4314][])
- Close store after stoping worker. ([#4356][])
- Don't pre allocate mutation map. ([#4343][])
- Cmd: fix config file from env variable issue in subcommands. Fixes [#4311][]. ([#4344][])
- Fix segmentation fault in Alpha. Fixes [#4288][]. ([#4394][])
-- Fix handling of depth parameter for shortest path query for numpaths=1 case. Fixes [#4169][]. ([#4347][])
+- Fix handling of depth parameter for shortest path query for numpaths=1 case. Fixes [#4169][].
+ ([#4347][])
- Do not return dgo.ErrAborted when client calls txn.Discard(). ([#4389][])
- Fix `has` pagination when predicate is queried with `@lang`. Fixes [#4282][]. ([#4331][])
- Make uid function work with value variables in upsert blocks. Fixes [#4424][]. ([#4425][])
@@ -3019,12 +3307,10 @@ Enterprise features:
[#4351]: https://github.com/hypermodeinc/dgraph/issues/4351
[#3268]: https://github.com/hypermodeinc/dgraph/issues/3268
[#4132]: https://github.com/hypermodeinc/dgraph/issues/4132
-[#4005]: https://github.com/hypermodeinc/dgraph/issues/4005
[#4298]: https://github.com/hypermodeinc/dgraph/issues/4298
[#4021]: https://github.com/hypermodeinc/dgraph/issues/4021
[#3740]: https://github.com/hypermodeinc/dgraph/issues/3740
[#4311]: https://github.com/hypermodeinc/dgraph/issues/4311
-[#4047]: https://github.com/hypermodeinc/dgraph/issues/4047
[#4375]: https://github.com/hypermodeinc/dgraph/issues/4375
[#4394]: https://github.com/hypermodeinc/dgraph/issues/4394
[#4288]: https://github.com/hypermodeinc/dgraph/issues/4288
@@ -3043,50 +3329,53 @@ Enterprise features:
[#4425]: https://github.com/hypermodeinc/dgraph/issues/4425
## [1.1.0] - 2019-09-03
+
[1.1.0]: https://github.com/hypermodeinc/dgraph/compare/v1.0.17...v1.1.0
-### Changed
+**Changed**
- **Breaking changes**
- - **uid schema type**: The `uid` schema type now means a one-to-one relation,
- **not** a one-to-many relation as in Dgraph v1.1. To specify a one-to-many
- relation in Dgraph v1.0, use the `[uid]` schema type. ([#2895][], [#3173][], [#2921][])
+ - **uid schema type**: The `uid` schema type now means a one-to-one relation, **not** a
+ one-to-many relation as in Dgraph v1.1. To specify a one-to-many relation in Dgraph v1.0, use
+ the `[uid]` schema type. ([#2895][], [#3173][], [#2921][])
- **\_predicate\_** is removed from the query language.
- - **expand(\_all\_)** only works for nodes with attached type information via
- the type system. The type system is used to determine the predicates to expand
- out from a node. ([#3262][])
+ - **expand(\_all\_)** only works for nodes with attached type information via the type system. The
+ type system is used to determine the predicates to expand out from a node. ([#3262][])
+
+ - **S \* \* deletion** only works for nodes with attached type information via the type system.
+ The type system is used to determine the predicates to delete from a node. For `S * *`
+ deletions, only the predicates specified by the type are deleted.
- - **S \* \* deletion** only works for nodes with attached type information via
- the type system. The type system is used to determine the predicates to
- delete from a node. For `S * *` deletions, only the predicates specified by
- the type are deleted.
+ - **HTTP API**: The HTTP API has been updated to replace the custom HTTP headers with standard
+ headers.
- - **HTTP API**: The HTTP API has been updated to replace the custom HTTP headers
- with standard headers.
- Change `/commit` endpoint to accept a list of preds for conflict detection. ([#3020][])
- Remove custom HTTP Headers, cleanup API. ([#3365][])
- - The startTs path parameter is now a query parameter `startTs` for the
- `/query`, `/mutate`, and `/commit` endpoints.
- - Dgraph custom HTTP Headers `X-Dgraph-CommitNow`,
- `X-Dgraph-MutationType`, and `X-Dgraph-Vars` are now ignored.
+ - The startTs path parameter is now a query parameter `startTs` for the `/query`, `/mutate`,
+ and `/commit` endpoints.
+ - Dgraph custom HTTP Headers `X-Dgraph-CommitNow`, `X-Dgraph-MutationType`, and
+ `X-Dgraph-Vars` are now ignored.
- Update HTTP API Content-Type headers. ([#3550][]) ([#3532][])
- - Queries over HTTP must have the Content-Type header `application/graphql+-` or `application/json`.
- - Queries over HTTP with GraphQL Variables (e.g., `query queryName($a: string) { ... }`) must use the query format via `application/json` to pass query variables.
- - Mutations over HTTP must have the Content-Type header set to `application/rdf` for RDF format or `application/json` for JSON format.
- - Commits over HTTP must have the `startTs` query parameter along with the JSON map of conflict keys and predicates.
-
- - **Datetime index**: Use UTC Hour, Day, Month, Year for datetime
- comparison. This is a bug fix that may result in different query results for
- existing queries involving the datetime index. ([#3251][])
-
- - **Blank node name generation for JSON mutations.** For JSON mutations that
- do not explicitly set the `"uid"` field, the blank name format has changed
- to contain randomly generated identifiers. This fixes a bug where two JSON
- objects within a single mutation are assigned the same blank node.
- ([#3795][])
+ - Queries over HTTP must have the Content-Type header `application/graphql+-` or
+ `application/json`.
+ - Queries over HTTP with GraphQL Variables (e.g., `query queryName($a: string) { ... }`) must
+ use the query format via `application/json` to pass query variables.
+ - Mutations over HTTP must have the Content-Type header set to `application/rdf` for RDF
+ format or `application/json` for JSON format.
+ - Commits over HTTP must have the `startTs` query parameter along with the JSON map of
+ conflict keys and predicates.
+
+ - **Datetime index**: Use UTC Hour, Day, Month, Year for datetime comparison. This is a bug fix
+ that may result in different query results for existing queries involving the datetime index.
+ ([#3251][])
+
+ - **Blank node name generation for JSON mutations.** For JSON mutations that do not explicitly set
+ the `"uid"` field, the blank name format has changed to contain randomly generated identifiers.
+ This fixes a bug where two JSON objects within a single mutation are assigned the same blank
+ node. ([#3795][])
- Improve hash index. ([#2887][])
- Use a stream connection for internal connection health checking. ([#2956][])
@@ -3099,7 +3388,8 @@ Enterprise features:
- Prevent alphas from asking zero to serve tablets during queries. ([#3091][])
- Put data before extensions in JSON response. ([#3194][])
- Always parse language tag. ([#3243][])
-- Populate the StartTs for the commit gRPC call so that clients can double check the startTs still matches. ([#3228][])
+- Populate the StartTs for the commit gRPC call so that clients can double check the startTs still
+ matches. ([#3228][])
- Replace MD5 with SHA-256 in `dgraph cert ls`. ([#3254][])
- Fix use of deprecated function `grpc.WithTimeout()`. ([#3253][])
- Introduce multi-part posting lists. ([#3105][])
@@ -3109,12 +3399,12 @@ Enterprise features:
- Remove list.SetForDeletion method, remnant of the global LRU cache. ([#3481][])
- Whitelist by hostname. ([#2953][])
- Use CIDR format for whitelists instead of the previous range format.
-- Introduce Badger's DropPrefix API into Dgraph to simplify how predicate deletions and drop all work internally. ([#3060][])
+- Introduce Badger's DropPrefix API into Dgraph to simplify how predicate deletions and drop all
+ work internally. ([#3060][])
- Replace integer compression in UID Pack with groupvarint algorithm. ([#3527][], [#3650][])
- Rebuild reverse index before count reverse. ([#3688][])
-- **Breaking change**: Use one atomic variable to generate blank node ids for
- json objects. This changes the format of automatically generated blank node
- names in JSON mutations. ([#3795][])
+- **Breaking change**: Use one atomic variable to generate blank node ids for json objects. This
+ changes the format of automatically generated blank node names in JSON mutations. ([#3795][])
- Print commit SHA256 when invoking "make install". ([#3786][])
- Print SHA-256 checksum of Dgraph binary in the version section logs. ([#3828][])
- Change anonynmous telemetry endpoint. ([#3872][])
@@ -3123,6 +3413,7 @@ Enterprise features:
- Allow glog flags to be set via config file. ([#3062][], [#3077][])
- Logging
+
- Suppress logging before `flag.Parse` from glog. ([#2970][])
- Move glog of missing value warning to verbosity level 3. ([#3092][])
- Change time threshold for Raft.Ready warning logs. ([#3901][])
@@ -3130,6 +3421,7 @@ Enterprise features:
- Add additional logs to show progress of reindexing operation. ([#3746][])
- Error messages
+
- Output the line and column number in schema parsing error messages. ([#2986][])
- Improve error of empty block queries. ([#3015][])
- Update flag description and error messaging related to `--query_edge_limit` flag. ([#2979][])
@@ -3138,6 +3430,7 @@ Enterprise features:
- Return GraphQL compliant `"errors"` field for HTTP requests. ([#3728][])
- Optimizations
+
- Don't read posting lists from disk when mutating indices. ([#3695][], [#3713][])
- Avoid preallocating uid slice. It was slowing down unpackBlock.
- Reduce memory consumption in bulk loader. ([#3724][])
@@ -3170,12 +3463,16 @@ Dgraph Debug Tool
Dgraph Live Loader / Dgraph Bulk Loader
-- Add `--format` flag to Dgraph Live Loader and Dgraph Bulk Loader to specify input data format type. ([#2991][])
+- Add `--format` flag to Dgraph Live Loader and Dgraph Bulk Loader to specify input data format
+ type. ([#2991][])
- Update live loader flag help text. ([#3278][])
- Improve reporting of aborts and retries during live load. ([#3313][])
- Remove xidmap storage on disk from bulk loader.
- Optimize XidtoUID map used by live and bulk loader. ([#2998][])
-- Export data contains UID literals instead of blank nodes. Using Live Loader or Bulk Loader to load exported data will result in the same UIDs as the original database. ([#3004][], [#3045][]) To preserve the previous behavior, set the `--new_uids` flag in the live or bulk loader. ([18277872f][])
+- Export data contains UID literals instead of blank nodes. Using Live Loader or Bulk Loader to load
+ exported data will result in the same UIDs as the original database. ([#3004][], [#3045][]) To
+ preserve the previous behavior, set the `--new_uids` flag in the live or bulk loader.
+ ([18277872f][])
- Use StreamWriter in bulk loader. ([#3542][], [#3635][], [#3649][])
- Add timestamps during bulk/live load. ([#3287][])
- Use initial schema during bulk load. ([#3333][])
@@ -3191,16 +3488,17 @@ Dgraph Increment Tool
- Add `--retries` flag to specify number of retry requests to set up a gRPC connection. ([#3584][])
- Add TLS support to `dgraph increment` command. ([#3257][])
-### Added
+**Added**
-- Add bash and zsh shell completion. See `dgraph completion bash --help` or `dgraph completion zsh --help` for usage instructions. ([#3084][])
+- Add bash and zsh shell completion. See `dgraph completion bash --help` or
+ `dgraph completion zsh --help` for usage instructions. ([#3084][])
- Add support for ECDSA in dgraph cert. ([#3269][])
- Add support for JSON export via `/admin/export?format=json`. ([#3309][])
- Add the SQL-to-Dgraph migration tool `dgraph migrate`. ([#3295][])
-- Add `assign_timestamp_ns` latency field to fix encoding_ns calculation. Fixes [#3668][]. ([#3692][], [#3711][])
+- Add `assign_timestamp_ns` latency field to fix encoding_ns calculation. Fixes [#3668][].
+ ([#3692][], [#3711][])
- Adding draining mode to Alpha. ([#3880][])
-
- Enterprise features
- Support applying a license using /enterpriseLicense endpoint in Zero. ([#3824][])
- Don't apply license state for oss builds. ([#3847][])
@@ -3208,6 +3506,7 @@ Dgraph Increment Tool
Query
- Type system
+
- Add `type` function to query types. ([#2933][])
- Parser for type declaration. ([#2950][])
- Add `@type` directive to enforce type constraints. ([#3003][])
@@ -3218,9 +3517,10 @@ Query
- Include types in results of export operation. ([#3493][])
- Support types in the bulk loader. ([#3506][])
-- Add the `upsert` block to send "query-mutate-commit" updates as a single
- call to Dgraph. This is especially helpful to do upserts with the `@upsert`
- schema directive. Addresses [#3059][]. ([#3412][])
+- Add the `upsert` block to send "query-mutate-commit" updates as a single call to Dgraph. This is
+ especially helpful to do upserts with the `@upsert` schema directive. Addresses [#3059][].
+ ([#3412][])
+
- Add support for conditional mutation in Upsert Block. ([#3612][])
- Allow querying all lang values of a predicate. ([#2910][])
@@ -3234,11 +3534,12 @@ Query
- Support for GraphQL variables in arrays. ([#2981][])
- Show total weight of path in shortest path algorithm. ([#2954][])
- Rename dgraph `--dgraph` option to `--alpha`. ([#3273][])
-- Support uid variables in `from` and `to` arguments for shortest path query. Fixes [#1243][]. ([#3710][])
+- Support uid variables in `from` and `to` arguments for shortest path query. Fixes [#1243][].
+ ([#3710][])
-- Add support for `len()` function in query language. The `len()` function is
- only used in the `@if` directive for upsert blocks. `len(v)` It returns the
- length of a variable `v`. ([#3756][], [#3769][])
+- Add support for `len()` function in query language. The `len()` function is only used in the `@if`
+ directive for upsert blocks. `len(v)` It returns the length of a variable `v`. ([#3756][],
+ [#3769][])
Mutation
@@ -3251,8 +3552,10 @@ Alter
Schema
-- **Breaking change**: Add ability to set schema to a single UID schema. Fixes [#2511][]. ([#2895][], [#3173][], [#2921][])
- - If you wish to create one-to-one edges, use the schema type `uid`. The `uid` schema type in v1.0.x must be changed to `[uid]` to denote a one-to-many uid edge.
+- **Breaking change**: Add ability to set schema to a single UID schema. Fixes [#2511][].
+ ([#2895][], [#3173][], [#2921][])
+ - If you wish to create one-to-one edges, use the schema type `uid`. The `uid` schema type in
+ v1.0.x must be changed to `[uid]` to denote a one-to-many uid edge.
- Prevent dropping or altering reserved predicates. ([#2967][]) ([#2997][])
- Reserved predicate names start with `dgraph.` .
- Support comments in schema. ([#3133][])
@@ -3263,8 +3566,8 @@ Schema
Enterprise feature: Access Control Lists (ACLs)
-Enterprise ACLs provide read/write/admin permissions to defined users and groups
-at the predicate-level.
+Enterprise ACLs provide read/write/admin permissions to defined users and groups at the
+predicate-level.
- Enforcing ACLs for query, mutation and alter requests. ([#2862][])
- Don't create ACL predicates when the ACL feature is not turned on. ([#2924][])
@@ -3281,10 +3584,9 @@ at the predicate-level.
Enterprise feature: Backups
-Enterprise backups are Dgraph backups in a binary format designed to be restored
-to a cluster of the same version and configuration. Backups can be stored on
-local disk or stored directly to the cloud via AWS S3 or any Minio-compatible
-backend.
+Enterprise backups are Dgraph backups in a binary format designed to be restored to a cluster of the
+same version and configuration. Backups can be stored on local disk or stored directly to the cloud
+via AWS S3 or any Minio-compatible backend.
- Fixed bug with backup fan-out code. ([#2973][])
- Incremental backups / partial restore. ([#2963][])
@@ -3298,7 +3600,7 @@ backend.
- Store group to predicate mapping as part of the backup manifest. ([#3570][])
- Only backup the predicates belonging to a group. ([#3621][])
- Introduce backup data formats for cross-version compatibility. ([#3575][])
-- Add series and backup number information to manifest. ([#3559][])
+- Add series and backup number information to manifest. ([#3559][])
- Use backwards-compatible formats during backup ([#3629][])
- Use manifest to only restore preds assigned to each group. ([#3648][])
- Fixes the toBackupList function by removing the loop. ([#3869][])
@@ -3322,17 +3624,19 @@ Tracing
- Support exporting tracing data to oc_agent, then to datadog agent. ([#3398][])
- Measure latency of Alpha's Raft loop. (63f545568)
-### Removed
+**Removed**
- **Breaking change**: Remove `_predicate_` predicate within queries. ([#3262][])
- Remove `--debug_mode` option. ([#3441][])
-- Remove deprecated and unused IgnoreIndexConflict field in mutations. This functionality is superceded by the `@upsert` schema directive since v1.0.4. ([#3854][])
+- Remove deprecated and unused IgnoreIndexConflict field in mutations. This functionality is
+ superceded by the `@upsert` schema directive since v1.0.4. ([#3854][])
- Enterprise features
- - Remove `--enterprise_feature` flag. Enterprise license can be applied via /enterpriseLicense endpoint in Zero. ([#3824][])
+ - Remove `--enterprise_feature` flag. Enterprise license can be applied via /enterpriseLicense
+ endpoint in Zero. ([#3824][])
-### Fixed
+**Fixed**
- Fix `anyofterms()` query for facets from mutations in JSON format. Fixes [#2867][]. ([#2885][])
- Fixes error found by gofuzz. ([#2914][])
@@ -3350,9 +3654,11 @@ Tracing
- Fix race condition in numShutDownSig in Alpha. ([#3402][])
- Fix race condition in oracle.go. ([#3417][])
- Fix tautological condition in zero.go. ([#3516][])
-- Correctness fix: Block before proposing mutations and improve conflict key generation. Fixes [#3528][]. ([#3565][])
+- Correctness fix: Block before proposing mutations and improve conflict key generation. Fixes
+ [#3528][]. ([#3565][])
-- Reject requests with predicates larger than the max size allowed (longer than 65,535 characters). ([#3052][])
+- Reject requests with predicates larger than the max size allowed (longer than 65,535 characters).
+ ([#3052][])
- Upgrade raft lib and fix group checksum. ([#3085][])
- Check that uid is not used as function attribute. ([#3112][])
- Do not retrieve facets when max recurse depth has been reached. ([#3190][])
@@ -3379,7 +3685,6 @@ Tracing
[#3365]: https://github.com/hypermodeinc/dgraph/issues/3365
[#3550]: https://github.com/hypermodeinc/dgraph/issues/3550
[#3532]: https://github.com/hypermodeinc/dgraph/issues/3532
-[#3526]: https://github.com/hypermodeinc/dgraph/issues/3526
[#3528]: https://github.com/hypermodeinc/dgraph/issues/3528
[#3565]: https://github.com/hypermodeinc/dgraph/issues/3565
[#2914]: https://github.com/hypermodeinc/dgraph/issues/2914
@@ -3410,7 +3715,6 @@ Tracing
[#3243]: https://github.com/hypermodeinc/dgraph/issues/3243
[#3228]: https://github.com/hypermodeinc/dgraph/issues/3228
[#3254]: https://github.com/hypermodeinc/dgraph/issues/3254
-[#3274]: https://github.com/hypermodeinc/dgraph/issues/3274
[#3253]: https://github.com/hypermodeinc/dgraph/issues/3253
[#3105]: https://github.com/hypermodeinc/dgraph/issues/3105
[#3310]: https://github.com/hypermodeinc/dgraph/issues/3310
@@ -3460,8 +3764,6 @@ Tracing
[#3287]: https://github.com/hypermodeinc/dgraph/issues/3287
[#3333]: https://github.com/hypermodeinc/dgraph/issues/3333
[#3560]: https://github.com/hypermodeinc/dgraph/issues/3560
-[#3613]: https://github.com/hypermodeinc/dgraph/issues/3613
-[#3560]: https://github.com/hypermodeinc/dgraph/issues/3560
[#3628]: https://github.com/hypermodeinc/dgraph/issues/3628
[#3681]: https://github.com/hypermodeinc/dgraph/issues/3681
[#3659]: https://github.com/hypermodeinc/dgraph/issues/3659
@@ -3488,7 +3790,6 @@ Tracing
[#3493]: https://github.com/hypermodeinc/dgraph/issues/3493
[#3506]: https://github.com/hypermodeinc/dgraph/issues/3506
[#3059]: https://github.com/hypermodeinc/dgraph/issues/3059
-[#3412]: https://github.com/hypermodeinc/dgraph/issues/3412
[#3612]: https://github.com/hypermodeinc/dgraph/issues/3612
[#2910]: https://github.com/hypermodeinc/dgraph/issues/2910
[#2913]: https://github.com/hypermodeinc/dgraph/issues/2913
@@ -3519,7 +3820,6 @@ Tracing
[#2924]: https://github.com/hypermodeinc/dgraph/issues/2924
[#2951]: https://github.com/hypermodeinc/dgraph/issues/2951
[#3124]: https://github.com/hypermodeinc/dgraph/issues/3124
-[#3141]: https://github.com/hypermodeinc/dgraph/issues/3141
[#3164]: https://github.com/hypermodeinc/dgraph/issues/3164
[#3218]: https://github.com/hypermodeinc/dgraph/issues/3218
[#3207]: https://github.com/hypermodeinc/dgraph/issues/3207
@@ -3532,7 +3832,6 @@ Tracing
[#3172]: https://github.com/hypermodeinc/dgraph/issues/3172
[#3219]: https://github.com/hypermodeinc/dgraph/issues/3219
[#3227]: https://github.com/hypermodeinc/dgraph/issues/3227
-[#3387]: https://github.com/hypermodeinc/dgraph/issues/3387
[#3515]: https://github.com/hypermodeinc/dgraph/issues/3515
[#3536]: https://github.com/hypermodeinc/dgraph/issues/3536
[#3547]: https://github.com/hypermodeinc/dgraph/issues/3547
@@ -3551,10 +3850,8 @@ Tracing
[#3262]: https://github.com/hypermodeinc/dgraph/issues/3262
[#3441]: https://github.com/hypermodeinc/dgraph/issues/3441
[#3854]: https://github.com/hypermodeinc/dgraph/issues/3854
-[#3824]: https://github.com/hypermodeinc/dgraph/issues/3824
[#2867]: https://github.com/hypermodeinc/dgraph/issues/2867
[#2885]: https://github.com/hypermodeinc/dgraph/issues/2885
-[#2914]: https://github.com/hypermodeinc/dgraph/issues/2914
[#2893]: https://github.com/hypermodeinc/dgraph/issues/2893
[#2891]: https://github.com/hypermodeinc/dgraph/issues/2891
[#2925]: https://github.com/hypermodeinc/dgraph/issues/2925
@@ -3565,7 +3862,6 @@ Tracing
[#3027]: https://github.com/hypermodeinc/dgraph/issues/3027
[#3182]: https://github.com/hypermodeinc/dgraph/issues/3182
[#3505]: https://github.com/hypermodeinc/dgraph/issues/3505
-[#3402]: https://github.com/hypermodeinc/dgraph/issues/3402
[#3417]: https://github.com/hypermodeinc/dgraph/issues/3417
[#3516]: https://github.com/hypermodeinc/dgraph/issues/3516
[#3052]: https://github.com/hypermodeinc/dgraph/issues/3052
@@ -3574,7 +3870,6 @@ Tracing
[#3085]: https://github.com/hypermodeinc/dgraph/issues/3085
[#3112]: https://github.com/hypermodeinc/dgraph/issues/3112
[#3190]: https://github.com/hypermodeinc/dgraph/issues/3190
-[#3172]: https://github.com/hypermodeinc/dgraph/issues/3172
[#3216]: https://github.com/hypermodeinc/dgraph/issues/3216
[#3205]: https://github.com/hypermodeinc/dgraph/issues/3205
[#3169]: https://github.com/hypermodeinc/dgraph/issues/3169
@@ -3597,17 +3892,17 @@ Tracing
[#3874]: https://github.com/hypermodeinc/dgraph/issues/3874
[#3900]: https://github.com/hypermodeinc/dgraph/issues/3900
[3271f64e0]: https://github.com/hypermodeinc/dgraph/commit/3271f64e0
-[63f545568]: https://github.com/hypermodeinc/dgraph/commit/63f545568
[18277872f]: https://github.com/hypermodeinc/dgraph/commit/18277872f
[802ec4c39]: https://github.com/hypermodeinc/dgraph/commit/802ec4c39
## [1.0.18] - 2019-12-16
+
[1.0.18]: https://github.com/hypermodeinc/dgraph/compare/v1.0.17...v1.0.18
-### Fixed
+**Fixed**
-- Preserve the order of entries in a mutation if multiple versions of the same
- edge are found. This addresses the mutation re-ordering change ([#2987][]) from v1.0.15.
+- Preserve the order of entries in a mutation if multiple versions of the same edge are found. This
+ addresses the mutation re-ordering change ([#2987][]) from v1.0.15.
- Fixing the zero client in live loader to avoid using TLS. Fixes [#3919][]. ([#3936][])
- Remove query cache which is causing contention. ([#4071][]).
- Fix bug when querying with nested levels of `expand(_all_)`. Fixes [#3807][]. ([#4143][]).
@@ -3620,24 +3915,25 @@ Tracing
[#3807]: https://github.com/hypermodeinc/dgraph/issues/3807
[#4143]: https://github.com/hypermodeinc/dgraph/issues/4143
[#4212]: https://github.com/hypermodeinc/dgraph/issues/4212
-[#4157]: https://github.com/hypermodeinc/dgraph/issues/4157
[#4252]: https://github.com/hypermodeinc/dgraph/issues/4252
## [1.0.17] - 2019-08-30
+
[1.0.17]: https://github.com/hypermodeinc/dgraph/compare/v1.0.16...v1.0.17
-### Changed
+**Changed**
- Increase max trace logs per span in Alpha. ([#3886][])
- Include line and column numbers in lexer errors. Fixes [#2900][]. ([#3772][])
- Release binaries built with Go 1.12.7.
-### Fixed
+**Fixed**
- Decrease rate of Raft heartbeat messages. ([#3708][], [#3753][])
- Fix bug when exporting a predicate name to the schema. Fixes [#3699][]. ([#3701][])
- Return error instead of asserting in handleCompareFunction. ([#3665][])
-- Fix bug where aliases in a query incorrectly alias the response depending on alias order. Fixes [#3814][]. ([#3837][])
+- Fix bug where aliases in a query incorrectly alias the response depending on alias order. Fixes
+ [#3814][]. ([#3837][])
- Fix for panic in fillGroupedVars. Fixes [#3768][]. ([#3781][])
[#3886]: https://github.com/hypermodeinc/dgraph/issues/3886
@@ -3654,13 +3950,14 @@ Tracing
[#3781]: https://github.com/hypermodeinc/dgraph/issues/3781
## [1.0.16] - 2019-07-11
+
[1.0.16]: https://github.com/hypermodeinc/dgraph/compare/v1.0.15...v1.0.16
-### Changed
+**Changed**
- Vendor in prometheus/client_golang/prometheus v0.9.4. ([#3653][])
-### Fixed
+**Fixed**
- Fix panic with value variables in queries. Fixes [#3470][]. ([#3554][])
- Remove unused reserved predicates in the schema. Fixes [#3535][]. ([#3557][])
@@ -3674,46 +3971,62 @@ Tracing
[#3653]: https://github.com/hypermodeinc/dgraph/issue/3653
## [1.0.15] - 2019-05-30
+
[1.0.15]: https://github.com/hypermodeinc/dgraph/compare/v1.0.14...v1.0.15
-### Fixed
+**Fixed**
- Fix bug that can cause a Dgraph cluster to get stuck in infinite leader election. ([#3391][])
- Fix bug in bulk loader that prevented loading data from JSON files. ([#3464][])
- Fix bug with a potential deadlock by breaking circular lock acquisition. ([#3393][])
-- Properly escape strings containing Unicode control characters for data exports. Fixes [#3383]. ([#3429][])
+- Properly escape strings containing Unicode control characters for data exports. Fixes [#3383].
+ ([#3429][])
- Initialize tablets map when creating a group. ([#3360][])
-- Fix queries with `offset` not working with multiple `orderasc` or `orderdesc` statements. Fixes [#3366][]. ([#3455][])
+- Fix queries with `offset` not working with multiple `orderasc` or `orderdesc` statements. Fixes
+ [#3366][]. ([#3455][])
- Vendor in bug fixes from badger. ([#3348][], [#3371][], [#3460][])
-### Changed
+**Changed**
- Use Go v1.12.5 to build Dgraph release binaries.
- Truncate Raft logs even when no txn commits are happening. ([3be380b8a][])
-- Reduce memory usage by setting a limit on the size of committed entries that can be served per Ready. ([#3308][])
+- Reduce memory usage by setting a limit on the size of committed entries that can be served per
+ Ready. ([#3308][])
- Reduce memory usage of pending txns by only keeping deltas in memory. ([#3349][])
- Reduce memory usage by limiting the number of pending proposals in apply channel. ([#3340][])
- Reduce memory usage when calculating snapshots by retrieving entries in batches. ([#3409][])
- Allow snapshot calculations during snapshot streaming. ([ecb454754][])
-- Allow quick recovery from partitions by shortening the deadline of sending Raft messages to 10s. ([77b52aca1][])
-- Take snapshots less frequently so straggling Alpha followers can catch up to the leader. Snapshot frequency is configurable via a flag (see Added section). ([#3367][])
-- Allow partial snapshot streams to reduce the amount of data needed to be transferred between Alphas. ([#3454][])
-- Use Badger's StreamWriter to improve write speeds during snapshot streaming. ([#3457][]) ([#3442][])
+- Allow quick recovery from partitions by shortening the deadline of sending Raft messages to 10s.
+ ([77b52aca1][])
+- Take snapshots less frequently so straggling Alpha followers can catch up to the leader. Snapshot
+ frequency is configurable via a flag (see Added section). ([#3367][])
+- Allow partial snapshot streams to reduce the amount of data needed to be transferred between
+ Alphas. ([#3454][])
+- Use Badger's StreamWriter to improve write speeds during snapshot streaming. ([#3457][])
+ ([#3442][])
- Call file sync explicitly at the end of TxnWriter to improve performance. ([#3418][])
-- Optimize mutation and delta application. **Breaking: With these changes, the mutations within a single call are rearranged. So, no assumptions must be made about the order in which they get executed.**
- ([#2987][])
+- Optimize mutation and delta application. **Breaking: With these changes, the mutations within a
+ single call are rearranged. So, no assumptions must be made about the order in which they get
+ executed.** ([#2987][])
- Add logs to show Dgraph config options. ([#3337][])
-- Add `-v=3` logs for reporting Raft communication for debugging. These logs start with `RaftComm:`. ([9cd628f6f][])
-
-### Added
-
-- Add Alpha flag `--snapshot_after` (default: 10000) to configure the number of Raft entries to keep before taking a snapshot. ([#3367][])
-- Add Alpha flag `--abort_older_than` (default: 5m) to configure the amount of time since a pending txn's last mutation until it is aborted. ([#3367][])
-- Add Alpha flag `--normalize_node_limit` (default: 10000) to configure the limit for the maximum number of nodes that can be returned in a query that uses the `@normalize` directive. Fixes [#3335][]. ([#3467][])
-- Add Prometheus metrics for latest Raft applied index (`dgraph_raft_applied_index`) and the max assigned txn timestamp (`dgraph_max_assigned_ts`). These are useful to track cluster progress. ([#3338][])
+- Add `-v=3` logs for reporting Raft communication for debugging. These logs start with `RaftComm:`.
+ ([9cd628f6f][])
+
+**Added**
+
+- Add Alpha flag `--snapshot_after` (default: 10000) to configure the number of Raft entries to keep
+ before taking a snapshot. ([#3367][])
+- Add Alpha flag `--abort_older_than` (default: 5m) to configure the amount of time since a pending
+ txn's last mutation until it is aborted. ([#3367][])
+- Add Alpha flag `--normalize_node_limit` (default: 10000) to configure the limit for the maximum
+ number of nodes that can be returned in a query that uses the `@normalize` directive. Fixes
+ [#3335][]. ([#3467][])
+- Add Prometheus metrics for latest Raft applied index (`dgraph_raft_applied_index`) and the max
+ assigned txn timestamp (`dgraph_max_assigned_ts`). These are useful to track cluster progress.
+ ([#3338][])
- Add Raft checkpoint index to WAL for quicker recovery after restart. ([#3444][])
-### Removed
+**Removed**
- Remove size calculation in posting list. ([0716dc4e1][])
- Remove a `-v=2` log which can be too noisy during Raft replay. ([2377d9f56][]).
@@ -3721,7 +4034,6 @@ Tracing
[#3337]: https://github.com/hypermodeinc/dgraph/pull/3337
[#3391]: https://github.com/hypermodeinc/dgraph/pull/3391
-[#3400]: https://github.com/hypermodeinc/dgraph/pull/3400
[#3464]: https://github.com/hypermodeinc/dgraph/pull/3464
[#2987]: https://github.com/hypermodeinc/dgraph/pull/2987
[#3349]: https://github.com/hypermodeinc/dgraph/pull/3349
@@ -3742,10 +4054,8 @@ Tracing
[#3418]: https://github.com/hypermodeinc/dgraph/pull/3418
[#3454]: https://github.com/hypermodeinc/dgraph/pull/3454
[#3457]: https://github.com/hypermodeinc/dgraph/pull/3457
-[#3442]: https://github.com/hypermodeinc/dgraph/pull/3442
[#3467]: https://github.com/hypermodeinc/dgraph/pull/3467
[#3338]: https://github.com/hypermodeinc/dgraph/pull/3338
-[#3444]: https://github.com/hypermodeinc/dgraph/pull/3444
[3be380b8a]: https://github.com/hypermodeinc/dgraph/commit/3be380b8a
[ecb454754]: https://github.com/hypermodeinc/dgraph/commit/ecb454754
[77b52aca1]: https://github.com/hypermodeinc/dgraph/commit/77b52aca1
@@ -3754,19 +4064,22 @@ Tracing
[2377d9f56]: https://github.com/hypermodeinc/dgraph/commit/2377d9f56
## [1.0.14] - 2019-04-12
+
[1.0.14]: https://github.com/hypermodeinc/dgraph/compare/v1.0.13...v1.0.14
-### Fixed
+**Fixed**
- Fix bugs related to best-effort queries. ([#3125][])
- Stream Raft Messages and Fix Check Quorum. ([#3138][])
- Fix lin reads timeouts and AssignUid recursion in Zero. ([#3203][])
- Fix panic when running `@groupby(uid)` which is not allowed and other logic fixes. ([#3232][])
-- Fix a StartTs Mismatch bug which happens when running multiple best effort queries using the same txn. Reuse the same timestamp instead of allocating a new one. ([#3187][]) ([#3246][])
+- Fix a StartTs Mismatch bug which happens when running multiple best effort queries using the same
+ txn. Reuse the same timestamp instead of allocating a new one. ([#3187][]) ([#3246][])
- Shutdown extra connections. ([#3280][])
- Fix bug for queries with `@recurse` and `expand(_all_)`. ([#3179][])
- Fix assorted cases of goroutine leaks. ([#3074][])
-- Increment tool: Fix best-effort flag name so best-effort queries run as intended from the tool. ([d386fa5][])
+- Increment tool: Fix best-effort flag name so best-effort queries run as intended from the tool.
+ ([d386fa5][])
[#3125]: https://github.com/hypermodeinc/dgraph/pull/3125
[#3138]: https://github.com/hypermodeinc/dgraph/pull/3138
@@ -3779,569 +4092,708 @@ Tracing
[#3074]: https://github.com/hypermodeinc/dgraph/pull/3074
[d386fa5]: https://github.com/hypermodeinc/dgraph/commit/d386fa5
-### Added
+**Added**
-- Add timeout option while running queries over HTTP. Setting the `timeout` query parameter `/query?timeout=60s` will timeout queries after 1 minute. ([#3238][])
+- Add timeout option while running queries over HTTP. Setting the `timeout` query parameter
+ `/query?timeout=60s` will timeout queries after 1 minute. ([#3238][])
- Add `badger` tool to release binaries and Docker image.
[#3238]: https://github.com/hypermodeinc/dgraph/pull/3238
## [1.0.13] - 2019-03-10
+
[1.0.13]: https://github.com/hypermodeinc/dgraph/compare/v1.0.12...v1.0.13
-**Note: This release supersedes v1.0.12 with bug fixes. If you're running v1.0.12, please upgrade to v1.0.13. It is safe to upgrade in-place without a data export and import.**
+**Note: This release supersedes v1.0.12 with bug fixes. If you're running v1.0.12, please upgrade to
+v1.0.13. It is safe to upgrade in-place without a data export and import.**
-### Fixed
+**Fixed**
- Fix Raft panic. ([8cb69ea](https://github.com/hypermodeinc/dgraph/commit/8cb69ea))
-- Log an error instead of an assertion check for SrcUIDs being nil. ([691b3b3](https://github.com/hypermodeinc/dgraph/commit/691b3b3))
+- Log an error instead of an assertion check for SrcUIDs being nil.
+ ([691b3b3](https://github.com/hypermodeinc/dgraph/commit/691b3b3))
## [1.0.12] - 2019-03-05
+
[1.0.12]: https://github.com/hypermodeinc/dgraph/compare/v1.0.11...v1.0.12
-**Note: This release requires you to export and re-import data prior to
-upgrading or rolling back. The underlying data format has been changed.**
+**Note: This release requires you to export and re-import data prior to upgrading or rolling back.
+The underlying data format has been changed.**
-### Added
+**Added**
- Support gzip compression for gRPC and HTTP requests.
([#2843](https://github.com/hypermodeinc/dgraph/issues/2843))
-- Restore is available from a full binary backup. This is an enterprise
- feature licensed under the Dgraph Community License.
-- Strict schema mode via `--mutations` flag. By default `--mutations=allow` is
- set to allow all mutations; `--mutations=disallow` disables all mutations;
- `--mutations=strict` allows mutations only for predicates which are defined in
- the schema. Fixes [#2277](https://github.com/hypermodeinc/dgraph/issues/2277).
-- Add `dgraph increment` tool for debugging and testing. The increment tool
- queries for the specified predicate (default: `counter.val`), increments its
- integer counter value, and mutates the result back to Dgraph. Useful for
- testing end-to-end txns to verify cluster health.
+- Restore is available from a full binary backup. This is an enterprise feature licensed under the
+ Dgraph Community License.
+- Strict schema mode via `--mutations` flag. By default `--mutations=allow` is set to allow all
+ mutations; `--mutations=disallow` disables all mutations; `--mutations=strict` allows mutations
+ only for predicates which are defined in the schema. Fixes
+ [#2277](https://github.com/hypermodeinc/dgraph/issues/2277).
+- Add `dgraph increment` tool for debugging and testing. The increment tool queries for the
+ specified predicate (default: `counter.val`), increments its integer counter value, and mutates
+ the result back to Dgraph. Useful for testing end-to-end txns to verify cluster health.
([#2955](https://github.com/hypermodeinc/dgraph/issues/2955))
-- Support best-effort queries. This would relax the requirement of linearizible
- reads. For best-effort queries, Alpha would request timestamps from memory
- instead of making an outbound request to Zero.
- ([#3071](https://github.com/hypermodeinc/dgraph/issues/3071))
-
-### Changed
-
-- Use the new Stream API from Badger instead of Dgraph's Stream framework. ([#2852](https://github.com/hypermodeinc/dgraph/issues/2852))
-- Discard earlier versions of posting lists. ([#2859](https://github.com/hypermodeinc/dgraph/issues/2859))
-- Make HTTP JSON response encoding more efficient by operating on a bytes buffer
- directly. ([ae1d9f3](https://github.com/hypermodeinc/dgraph/commit/ae1d9f3))
-- Optimize and refactor facet filtering. ([#2829](https://github.com/hypermodeinc/dgraph/issues/2829))
+- Support best-effort queries. This would relax the requirement of linearizible reads. For
+ best-effort queries, Alpha would request timestamps from memory instead of making an outbound
+ request to Zero. ([#3071](https://github.com/hypermodeinc/dgraph/issues/3071))
+
+**Changed**
+
+- Use the new Stream API from Badger instead of Dgraph's Stream framework.
+ ([#2852](https://github.com/hypermodeinc/dgraph/issues/2852))
+- Discard earlier versions of posting lists.
+ ([#2859](https://github.com/hypermodeinc/dgraph/issues/2859))
+- Make HTTP JSON response encoding more efficient by operating on a bytes buffer directly.
+ ([ae1d9f3](https://github.com/hypermodeinc/dgraph/commit/ae1d9f3))
+- Optimize and refactor facet filtering.
+ ([#2829](https://github.com/hypermodeinc/dgraph/issues/2829))
- Show badger.Item meta information in `dgraph debug` output.
-- Add new option to `dgraph debug` tool to get a histogram of key and value sizes. ([#2844](https://github.com/hypermodeinc/dgraph/issues/2844))
+- Add new option to `dgraph debug` tool to get a histogram of key and value sizes.
+ ([#2844](https://github.com/hypermodeinc/dgraph/issues/2844))
- Add new option to `dgraph debug` tool to get info from a particular read timestamp.
-- Refactor rebuild index logic. ([#2851](https://github.com/hypermodeinc/dgraph/issues/2851), [#2866](https://github.com/hypermodeinc/dgraph/issues/2866))
-- For gRPC clients, schema queries are returned in the Json field. The Schema proto field is deprecated.
-- Simplify design and make tablet moves robust. ([#2800](https://github.com/hypermodeinc/dgraph/issues/2800))
-- Switch all node IDs to hex in logs (e.g., ID 0xa instead of ID 10), so they are consistent with Raft logs.
-- Refactor reindexing code to only reindex specific tokenizers. ([#2948](https://github.com/hypermodeinc/dgraph/issues/2948))
-- Introduce group checksums. ([#2964](https://github.com/hypermodeinc/dgraph/issues/2964), [#3085](https://github.com/hypermodeinc/dgraph/issues/3085))
+- Refactor rebuild index logic. ([#2851](https://github.com/hypermodeinc/dgraph/issues/2851),
+ [#2866](https://github.com/hypermodeinc/dgraph/issues/2866))
+- For gRPC clients, schema queries are returned in the Json field. The Schema proto field is
+ deprecated.
+- Simplify design and make tablet moves robust.
+ ([#2800](https://github.com/hypermodeinc/dgraph/issues/2800))
+- Switch all node IDs to hex in logs (e.g., ID 0xa instead of ID 10), so they are consistent with
+ Raft logs.
+- Refactor reindexing code to only reindex specific tokenizers.
+ ([#2948](https://github.com/hypermodeinc/dgraph/issues/2948))
+- Introduce group checksums. ([#2964](https://github.com/hypermodeinc/dgraph/issues/2964),
+ [#3085](https://github.com/hypermodeinc/dgraph/issues/3085))
- Return aborted error if commit ts is 0.
-- Reduce number of "ClusterInfoOnly" requests to Zero by making VerifyUid wait for membership information. ([#2974](https://github.com/hypermodeinc/dgraph/issues/2974))
+- Reduce number of "ClusterInfoOnly" requests to Zero by making VerifyUid wait for membership
+ information. ([#2974](https://github.com/hypermodeinc/dgraph/issues/2974))
- Simplify Raft WAL storage caching. ([#3102](https://github.com/hypermodeinc/dgraph/issues/3102))
- Build release binary with Go version 1.11.5.
-### Removed
+**Removed**
-- **Remove LRU cache from Alpha for big wins in query latency reduction (5-10x)
- and mutation throughput (live loading 1.7x faster).** Setting `--lru_mb` is
- still required but will not have any effect since the cache is removed. The
- flag will be used later version when LRU cache is introduced within Badger and
- configurable from Dgraph.
-- Remove `--nomutations` flag. Its functionality has moved into strict schema
- mode with the `--mutations` flag (see Added section).
+- **Remove LRU cache from Alpha for big wins in query latency reduction (5-10x) and mutation
+ throughput (live loading 1.7x faster).** Setting `--lru_mb` is still required but will not have
+ any effect since the cache is removed. The flag will be used later version when LRU cache is
+ introduced within Badger and configurable from Dgraph.
+- Remove `--nomutations` flag. Its functionality has moved into strict schema mode with the
+ `--mutations` flag (see Added section).
-### Fixed
+**Fixed**
-- Use json.Marshal for strings and blobs. Fixes [#2662](https://github.com/hypermodeinc/dgraph/issues/2662).
-- Let eq use string "uid" as value. Fixes [#2827](https://github.com/hypermodeinc/dgraph/issues/2827).
+- Use json.Marshal for strings and blobs. Fixes
+ [#2662](https://github.com/hypermodeinc/dgraph/issues/2662).
+- Let eq use string "uid" as value. Fixes
+ [#2827](https://github.com/hypermodeinc/dgraph/issues/2827).
- Skip empty posting lists in `has` function.
- Fix Rollup to pick max update commit ts.
-- Fix a race condition when processing concurrent queries. Fixes [#2849](https://github.com/hypermodeinc/dgraph/issues/2849).
-- Show an error when running multiple mutation blocks. Fixes [#2815](https://github.com/hypermodeinc/dgraph/issues/2815).
+- Fix a race condition when processing concurrent queries. Fixes
+ [#2849](https://github.com/hypermodeinc/dgraph/issues/2849).
+- Show an error when running multiple mutation blocks. Fixes
+ [#2815](https://github.com/hypermodeinc/dgraph/issues/2815).
- Bring in optimizations and bug fixes over from Badger.
-- Bulk Loader for multi-group (sharded data) clusters writes out per-group
- schema with only the predicates owned by the group instead of all predicates
- in the cluster. This fixes an issue where queries made to one group may not
- return data served by other groups.
+- Bulk Loader for multi-group (sharded data) clusters writes out per-group schema with only the
+ predicates owned by the group instead of all predicates in the cluster. This fixes an issue where
+ queries made to one group may not return data served by other groups.
([#3065](https://github.com/hypermodeinc/dgraph/issues/3065))
- Remove the assert failure in raftwal/storage.go.
## [1.0.11] - 2018-12-17
+
[1.0.11]: https://github.com/hypermodeinc/dgraph/compare/v1.0.10...v1.0.11
-### Added
+**Added**
- Integrate OpenCensus in Dgraph. ([#2739](https://github.com/hypermodeinc/dgraph/issues/2739))
- Add Dgraph Community License for proprietary features.
-- Feature: Full binary backups. This is an enterprise feature licensed under the Dgraph Community License. ([#2710](https://github.com/hypermodeinc/dgraph/issues/2710))
-- Add `--enterprise_features` flag to enable enterprise features. By enabling enterprise features, you accept the terms of the Dgraph Community License.
-- Add minio dep and its deps in govendor. ([94daeaf7](https://github.com/hypermodeinc/dgraph/commit/94daeaf7), [35a73e81](https://github.com/hypermodeinc/dgraph/commit/35a73e81))
-- Add network partitioning tests with blockade tool. ([./contrib/blockade](https://github.com/hypermodeinc/dgraph/tree/v1.0.11/contrib/blockade))
-- Add Zero endpoints `/assign?what=uids&num=10` and `/assign?what=timestamps&num=10` to assign UIDs or transaction timestamp leases.
-- Adding the acl subcommand to support acl features (still work-in-progress). ([#2795](https://github.com/hypermodeinc/dgraph/issues/2795))
-- Support custom tokenizer in bulk loader ([#2820](https://github.com/hypermodeinc/dgraph/issues/2820))
-- Support JSON data with Dgraph Bulk Loader. ([#2799](https://github.com/hypermodeinc/dgraph/issues/2799))
-
-### Changed
-
-- Make posting list memory rollup happen right after disk. ([#2731](https://github.com/hypermodeinc/dgraph/issues/2731))
-- Do not retry proposal if already found in CommittedEntries. ([#2740](https://github.com/hypermodeinc/dgraph/issues/2740))
-- Remove ExportPayload from protos. Export returns Status and ExportRequest. ([#2741](https://github.com/hypermodeinc/dgraph/issues/2741))
-- Allow more escape runes to be skipped over when parsing string literal. ([#2734](https://github.com/hypermodeinc/dgraph/issues/2734))
-- Clarify message of overloaded pending proposals for live loader. ([#2732](https://github.com/hypermodeinc/dgraph/issues/2732))
+- Feature: Full binary backups. This is an enterprise feature licensed under the Dgraph Community
+ License. ([#2710](https://github.com/hypermodeinc/dgraph/issues/2710))
+- Add `--enterprise_features` flag to enable enterprise features. By enabling enterprise features,
+ you accept the terms of the Dgraph Community License.
+- Add minio dep and its deps in govendor.
+ ([94daeaf7](https://github.com/hypermodeinc/dgraph/commit/94daeaf7),
+ [35a73e81](https://github.com/hypermodeinc/dgraph/commit/35a73e81))
+- Add network partitioning tests with blockade tool.
+ ([./contrib/blockade](https://github.com/hypermodeinc/dgraph/tree/v1.0.11/contrib/blockade))
+- Add Zero endpoints `/assign?what=uids&num=10` and `/assign?what=timestamps&num=10` to assign UIDs
+ or transaction timestamp leases.
+- Adding the acl subcommand to support acl features (still work-in-progress).
+ ([#2795](https://github.com/hypermodeinc/dgraph/issues/2795))
+- Support custom tokenizer in bulk loader
+ ([#2820](https://github.com/hypermodeinc/dgraph/issues/2820))
+- Support JSON data with Dgraph Bulk Loader.
+ ([#2799](https://github.com/hypermodeinc/dgraph/issues/2799))
+
+**Changed**
+
+- Make posting list memory rollup happen right after disk.
+ ([#2731](https://github.com/hypermodeinc/dgraph/issues/2731))
+- Do not retry proposal if already found in CommittedEntries.
+ ([#2740](https://github.com/hypermodeinc/dgraph/issues/2740))
+- Remove ExportPayload from protos. Export returns Status and ExportRequest.
+ ([#2741](https://github.com/hypermodeinc/dgraph/issues/2741))
+- Allow more escape runes to be skipped over when parsing string literal.
+ ([#2734](https://github.com/hypermodeinc/dgraph/issues/2734))
+- Clarify message of overloaded pending proposals for live loader.
+ ([#2732](https://github.com/hypermodeinc/dgraph/issues/2732))
- Posting List Evictions. (e2bcfdad)
- Log when removing a tablet. ([#2746](https://github.com/hypermodeinc/dgraph/issues/2746))
-- Deal better with network partitions in leaders. ([#2749](https://github.com/hypermodeinc/dgraph/issues/2749))
+- Deal better with network partitions in leaders.
+ ([#2749](https://github.com/hypermodeinc/dgraph/issues/2749))
- Keep maxDelay during timestamp req to 1s.
- Updates to the version output info.
- - Print the go version used to build Dgraph when running `dgraph version` and in the logs when Dgraph runs. ([#2768](https://github.com/hypermodeinc/dgraph/issues/2768))
- - Print the Dgraph version when running live or bulk loader. ([#2736](https://github.com/hypermodeinc/dgraph/issues/2736))
-- Checking nil values in the equal function ([#2769](https://github.com/hypermodeinc/dgraph/issues/2769))
+ - Print the go version used to build Dgraph when running `dgraph version` and in the logs when
+ Dgraph runs. ([#2768](https://github.com/hypermodeinc/dgraph/issues/2768))
+ - Print the Dgraph version when running live or bulk loader.
+ ([#2736](https://github.com/hypermodeinc/dgraph/issues/2736))
+- Checking nil values in the equal function
+ ([#2769](https://github.com/hypermodeinc/dgraph/issues/2769))
- Optimize query: UID expansion. ([#2772](https://github.com/hypermodeinc/dgraph/issues/2772))
-- Split membership sync endpoints and remove PurgeTs endpoint. ([#2773](https://github.com/hypermodeinc/dgraph/issues/2773))
-- Set the Prefix option during iteration. ([#2780](https://github.com/hypermodeinc/dgraph/issues/2780))
+- Split membership sync endpoints and remove PurgeTs endpoint.
+ ([#2773](https://github.com/hypermodeinc/dgraph/issues/2773))
+- Set the Prefix option during iteration.
+ ([#2780](https://github.com/hypermodeinc/dgraph/issues/2780))
- Replace Zero's `/assignIds?num=10` endpoint with `/assign?what=uids&num=10` (see Added section).
-### Removed
+**Removed**
-- Remove type hinting for JSON and RDF schema-less types. ([#2742](https://github.com/hypermodeinc/dgraph/issues/2742))
-- Remove deprecated logic that was found using vet. ([#2758](https://github.com/hypermodeinc/dgraph/issues/2758))
-- Remove assert for zero-length posting lists. ([#2763](https://github.com/hypermodeinc/dgraph/issues/2763))
+- Remove type hinting for JSON and RDF schema-less types.
+ ([#2742](https://github.com/hypermodeinc/dgraph/issues/2742))
+- Remove deprecated logic that was found using vet.
+ ([#2758](https://github.com/hypermodeinc/dgraph/issues/2758))
+- Remove assert for zero-length posting lists.
+ ([#2763](https://github.com/hypermodeinc/dgraph/issues/2763))
-### Fixed
+**Fixed**
- Restore schema states on error. ([#2730](https://github.com/hypermodeinc/dgraph/issues/2730))
-- Refactor bleve tokenizer usage ([#2738](https://github.com/hypermodeinc/dgraph/issues/2738)). Fixes [#2622](https://github.com/hypermodeinc/dgraph/issues/2622) and [#2601](https://github.com/hypermodeinc/dgraph/issues/2601).
+- Refactor bleve tokenizer usage ([#2738](https://github.com/hypermodeinc/dgraph/issues/2738)).
+ Fixes [#2622](https://github.com/hypermodeinc/dgraph/issues/2622) and
+ [#2601](https://github.com/hypermodeinc/dgraph/issues/2601).
- Switch to Badger's Watermark library, which has a memory leak fix. (0cd9d82e)
- Fix tiny typo. ([#2761](https://github.com/hypermodeinc/dgraph/issues/2761))
- Fix Test: TestMillion.
- Fix Jepsen bank test. ([#2764](https://github.com/hypermodeinc/dgraph/issues/2764))
- Fix link to help_wanted. ([#2774](https://github.com/hypermodeinc/dgraph/issues/2774))
-- Fix invalid division by zero error. Fixes [#2733](https://github.com/hypermodeinc/dgraph/issues/2733).
-- Fix missing predicates after export and bulk load. Fixes [#2616](https://github.com/hypermodeinc/dgraph/issues/2616).
-- Handle various edge cases around cluster memberships. ([#2791](https://github.com/hypermodeinc/dgraph/issues/2791))
-- Change Encrypt to not re-encrypt password values. Fixes [#2765](https://github.com/hypermodeinc/dgraph/issues/2765).
-- Correctly parse facet types for both JSON and RDF formats. Previously the
- parsing was handled differently depending on the input format. ([#2797](https://github.com/hypermodeinc/dgraph/issues/2797))
+- Fix invalid division by zero error. Fixes
+ [#2733](https://github.com/hypermodeinc/dgraph/issues/2733).
+- Fix missing predicates after export and bulk load. Fixes
+ [#2616](https://github.com/hypermodeinc/dgraph/issues/2616).
+- Handle various edge cases around cluster memberships.
+ ([#2791](https://github.com/hypermodeinc/dgraph/issues/2791))
+- Change Encrypt to not re-encrypt password values. Fixes
+ [#2765](https://github.com/hypermodeinc/dgraph/issues/2765).
+- Correctly parse facet types for both JSON and RDF formats. Previously the parsing was handled
+ differently depending on the input format.
+ ([#2797](https://github.com/hypermodeinc/dgraph/issues/2797))
## [1.0.10] - 2018-11-05
+
[1.0.10]: https://github.com/hypermodeinc/dgraph/compare/v1.0.9...v1.0.10
-**Note: This release requires you to export and re-import data. We have changed the underlying storage format.**
+**Note: This release requires you to export and re-import data. We have changed the underlying
+storage format.**
-### Added
+**Added**
-- The Alter endpoint can be protected by an auth token that is set on the Dgraph Alphas via the `--auth_token` option. This can help prevent accidental schema updates and drop all operations. ([#2692](https://github.com/hypermodeinc/dgraph/issues/2692))
+- The Alter endpoint can be protected by an auth token that is set on the Dgraph Alphas via the
+ `--auth_token` option. This can help prevent accidental schema updates and drop all operations.
+ ([#2692](https://github.com/hypermodeinc/dgraph/issues/2692))
- Optimize has function ([#2724](https://github.com/hypermodeinc/dgraph/issues/2724))
-- Expose the health check API via gRPC. ([#2721](https://github.com/hypermodeinc/dgraph/issues/2721))
+- Expose the health check API via gRPC.
+ ([#2721](https://github.com/hypermodeinc/dgraph/issues/2721))
-### Changed
+**Changed**
- Dgraph is relicensed to Apache 2.0. ([#2652](https://github.com/hypermodeinc/dgraph/issues/2652))
-- **Breaking change**. Rename Dgraph Server to Dgraph Alpha to clarify discussions of the Dgraph cluster. The top-level command `dgraph server` is now `dgraph alpha`. ([#2667](https://github.com/hypermodeinc/dgraph/issues/2667))
-- Prometheus metrics have been renamed for consistency for alpha, memory, and lru cache metrics. ([#2636](https://github.com/hypermodeinc/dgraph/issues/2636), [#2670](https://github.com/hypermodeinc/dgraph/issues/2670), [#2714](https://github.com/hypermodeinc/dgraph/issues/2714))
-- The `dgraph-converter` command is available as the subcommand `dgraph conv`. ([#2635](https://github.com/hypermodeinc/dgraph/issues/2635))
+- **Breaking change**. Rename Dgraph Server to Dgraph Alpha to clarify discussions of the Dgraph
+ cluster. The top-level command `dgraph server` is now `dgraph alpha`.
+ ([#2667](https://github.com/hypermodeinc/dgraph/issues/2667))
+- Prometheus metrics have been renamed for consistency for alpha, memory, and lru cache metrics.
+ ([#2636](https://github.com/hypermodeinc/dgraph/issues/2636),
+ [#2670](https://github.com/hypermodeinc/dgraph/issues/2670),
+ [#2714](https://github.com/hypermodeinc/dgraph/issues/2714))
+- The `dgraph-converter` command is available as the subcommand `dgraph conv`.
+ ([#2635](https://github.com/hypermodeinc/dgraph/issues/2635))
- Updating protobuf version. ([#2639](https://github.com/hypermodeinc/dgraph/issues/2639))
- Allow checkpwd to be aliased ([#2641](https://github.com/hypermodeinc/dgraph/issues/2641))
-- Better control excessive traffic to Dgraph ([#2678](https://github.com/hypermodeinc/dgraph/issues/2678))
-- Export format now exports on the Alpha receiving the export request. The naming scheme of the export files has been simplified.
-- Improvements to the `dgraph debug` tool that can be used to inspect the contents of the posting lists directory.
+- Better control excessive traffic to Dgraph
+ ([#2678](https://github.com/hypermodeinc/dgraph/issues/2678))
+- Export format now exports on the Alpha receiving the export request. The naming scheme of the
+ export files has been simplified.
+- Improvements to the `dgraph debug` tool that can be used to inspect the contents of the posting
+ lists directory.
- Bring in Badger updates ([#2697](https://github.com/hypermodeinc/dgraph/issues/2697))
-### Fixed
+**Fixed**
-- Make raft leader resume probing after snapshot crash ([#2707](https://github.com/hypermodeinc/dgraph/issues/2707))
-- **Breaking change:** Create a lot simpler sorted uint64 codec ([#2716](https://github.com/hypermodeinc/dgraph/issues/2716))
-- Increase the size of applyCh, to give Raft some breathing space. Otherwise, it fails to maintain quorum health.
+- Make raft leader resume probing after snapshot crash
+ ([#2707](https://github.com/hypermodeinc/dgraph/issues/2707))
+- **Breaking change:** Create a lot simpler sorted uint64 codec
+ ([#2716](https://github.com/hypermodeinc/dgraph/issues/2716))
+- Increase the size of applyCh, to give Raft some breathing space. Otherwise, it fails to maintain
+ quorum health.
- Zero should stream last commit update
- Send commit timestamps in order ([#2687](https://github.com/hypermodeinc/dgraph/issues/2687))
- Query blocks with the same name are no longer allowed.
-- Fix out-of-range values in query parser. ([#2690](https://github.com/hypermodeinc/dgraph/issues/2690))
+- Fix out-of-range values in query parser.
+ ([#2690](https://github.com/hypermodeinc/dgraph/issues/2690))
## [1.0.9] - 2018-10-02
+
[1.0.9]: https://github.com/hypermodeinc/dgraph/compare/v1.0.8...v1.0.9
-### Added
+**Added**
-- This version switches Badger Options to reasonable settings for p and w directories. This removes the need to expose `--badger.options` option and removes the `none` option from `--badger.vlog`. ([#2605](https://github.com/hypermodeinc/dgraph/issues/2605))
-- Add support for ignoring parse errors in bulk loader with the option `--ignore_error`. ([#2599](https://github.com/hypermodeinc/dgraph/issues/2599))
-- Introduction of new command `dgraph cert` to simplify initial TLS setup. See [TLS configuration docs](https://dgraph.io/docs/deploy/#tls-configuration) for more info.
-- Add `expand(_forward_)` and `expand(_reverse_)` to GraphQL+- query language. If `_forward_` is passed as an argument to `expand()`, all predicates at that level (minus any reverse predicates) are retrieved.
-If `_reverse_` is passed as an argument to `expand()`, only the reverse predicates are retrieved.
+- This version switches Badger Options to reasonable settings for p and w directories. This removes
+ the need to expose `--badger.options` option and removes the `none` option from `--badger.vlog`.
+ ([#2605](https://github.com/hypermodeinc/dgraph/issues/2605))
+- Add support for ignoring parse errors in bulk loader with the option `--ignore_error`.
+ ([#2599](https://github.com/hypermodeinc/dgraph/issues/2599))
+- Introduction of new command `dgraph cert` to simplify initial TLS setup. See
+ [TLS configuration docs](https://dgraph.io/docs/deploy/#tls-configuration) for more info.
+- Add `expand(_forward_)` and `expand(_reverse_)` to GraphQL+- query language. If `_forward_` is
+ passed as an argument to `expand()`, all predicates at that level (minus any reverse predicates)
+ are retrieved. If `_reverse_` is passed as an argument to `expand()`, only the reverse predicates
+ are retrieved.
-### Changed
+**Changed**
- Rename intern pkg to pb ([#2608](https://github.com/hypermodeinc/dgraph/issues/2608))
-### Fixed
+**Fixed**
- Remove LinRead map logic from Dgraph ([#2570](https://github.com/hypermodeinc/dgraph/issues/2570))
- Sanity length check for facets mostly.
-- Make has function correct w.r.t. transactions ([#2585](https://github.com/hypermodeinc/dgraph/issues/2585))
-- Increase the snapshot calculation interval, while decreasing the min number of entries required; so we take snapshots even when there's little activity.
-- Convert an assert during DropAll to inf retry. ([#2578](https://github.com/hypermodeinc/dgraph/issues/2578))
-- Fix a bug which caused all transactions to abort if `--expand_edge` was set to false. Fixes [#2547](https://github.com/hypermodeinc/dgraph/issues/2547).
-- Set the Applied index in Raft directly, so it does not pick up an index older than the snapshot. Ensure that it is in sync with the Applied watermark. Fixes [#2581](https://github.com/hypermodeinc/dgraph/issues/2581).
+- Make has function correct w.r.t. transactions
+ ([#2585](https://github.com/hypermodeinc/dgraph/issues/2585))
+- Increase the snapshot calculation interval, while decreasing the min number of entries required;
+ so we take snapshots even when there's little activity.
+- Convert an assert during DropAll to inf retry.
+ ([#2578](https://github.com/hypermodeinc/dgraph/issues/2578))
+- Fix a bug which caused all transactions to abort if `--expand_edge` was set to false. Fixes
+ [#2547](https://github.com/hypermodeinc/dgraph/issues/2547).
+- Set the Applied index in Raft directly, so it does not pick up an index older than the snapshot.
+ Ensure that it is in sync with the Applied watermark. Fixes
+ [#2581](https://github.com/hypermodeinc/dgraph/issues/2581).
- Pull in Badger updates. This also fixes the Unable to find log file, retry error.
-- Improve efficiency of readonly transactions by reusing the same read ts ([#2604](https://github.com/hypermodeinc/dgraph/issues/2604))
+- Improve efficiency of readonly transactions by reusing the same read ts
+ ([#2604](https://github.com/hypermodeinc/dgraph/issues/2604))
- Fix a bug in Raft.Run loop. ([#2606](https://github.com/hypermodeinc/dgraph/issues/2606))
-- Fix a few issues regarding snapshot.Index for raft.Cfg.Applied. Do not overwrite any existing data when apply txn commits. Do not let CreateSnapshot fail.
-- Consider all future versions of the key as well, when deciding whether to write a key or not during txn commits. Otherwise, we'll end up in an endless loop of trying to write a stale key but failing to do so.
-- When testing inequality value vars with non-matching values, the response was sent as an error although it should return empty result if the query has correct syntax. ([#2611](https://github.com/hypermodeinc/dgraph/issues/2611))
-- Switch traces to glogs in worker/export.go ([#2614](https://github.com/hypermodeinc/dgraph/issues/2614))
-- Improve error handling for `dgraph live` for errors when processing RDF and schema files. ([#2596](https://github.com/hypermodeinc/dgraph/issues/2596))
-- Fix task conversion from bool to int that used uint32 ([#2621](https://github.com/hypermodeinc/dgraph/issues/2621))
-- Fix `expand(_all_)` in recurse queries ([#2600](https://github.com/hypermodeinc/dgraph/issues/2600)).
-- Add language aliases for broader support for full text indices. ([#2602](https://github.com/hypermodeinc/dgraph/issues/2602))
+- Fix a few issues regarding snapshot.Index for raft.Cfg.Applied. Do not overwrite any existing data
+ when apply txn commits. Do not let CreateSnapshot fail.
+- Consider all future versions of the key as well, when deciding whether to write a key or not
+ during txn commits. Otherwise, we'll end up in an endless loop of trying to write a stale key but
+ failing to do so.
+- When testing inequality value vars with non-matching values, the response was sent as an error
+ although it should return empty result if the query has correct syntax.
+ ([#2611](https://github.com/hypermodeinc/dgraph/issues/2611))
+- Switch traces to glogs in worker/export.go
+ ([#2614](https://github.com/hypermodeinc/dgraph/issues/2614))
+- Improve error handling for `dgraph live` for errors when processing RDF and schema files.
+ ([#2596](https://github.com/hypermodeinc/dgraph/issues/2596))
+- Fix task conversion from bool to int that used uint32
+ ([#2621](https://github.com/hypermodeinc/dgraph/issues/2621))
+- Fix `expand(_all_)` in recurse queries
+ ([#2600](https://github.com/hypermodeinc/dgraph/issues/2600)).
+- Add language aliases for broader support for full text indices.
+ ([#2602](https://github.com/hypermodeinc/dgraph/issues/2602))
## [1.0.8] - 2018-08-29
+
[1.0.8]: https://github.com/hypermodeinc/dgraph/compare/v1.0.7...v1.0.8
-### Added
+**Added**
- Introduce a new /assignIds HTTP endpoint in Zero, so users can allocate UIDs to nodes externally.
-- Add a new tool which retrieves and increments a counter by 1 transactionally. This can be used to test the sanity of Dgraph cluster.
+- Add a new tool which retrieves and increments a counter by 1 transactionally. This can be used to
+ test the sanity of Dgraph cluster.
-### Changed
+**Changed**
-- This version introduces tracking of a few anonymous metrics to measure Dgraph adoption ([#2554](https://github.com/hypermodeinc/dgraph/issues/2554)). These metrics do not contain any specifically identifying information about the user, so most users can leave it on. This can be turned off by setting `--telemetry=false` flag if needed in Dgraph Zero.
+- This version introduces tracking of a few anonymous metrics to measure Dgraph adoption
+ ([#2554](https://github.com/hypermodeinc/dgraph/issues/2554)). These metrics do not contain any
+ specifically identifying information about the user, so most users can leave it on. This can be
+ turned off by setting `--telemetry=false` flag if needed in Dgraph Zero.
-### Fixed
+**Fixed**
-- Correctly handle a list of type geo in json ([#2482](https://github.com/hypermodeinc/dgraph/issues/2482), [#2485](https://github.com/hypermodeinc/dgraph/issues/2485)).
+- Correctly handle a list of type geo in json
+ ([#2482](https://github.com/hypermodeinc/dgraph/issues/2482),
+ [#2485](https://github.com/hypermodeinc/dgraph/issues/2485)).
- Fix the graceful shutdown of Dgraph server, so a single Ctrl+C would now suffice to stop it.
-- Fix various deadlocks in Dgraph and set ConfState in Raft correctly ([#2548](https://github.com/hypermodeinc/dgraph/issues/2548)).
-- Significantly decrease the number of transaction aborts by using SPO as key for entity to entity connections. ([#2556](https://github.com/hypermodeinc/dgraph/issues/2556)).
-- Do not print error while sending Raft message by default. No action needs to be taken by the user, so it is set to V(3) level.
+- Fix various deadlocks in Dgraph and set ConfState in Raft correctly
+ ([#2548](https://github.com/hypermodeinc/dgraph/issues/2548)).
+- Significantly decrease the number of transaction aborts by using SPO as key for entity to entity
+ connections. ([#2556](https://github.com/hypermodeinc/dgraph/issues/2556)).
+- Do not print error while sending Raft message by default. No action needs to be taken by the user,
+ so it is set to V(3) level.
## [1.0.7] - 2018-08-10
+
[1.0.7]: https://github.com/hypermodeinc/dgraph/compare/v1.0.6...v1.0.7
-### Changed
+**Changed**
- Set the `--conc` flag in live loader default to 1, as a temporary fix to avoid tons of aborts.
-### Fixed
+**Fixed**
-- All Oracle delta streams are applied via Raft proposals. This deals better with network partition like edge-cases. [#2463](https://github.com/hypermodeinc/dgraph/issues/2463)
-- Fix deadlock in 10-node cluster convergence. Fixes [#2286](https://github.com/hypermodeinc/dgraph/issues/2286).
+- All Oracle delta streams are applied via Raft proposals. This deals better with network partition
+ like edge-cases. [#2463](https://github.com/hypermodeinc/dgraph/issues/2463)
+- Fix deadlock in 10-node cluster convergence. Fixes
+ [#2286](https://github.com/hypermodeinc/dgraph/issues/2286).
- Make ReadIndex work safely. [#2469](https://github.com/hypermodeinc/dgraph/issues/2469)
-- Simplify snapshots, leader now calculates and proposes snapshots to the group. [#2475](https://github.com/hypermodeinc/dgraph/issues/2475).
+- Simplify snapshots, leader now calculates and proposes snapshots to the group.
+ [#2475](https://github.com/hypermodeinc/dgraph/issues/2475).
- Make snapshot streaming more robust. [#2487](https://github.com/hypermodeinc/dgraph/issues/2487)
-- Consolidate all txn tracking logic into Oracle, remove inSnapshot logic. [#2480](https://github.com/hypermodeinc/dgraph/issues/2480).
+- Consolidate all txn tracking logic into Oracle, remove inSnapshot logic.
+ [#2480](https://github.com/hypermodeinc/dgraph/issues/2480).
- Bug fix in Badger, to stop panics when exporting.
- Use PreVote to avoid leader change on a node join.
-- Fix a long-standing bug where `raft.Step` was being called via goroutines. It is now called serially.
-- Fix context deadline issues with proposals. [#2501](https://github.com/hypermodeinc/dgraph/issues/2501).
+- Fix a long-standing bug where `raft.Step` was being called via goroutines. It is now called
+ serially.
+- Fix context deadline issues with proposals.
+ [#2501](https://github.com/hypermodeinc/dgraph/issues/2501).
## [1.0.6] - 2018-06-20
-[1.0.6]: https://github.com/hypermodeinc/dgraph/compare/v1.0.5...v1.0.6
-### Added
-
-* Support GraphQL vars as args for Regexp function. [#2353](https://github.com/hypermodeinc/dgraph/issues/2353)
-* Support GraphQL vars with filters. [#2359](https://github.com/hypermodeinc/dgraph/issues/2359)
-* Add JSON mutations to raw HTTP. [#2396](https://github.com/hypermodeinc/dgraph/issues/2396)
-
-### Fixed
-
-* Fix math >= evaluation. [#2365](https://github.com/hypermodeinc/dgraph/issues/2365)
-* Avoid race condition between mutation commit and predicate move. [#2392](https://github.com/hypermodeinc/dgraph/issues/2392)
-* Ability to correctly distinguish float from int in JSON. [#2398](https://github.com/hypermodeinc/dgraph/issues/2398)
-* Remove _dummy_ data key. [#2401](https://github.com/hypermodeinc/dgraph/issues/2401)
-* Serialize applying of Raft proposals. Concurrent application was complex and
- cause of multiple bugs. [#2428](https://github.com/hypermodeinc/dgraph/issues/2428).
-* Improve Zero connections.
-* Fix bugs in snapshot move, refactor code and improve performance significantly. [#2440](https://github.com/hypermodeinc/dgraph/issues/2440), [#2442](https://github.com/hypermodeinc/dgraph/issues/2442)
-* Add error handling to GetNoStore. Fixes [#2373](https://github.com/hypermodeinc/dgraph/issues/2373).
-* Fix bugs in Bulk loader. [#2449](https://github.com/hypermodeinc/dgraph/issues/2449)
-* Posting List and Raft bug fixes. [#2457](https://github.com/hypermodeinc/dgraph/issues/2457)
-
-### Changed
-
-* Pull in Badger v1.5.2.
-* Raft storage is now done entirely via Badger. This reduces RAM
- consumption by previously used MemoryStorage. [#2433](https://github.com/hypermodeinc/dgraph/issues/2433)
-* Trace how node.Run loop performs.
-* Allow tweaking Badger options.
+[1.0.6]: https://github.com/hypermodeinc/dgraph/compare/v1.0.5...v1.0.6
-**Note:** This change modifies some flag names. In particular, Badger options
-are now exposed via flags named with `--badger.` prefix.
+**Added**
+
+- Support GraphQL vars as args for Regexp function.
+ [#2353](https://github.com/hypermodeinc/dgraph/issues/2353)
+- Support GraphQL vars with filters. [#2359](https://github.com/hypermodeinc/dgraph/issues/2359)
+- Add JSON mutations to raw HTTP. [#2396](https://github.com/hypermodeinc/dgraph/issues/2396)
+
+**Fixed**
+
+- Fix math >= evaluation. [#2365](https://github.com/hypermodeinc/dgraph/issues/2365)
+- Avoid race condition between mutation commit and predicate move.
+ [#2392](https://github.com/hypermodeinc/dgraph/issues/2392)
+- Ability to correctly distinguish float from int in JSON.
+ [#2398](https://github.com/hypermodeinc/dgraph/issues/2398)
+- Remove _dummy_ data key. [#2401](https://github.com/hypermodeinc/dgraph/issues/2401)
+- Serialize applying of Raft proposals. Concurrent application was complex and cause of multiple
+ bugs. [#2428](https://github.com/hypermodeinc/dgraph/issues/2428).
+- Improve Zero connections.
+- Fix bugs in snapshot move, refactor code and improve performance significantly.
+ [#2440](https://github.com/hypermodeinc/dgraph/issues/2440),
+ [#2442](https://github.com/hypermodeinc/dgraph/issues/2442)
+- Add error handling to GetNoStore. Fixes
+ [#2373](https://github.com/hypermodeinc/dgraph/issues/2373).
+- Fix bugs in Bulk loader. [#2449](https://github.com/hypermodeinc/dgraph/issues/2449)
+- Posting List and Raft bug fixes. [#2457](https://github.com/hypermodeinc/dgraph/issues/2457)
+
+**Changed**
+
+- Pull in Badger v1.5.2.
+- Raft storage is now done entirely via Badger. This reduces RAM consumption by previously used
+ MemoryStorage. [#2433](https://github.com/hypermodeinc/dgraph/issues/2433)
+- Trace how node.Run loop performs.
+- Allow tweaking Badger options.
+
+**Note:** This change modifies some flag names. In particular, Badger options are now exposed via
+flags named with `--badger.` prefix.
## [1.0.5] - 2018-04-20
-[1.0.5]: https://github.com/hypermodeinc/dgraph/compare/v1.0.4...v1.0.5
-
-### Added
-* Option to have server side sequencing.
-* Ability to specify whitelisted IP addresses for admin actions.
-
-
-### Fixed
-
-* Fix bug where predicate with string type sometimes appeared as `_:uidffffffffffffffff` in exports.
-* Validate facet value should be according to the facet type supplied when mutating using N-Quads ([#2074](https://github.com/hypermodeinc/dgraph/issues/2074)).
-* Use `time.Equal` function for comparing predicates with `datetime`([#2219](https://github.com/hypermodeinc/dgraph/issues/2219)).
-* Skip `BitEmptyPosting` for `has` queries.
-* Return error from query if we don't serve the group for the attribute instead of crashing ([#2227](https://github.com/hypermodeinc/dgraph/issues/2227)).
-* Send `maxpending` in connection state to server ([#2236](https://github.com/hypermodeinc/dgraph/issues/2236)).
-* Fix bug in SP* transactions ([#2148](https://github.com/hypermodeinc/dgraph/issues/2148)).
-* Batch and send during snapshot to make snapshots faster.
-* Don't skip schema keys while calculating tablets served.
-* Fix the issue which could lead to snapshot getting blocked for a cluster with replicas ([#2266](https://github.com/hypermodeinc/dgraph/issues/2266)).
-* Dgraph server retries indefinitely to connect to Zero.
-* Allow filtering and regex queries for list types with lossy tokenizers.
-* Dgraph server segfault in worker package ([#2322](https://github.com/hypermodeinc/dgraph/issues/2322)).
-* Node crashes can lead to the loss of inserted triples ([#2290](https://github.com/hypermodeinc/dgraph/issues/2290)).
-
-
-### Changed
+[1.0.5]: https://github.com/hypermodeinc/dgraph/compare/v1.0.4...v1.0.5
-* Cancel pending transactions for a predicate when predicate move is initiated.
-* Move Go client to its own repo at `dgraph-io/dgo`.
-* Make `expand(_all_)` return value and uid facets.
-* Add an option to specify a `@lang` directive in schema for predicates with lang tags.
-* Flag `memory_mb` has been changed to `lru_mb`. The default recommended value for `lru_mb` is
+**Added**
+
+- Option to have server side sequencing.
+- Ability to specify whitelisted IP addresses for admin actions.
+
+**Fixed**
+
+- Fix bug where predicate with string type sometimes appeared as `_:uidffffffffffffffff` in exports.
+- Validate facet value should be according to the facet type supplied when mutating using N-Quads
+ ([#2074](https://github.com/hypermodeinc/dgraph/issues/2074)).
+- Use `time.Equal` function for comparing predicates with
+ `datetime`([#2219](https://github.com/hypermodeinc/dgraph/issues/2219)).
+- Skip `BitEmptyPosting` for `has` queries.
+- Return error from query if we don't serve the group for the attribute instead of crashing
+ ([#2227](https://github.com/hypermodeinc/dgraph/issues/2227)).
+- Send `maxpending` in connection state to server
+ ([#2236](https://github.com/hypermodeinc/dgraph/issues/2236)).
+- Fix bug in SP\* transactions ([#2148](https://github.com/hypermodeinc/dgraph/issues/2148)).
+- Batch and send during snapshot to make snapshots faster.
+- Don't skip schema keys while calculating tablets served.
+- Fix the issue which could lead to snapshot getting blocked for a cluster with replicas
+ ([#2266](https://github.com/hypermodeinc/dgraph/issues/2266)).
+- Dgraph server retries indefinitely to connect to Zero.
+- Allow filtering and regex queries for list types with lossy tokenizers.
+- Dgraph server segfault in worker package
+ ([#2322](https://github.com/hypermodeinc/dgraph/issues/2322)).
+- Node crashes can lead to the loss of inserted triples
+ ([#2290](https://github.com/hypermodeinc/dgraph/issues/2290)).
+
+**Changed**
+
+- Cancel pending transactions for a predicate when predicate move is initiated.
+- Move Go client to its own repo at `dgraph-io/dgo`.
+- Make `expand(_all_)` return value and uid facets.
+- Add an option to specify a `@lang` directive in schema for predicates with lang tags.
+- Flag `memory_mb` has been changed to `lru_mb`. The default recommended value for `lru_mb` is
one-third of the total RAM available on the server.
## [1.0.4] - 2018-03-09
+
[1.0.4]: https://github.com/hypermodeinc/dgraph/compare/v1.0.3...v1.0.4
-### Added
+**Added**
-* Support for empty strings in query attributes.
-* Support GraphQL vars in first, offset and after at root.
-* Add support for query_edge_limit flag which can be used to limit number of results for shortest
+- Support for empty strings in query attributes.
+- Support GraphQL vars in first, offset and after at root.
+- Add support for query_edge_limit flag which can be used to limit number of results for shortest
path, recurse queries.
-* Make rebalance interval a flag in Zero.
-* Return latency information for mutation operations.
-* Support @upsert directive in schema.
-
-### Fixed
-
-* Issues with predicate deletion in a cluster.
-* Handle errors from posting.Get.
-* Correctly update commitTs while committing and startTs == deleteTs.
-* Error handling in abort http handler.
-* Get latest membership state from Zero if uid in mutation > maxLeaseId.
-* Fix bug in Mutate where mutated keys were not filled.
-* Update membership state if we can't find a leader while doing snapshot retrieval.
-* Make snapshotting more frequent, also try aborting long pending transactions.
-* Trim null character from end of strings before exporting.
-* Sort facets after parsing RDF's using bulk loader.
-* Fig bug in SyncIfDirty.
-* Fix fatal error due to TxnTooBig error.
-* Fix bug in dgraph live where some batches could be skipped on conflict error.
-* Fix a bug related to expand(_all_) queries.
-* Run cleanPredicate and proposeKeyValues sequentially.
-* Serialize connect requests in Zero.
-
-### Changed
-
-* Retry snapshot retrieval and join cluster indefinitely.
-* Make client directory optional in dgraph live.
-* Do snapshot in Zero in a goroutine so that Run loop isn't blocked.
-
+- Make rebalance interval a flag in Zero.
+- Return latency information for mutation operations.
+- Support @upsert directive in schema.
+
+**Fixed**
+
+- Issues with predicate deletion in a cluster.
+- Handle errors from posting.Get.
+- Correctly update commitTs while committing and startTs == deleteTs.
+- Error handling in abort http handler.
+- Get latest membership state from Zero if uid in mutation > maxLeaseId.
+- Fix bug in Mutate where mutated keys were not filled.
+- Update membership state if we can't find a leader while doing snapshot retrieval.
+- Make snapshotting more frequent, also try aborting long pending transactions.
+- Trim null character from end of strings before exporting.
+- Sort facets after parsing RDF's using bulk loader.
+- Fig bug in SyncIfDirty.
+- Fix fatal error due to TxnTooBig error.
+- Fix bug in dgraph live where some batches could be skipped on conflict error.
+- Fix a bug related to expand(_all_) queries.
+- Run cleanPredicate and proposeKeyValues sequentially.
+- Serialize connect requests in Zero.
+
+**Changed**
+
+- Retry snapshot retrieval and join cluster indefinitely.
+- Make client directory optional in dgraph live.
+- Do snapshot in Zero in a goroutine so that Run loop isn't blocked.
## [1.0.3] - 2018-02-08
+
[1.0.3]: https://github.com/hypermodeinc/dgraph/compare/v1.0.2...v1.0.3
-### Added
+**Added**
-* Support for specifying blank nodes as part of JSON mutation.
-* `dgraph version` command to check current version.
-* `curl` to Docker image.
-* `moveTablet` endpoint to Zero to allow initiating a predicate move.
+- Support for specifying blank nodes as part of JSON mutation.
+- `dgraph version` command to check current version.
+- `curl` to Docker image.
+- `moveTablet` endpoint to Zero to allow initiating a predicate move.
-### Fixed
+**Fixed**
-* Out of range error while doing `eq` query.
-* Reduce `maxBackOffDelay` to 10 sec so that leader election is faster after restart.
-* Fix bugs with predicate move where some data was not sent and schema not loaded properly on
+- Out of range error while doing `eq` query.
+- Reduce `maxBackOffDelay` to 10 sec so that leader election is faster after restart.
+- Fix bugs with predicate move where some data was not sent and schema not loaded properly on
replicas.
-* Fix the total number of RDF's processed when live loader ends.
-* Reindex data when schema is changed to list type to fix adding and deleting new data.
-* Correctly upate uidMatrix when facetOrder is supplied.
-* Inequality operator(`gt` and `lt`) result for non lossy tokenizers.
+- Fix the total number of RDF's processed when live loader ends.
+- Reindex data when schema is changed to list type to fix adding and deleting new data.
+- Correctly upate uidMatrix when facetOrder is supplied.
+- Inequality operator(`gt` and `lt`) result for non lossy tokenizers.
-### Changed
+**Changed**
-* `--zero_addr` flag changed to `--zero` for `dgraph bulk` command.
-* Default ports for Zero have been changed `7080` => `5080`(grpc) and `8080` => `6080`(http).
-* Update badger version and how purging is done to fix CPU spiking when Dgraph is idle.
-* Print predicate name as part of the warning about long term for exact index.
+- `--zero_addr` flag changed to `--zero` for `dgraph bulk` command.
+- Default ports for Zero have been changed `7080` => `5080`(grpc) and `8080` => `6080`(http).
+- Update badger version and how purging is done to fix CPU spiking when Dgraph is idle.
+- Print predicate name as part of the warning about long term for exact index.
## [1.0.2] - 2018-01-17
+
[1.0.2]: https://github.com/hypermodeinc/dgraph/compare/v1.0.1...v1.0.2
-### Fixed
-
-* Always return predicates of list type in an array.
-* Edges without facet values are also returned when performing sort on facet.
-* Don't derive schema while deleting edges.
-* Better error checking when accessing posting lists. Fixes bug where parts of
- queries are sometimes omitted when system is under heavy load.
-* Fix missing error check in mutation handling when using CommitNow (gave incorrect error).
-* Fix bug where eq didn't work correctly for the fulltext index.
-* Fix race because of which `replicas` flag was not respected.
-* Fix bug with key copy during predicate move.
-* Fix race in merging keys keys from btree and badger iterator.
-* Fix snapshot retrieval for new nodes by retrieving it before joining the cluster.
-* Write schema at timestamp 1 in bulk loader.
-* Fix unexpected meta fatal error.
-* Fix groupby result incase the child being grouped open has multiple parents.
-
-### Changed
-
-* Remove StartTs field from `api.Operation`.
-* Print error message in live loader if its not ErrAborted. Also, stop using membership state and
-instead use the address given by user.
-* Only send keys corresponding to data that was mutated.
+**Fixed**
+
+- Always return predicates of list type in an array.
+- Edges without facet values are also returned when performing sort on facet.
+- Don't derive schema while deleting edges.
+- Better error checking when accessing posting lists. Fixes bug where parts of queries are sometimes
+ omitted when system is under heavy load.
+- Fix missing error check in mutation handling when using CommitNow (gave incorrect error).
+- Fix bug where eq didn't work correctly for the fulltext index.
+- Fix race because of which `replicas` flag was not respected.
+- Fix bug with key copy during predicate move.
+- Fix race in merging keys keys from btree and badger iterator.
+- Fix snapshot retrieval for new nodes by retrieving it before joining the cluster.
+- Write schema at timestamp 1 in bulk loader.
+- Fix unexpected meta fatal error.
+- Fix groupby result incase the child being grouped open has multiple parents.
+
+**Changed**
+
+- Remove StartTs field from `api.Operation`.
+- Print error message in live loader if its not ErrAborted. Also, stop using membership state and
+ instead use the address given by user.
+- Only send keys corresponding to data that was mutated.
## [1.0.1] - 2017-12-20
+
[1.0.1]: https://github.com/hypermodeinc/dgraph/compare/v1.0.0...v1.0.1
-### Fixed
+**Fixed**
-* Wait for background goroutines to finish in posting package on shutdown.
-* Return error if we cant parse the uid given in json input for mutations.
-* Don't remove `_predicate_` schema from disk during drop all.
-* Fix panic in expand(_all_)
+- Wait for background goroutines to finish in posting package on shutdown.
+- Return error if we cant parse the uid given in json input for mutations.
+- Don't remove `_predicate_` schema from disk during drop all.
+- Fix panic in expand(_all_)
-### Changed
+**Changed**
-* Make sure at least one field is set while doing Alter.
+- Make sure at least one field is set while doing Alter.
## [1.0.0] - 2017-12-18
+
[1.0.0]: https://github.com/hypermodeinc/dgraph/compare/v0.9.3...v1.0.0
-### Added
+**Added**
-* Allow doing Mutate and Alter Operations using dgraph UI.
-* Provide option to user to ignore conflicts on index keys.
+- Allow doing Mutate and Alter Operations using dgraph UI.
+- Provide option to user to ignore conflicts on index keys.
-### Fixed
+**Fixed**
-* Language tag parsing in queries now accepts digits (in line with RDF parsing).
-* Ensure that GraphQL variables are declared before use.
-* Export now uses correct blank node syntax.
-* Membership stream doesn't get stuck if node steps down as leader.
-* Fix issue where sets were not being returned after doing a S P * deletion when part of same
+- Language tag parsing in queries now accepts digits (in line with RDF parsing).
+- Ensure that GraphQL variables are declared before use.
+- Export now uses correct blank node syntax.
+- Membership stream doesn't get stuck if node steps down as leader.
+- Fix issue where sets were not being returned after doing a S P \* deletion when part of same
transaction.
-* Empty string values are stored as it is and no strings have special meaning now.
-* Correctly update order of facetMatrix when orderdesc/orderasc is applied.
-* Allow live and bulk loaders to work with multiple zeros.
-* Fix sorting with for predicates with multiple language tags.
-* Fix alias edge cases in normalize directive.
-* Allow reading new index key mutated as part of same transaction.
-* Fix bug in value log GC in badger.
-* SIGINT now forces a shutdown after 5 seconds when there are pending RPCs.
-
-### Changed
-
-* `DropAttr` now also removes the schema for the attribute (previously it just removed the edges).
-* Tablet metadata is removed from zero after deletion of predicate.
-* LRU size is changed dynamically now based on `max_memory_mb`
-* Call RunValueLogGC for every GB increase in size of value logs. Upgrade vendored version of
- Badger.
-* Prohibit string to password schema change.
-* Make purging less aggressive.
-* Check if GraphQL Variable is defined before using.
+- Empty string values are stored as it is and no strings have special meaning now.
+- Correctly update order of facetMatrix when orderdesc/orderasc is applied.
+- Allow live and bulk loaders to work with multiple zeros.
+- Fix sorting with for predicates with multiple language tags.
+- Fix alias edge cases in normalize directive.
+- Allow reading new index key mutated as part of same transaction.
+- Fix bug in value log GC in badger.
+- SIGINT now forces a shutdown after 5 seconds when there are pending RPCs.
+
+**Changed**
+
+- `DropAttr` now also removes the schema for the attribute (previously it just removed the edges).
+- Tablet metadata is removed from zero after deletion of predicate.
+- LRU size is changed dynamically now based on `max_memory_mb`
+- Call RunValueLogGC for every GB increase in size of value logs. Upgrade vendored version of
+ Badger.
+- Prohibit string to password schema change.
+- Make purging less aggressive.
+- Check if GraphQL Variable is defined before using.
## [0.9.3] - 2017-12-01
+
[0.9.3]: https://github.com/hypermodeinc/dgraph/compare/v0.9.2...v0.9.3
-### Added
+**Added**
-* Support for alias while asking for facets.
-* Support for general configuration via environment variables and configuration files.
-* `IgnoreIndexConflict` field in Txn which allows ignoring conflicts on index keys.
+- Support for alias while asking for facets.
+- Support for general configuration via environment variables and configuration files.
+- `IgnoreIndexConflict` field in Txn which allows ignoring conflicts on index keys.
-### Fixed
+**Fixed**
-* `expand(_all_)` now correctly gives all language variants of a string.
-* Indexes now correctly maintained when deleting via `S * *` and `S P *`.
-* `expand(_all_)` now follows reverse edges.
-* Don't return uid for nodes without any children when requested through debug flag.
-* GraphQL variables for HTTP endpoints. Variable map can be set as a JSON
- object using the `X-Dgraph-Vars` header.
-* Abort if CommitNow flag is set and the mutation fails.
-* Live loader treats subjects/predicates that look like UIDs as existing nodes
- rather than new nodes.
-* Fix bug in `@groupby` queries where predicate was converted to lower case in queries.
+- `expand(_all_)` now correctly gives all language variants of a string.
+- Indexes now correctly maintained when deleting via `S * *` and `S P *`.
+- `expand(_all_)` now follows reverse edges.
+- Don't return uid for nodes without any children when requested through debug flag.
+- GraphQL variables for HTTP endpoints. Variable map can be set as a JSON object using the
+ `X-Dgraph-Vars` header.
+- Abort if CommitNow flag is set and the mutation fails.
+- Live loader treats subjects/predicates that look like UIDs as existing nodes rather than new
+ nodes.
+- Fix bug in `@groupby` queries where predicate was converted to lower case in queries.
- Fix race condition in IsPeer. (#3432)
-### Changed
+**Changed**
-* When showing a predicate with list type, only values without a language tag are shown. To get the values of the predicate that are tagged with a language, query the predicate with that language explicitly.
-* Validate the address advertised by dgraph nodes.
-* Store/Restore peer map on snapshot.
-* Fix rdfs per second reporting in live loader.
-* Fix bug in lru eviction.
-* Proto definitions are split into intern and api.
+- When showing a predicate with list type, only values without a language tag are shown. To get the
+ values of the predicate that are tagged with a language, query the predicate with that language
+ explicitly.
+- Validate the address advertised by dgraph nodes.
+- Store/Restore peer map on snapshot.
+- Fix rdfs per second reporting in live loader.
+- Fix bug in lru eviction.
+- Proto definitions are split into intern and api.
## [0.9.2] - 2017-11-20
+
[0.9.2]: https://github.com/hypermodeinc/dgraph/compare/v0.9.1...v0.9.2
-### Added
+**Added**
-* Support for removing dead node from quorum.
-* Support for alias in groupby queries.
-* Add DeleteEdges helper function for Go client.
+- Support for removing dead node from quorum.
+- Support for alias in groupby queries.
+- Add DeleteEdges helper function for Go client.
-### Changed
+**Changed**
-* Dgraph tries to abort long running/abandoned transactions.
-* Fix TLS flag parsing for Dgraph server and live loader.
-* Reduce dependencies for Go client.
-* `depth` and `loop` arguments should be inside @recurse().
-* Base36 encode keys that are part of TxnContext and are sent to the client.
-* Fix race condition in expand(_all_) queries.
-* Fix (--ui) flag not being parsed properly.
+- Dgraph tries to abort long running/abandoned transactions.
+- Fix TLS flag parsing for Dgraph server and live loader.
+- Reduce dependencies for Go client.
+- `depth` and `loop` arguments should be inside @recurse().
+- Base36 encode keys that are part of TxnContext and are sent to the client.
+- Fix race condition in expand(_all_) queries.
+- Fix (--ui) flag not being parsed properly.
## [0.9.1] - 2017-11-15
+
[0.9.1]: https://github.com/hypermodeinc/dgraph/compare/v0.9.0...v0.9.1
-### Changed
+**Changed**
-* Transaction HTTP API has been modified slightly. `start_ts` is now a path parameter instead of a header.
-For `/commit` API, keys are passed in the body.
+- Transaction HTTP API has been modified slightly. `start_ts` is now a path parameter instead of a
+ header. For `/commit` API, keys are passed in the body.
## [0.9.0] - 2017-11-14
+
[0.9.0]: https://github.com/hypermodeinc/dgraph/compare/v0.8.3...v0.9.0
-**The latest release has a lot of breaking changes but also brings powerful features like Transactions, support for CJK and custom tokenization.**
+**The latest release has a lot of breaking changes but also brings powerful features like
+Transactions, support for CJK and custom tokenization.**
-### Added
+**Added**
-* Dgraph adds support for distributed ACID transactions (a blog post is in works). Transactions can be done via the Go, Java or HTTP clients (JS client coming). See [docs here](https://dgraph.io/docs/clients/).
-* Support for Indexing via [Custom tokenizers](https://dgraph.io/docs/query-language/#indexing-with-custom-tokenizers).
-* Support for CJK languages in the full-text index.
+- Dgraph adds support for distributed ACID transactions (a blog post is in works). Transactions can
+ be done via the Go, Java or HTTP clients (JS client coming). See
+ [docs here](https://dgraph.io/docs/clients/).
+- Support for Indexing via
+ [Custom tokenizers](https://dgraph.io/docs/query-language/#indexing-with-custom-tokenizers).
+- Support for CJK languages in the full-text index.
-### Changed
+**Changed**
-#### Running Dgraph
+**Running Dgraph**
-* We have consolidated all the `server`, `zero`, `live/bulk-loader` binaries into a single `dgraph` binary for convenience. Instructions for running Dgraph can be found in the [docs](https://dgraph.io/docs/get-started/).
-* For Dgraph server, Raft ids can be assigned automatically. A user can optionally still specify an ID, via `--idx` flag.
-* `--peer` flag which was used to specify another Zero instance’s IP address is being replaced by `--zero` flag to indicate the address corresponds to Dgraph zero.
-* `port`, `grpc_port` and `worker_port` flags have been removed from Dgraph server and Zero. The ports are:
+- We have consolidated all the `server`, `zero`, `live/bulk-loader` binaries into a single `dgraph`
+ binary for convenience. Instructions for running Dgraph can be found in the
+ [docs](https://dgraph.io/docs/get-started/).
+- For Dgraph server, Raft ids can be assigned automatically. A user can optionally still specify an
+ ID, via `--idx` flag.
+- `--peer` flag which was used to specify another Zero instance’s IP address is being replaced by
+ `--zero` flag to indicate the address corresponds to Dgraph zero.
+- `port`, `grpc_port` and `worker_port` flags have been removed from Dgraph server and Zero. The
+ ports are:
-- Internal Grpc: 7080
-- HTTP: 8080
-- External Grpc: 9080 (Dgraph server only)
+> Internal Grpc: 7080 HTTP: 8080 External Grpc: 9080 (Dgraph server only)
Users can set `port_offset` flag, to modify these fixed ports.
-#### Queries
+**Queries**
-* Queries, mutations and schema updates are done through separate endpoints. **Queries can no longer have a mutation block.**
-* Queries can be done via `Query` Grpc endpoint (it was called `Run` before) or the `/query` HTTP handler.
-* `_uid_` is renamed to `uid`. So queries now need to request for `uid`. Example
-```
+- Queries, mutations and schema updates are done through separate endpoints. **Queries can no longer
+ have a mutation block.**
+- Queries can be done via `Query` Grpc endpoint (it was called `Run` before) or the `/query` HTTP
+ handler.
+- `_uid_` is renamed to `uid`. So queries now need to request for `uid`. Example
+
+```dql
{
bladerunner(func: eq(name@en, "Blade Runner")) {
uid
@@ -4349,18 +4801,26 @@ Users can set `port_offset` flag, to modify these fixed ports.
}
}
```
-* Facets response structure has been modified and is a lot flatter. Facet key is now `predicate|facet_name`.
-Examples for [Go client](https://godoc.org/github.com/dgraph-io/dgraph/client#example-Txn-Mutate-Facets) and [HTTP](https://dgraph.io/docs/query-language/#facets-edge-attributes).
-* Query latency is now returned as numeric (ns) instead of string.
-* [`Recurse`](https://dgraph.io/docs/query-language/#recurse-query) is now a directive. So queries with `recurse` keyword at root won't work anymore.
-* Syntax for [`count` at root](https://dgraph.io/docs/query-language/#count) has changed. You need to ask for `count(uid)`, instead of `count()`.
-
-#### Mutations
-* Mutations can only be done via `Mutate` Grpc endpoint or via [`/mutate` HTTP handler](https://dgraph.io/docs/clients/#transactions).
-* `Mutate` Grpc endpoint can be used to set/ delete JSON, or set/ delete a list of N-Quads and set/ delete raw RDF strings.
-* Mutation blocks don't require the mutation keyword anymore. Here is an example of the new syntax.
-```
+- Facets response structure has been modified and is a lot flatter. Facet key is now
+ `predicate|facet_name`. Examples for
+ [Go client](https://godoc.org/github.com/dgraph-io/dgraph/client#example-Txn-Mutate-Facets) and
+ [HTTP](https://dgraph.io/docs/query-language/#facets-edge-attributes).
+- Query latency is now returned as numeric (ns) instead of string.
+- [`Recurse`](https://dgraph.io/docs/query-language/#recurse-query) is now a directive. So queries
+ with `recurse` keyword at root won't work anymore.
+- Syntax for [`count` at root](https://dgraph.io/docs/query-language/#count) has changed. You need
+ to ask for `count(uid)`, instead of `count()`.
+
+**Mutations**
+
+- Mutations can only be done via `Mutate` Grpc endpoint or via
+ [`/mutate` HTTP handler](https://dgraph.io/docs/clients/#transactions).
+- `Mutate` Grpc endpoint can be used to set/ delete JSON, or set/ delete a list of N-Quads and set/
+ delete raw RDF strings.
+- Mutation blocks don't require the mutation keyword anymore. Here is an example of the new syntax.
+
+```dql
{
set {
.
@@ -4368,21 +4828,33 @@ Examples for [Go client](https://godoc.org/github.com/dgraph-io/dgraph/client#ex
}
}
```
-* [`Upsert`](https://dgraph.io/docs/v0.8.3/query-language/#upsert) directive and [mutation variables](https://dgraph.io/docs/v0.8.3/query-language/#variables-in-mutations) go away. Both these functionalities can now easily be achieved via transactions.
-#### Schema
+- [`Upsert`](https://dgraph.io/docs/v0.8.3/query-language/#upsert) directive and
+ [mutation variables](https://dgraph.io/docs/v0.8.3/query-language/#variables-in-mutations) go
+ away. Both these functionalities can now easily be achieved via transactions.
+
+**Schema**
+
+- `<*> <*>` operations, that is deleting a predicate can't be done via mutations anymore.
+ They need to be done via `Alter` Grpc endpoint or via the `/alter` HTTP handler.
+- Drop all is now done via `Alter`.
+- Schema updates are now done via `Alter` Grpc endpoint or via `/alter` HTTP handler.
-* `<*> <*>` operations, that is deleting a predicate can't be done via mutations anymore. They need to be done via `Alter` Grpc endpoint or via the `/alter` HTTP handler.
-* Drop all is now done via `Alter`.
-* Schema updates are now done via `Alter` Grpc endpoint or via `/alter` HTTP handler.
+**Go client**
-#### Go client
+- `Query` Grpc endpoint returns response in JSON under `Json` field instead of protocol buffer.
+ `client.Unmarshal` method also goes away from the Go client. Users can use `json.Unmarshal` for
+ unmarshalling the response.
+- Response for predicate of type `geo` can be unmarshalled into a struct. Example
+ [here](https://godoc.org/github.com/dgraph-io/dgraph/client#example-package--SetObject).
+- `Node` and `Edge` structs go away along with the `SetValue...` methods. We recommend using
+ [`SetJson`](https://godoc.org/github.com/dgraph-io/dgraph/client#example-package--SetObject) and
+ `DeleteJson` fields to do mutations.
+- Examples of how to use transactions using the client can be found at
+ https://dgraph.io/docs/clients/#go.
-* `Query` Grpc endpoint returns response in JSON under `Json` field instead of protocol buffer. `client.Unmarshal` method also goes away from the Go client. Users can use `json.Unmarshal` for unmarshalling the response.
-* Response for predicate of type `geo` can be unmarshalled into a struct. Example [here](https://godoc.org/github.com/dgraph-io/dgraph/client#example-package--SetObject).
-* `Node` and `Edge` structs go away along with the `SetValue...` methods. We recommend using [`SetJson`](https://godoc.org/github.com/dgraph-io/dgraph/client#example-package--SetObject) and `DeleteJson` fields to do mutations.
-* Examples of how to use transactions using the client can be found at https://dgraph.io/docs/clients/#go.
+**Removed**
-### Removed
-- Embedded dgraph goes away. We haven’t seen much usage of this feature. And it adds unnecessary maintenance overhead to the code.
+- Embedded dgraph goes away. We haven’t seen much usage of this feature. And it adds unnecessary
+ maintenance overhead to the code.
- Dgraph live no longer stores external ids. And hence the `xid` flag is gone.
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index efb35f2961b..749fda70d92 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,20 +1,19 @@
# Contributing to Dgraph
-* [Getting Started](#getting-started)
-* [Setting Up the Development Environment](#setting-up-the-development-environment)
- * [Prerequisites](#prerequisites)
- * [Setup Dgraph from source repo](#setup-dgraph-from-source-repo)
- * [Setup Badger from source repo](#setup-badger-from-source-repo)
- * [Protocol buffers](#protocol-buffers)
- * [Build Dgraph](#build-dgraph)
- * [Build Docker Image](#build-docker-image)
- * [Testing](#testing)
-* [Doing a release](#doing-a-release)
-* [Contributing](#contributing)
- * [Guidelines](#guidelines)
- * [Code style](#code-style)
- * [License Header](#license-header)
- * [Signed Commits](#signed-commits)
+- [Getting Started](#getting-started)
+- [Setting Up the Development Environment](#setting-up-the-development-environment)
+ - [Prerequisites](#prerequisites)
+ - [Setup Dgraph from source repo](#setup-dgraph-from-source-repo)
+ - [Setup Badger from source repo](#setup-badger-from-source-repo)
+ - [Protocol buffers](#protocol-buffers)
+ - [Build Dgraph](#build-dgraph)
+ - [Build Docker Image](#build-docker-image)
+ - [Testing](#testing)
+- [Contributing](#contributing)
+ - [Guidelines](#guidelines)
+ - [Code style](#code-style)
+ - [License Header](#license-header)
+ - [Signed Commits](#signed-commits)
## Getting Started
@@ -25,59 +24,78 @@
### Prerequisites
-- Install [Git](https://git-scm.com/) (may be already installed on your system, or available through your OS package manager)
-- Install [Make](https://www.gnu.org/software/make/) (may be already installed on your system, or available through your OS package manager)
-- Install [Docker](https://docs.docker.com/install/) and [Docker Compose](https://docs.docker.com/compose/install/).
+- Install [Git](https://git-scm.com/) (may be already installed on your system, or available through
+ your OS package manager)
+- Install [Make](https://www.gnu.org/software/make/) (may be already installed on your system, or
+ available through your OS package manager)
+- Install [Docker](https://docs.docker.com/install/) and
+ [Docker Compose](https://docs.docker.com/compose/install/).
- [Install Go 1.13 or above](https://golang.org/doc/install).
### Setup Dgraph from source repo
It's best to put the Dgraph repo somewhere in `$GOPATH`.
- $ mkdir -p "$(go env GOPATH)/src/github.com/dgraph-io"
- $ cd "$(go env GOPATH)/src/github.com/dgraph-io"
- $ git clone https://github.com/hypermodeinc/dgraph.git
- $ cd ./dgraph
- $ make install
+```bash
+mkdir -p "$(go env GOPATH)/src/github.com/dgraph-io"
+cd "$(go env GOPATH)/src/github.com/dgraph-io"
+git clone https://github.com/hypermodeinc/dgraph.git
+cd ./dgraph
+make install
+```
-This will put the source code in a Git repo under `$GOPATH/src/github.com/dgraph-io/dgraph` and compile the binaries to `$GOPATH/bin`.
+This will put the source code in a Git repo under `$GOPATH/src/github.com/dgraph-io/dgraph` and
+compile the binaries to `$GOPATH/bin`.
### Setup Badger from source repo
-Dgraph source repo vendors its own version of Badger. If you are just working on Dgraph, you do not necessarily need to check out Badger from its own repo. However, if you want to contribute to Badger as well, you will need to check it out from its own repo.
-
+Dgraph source repo vendors its own version of Badger. If you are just working on Dgraph, you do not
+necessarily need to check out Badger from its own repo. However, if you want to contribute to Badger
+as well, you will need to check it out from its own repo.
- $ go get -t -v github.com/dgraph-io/badger
+```bash
+go get -t -v github.com/dgraph-io/badger
+```
This will put the source code in a Git repo under `$GOPATH/src/github.com/dgraph-io/badger`.
### Protocol buffers
-We use [protocol buffers](https://developers.google.com/protocol-buffers/) to serialize data between our server and the Go client and also for inter-worker communication. If you make any changes to the `.proto` files, you would have to recompile them.
+We use [protocol buffers](https://developers.google.com/protocol-buffers/) to serialize data between
+our server and the Go client and also for inter-worker communication. If you make any changes to the
+`.proto` files, you would have to recompile them.
-Install the `protoc` compiler which is required for compiling proto files used for gRPC communication. Get `protoc` version 3.0.0 or above from [GitHub releases page](https://github.com/google/protobuf/releases/latest) (look for the binary releases at the bottom, or compile from sources [following the instructions](https://github.com/google/protobuf/tree/main/src)).
+Install the `protoc` compiler which is required for compiling proto files used for gRPC
+communication. Get `protoc` version 3.0.0 or above from
+[GitHub releases page](https://github.com/google/protobuf/releases/latest) (look for the binary
+releases at the bottom, or compile from sources
+[following the instructions](https://github.com/google/protobuf/tree/main/src)).
-We use [gogo protobuf](https://github.com/gogo/protobuf) in Dgraph. To get the protocol buffer compiler plugin from gogo run
+We use [gogo protobuf](https://github.com/gogo/protobuf) in Dgraph. To get the protocol buffer
+compiler plugin from gogo run
+```bash
+go get -u github.com/gogo/protobuf/protoc-gen-gofast
+```
- $ go get -u github.com/gogo/protobuf/protoc-gen-gofast
-
-To compile the proto file using the `protoc` plugin and the gogo compiler plugin run the command `make regenerate` from within the directory containing the `.proto` files.
-
+To compile the proto file using the `protoc` plugin and the gogo compiler plugin run the command
+`make regenerate` from within the directory containing the `.proto` files.
- $ cd protos
- $ make regenerate
+```bash
+cd protos
+make regenerate
+```
This should generate the required `.pb.go` file.
### Build Dgraph
-You can build Dgraph using `make dgraph` or `make install`
-which add the version information to the binary.
+You can build Dgraph using `make dgraph` or `make install` which add the version information to the
+binary.
- `make dgraph`: Creates a `dgraph` binary at `./dgraph/dgraph`
-- `make install`: Creates a `dgraph` binary at `$GOPATH/bin/dgraph`. You can add
- `$GOPATH/bin` to your `$PATH`.
+- `make install`: Creates a `dgraph` binary at `$GOPATH/bin/dgraph`. You can add `$GOPATH/bin` to
+ your `$PATH`.
```text
$ make install
@@ -104,12 +122,13 @@ Copyright 2015-2025 Hypermode Inc.
make image
```
-To build a test Docker image from source, use `make image`. This builds a Dgraph
-binary using `make dgraph` and creates a Docker image named `dgraph/dgraph`
-tagged as the current branch name. The image only contains the `dgraph` binary.
+To build a test Docker image from source, use `make image`. This builds a Dgraph binary using
+`make dgraph` and creates a Docker image named `dgraph/dgraph` tagged as the current branch name.
+The image only contains the `dgraph` binary.
Example:
-```
+
+```bash
$ git rev-parse --abbrev-ref HEAD # current branch
main
$ make image
@@ -132,66 +151,72 @@ Licensed variously under the Apache Public License 2.0 and Dgraph Community Lice
Copyright 2015-2025 Hypermode Inc.
```
-For release images, follow [Doing a release](#doing-a-release). It creates
-Docker images that contains `dgraph` and `badger` commands.
-
### Testing
#### Dgraph
-1. Change directory to t directory.
-2. If all packages need to be tested, run
- make test
- If only a specific package needs to be tested, run
- make test args="--pkg=desired_package_name"
-
- example 1: make test args="--pkg=tok"
- example 2: make test args="--pkg=tlstest/acl"
-
- The first example will run all the tests in the 'tok' directory (if there are any)
- The second one will run all the test in the acl subfolder of the tlstest directory.
- Note: running make test args="--pkg=tlstest" will return an error saying no packages
- found because all the tests in the tlstest package are in subdirectories of the package.
- So the subdirectories must be specified as shown in example 2.
+
+1. Change directory to t directory.
+2. If all packages need to be tested, run make test If only a specific package needs to be tested,
+ run make test args="--pkg=desired_package_name"
+
+ example 1: make test args="--pkg=tok" example 2: make test args="--pkg=tlstest/acl"
+
+ The first example will run all the tests in the 'tok' directory (if there are any) The second one
+ will run all the test in the acl subfolder of the tlstest directory. Note: running make test
+ args="--pkg=tlstest" will return an error saying no packages found because all the tests in the
+ tlstest package are in subdirectories of the package. So the subdirectories must be specified as
+ shown in example 2.
Tests should be written in Go and use the Dgraph cluster set up in `dgraph/docker-compose.yml`
whenever possible. If the functionality being tested requires a different cluster setup (e.g.
different commandline options), the `*_test.go` files should be put in a separate directory that
also contains a `docker-compose.yml` to set up the cluster as needed.
- **IMPORTANT:** All containers should be labeled with `cluster: test` so they may be correctly
- restarted and cleaned up by the test script.
+**IMPORTANT:** All containers should be labeled with `cluster: test` so they may be correctly
+restarted and cleaned up by the test script.
#### Badger
-Run `go test` in the root folder.
+Run `go test` in the root folder.
- $ go test ./...
- ok github.com/dgraph-io/badger 24.853s
- ok github.com/dgraph-io/badger/skl 0.027s
- ok github.com/dgraph-io/badger/table 0.478s
- ok github.com/dgraph-io/badger/y 0.004s
+```bash
+$ go test ./...
+ok github.com/dgraph-io/badger 24.853s
+ok github.com/dgraph-io/badger/skl 0.027s
+ok github.com/dgraph-io/badger/table 0.478s
+ok github.com/dgraph-io/badger/y 0.004s
+```
## Contributing
### Guidelines
-Over years of writing big scalable systems, we are convinced that striving for simplicity wherever possible is the only way to build robust systems. This simplicity could be in design, could be in coding, or could be achieved by rewriting an entire module, that you may have painstakingly finished yesterday.
+Over years of writing big scalable systems, we are convinced that striving for simplicity wherever
+possible is the only way to build robust systems. This simplicity could be in design, could be in
+coding, or could be achieved by rewriting an entire module, that you may have painstakingly finished
+yesterday.
-- **Pull requests are welcome**, as long as you're willing to put in the effort to meet the guidelines. After you fork dgraph, create your pull request against our `main` branch
-- Contributors are required to execute our [Individual Contributor License Agreement](https://cla-assistant.io/dgraph-io/dgraph)
+- **Pull requests are welcome**, as long as you're willing to put in the effort to meet the
+ guidelines. After you fork dgraph, create your pull request against our `main` branch
+- Contributors are required to execute our
+ [Individual Contributor License Agreement](https://cla-assistant.io/dgraph-io/dgraph)
- Aim for clear, well written, maintainable code
- Simple and minimal approach to features, like Go
- New features must include passing unit tests, and integration tests when appropriate
-- Refactoring existing code now for better performance, better readability or better testability wins over adding a new feature
-- Don't add a function to a module that you don't use right now, or doesn't clearly enable a planned functionality
+- Refactoring existing code now for better performance, better readability or better testability
+ wins over adding a new feature
+- Don't add a function to a module that you don't use right now, or doesn't clearly enable a planned
+ functionality
- Don't ship a half done feature, which would require significant alterations to work fully
- Avoid [Technical debt](https://en.wikipedia.org/wiki/Technical_debt) like cancer
- Leave the code cleaner than when you began
### Code style
+
- We're following [Go Code Review](https://github.com/golang/go/wiki/CodeReviewComments)
- Use `go fmt` to format your code before committing
-- If you see *any code* which clearly violates the style guide, please fix it and send a pull request. No need to ask for permission
+- If you see _any code_ which clearly violates the style guide, please fix it and send a pull
+ request. No need to ask for permission
- Avoid unnecessary vertical spaces. Use your judgment or follow the code review comments
- Wrap your code and comments to 120 characters, unless doing so makes the code less legible
@@ -199,27 +224,32 @@ Over years of writing big scalable systems, we are convinced that striving for s
Every new source file must begin with a license header.
-Most of Dgraph, Badger, and the Dgraph clients (dgo, dgraph-js, pydgraph and dgraph4j) are licensed under the Apache 2.0 license:
-
- /*
- * Copyright 2016-2025 Hypermode Inc. and Contributors
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
+Most of Dgraph, Badger, and the Dgraph clients (dgo, dgraph-js, pydgraph and dgraph4j) are licensed
+under the Apache 2.0 license:
-### Signed Commits
+```sh
+/*
+ * Copyright 2016-2025 Hypermode Inc. and Contributors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+```
-Signed commits help in verifying the authenticity of the contributor. We use signed commits in Dgraph, and we prefer it, though it's not compulsory to have signed commits. This is a recommended step for people who intend to contribute to Dgraph on a regular basis.
+### Signed Commits
-Follow instructions to generate and setup GPG keys for signing code commits on this [Github Help page](https://help.github.com/articles/signing-commits-with-gpg/).
+Signed commits help in verifying the authenticity of the contributor. We use signed commits in
+Dgraph, and we prefer it, though it's not compulsory to have signed commits. This is a recommended
+step for people who intend to contribute to Dgraph on a regular basis.
+Follow instructions to generate and setup GPG keys for signing code commits on this
+[Github Help page](https://help.github.com/articles/signing-commits-with-gpg/).
diff --git a/LICENSE.md b/LICENSE.md
index 05a216ab4a7..b45af6becc8 100644
--- a/LICENSE.md
+++ b/LICENSE.md
@@ -2,14 +2,14 @@
Copyright 2016-2025 Hypermode Inc.
-Source code in this repository is variously licensed under the Apache Public
-License 2.0 (APL) and the Dgraph Community License (DCL). A copy of each license
-can be found in the [licenses](./licenses/) directory.
+Source code in this repository is variously licensed under the Apache Public License 2.0 (APL) and
+the Dgraph Community License (DCL). A copy of each license can be found in the
+[licenses](./licenses/) directory.
-Unless required by applicable law or agreed to in writing, software distributed
-under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
-CONDITIONS OF ANY KIND, either express or implied. See the License for the
-specific language governing permissions and limitations under the License.
+Unless required by applicable law or agreed to in writing, software distributed under the License is
+distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+implied. See the License for the specific language governing permissions and limitations under the
+License.
## Trademark
diff --git a/README.md b/README.md
index feb8e5affbd..ff61991b1ba 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,5 @@
-
@@ -21,43 +21,54 @@
[![Go Report Card](https://goreportcard.com/badge/github.com/hypermodeinc/dgraph)](https://goreportcard.com/report/github.com/hypermodeinc/dgraph)
[![TODOs](https://badgen.net/https/api.tickgit.com/badgen/github.com/hypermodeinc/dgraph/main)](https://www.tickgit.com/browse?repo=github.com/hypermodeinc/dgraph&branch=main)
-Dgraph is a horizontally scalable and distributed GraphQL database with a graph backend. It provides ACID transactions, consistent replication, and linearizable reads. It's built from the ground up to perform
-a rich set of queries. Being a native GraphQL database, it tightly controls how the
-data is arranged on disk to optimize for query performance and throughput,
-reducing disk seeks and network calls in a cluster.
-
+Dgraph is a horizontally scalable and distributed GraphQL database with a graph backend. It provides
+ACID transactions, consistent replication, and linearizable reads. It's built from the ground up to
+perform a rich set of queries. Being a native GraphQL database, it tightly controls how the data is
+arranged on disk to optimize for query performance and throughput, reducing disk seeks and network
+calls in a cluster.
-Dgraph's goal is to provide Google production-level scale and throughput,
-with low enough latency to serve real-time user queries over terabytes of structured data.
-Dgraph supports [GraphQL query syntax](https://dgraph.io/docs/main/query-language/), and responds in [JSON](http://www.json.org/) and [Protocol Buffers](https://developers.google.com/protocol-buffers/) over [GRPC](http://www.grpc.io/) and HTTP. Dgraph is written using the Go Programming Language.
+Dgraph's goal is to provide Google production-level scale and throughput, with low enough latency to
+serve real-time user queries over terabytes of structured data. Dgraph supports
+[GraphQL query syntax](https://dgraph.io/docs/main/query-language/), and responds in
+[JSON](http://www.json.org/) and [Protocol Buffers](https://developers.google.com/protocol-buffers/)
+over [GRPC](http://www.grpc.io/) and HTTP. Dgraph is written using the Go Programming Language.
## Status
-Dgraph is at [version v24.0.5][rel] and is production-ready. Apart from the vast open source community, it is being used in
-production at multiple Fortune 500 companies, and by
-[Intuit Katlas](https://github.com/intuit/katlas) and [VMware Purser](https://github.com/vmware/purser). A hosted version of Dgraph is available at [https://cloud.dgraph.io](https://cloud.dgraph.io).
+Dgraph is at [version v24.0.5][rel] and is production-ready. Apart from the vast open source
+community, it is being used in production at multiple Fortune 500 companies, and by
+[Intuit Katlas](https://github.com/intuit/katlas) and
+[VMware Purser](https://github.com/vmware/purser). A hosted version of Dgraph is available at
+[https://cloud.dgraph.io](https://cloud.dgraph.io).
[rel]: https://github.com/hypermodeinc/dgraph/releases/tag/v24.0.5
## Supported Platforms
-Dgraph officially supports the Linux/amd64 architecture. Support for Linux/arm64 is in development. In order to take advantage of memory performance gains and other architecture-specific advancements in Linux, we dropped official support Mac and Windows in 2021, see [this blog post](https://discuss.dgraph.io/t/dropping-support-for-windows-and-mac/12913) for more information. You can still build and use Dgraph on other platforms (for live or bulk loading for instance), but support for platforms other than Linux/amd64 is not available.
+Dgraph officially supports the Linux/amd64 architecture. Support for Linux/arm64 is in development.
+In order to take advantage of memory performance gains and other architecture-specific advancements
+in Linux, we dropped official support Mac and Windows in 2021, see
+[this blog post](https://discuss.dgraph.io/t/dropping-support-for-windows-and-mac/12913) for more
+information. You can still build and use Dgraph on other platforms (for live or bulk loading for
+instance), but support for platforms other than Linux/amd64 is not available.
Running Dgraph in a Docker environment is the recommended testing and deployment method.
## Install with Docker
-If you're using Docker, you can use the [official Dgraph image](https://hub.docker.com/r/dgraph/dgraph/).
+If you're using Docker, you can use the
+[official Dgraph image](https://hub.docker.com/r/dgraph/dgraph/).
```bash
docker pull dgraph/dgraph:latest
```
-For more information on a variety Docker deployment methods including Docker Compose and Kubernetes, see the [docs](https://dgraph.io/docs/installation/single-host-setup/#docker).
+For more information on a variety Docker deployment methods including Docker Compose and Kubernetes,
+see the [docs](https://dgraph.io/docs/installation/single-host-setup/#docker).
## Run a Quick Standalone Cluster
-```
+```bash
docker run -it -p 8080:8080 -p 9080:9080 -v ~/dgraph:/dgraph dgraph/standalone:latest
```
@@ -65,7 +76,7 @@ docker run -it -p 8080:8080 -p 9080:9080 -v ~/dgraph:/dgraph dgraph/standalone:l
If you want to install from source, install Go 1.19+ or later and the following dependencies:
-#### Ubuntu
+### Ubuntu
```bash
sudo apt-get update
@@ -74,7 +85,9 @@ sudo apt-get install build-essential
### Build and Install
-Then clone the Dgraph repository and use `make install` to install the Dgraph binary in the directory named by the GOBIN environment variable, which defaults to $GOPATH/bin or $HOME/go/bin if the GOPATH environment variable is not set.
+Then clone the Dgraph repository and use `make install` to install the Dgraph binary in the
+directory named by the GOBIN environment variable, which defaults to $GOPATH/bin or $HOME/go/bin if
+the GOPATH environment variable is not set.
```bash
git clone https://github.com/hypermodeinc/dgraph.git
@@ -83,67 +96,76 @@ make install
```
## Get Started
+
**To get started with Dgraph, follow:**
- Installation to queries in 3 steps via [dgraph.io/docs/](https://dgraph.io/docs/get-started/).
- A longer interactive tutorial via [dgraph.io/tour/](https://dgraph.io/tour/).
-- Tutorial and
-presentation videos on [YouTube channel](https://www.youtube.com/channel/UCghE41LR8nkKFlR3IFTRO4w/featured).
+- Tutorial and presentation videos on
+ [YouTube channel](https://www.youtube.com/channel/UCghE41LR8nkKFlR3IFTRO4w/featured).
## Is Dgraph the right choice for me?
- Do you have more than 10 SQL tables connected via foreign keys?
- Do you have sparse data, which doesn't elegantly fit into SQL tables?
-- Do you want a simple and flexible schema, which is readable and maintainable
- over time?
+- Do you want a simple and flexible schema, which is readable and maintainable over time?
- Do you care about speed and performance at scale?
-If the answers to the above are YES, then Dgraph would be a great fit for your
-application. Dgraph provides NoSQL like scalability while providing SQL like
-transactions and the ability to select, filter, and aggregate data points. It
-combines that with distributed joins, traversals, and graph operations, which
-makes it easy to build applications with it.
+If the answers to the above are YES, then Dgraph would be a great fit for your application. Dgraph
+provides NoSQL like scalability while providing SQL like transactions and the ability to select,
+filter, and aggregate data points. It combines that with distributed joins, traversals, and graph
+operations, which makes it easy to build applications with it.
## Dgraph compared to other graph DBs
-| Features | Dgraph | Neo4j | Janus Graph |
-| -------- | ------ | ----- | ----------- |
-| Architecture | Sharded and Distributed | Single server (+ replicas in enterprise) | Layer on top of other distributed DBs |
-| Replication | Consistent | None in community edition (only available in enterprise) | Via underlying DB |
-| Data movement for shard rebalancing | Automatic | Not applicable (all data lies on each server) | Via underlying DB |
-| Language | GraphQL inspired | Cypher, Gremlin | Gremlin |
-| Protocols | Grpc / HTTP + JSON / RDF | Bolt + Cypher | Websocket / HTTP |
-| Transactions | Distributed ACID transactions | Single server ACID transactions | Not typically ACID
-| Full-Text Search | Native support | Native support | Via External Indexing System |
-| Regular Expressions | Native support | Native support | Via External Indexing System |
-| Geo Search | Native support | External support only | Via External Indexing System |
-| License | Apache 2.0 | GPL v3 | Apache 2.0 |
+| Features | Dgraph | Neo4j | Janus Graph |
+| ----------------------------------- | ----------------------------- | -------------------------------------------------------- | ------------------------------------- |
+| Architecture | Sharded and Distributed | Single server (+ replicas in enterprise) | Layer on top of other distributed DBs |
+| Replication | Consistent | None in community edition (only available in enterprise) | Via underlying DB |
+| Data movement for shard rebalancing | Automatic | Not applicable (all data lies on each server) | Via underlying DB |
+| Language | GraphQL inspired | Cypher, Gremlin | Gremlin |
+| Protocols | Grpc / HTTP + JSON / RDF | Bolt + Cypher | Websocket / HTTP |
+| Transactions | Distributed ACID transactions | Single server ACID transactions | Not typically ACID |
+| Full-Text Search | Native support | Native support | Via External Indexing System |
+| Regular Expressions | Native support | Native support | Via External Indexing System |
+| Geo Search | Native support | External support only | Via External Indexing System |
+| License | Apache 2.0 | GPL v3 | Apache 2.0 |
## Users
+
- **Dgraph official documentation is present at [dgraph.io/docs/](https://dgraph.io/docs/).**
-- For feature requests or questions, visit
- [https://discuss.dgraph.io](https://discuss.dgraph.io).
-- Check out [the demo at dgraph.io](http://dgraph.io) and [the visualization at
- play.dgraph.io](http://play.dgraph.io/).
-- Please see [releases tab](https://github.com/hypermodeinc/dgraph/releases) to
- find the latest release and corresponding release notes.
-- [See the Roadmap](https://discuss.dgraph.io/t/product-roadmap-2020/8479) for a list of
- working and planned features.
-- Read about the latest updates from the Dgraph team [on our
- blog](https://open.dgraph.io/).
-- Watch tech talks on our [YouTube
- channel](https://www.youtube.com/channel/UCghE41LR8nkKFlR3IFTRO4w/featured).
+- For feature requests or questions, visit [https://discuss.dgraph.io](https://discuss.dgraph.io).
+- Check out [the demo at dgraph.io](http://dgraph.io) and
+ [the visualization at play.dgraph.io](http://play.dgraph.io/).
+- Please see [releases tab](https://github.com/hypermodeinc/dgraph/releases) to find the latest
+ release and corresponding release notes.
+- [See the Roadmap](https://discuss.dgraph.io/t/product-roadmap-2020/8479) for a list of working and
+ planned features.
+- Read about the latest updates from the Dgraph team [on our blog](https://open.dgraph.io/).
+- Watch tech talks on our
+ [YouTube channel](https://www.youtube.com/channel/UCghE41LR8nkKFlR3IFTRO4w/featured).
## Developers
-- See a list of issues [that we need help with](https://github.com/hypermodeinc/dgraph/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22).
-- Please see [Contributing to Dgraph](https://github.com/hypermodeinc/dgraph/blob/main/CONTRIBUTING.md) for guidelines on contributions.
+
+- See a list of issues
+ [that we need help with](https://github.com/hypermodeinc/dgraph/issues?q=is%3Aissue+is%3Aopen+label%3A%22help+wanted%22).
+- Please see
+ [Contributing to Dgraph](https://github.com/hypermodeinc/dgraph/blob/main/CONTRIBUTING.md) for
+ guidelines on contributions.
## Client Libraries
-The Dgraph team maintains several [officially supported client libraries](https://dgraph.io/docs/clients/). There are also libraries contributed by the community [unofficial client libraries](https://dgraph.io/docs/clients#unofficial-dgraph-clients).
+
+The Dgraph team maintains several
+[officially supported client libraries](https://dgraph.io/docs/clients/). There are also libraries
+contributed by the community
+[unofficial client libraries](https://dgraph.io/docs/clients#unofficial-dgraph-clients).
##
## Contact
-- Please use [discuss.dgraph.io](https://discuss.dgraph.io) for documentation, questions, feature requests and discussions.
-- Please use [discuss.dgraph.io](https://discuss.dgraph.io/c/issues/dgraph/38) for filing bugs or feature requests.
+
+- Please use [discuss.dgraph.io](https://discuss.dgraph.io) for documentation, questions, feature
+ requests and discussions.
+- Please use [discuss.dgraph.io](https://discuss.dgraph.io/c/issues/dgraph/38) for filing bugs or
+ feature requests.
- Follow us on Twitter [@dgraphlabs](https://twitter.com/dgraphlabs).
diff --git a/compose/run.sh b/compose/run.sh
index 2d116050274..de25e1cff57 100755
--- a/compose/run.sh
+++ b/compose/run.sh
@@ -1,30 +1,30 @@
#!/bin/bash
main() {
- setup $@
+ setup $@
- set -e
- build_compose_tool $@
- build_dgraph_docker_image
- launch_environment
+ set -e
+ build_compose_tool $@
+ build_dgraph_docker_image
+ launch_environment
}
setup() {
- readonly ME=${0##*/}
- DGRAPH_ROOT=$(dirname "${BASH_SOURCE[0]}")/..
- readonly COMPOSE_FILE="./docker-compose.yml"
+ readonly ME=${0##*/}
+ DGRAPH_ROOT=$(dirname "${BASH_SOURCE[0]}")/..
+ readonly COMPOSE_FILE="./docker-compose.yml"
- if [[ $1 == "-h" || $1 == "--help" ]]; then usage; fi
+ if [[ $1 == "-h" || $1 == "--help" ]]; then usage; fi
- check_environment
+ check_environment
}
Info() {
- echo -e "INFO: $*"
+ echo -e "INFO: $*"
}
usage() {
- cat < /dev/null || \
- { echo "ERROR: 'make' command not not found" 1>&2; exit 1; }
- command -v go > /dev/null || \
- { echo "ERROR: 'go' command not not found" 1>&2; exit 1; }
- command -v docker-compose > /dev/null || \
- { echo "ERROR: 'docker-compose' command not not found" 1>&2; exit 1; }
- ## GOPATH required for locally built docker images
- [[ -z "${GOPATH}" ]] && \
- { echo "ERROR: The env var of 'GOPATH' was not defined. Exiting" 1>&2; exit 1; }
+ command -v make >/dev/null ||
+ {
+ echo "ERROR: 'make' command not not found" 1>&2
+ exit 1
+ }
+ command -v go >/dev/null ||
+ {
+ echo "ERROR: 'go' command not not found" 1>&2
+ exit 1
+ }
+ command -v docker-compose >/dev/null ||
+ {
+ echo "ERROR: 'docker-compose' command not not found" 1>&2
+ exit 1
+ }
+ ## GOPATH required for locally built docker images
+ [[ -z ${GOPATH} ]] &&
+ {
+ echo "ERROR: The env var of 'GOPATH' was not defined. Exiting" 1>&2
+ exit 1
+ }
}
build_compose_tool() {
- ## Always make compose if it doesn't exist
- make compose
-
- ## Create compose file if it does not exist or compose parameters passed
- if [[ $# -gt 0 ]] || ! [[ -f $COMPOSE_FILE ]]; then
- Info "creating compose file ..."
- ./compose "$@"
- fi
-
- if [[ ! -e $COMPOSE_FILE ]]; then
- echo >&2 "$ME: no '$COMPOSE_FILE' found"
- exit 1
- fi
+ ## Always make compose if it doesn't exist
+ make compose
+
+ ## Create compose file if it does not exist or compose parameters passed
+ if [[ $# -gt 0 ]] || ! [[ -f ${COMPOSE_FILE} ]]; then
+ Info "creating compose file ..."
+ ./compose "$@"
+ fi
+
+ if [[ ! -e ${COMPOSE_FILE} ]]; then
+ echo >&2 "${ME}: no '${COMPOSE_FILE}' found"
+ exit 1
+ fi
}
build_dgraph_docker_image() {
- ## linux binary required for docker image
- export GOOS=linux
- Info "rebuilding dgraph ..."
- ( cd $DGRAPH_ROOT/dgraph && make install )
+ ## linux binary required for docker image
+ export GOOS=linux
+ Info "rebuilding dgraph ..."
+ (cd "${DGRAPH_ROOT}"/dgraph && make install)
}
launch_environment() {
- # Detect if $GOPATH/bin/$GOOS_$GOARCH path
- if [[ -f $GOPATH/bin/linux_amd64/dgraph ]]; then
- Info "Found '$GOPATH/bin/linux_amd64/dgraph'. Updating $COMPOSE_FILE."
- sed -i 's/\$GOPATH\/bin$/\$GOPATH\/bin\/linux_amd64/' $COMPOSE_FILE
- # if no dgraph binary found, abort
- elif ! [[ -f $GOPATH/bin/dgraph ]]; then
- echo "ERROR: '$GOPATH/bin/dgraph' not found. Exiting" 1>&2
- exit 1
- else
- Info "Found '$GOPATH/bin/dgraph'"
- fi
-
- # No need to down existing containers, if any.
- # The up command handles that automatically
-
- Info "Bringing up containers"
- docker-compose -p dgraph down
- docker-compose --compatibility -p dgraph up --force-recreate --remove-orphans
+ # Detect if $GOPATH/bin/$GOOS_$GOARCH path
+ if [[ -f ${GOPATH}/bin/linux_amd64/dgraph ]]; then
+ Info "Found '${GOPATH}/bin/linux_amd64/dgraph'. Updating ${COMPOSE_FILE}."
+ sed -i 's/\$GOPATH\/bin$/\$GOPATH\/bin\/linux_amd64/' "${COMPOSE_FILE}"
+ # if no dgraph binary found, abort
+ elif ! [[ -f ${GOPATH}/bin/dgraph ]]; then
+ echo "ERROR: '${GOPATH}/bin/dgraph' not found. Exiting" 1>&2
+ exit 1
+ else
+ Info "Found '${GOPATH}/bin/dgraph'"
+ fi
+
+ # No need to down existing containers, if any.
+ # The up command handles that automatically
+
+ Info "Bringing up containers"
+ docker-compose -p dgraph down
+ docker-compose --compatibility -p dgraph up --force-recreate --remove-orphans
}
main $@
diff --git a/contrib/README.md b/contrib/README.md
index a75357901ae..4c7967e6d5f 100644
--- a/contrib/README.md
+++ b/contrib/README.md
@@ -1,4 +1,3 @@
-The `contrib` directory contains scripts, images, and other helpful things
-which are not part of the core dgraph distribution. Please note that they
-could be out of date, since they do not receive the same attention as the
-rest of the repository.
+The `contrib` directory contains scripts, images, and other helpful things which are not part of the
+core dgraph distribution. Please note that they could be out of date, since they do not receive the
+same attention as the rest of the repository.
diff --git a/contrib/config/backups/README.md b/contrib/config/backups/README.md
index cd23f625fc2..14917d629d2 100644
--- a/contrib/config/backups/README.md
+++ b/contrib/config/backups/README.md
@@ -1,12 +1,17 @@
# Binary Backups
-These will be a collection of scripts to assist backup process for Binary Backups (Enterprise feature).
+These will be a collection of scripts to assist backup process for Binary Backups (Enterprise
+feature).
-* Client
- * [Client](client/README.md) - a client `dgraph-backup.sh` that can used to automate backups.
-* Cloud Object Storage
- * [Azure Blob Storage](azure/README.md) - use `minio` destination scheme with MinIO Azure Gateway to backup to Azure Blob Storage.
- * [GCS (Google Cloud Storage)](gcp/README.md) - use `minio` destination scheme with MinIO GCS Gateway to a GCS bucket.
- * [AWS S3 (Simple Storage Service)](s3/README.md) - use `s3` destination scheme to backup to an S3 bucket.
-* File Storage
- * [NFS (Network File System)](nfs/README.md) - use file destination to backup to remote file storage
+- Client
+ - [Client](client/README.md) - a client `dgraph-backup.sh` that can used to automate backups.
+- Cloud Object Storage
+ - [Azure Blob Storage](azure/README.md) - use `minio` destination scheme with MinIO Azure Gateway
+ to backup to Azure Blob Storage.
+ - [GCS (Google Cloud Storage)](gcp/README.md) - use `minio` destination scheme with MinIO GCS
+ Gateway to a GCS bucket.
+ - [AWS S3 (Simple Storage Service)](s3/README.md) - use `s3` destination scheme to backup to an S3
+ bucket.
+- File Storage
+ - [NFS (Network File System)](nfs/README.md) - use file destination to backup to remote file
+ storage
diff --git a/contrib/config/backups/azure/README.md b/contrib/config/backups/azure/README.md
index 51d0821079e..eca58310113 100644
--- a/contrib/config/backups/azure/README.md
+++ b/contrib/config/backups/azure/README.md
@@ -1,13 +1,14 @@
# Binary Backups to Azure Blob
-Binary backups can use Azure Blob Storage for object storage using [MinIO Azure Gateway](https://docs.min.io/docs/minio-gateway-for-azure.html).
+Binary backups can use Azure Blob Storage for object storage using
+[MinIO Azure Gateway](https://docs.min.io/docs/minio-gateway-for-azure.html).
## Provisioning Azure Blob
Some example scripts have been provided to illustrate how to create Azure Blob.
-* [azure_cli](azure_cli/README.md) - shell scripts to provision Azure Blob
-* [terraform](terraform/README.md) - terraform scripts to provision Azure Blob
+- [azure_cli](azure_cli/README.md) - shell scripts to provision Azure Blob
+- [terraform](terraform/README.md) - terraform scripts to provision Azure Blob
## Setting up the Environment
@@ -15,18 +16,23 @@ Some example scripts have been provided to illustrate how to create Azure Blob.
You will need these tools:
-* Docker Environment
- * [Docker](https://docs.docker.com/get-docker/) - container engine platform
- * [Docker Compose](https://docs.docker.com/compose/install/) - orchestrates running dokcer containers
-* Kubernetes Environment
- * [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - required for interacting with Kubenetes platform
- * [helm](https://helm.sh/docs/intro/install/) - deploys Kuberetes packages called helm charts
- * [helm-diff](https://github.com/databus23/helm-diff) [optional] - displays differences that will be applied to Kubernetes cluster
- * [helmfile](https://github.com/roboll/helmfile#installation) [optional] - orchestrates helm chart deployments
+- Docker Environment
+ - [Docker](https://docs.docker.com/get-docker/) - container engine platform
+ - [Docker Compose](https://docs.docker.com/compose/install/) - orchestrates running dokcer
+ containers
+- Kubernetes Environment
+ - [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - required for interacting
+ with Kubenetes platform
+ - [helm](https://helm.sh/docs/intro/install/) - deploys Kuberetes packages called helm charts
+ - [helm-diff](https://github.com/databus23/helm-diff) [optional] - displays differences that
+ will be applied to Kubernetes cluster
+ - [helmfile](https://github.com/roboll/helmfile#installation) [optional] - orchestrates helm chart
+ deployments
### Using Docker Compose
-A `docker-compose.yml` configuration is provided that will run the [MinIO Azure Gateway](https://docs.min.io/docs/minio-gateway-for-azure.html) and Dgraph cluster.
+A `docker-compose.yml` configuration is provided that will run the
+[MinIO Azure Gateway](https://docs.min.io/docs/minio-gateway-for-azure.html) and Dgraph cluster.
#### Configuring Docker Compose
@@ -37,9 +43,11 @@ MINIO_ACCESS_KEY=
MINIO_SECRET_KEY=
```
-These values are used to both access the [MinIO Azure Gateway](https://docs.min.io/docs/minio-gateway-for-azure.html) using the same credentials used to access Azure Storage Account. As a convenience, both example [Terraform](terraform/README.md) and [azure_cli](azure_cli/README.md) scripts will auto-generate the `minio.env`.
-
-#### Using Docker Compose
+These values are used to both access the
+[MinIO Azure Gateway](https://docs.min.io/docs/minio-gateway-for-azure.html) using the same
+credentials used to access Azure Storage Account. As a convenience, both example
+[Terraform](terraform/README.md) and [azure_cli](azure_cli/README.md) scripts will auto-generate the
+`minio.env`.
```bash
## Run Minio Azure Gateway and Dgraph Cluster
@@ -48,8 +56,8 @@ docker-compose up --detach
#### Access Minio and Ratel UI
-* MinIO UI: http://localhost:9000
-* Ratel UI: http://localhost:8000
+- MinIO UI: http://localhost:9000
+- Ratel UI: http://localhost:8000
#### Clean Up Docker Environment
@@ -60,11 +68,15 @@ docker-compose rm
### Using Kubernetes with Helm Charts
-For Kubernetes, you can deploy [MinIO Azure Gateway](https://docs.min.io/docs/minio-gateway-for-azure.html), Dgraph cluster, and a Kubernetes Cronjob that triggers backups using [helm](https://helm.sh/docs/intro/install/).
+For Kubernetes, you can deploy
+[MinIO Azure Gateway](https://docs.min.io/docs/minio-gateway-for-azure.html), Dgraph cluster, and a
+Kubernetes Cronjob that triggers backups using [helm](https://helm.sh/docs/intro/install/).
#### Configuring Secrets Values
-These values are auto-generated if you used either [terraform](terraform/README.md) and [azure_cli](azure_cli/README.md) scripts. If you already an existing Azure Blob you would like to use, you will need to create `charts/dgraph_secrets.yaml` and `charts/minio_secrets.yaml` files.
+These values are auto-generated if you used either [terraform](terraform/README.md) and
+[azure_cli](azure_cli/README.md) scripts. If you already an existing Azure Blob you would like to
+use, you will need to create `charts/dgraph_secrets.yaml` and `charts/minio_secrets.yaml` files.
For the `charts/dgraph_secrets.yaml`, you would create a file like this:
@@ -85,12 +97,16 @@ secretKey:
#### Deploy Using Helmfile
-If you have [helmfile](https://github.com/roboll/helmfile#installation) and [helm-diff](https://github.com/databus23/helm-diff) installed, you can deploy [MinIO Azure Gateway](https://docs.min.io/docs/minio-gateway-for-azure.html) and Dgraph cluster with the following:
+If you have [helmfile](https://github.com/roboll/helmfile#installation) and
+[helm-diff](https://github.com/databus23/helm-diff) installed, you can deploy
+[MinIO Azure Gateway](https://docs.min.io/docs/minio-gateway-for-azure.html) and Dgraph cluster with
+the following:
```bash
export BACKUP_BUCKET_NAME= # corresponds to Azure Container Name
helmfile apply
```
+
#### Deploy Using Helm
```bash
@@ -114,7 +130,7 @@ helm install "my-release" \
#### Access Resources
-For MinIO UI, you can use this to access it at http://localhost:9000:
+For MinIO UI, you can use this to access it at http://localhost:9000:
```bash
export MINIO_POD_NAME=$(
@@ -170,8 +186,11 @@ helm delete azuregw --namespace minio
## Triggering a Backup
-This is run from the host with the alpha node accessible on localhost at port `8080`. Can be done by running the docker-compose environment, or running `kubectl port-forward pod/dgraph-dgraph-alpha-0 8080:8080`.
-In the docker-compose environment, the host for `MINIO_HOST` is `gateway`. In the Kubernetes environment, using the scripts above, the `MINIO_HOST` is `azuregw-minio.minio.svc`.
+This is run from the host with the alpha node accessible on localhost at port `8080`. Can be done by
+running the docker-compose environment, or running
+`kubectl port-forward pod/dgraph-dgraph-alpha-0 8080:8080`. In the docker-compose environment, the
+host for `MINIO_HOST` is `gateway`. In the Kubernetes environment, using the scripts above, the
+`MINIO_HOST` is `azuregw-minio.minio.svc`.
### Using GraphQL
diff --git a/contrib/config/backups/azure/azure_cli/README.md b/contrib/config/backups/azure/azure_cli/README.md
index 0f6b6d0e5ed..0ce1d2fe974 100644
--- a/contrib/config/backups/azure/azure_cli/README.md
+++ b/contrib/config/backups/azure/azure_cli/README.md
@@ -2,26 +2,32 @@
## About
-This script will create the required resources needed to create Azure Blob Storage using (`simple-azure-blob`)[https://github.com/darkn3rd/simple-azure-blob] module.
+This script will create the required resources needed to create Azure Blob Storage using
+(`simple-azure-blob`)[https://github.com/darkn3rd/simple-azure-blob] module.
## Prerequisites
You need the following installed to use this automation:
-* [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest) with an active Azure subscription configured.
-* [jq](https://stedolan.github.io/jq/) - command-line JSON process that makes it easy to parse JSON output from Azure CLI.
-* [bash](https://www.gnu.org/software/bash/) - shell environment
+- [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest)
+ with an active Azure subscription configured.
+- [jq](https://stedolan.github.io/jq/) - command-line JSON process that makes it easy to parse JSON
+ output from Azure CLI.
+- [bash](https://www.gnu.org/software/bash/) - shell environment
## Configuration
You will need to define these environment variables:
-* Required Variables:
- * `MY_RESOURCE_GROUP` (required) - Azure resource group that contains the resources. If the resource group does not exist, this script will create it.
- * `MY_STORAGE_ACCT` (required) - Azure storage account (unique global name) to contain storage. If the storage account does not exist, this script will create it.
- * `MY_CONTAINER_NAME` (required) - Azure container to host the blob storage.
-* Optional Variables:
- * `MY_LOCATION` (default = `eastus2`)- the location where to create the resource group if it doesn't exist
+- Required Variables:
+ - `MY_RESOURCE_GROUP` (required) - Azure resource group that contains the resources. If the
+ resource group does not exist, this script will create it.
+ - `MY_STORAGE_ACCT` (required) - Azure storage account (unique global name) to contain storage. If
+ the storage account does not exist, this script will create it.
+ - `MY_CONTAINER_NAME` (required) - Azure container to host the blob storage.
+- Optional Variables:
+ - `MY_LOCATION` (default = `eastus2`)- the location where to create the resource group if it
+ doesn't exist
## Steps
diff --git a/contrib/config/backups/azure/azure_cli/create_blob.sh b/contrib/config/backups/azure/azure_cli/create_blob.sh
index 0de16227fb4..4f7909d9cc0 100755
--- a/contrib/config/backups/azure/azure_cli/create_blob.sh
+++ b/contrib/config/backups/azure/azure_cli/create_blob.sh
@@ -4,127 +4,132 @@
# main
##################
main() {
- check_environment $@
- create_resource_group
- create_storage_acct
- authorize_ad_user
- create_storage_container
- create_config_files
+ check_environment $@
+ create_resource_group
+ create_storage_acct
+ authorize_ad_user
+ create_storage_container
+ create_config_files
}
#####
# check_environment
##################
check_environment() {
- ## Check for Azure CLI command
- command -v az > /dev/null || \
- { echo "[ERROR]: 'az' command not not found" 1>&2; exit 1; }
- command -v jq > /dev/null || \
- { echo "[ERROR]: 'jq' command not not found" 1>&2; exit 1; }
+ ## Check for Azure CLI command
+ command -v az >/dev/null ||
+ {
+ echo "[ERROR]: 'az' command not not found" 1>&2
+ exit 1
+ }
+ command -v jq >/dev/null ||
+ {
+ echo "[ERROR]: 'jq' command not not found" 1>&2
+ exit 1
+ }
- ## Defaults
- MY_CONTAINER_NAME=${MY_CONTAINER_NAME:-$1}
- MY_STORAGE_ACCT=${MY_STORAGE_ACCT:-""}
- MY_RESOURCE_GROUP=${MY_RESOURCE_GROUP:=""}
- MY_LOCATION=${MY_LOCATION:-"eastus2"}
- MY_ACCOUNT_ID="$(az account show | jq '.id' -r)"
- CREATE_MINIO_ENV=${CREATE_MINIO_ENV:-"true"}
- CREATE_MINIO_CHART_SECRETS=${CREATE_MINIO_CHART_SECRETS:-"true"}
- CREATE_DGRAPH_CHART_SECRETS=${CREATE_DGRAPH_CHART_SECRETS:-"true"}
+ ## Defaults
+ MY_CONTAINER_NAME=${MY_CONTAINER_NAME:-$1}
+ MY_STORAGE_ACCT=${MY_STORAGE_ACCT:-""}
+ MY_RESOURCE_GROUP=${MY_RESOURCE_GROUP:=""}
+ MY_LOCATION=${MY_LOCATION:-"eastus2"}
+ MY_ACCOUNT_ID="$(az account show | jq '.id' -r)"
+ CREATE_MINIO_ENV=${CREATE_MINIO_ENV:-"true"}
+ CREATE_MINIO_CHART_SECRETS=${CREATE_MINIO_CHART_SECRETS:-"true"}
+ CREATE_DGRAPH_CHART_SECRETS=${CREATE_DGRAPH_CHART_SECRETS:-"true"}
- if [[ -z "${MY_CONTAINER_NAME}" ]]; then
- if (( $# < 1 )); then
- printf "[ERROR]: Need at least one parameter or define 'MY_CONTAINER_NAME'\n\n" 1>&2
- printf "Usage:\n\t$0 \n\tMY_CONTAINER_NAME= $0\n" 1>&2
- exit 1
- fi
- fi
+ if [[ -z ${MY_CONTAINER_NAME} ]]; then
+ if (($# < 1)); then
+ printf "[ERROR]: Need at least one parameter or define 'MY_CONTAINER_NAME'\n\n" 1>&2
+ printf "Usage:\n\t$0 \n\tMY_CONTAINER_NAME= $0\n" 1>&2
+ exit 1
+ fi
+ fi
- if [[ -z "${MY_STORAGE_ACCT}" ]]; then
- printf "[ERROR]: The env var of 'MY_STORAGE_ACCT' was not defined. Exiting\n" 1>&2
- exit 1
- fi
+ if [[ -z ${MY_STORAGE_ACCT} ]]; then
+ printf "[ERROR]: The env var of 'MY_STORAGE_ACCT' was not defined. Exiting\n" 1>&2
+ exit 1
+ fi
- if [[ -z "${MY_RESOURCE_GROUP}" ]]; then
- printf "[ERROR]: The env var of 'MY_RESOURCE_GROUP' was not defined. Exiting\n" 1>&2
- exit 1
- fi
+ if [[ -z ${MY_RESOURCE_GROUP} ]]; then
+ printf "[ERROR]: The env var of 'MY_RESOURCE_GROUP' was not defined. Exiting\n" 1>&2
+ exit 1
+ fi
}
#####
# create_resource_group
##################
create_resource_group() {
- ## create resource (idempotently)
- if ! az group list | jq '.[].name' -r | grep -q ${MY_RESOURCE_GROUP}; then
- echo "[INFO]: Creating Resource Group '${MY_RESOURCE_GROUP}' at Location '${MY_LOCATION}'"
- az group create --name=${MY_RESOURCE_GROUP} --location=${MY_LOCATION} > /dev/null
- fi
+ ## create resource (idempotently)
+ if ! az group list | jq '.[].name' -r | grep -q "${MY_RESOURCE_GROUP}"; then
+ echo "[INFO]: Creating Resource Group '${MY_RESOURCE_GROUP}' at Location '${MY_LOCATION}'"
+ az group create --name="${MY_RESOURCE_GROUP}" --location="${MY_LOCATION}" >/dev/null
+ fi
}
#####
# create_storage_acct
##################
create_storage_acct() {
- ## create globally unique storage account (idempotently)
- if ! az storage account list | jq '.[].name' -r | grep -q ${MY_STORAGE_ACCT}; then
- echo "[INFO]: Creating Storage Account '${MY_STORAGE_ACCT}'"
- az storage account create \
- --name ${MY_STORAGE_ACCT} \
- --resource-group ${MY_RESOURCE_GROUP} \
- --location ${MY_LOCATION} \
- --sku Standard_ZRS \
- --encryption-services blob > /dev/null
- fi
+ ## create globally unique storage account (idempotently)
+ if ! az storage account list | jq '.[].name' -r | grep -q "${MY_STORAGE_ACCT}"; then
+ echo "[INFO]: Creating Storage Account '${MY_STORAGE_ACCT}'"
+ az storage account create \
+ --name "${MY_STORAGE_ACCT}" \
+ --resource-group "${MY_RESOURCE_GROUP}" \
+ --location "${MY_LOCATION}" \
+ --sku Standard_ZRS \
+ --encryption-services blob >/dev/null
+ fi
}
#####
# authorize_ad_user
##################
authorize_ad_user() {
- ## Use Azure AD Account to Authorize Operation
- az ad signed-in-user show --query objectId -o tsv | az role assignment create \
- --role "Storage Blob Data Contributor" \
- --assignee @- \
- --scope "/subscriptions/${MY_ACCOUNT_ID}/resourceGroups/${MY_RESOURCE_GROUP}/providers/Microsoft.Storage/storageAccounts/${MY_STORAGE_ACCT}" > /dev/null
+ ## Use Azure AD Account to Authorize Operation
+ az ad signed-in-user show --query objectId -o tsv | az role assignment create \
+ --role "Storage Blob Data Contributor" \
+ --assignee @- \
+ --scope "/subscriptions/${MY_ACCOUNT_ID}/resourceGroups/${MY_RESOURCE_GROUP}/providers/Microsoft.Storage/storageAccounts/${MY_STORAGE_ACCT}" >/dev/null
}
#####
# create_storage_container
##################
create_storage_container() {
- ## Create Container Using Credentials
- if ! az storage container list \
- --account-name ${MY_STORAGE_ACCT} \
- --auth-mode login | jq '.[].name' -r | grep -q ${MY_CONTAINER_NAME}
- then
- echo "[INFO]: Creating Storage Container '${MY_CONTAINER_NAME}'"
- az storage container create \
- --account-name ${MY_STORAGE_ACCT} \
- --name ${MY_CONTAINER_NAME} \
- --auth-mode login > /dev/null
- fi
+ ## Create Container Using Credentials
+ if ! az storage container list \
+ --account-name "${MY_STORAGE_ACCT}" \
+ --auth-mode login | jq '.[].name' -r | grep -q "${MY_CONTAINER_NAME}"; then
+ echo "[INFO]: Creating Storage Container '${MY_CONTAINER_NAME}'"
+ az storage container create \
+ --account-name "${MY_STORAGE_ACCT}" \
+ --name "${MY_CONTAINER_NAME}" \
+ --auth-mode login >/dev/null
+ fi
}
#####
# create_config_files
##################
create_config_files() {
- ## Create Minio env file and Helm Chart secret files
- if [[ "${CREATE_MINIO_ENV}" =~ true|(y)es ]]; then
- echo "[INFO]: Creating Docker Compose 'minio.env' file"
- ./create_secrets.sh minio_env
- fi
+ ## Create Minio env file and Helm Chart secret files
+ if [[ ${CREATE_MINIO_ENV} =~ true|(y)es ]]; then
+ echo "[INFO]: Creating Docker Compose 'minio.env' file"
+ ./create_secrets.sh minio_env
+ fi
- if [[ "${CREATE_MINIO_CHART_SECRETS}" =~ true|(y)es ]]; then
- echo "[INFO]: Creating Helm Chart 'minio_secrets.yaml' file"
- ./create_secrets.sh minio_chart
- fi
+ if [[ ${CREATE_MINIO_CHART_SECRETS} =~ true|(y)es ]]; then
+ echo "[INFO]: Creating Helm Chart 'minio_secrets.yaml' file"
+ ./create_secrets.sh minio_chart
+ fi
- if [[ "${CREATE_DGRAPH_CHART_SECRETS}" =~ true|(y)es ]]; then
- echo "[INFO]: Creating Helm Chart 'dgraph_secrets.yaml' file"
- ./create_secrets.sh dgraph_chart
- fi
+ if [[ ${CREATE_DGRAPH_CHART_SECRETS} =~ true|(y)es ]]; then
+ echo "[INFO]: Creating Helm Chart 'dgraph_secrets.yaml' file"
+ ./create_secrets.sh dgraph_chart
+ fi
}
main $@
diff --git a/contrib/config/backups/azure/azure_cli/create_secrets.sh b/contrib/config/backups/azure/azure_cli/create_secrets.sh
index b3d3aed8444..ba126e6dafe 100755
--- a/contrib/config/backups/azure/azure_cli/create_secrets.sh
+++ b/contrib/config/backups/azure/azure_cli/create_secrets.sh
@@ -4,94 +4,101 @@
# main
##################
main() {
- check_environment $@
+ check_environment $@
- ## Fetch Secrets from Azure
- get_secrets
+ ## Fetch Secrets from Azure
+ get_secrets
- ## Create Configuration with Secrets
- case $1 in
- minio_env)
- create_minio_env
- ;;
- minio_chart)
- create_minio_secrets
- ;;
- dgraph_chart)
- create_dgraph_secrets
- ;;
- esac
+ ## Create Configuration with Secrets
+ case $1 in
+ minio_env)
+ create_minio_env
+ ;;
+ minio_chart)
+ create_minio_secrets
+ ;;
+ dgraph_chart)
+ create_dgraph_secrets
+ ;;
+ esac
}
#####
# check_environment
##################
check_environment() {
- ## Check for Azure CLI command
- command -v az > /dev/null || \
- { echo "[ERROR]: 'az' command not not found" 1>&2; exit 1; }
- command -v jq > /dev/null || \
- { echo "[ERROR]: 'jq' command not not found" 1>&2; exit 1; }
+ ## Check for Azure CLI command
+ command -v az >/dev/null ||
+ {
+ echo "[ERROR]: 'az' command not not found" 1>&2
+ exit 1
+ }
+ command -v jq >/dev/null ||
+ {
+ echo "[ERROR]: 'jq' command not not found" 1>&2
+ exit 1
+ }
- MY_STORAGE_ACCT=${MY_STORAGE_ACCT:-""}
- MY_RESOURCE_GROUP=${MY_RESOURCE_GROUP:=""}
+ MY_STORAGE_ACCT=${MY_STORAGE_ACCT:-""}
+ MY_RESOURCE_GROUP=${MY_RESOURCE_GROUP:=""}
- if [[ -z "${MY_STORAGE_ACCT}" ]]; then
- printf "[ERROR]: The env var of 'MY_STORAGE_ACCT' was not defined. Exiting\n" 1>&2
- exit 1
- fi
+ if [[ -z ${MY_STORAGE_ACCT} ]]; then
+ printf "[ERROR]: The env var of 'MY_STORAGE_ACCT' was not defined. Exiting\n" 1>&2
+ exit 1
+ fi
- if [[ -z "${MY_RESOURCE_GROUP}" ]]; then
- printf "[ERROR]: The env var of 'MY_RESOURCE_GROUP' was not defined. Exiting\n" 1>&2
- exit 1
- fi
+ if [[ -z ${MY_RESOURCE_GROUP} ]]; then
+ printf "[ERROR]: The env var of 'MY_RESOURCE_GROUP' was not defined. Exiting\n" 1>&2
+ exit 1
+ fi
}
#####
# get_secrets
##################
get_secrets() {
- CONN_STR=$(az storage account show-connection-string \
- --name "${MY_STORAGE_ACCT}" \
- --resource-group "${MY_RESOURCE_GROUP}" \
- | jq .connectionString -r
- )
+ CONN_STR=$(
+ az storage account show-connection-string \
+ --name "${MY_STORAGE_ACCT}" \
+ --resource-group "${MY_RESOURCE_GROUP}" |
+ jq .connectionString -r
+ )
- export MINIO_SECRET_KEY=$(grep -oP '(?<=AccountKey=).*' <<< $CONN_STR)
- export MINIO_ACCESS_KEY=$(grep -oP '(?<=AccountName=)[^;]*' <<< $CONN_STR)
+ export MINIO_SECRET_KEY=$(grep -oP '(?<=AccountKey=).*' <<<"${CONN_STR}")
+ export MINIO_ACCESS_KEY=$(grep -oP '(?<=AccountName=)[^;]*' <<<"${CONN_STR}")
}
#####
# create_minio_env
##################
create_minio_env() {
- cat <<-EOF > ../minio.env
-MINIO_SECRET_KEY=$(grep -oP '(?<=AccountKey=).*' <<< $CONN_STR)
-MINIO_ACCESS_KEY=$(grep -oP '(?<=AccountName=)[^;]*' <<< $CONN_STR)
-EOF
+ cat <<-EOF >../minio.env
+ MINIO_SECRET_KEY=$(grep -oP '(?<=AccountKey=).*' <<<"${CONN_STR}")
+ MINIO_ACCESS_KEY=$(grep -oP '(?<=AccountName=)[^;]*' <<<"${CONN_STR}")
+ EOF
}
#####
# create_minio_secrets
##################
create_minio_secrets() {
- cat <<-EOF > ../charts/minio_secrets.yaml
-accessKey: ${MINIO_ACCESS_KEY}
-secretKey: ${MINIO_SECRET_KEY}
-EOF
+ cat <<-EOF >../charts/minio_secrets.yaml
+ accessKey: ${MINIO_ACCESS_KEY}
+ secretKey: ${MINIO_SECRET_KEY}
+ EOF
}
#####
# create_dgraph_secrets
##################
create_dgraph_secrets() {
- cat <<-EOF > ../charts/dgraph_secrets.yaml
-backups:
- keys:
- minio:
- access: ${MINIO_ACCESS_KEY}
- secret: ${MINIO_SECRET_KEY}
-EOF
+ cat <<-EOF >../charts/dgraph_secrets.yaml
+ backups:
+ keys:
+ minio:
+ access: ${MINIO_ACCESS_KEY}
+ secret: ${MINIO_SECRET_KEY}
+ EOF
}
main $@
diff --git a/contrib/config/backups/azure/terraform/README.md b/contrib/config/backups/azure/terraform/README.md
index fe8d5a6c471..c3384ec224d 100644
--- a/contrib/config/backups/azure/terraform/README.md
+++ b/contrib/config/backups/azure/terraform/README.md
@@ -2,29 +2,34 @@
## About
-This script will create the required resources needed to create Azure Blob Storage using [`simple-azure-blob`](https://github.com/darkn3rd/simple-azure-blob) module.
+This script will create the required resources needed to create Azure Blob Storage using
+[`simple-azure-blob`](https://github.com/darkn3rd/simple-azure-blob) module.
## Prerequisites
You need the following installed to use this automation:
-* [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest) with an active Azure subscription configured.
-* [Terraform](https://www.terraform.io/downloads.html) tool that is used to provision resources and create configuration files from templates
+- [Azure CLI](https://docs.microsoft.com/en-us/cli/azure/install-azure-cli?view=azure-cli-latest)
+ with an active Azure subscription configured.
+- [Terraform](https://www.terraform.io/downloads.html) tool that is used to provision resources and
+ create configuration files from templates
## Configuration
You will need to define the following variables:
-* Required Variables:
- * `resource_group_name` (required) - Azure resource group that contains the resources
- * `storage_account_name` (required) - Azure storage account (unique global name) to contain storage
- * `storage_container_name` (default = `dgraph-backups`) - Azure container to host the blob storage
+- Required Variables:
+ - `resource_group_name` (required) - Azure resource group that contains the resources
+ - `storage_account_name` (required) - Azure storage account (unique global name) to contain
+ storage
+ - `storage_container_name` (default = `dgraph-backups`) - Azure container to host the blob storage
## Steps
### Define Variables
-You can define these when prompted, or in `terrafrom.tfvars` file, or through command line variables, e.g. `TF_VAR_resource_group_name`, `TF_VAR_storage_account_name`.
+You can define these when prompted, or in `terrafrom.tfvars` file, or through command line
+variables, e.g. `TF_VAR_resource_group_name`, `TF_VAR_storage_account_name`.
```terraform
# terraform.tfvars
diff --git a/contrib/config/backups/client/README.md b/contrib/config/backups/client/README.md
index dd63420811e..1582d628a26 100644
--- a/contrib/config/backups/client/README.md
+++ b/contrib/config/backups/client/README.md
@@ -1,41 +1,59 @@
# Backup Script
-This backup script that supports many of the features in Dgraph, such as ACLs, Mutual TLS, REST or GraphQL API. See `./dgraph-backup.sh --help` for all of the options.
+This backup script that supports many of the features in Dgraph, such as ACLs, Mutual TLS, REST or
+GraphQL API. See `./dgraph-backup.sh --help` for all of the options.
## Requirements
-* The scripts (`dgraph-backup.sh` and `compose-setup.sh`) require the following tools to run properly:
- * GNU `bash`
- * GNU `getopt`
- * GNU `grep`†
-* These scripts were tested on the following environments:
- * macOS with Homebrew [gnu-getopt](https://formulae.brew.sh/formula/gnu-getopt) bottle and [grep](https://formulae.brew.sh/formula/grep) bottle,
- * [Ubuntu 20.04.1 (Focal Fossa)](https://releases.ubuntu.com/20.04/) (any modern Linux distro should work, such as the [dgraph/dgraph](https://hub.docker.com/r/dgraph/dgraph/) docker container), and
- * Windows with [MSYS2](https://www.msys2.org/).
-* For the test demo environment, both [docker](https://docs.docker.com/engine/) and [docker-compose](https://docs.docker.com/compose/) are required.
+- The scripts (`dgraph-backup.sh` and `compose-setup.sh`) require the following tools to run
+ properly:
+ - GNU `bash`
+ - GNU `getopt`
+ - GNU `grep`†
+- These scripts were tested on the following environments:
+ - macOS with Homebrew [gnu-getopt](https://formulae.brew.sh/formula/gnu-getopt) bottle and
+ [grep](https://formulae.brew.sh/formula/grep) bottle,
+ - [Ubuntu 20.04.1 (Focal Fossa)](https://releases.ubuntu.com/20.04/) (any modern Linux distro
+ should work, such as the [dgraph/dgraph](https://hub.docker.com/r/dgraph/dgraph/) docker
+ container), and
+ - Windows with [MSYS2](https://www.msys2.org/).
+- For the test demo environment, both [docker](https://docs.docker.com/engine/) and
+ [docker-compose](https://docs.docker.com/compose/) are required.
-† Some versions of macOS 10.x do not include have a compatible version of `grep`. You need to have GNU grep in the path for this script to work.
+† Some versions of macOS 10.x do not include have a compatible version of `grep`. You need to have
+GNU grep in the path for this script to work.
## Important Notes
-If you are using this script on a system other than alpha, we'll call this *backup workstation*, you should be aware of the following:
+If you are using this script on a system other than alpha, we'll call this _backup workstation_, you
+should be aware of the following:
-* **General**
- * the *backup workstation* will need to have access to the alpha server, e.g. `localhost:8080`
-* **TLS**
- * when accessing alpha server secured by TLS, the *backup workstation* will need access to `ca.crt` created with `dgraph cert` in the path.
- * if Mutual TLS is used, the *backup workstation* will also need access to the client cert and key in the path.
-* **`subpath` option**
- * when specifying sub-path that uses a datestamp, the *backup workstation* needs to have the same timestamp as the alpha server.
- * when backing up to a file path, such as NFS, the *backup workstation* will need access to the same file path at the same mount point, e.g. if `/dgraph/backups` is used on alpha, the same path `/dgraph/backups` has to be accessible on the *backup workstation*
+- **General**
+ - the _backup workstation_ will need to have access to the alpha server, e.g. `localhost:8080`
+- **TLS**
+ - when accessing alpha server secured by TLS, the _backup workstation_ will need access to
+ `ca.crt` created with `dgraph cert` in the path.
+ - if Mutual TLS is used, the _backup workstation_ will also need access to the client cert and key
+ in the path.
+- **`subpath` option**
+ - when specifying sub-path that uses a datestamp, the _backup workstation_ needs to have the same
+ timestamp as the alpha server.
+ - when backing up to a file path, such as NFS, the _backup workstation_ will need access to the
+ same file path at the same mount point, e.g. if `/dgraph/backups` is used on alpha, the same
+ path `/dgraph/backups` has to be accessible on the _backup workstation_
## Demo (Test) with local file path
-You can try out these features using [Docker Compose](https://docs.docker.com/compose/). There's a `./compose-setup.sh` script that can configure the environment with the desired features. As you need to have a common shared directory for file paths, you can use `ratel` container as the *backup workstation* to run the backup script.
+You can try out these features using [Docker Compose](https://docs.docker.com/compose/). There's a
+`./compose-setup.sh` script that can configure the environment with the desired features. As you
+need to have a common shared directory for file paths, you can use `ratel` container as the _backup
+workstation_ to run the backup script.
-As an example of performing backups with a local mounted file path using ACLs, Encryption, and TLS, you can follow these steps:
+As an example of performing backups with a local mounted file path using ACLs, Encryption, and TLS,
+you can follow these steps:
+
+1. Setup Environment and log into _backup workstation_ (ratel container):
-1. Setup Environment and log into *backup workstation* (ratel container):
```bash
## configure docker-compose environment
./compose-setup.sh --acl --enc --tls --make_tls_cert
@@ -44,7 +62,9 @@ As an example of performing backups with a local mounted file path using ACLs, E
## login into Ratel to use for backups
docker exec --tty --interactive ratel bash
```
+
2. Trigger a full backup:
+
```bash
## trigger a backup on alpha1:8080
./dgraph-backup.sh \
@@ -55,25 +75,35 @@ As an example of performing backups with a local mounted file path using ACLs, E
--user groot \
--password password
```
+
3. Verify Results
+
```bash
## check for backup files
ls /dgraph/backups
```
+
4. Logout of the Ratel container
+
```bash
exit
```
-4. Cleanup when finished
+
+5. Cleanup when finished
+
```bash
docker-compose stop && docker-compose rm
```
### Demo (Test) with S3 Buckets
-This will have requirements for [Terraform](https://www.terraform.io/) and [AWS CLI](https://aws.amazon.com/cli/). See [s3/README.md](../s3/README.md) for further information. Because we do not need to share the same file path, we can use the host as the *backup workstation*:
+This will have requirements for [Terraform](https://www.terraform.io/) and
+[AWS CLI](https://aws.amazon.com/cli/). See [s3/README.md](../s3/README.md) for further information.
+Because we do not need to share the same file path, we can use the host as the _backup workstation_:
+
+1. Setup the S3 Bucket environment. Make sure to replace `` to an
+ appropriate name.
-1. Setup the S3 Bucket environment. Make sure to replace `` to an appropriate name.
```bash
## create the S3 Bucket + Credentials
pushd ../s3/terraform
@@ -89,18 +119,24 @@ This will have requirements for [Terraform](https://www.terraform.io/) and [AWS
source env.sh
popd
```
+
2. Trigger a backup
+
```bash
./dgraph-backup.sh \
--alpha localhost:8080 \
--force_full \
--location $BACKUP_PATH
```
+
3. Verify backups were finished
+
```bash
aws s3 ls s3://${BACKUP_PATH##*/}
```
+
4. Clean up when completed:
+
```bash
## remove the local Dgraph cluster
pushd ../s3
@@ -118,9 +154,14 @@ This will have requirements for [Terraform](https://www.terraform.io/) and [AWS
### Demo (Test) with GCP via Minio Gateway
-This will have requirements for [Terraform](https://www.terraform.io/) and [Google Cloud SDK](https://cloud.google.com/sdk). See [gcp/README.md](../gcp/README.md) for further information. Because we do not need to share the same file path, we can use the host as the *backup workstation*:
+This will have requirements for [Terraform](https://www.terraform.io/) and
+[Google Cloud SDK](https://cloud.google.com/sdk). See [gcp/README.md](../gcp/README.md) for further
+information. Because we do not need to share the same file path, we can use the host as the _backup
+workstation_:
+
+1. Setup the GCS Bucket environment. Make sure to replace `` and
+ `` and ``,
+ ``, and `` to something
+ appropriate.
-1. Setup Azure Storage Blob environment. Replace ``, ``, and `` to something appropriate.
```bash
## create Resource Group, Storage Account, authorize Storage Account, Create Storage Container
pushd ../azure/terraform
@@ -187,21 +240,27 @@ This will have requirements for [Terraform](https://www.terraform.io/) and [Azur
popd
```
+
2. Trigger a backup
+
```bash
./dgraph-backup.sh \
--alpha localhost:8080 \
--force_full \
--location minio://gateway:9000/${CONTAINER_NAME}
```
+
3. Verify backups were created
+
```bash
az storage blob list \
--account-name ${STORAGE_ACCOUNT_NAME} \
--container-name ${CONTAINER_NAME} \
--output table
```
+
4. Clean up when finished:
+
```bash
## remove the local Dgraph cluster
pushd ../azure
diff --git a/contrib/config/backups/client/backup_helper.sh b/contrib/config/backups/client/backup_helper.sh
index 5bac16ebc76..1cb1d422c38 100644
--- a/contrib/config/backups/client/backup_helper.sh
+++ b/contrib/config/backups/client/backup_helper.sh
@@ -7,23 +7,23 @@
# get_token_rest - get accessJWT token with REST command for Dgraph 1.x
##########################
get_token_rest() {
- JSON="{\"userid\": \"${USER}\", \"password\": \"${PASSWORD}\" }"
- RESULT=$(
- /usr/bin/curl --silent \
- "${HEADERS[@]}" \
- "${CERTOPTS[@]}" \
- --request POST \
- ${ALPHA_HOST}/login \
- --data "${JSON}"
- )
-
- if grep -q errors <<< "$RESULT"; then
- ERROR=$(grep -oP '(?<=message":")[^"]*' <<< $RESULT)
- echo "ERROR: $ERROR"
- return 1
- fi
-
- grep -oP '(?<=accessJWT":")[^"]*' <<< "$RESULT"
+ JSON="{\"userid\": \"${USER}\", \"password\": \"${PASSWORD}\" }"
+ RESULT=$(
+ /usr/bin/curl --silent \
+ "${HEADERS[@]}" \
+ "${CERTOPTS[@]}" \
+ --request POST \
+ "${ALPHA_HOST}"/login \
+ --data "${JSON}"
+ )
+
+ if grep -q errors <<<"${RESULT}"; then
+ ERROR=$(grep -oP '(?<=message":")[^"]*' <<<"${RESULT}")
+ echo "ERROR: ${ERROR}"
+ return 1
+ fi
+
+ grep -oP '(?<=accessJWT":")[^"]*' <<<"${RESULT}"
}
@@ -31,23 +31,23 @@ get_token_rest() {
# get_token_graphql - get accessJWT token using GraphQL for Dgraph 20.03.1+
##########################
get_token_graphql() {
- GQL="{\"query\": \"mutation { login(userId: \\\"${USER}\\\" password: \\\"${PASSWORD}\\\") { response { accessJWT } } }\"}"
- RESULT=$(
- /usr/bin/curl --silent \
- "${HEADERS[@]}" \
- "${CERTOPTS[@]}" \
- --request POST \
- ${ALPHA_HOST}/admin \
- --data "${GQL}"
- )
-
- if grep -q errors <<< "$RESULT"; then
- ERROR=$(grep -oP '(?<=message":")[^"]*' <<< $RESULT)
- echo "ERROR: $ERROR"
- return 1
- fi
-
- grep -oP '(?<=accessJWT":")[^"]*' <<< "$RESULT"
+ GQL="{\"query\": \"mutation { login(userId: \\\"${USER}\\\" password: \\\"${PASSWORD}\\\") { response { accessJWT } } }\"}"
+ RESULT=$(
+ /usr/bin/curl --silent \
+ "${HEADERS[@]}" \
+ "${CERTOPTS[@]}" \
+ --request POST \
+ "${ALPHA_HOST}"/admin \
+ --data "${GQL}"
+ )
+
+ if grep -q errors <<<"${RESULT}"; then
+ ERROR=$(grep -oP '(?<=message":")[^"]*' <<<"${RESULT}")
+ echo "ERROR: ${ERROR}"
+ return 1
+ fi
+
+ grep -oP '(?<=accessJWT":")[^"]*' <<<"${RESULT}"
}
@@ -63,42 +63,42 @@ get_token_graphql() {
# CLIENT_KEY_PATH - path to client cert (e.g. client.dgraphuser.key) for client TLS
##########################
get_token() {
- USER=${1}
- PASSWORD=${2}
- AUTH_TOKEN=${3:-""}
- CACERT_PATH=${CACERT_PATH:-""}
- CLIENT_CERT_PATH=${CLIENT_CERT_PATH:-""}
- CLIENT_KEY_PATH=${CLIENT_KEY_PATH:-""}
-
- ## user/password required for login
- if [[ -z "$USER" || -z "$PASSWORD" ]]; then
- return 1
- fi
-
- if [[ ! -z "$AUTH_TOKEN" ]]; then
- HEADERS+=('--header' "X-Dgraph-AuthToken: $AUTH_TOKEN")
- fi
-
- if [[ ! -z "$CACERT_PATH" ]]; then
- CERTOPTS+=('--cacert' "$CACERT_PATH")
- if [[ ! -z "$CLIENT_CERT_PATH" || ! -z "$CLIENT_KEY_PATH" ]]; then
- CERTOPTS+=(
- '--cert' "$CLIENT_CERT_PATH"
- '--key' "$CLIENT_KEY_PATH"
- )
- fi
- ALPHA_HOST=https://${ALPHA_HOST:-"localhost:8080"}
- else
- ALPHA_HOST=${ALPHA_HOST:-"localhost:8080"}
- fi
-
- API_TYPE=${API_TYPE:-"graphql"}
- if [[ "$API_TYPE" == "graphql" ]]; then
- HEADERS+=('--header' "Content-Type: application/json")
- get_token_graphql
- else
- get_token_rest
- fi
+ USER=${1}
+ PASSWORD=${2}
+ AUTH_TOKEN=${3:-""}
+ CACERT_PATH=${CACERT_PATH:-""}
+ CLIENT_CERT_PATH=${CLIENT_CERT_PATH:-""}
+ CLIENT_KEY_PATH=${CLIENT_KEY_PATH:-""}
+
+ ## user/password required for login
+ if [[ -z ${USER} || -z ${PASSWORD} ]]; then
+ return 1
+ fi
+
+ if [[ -n ${AUTH_TOKEN} ]]; then
+ HEADERS+=('--header' "X-Dgraph-AuthToken: ${AUTH_TOKEN}")
+ fi
+
+ if [[ -n ${CACERT_PATH} ]]; then
+ CERTOPTS+=('--cacert' "${CACERT_PATH}")
+ if [[ ! -z ${CLIENT_CERT_PATH} || -n ${CLIENT_KEY_PATH} ]]; then
+ CERTOPTS+=(
+ '--cert' "${CLIENT_CERT_PATH}"
+ '--key' "${CLIENT_KEY_PATH}"
+ )
+ fi
+ ALPHA_HOST=https://${ALPHA_HOST:-"localhost:8080"}
+ else
+ ALPHA_HOST=${ALPHA_HOST:-"localhost:8080"}
+ fi
+
+ API_TYPE=${API_TYPE:-"graphql"}
+ if [[ ${API_TYPE} == "graphql" ]]; then
+ HEADERS+=('--header' "Content-Type: application/json")
+ get_token_graphql
+ else
+ get_token_rest
+ fi
}
######
@@ -115,65 +115,68 @@ get_token() {
# CLIENT_KEY_PATH - path to client cert (e.g. client.dgraphuser.key) for client TLS
##########################
backup() {
- ACCESS_TOKEN=${1:-""}
- AUTH_TOKEN=${2:-""}
- CACERT_PATH=${CACERT_PATH:-""}
- CLIENT_CERT_PATH=${CLIENT_CERT_PATH:-""}
- CLIENT_KEY_PATH=${CLIENT_KEY_PATH:-""}
-
- API_TYPE=${API_TYPE:-"graphql"}
-
- MINIO_SECURE=${MINIO_SECURE:-"false"}
- FORCE_FULL=${FORCE_FULL:-"false"}
-
- [[ -z "$BACKUP_DESTINATION" ]] && \
- { echo "'BACKUP_DESTINATION' is not set. Exiting" >&2; return 1; }
-
- if [[ ! -z "$ACCESS_TOKEN" ]]; then
- HEADERS+=('--header' "X-Dgraph-AccessToken: $ACCESS_TOKEN")
- fi
-
- if [[ ! -z "$AUTH_TOKEN" ]]; then
- HEADERS+=('--header' "X-Dgraph-AuthToken: $AUTH_TOKEN")
- fi
-
- if [[ ! -z "$CACERT_PATH" ]]; then
- CERTOPTS+=('--cacert' "$CACERT_PATH")
- if [[ ! -z "$CLIENT_CERT_PATH" || ! -z "$CLIENT_KEY_PATH" ]]; then
- CERTOPTS+=(
- '--cert' "$CLIENT_CERT_PATH"
- '--key' "$CLIENT_KEY_PATH"
- )
- fi
- ALPHA_HOST=https://${ALPHA_HOST:-"localhost:8080"}
- else
- ALPHA_HOST=${ALPHA_HOST:-"localhost:8080"}
- fi
-
- ## Configure destination with date stamp folder
- BACKUP_DESTINATION="${BACKUP_DESTINATION}/${SUBPATH}"
- ## Configure Minio Configuration
- if [[ "$MINIO_SECURE" == "false" && "$BACKUP_DESTINATION" =~ ^minio ]]; then
- BACKUP_DESTINATION="${BACKUP_DESTINATION}?secure=false"
- fi
-
- ## Create date-stamped directory for file system
- if [[ ! "$BACKUP_DESTINATION" =~ ^minio|^s3 ]]; then
- ## Check destination directory exist
- if [[ -d ${BACKUP_DESTINATION%/*} ]]; then
- mkdir -p $BACKUP_DESTINATION
- else
- echo "Designated Backup Destination '${BACKUP_DESTINATION%/*}' does not exist. Aborting."
- return 1
- fi
- fi
-
- if [[ "$API_TYPE" == "graphql" ]]; then
- HEADERS+=('--header' "Content-Type: application/json")
- backup_graphql
- else
- backup_rest
- fi
+ ACCESS_TOKEN=${1:-""}
+ AUTH_TOKEN=${2:-""}
+ CACERT_PATH=${CACERT_PATH:-""}
+ CLIENT_CERT_PATH=${CLIENT_CERT_PATH:-""}
+ CLIENT_KEY_PATH=${CLIENT_KEY_PATH:-""}
+
+ API_TYPE=${API_TYPE:-"graphql"}
+
+ MINIO_SECURE=${MINIO_SECURE:-"false"}
+ FORCE_FULL=${FORCE_FULL:-"false"}
+
+ [[ -z ${BACKUP_DESTINATION} ]] &&
+ {
+ echo "'BACKUP_DESTINATION' is not set. Exiting" >&2
+ return 1
+ }
+
+ if [[ -n ${ACCESS_TOKEN} ]]; then
+ HEADERS+=('--header' "X-Dgraph-AccessToken: ${ACCESS_TOKEN}")
+ fi
+
+ if [[ -n ${AUTH_TOKEN} ]]; then
+ HEADERS+=('--header' "X-Dgraph-AuthToken: ${AUTH_TOKEN}")
+ fi
+
+ if [[ -n ${CACERT_PATH} ]]; then
+ CERTOPTS+=('--cacert' "${CACERT_PATH}")
+ if [[ ! -z ${CLIENT_CERT_PATH} || -n ${CLIENT_KEY_PATH} ]]; then
+ CERTOPTS+=(
+ '--cert' "${CLIENT_CERT_PATH}"
+ '--key' "${CLIENT_KEY_PATH}"
+ )
+ fi
+ ALPHA_HOST=https://${ALPHA_HOST:-"localhost:8080"}
+ else
+ ALPHA_HOST=${ALPHA_HOST:-"localhost:8080"}
+ fi
+
+ ## Configure destination with date stamp folder
+ BACKUP_DESTINATION="${BACKUP_DESTINATION}/${SUBPATH}"
+ ## Configure Minio Configuration
+ if [[ ${MINIO_SECURE} == "false" && ${BACKUP_DESTINATION} =~ ^minio ]]; then
+ BACKUP_DESTINATION="${BACKUP_DESTINATION}?secure=false"
+ fi
+
+ ## Create date-stamped directory for file system
+ if [[ ! ${BACKUP_DESTINATION} =~ ^minio|^s3 ]]; then
+ ## Check destination directory exist
+ if [[ -d ${BACKUP_DESTINATION%/*} ]]; then
+ mkdir -p "${BACKUP_DESTINATION}"
+ else
+ echo "Designated Backup Destination '${BACKUP_DESTINATION%/*}' does not exist. Aborting."
+ return 1
+ fi
+ fi
+
+ if [[ ${API_TYPE} == "graphql" ]]; then
+ HEADERS+=('--header' "Content-Type: application/json")
+ backup_graphql
+ else
+ backup_rest
+ fi
}
@@ -181,27 +184,28 @@ backup() {
# backup_rest - trigger backup using REST command for Dgraph 1.x
##########################
backup_rest() {
- URL_PATH="admin/backup?force_full=$FORCE_FULL"
-
- RESULT=$(/usr/bin/curl --silent \
- "${HEADERS[@]}" \
- "${CERTOPTS[@]}" \
- --request POST \
- ${ALPHA_HOST}/$URL_PATH \
- --data "destination=$BACKUP_DESTINATION"
- )
-
- if grep -q errors <<< "$RESULT"; then
- ERROR=$(grep -oP '(?<=message":")[^"]*' <<< $RESULT)
- MESSAGE="ERROR: $ERROR"
- if grep -q code <<< "$RESULT"; then
- CODE=$(grep -oP '(?<=code":")[^"]*' <<< $RESULT)
- echo "$MESSAGE REASON='$CODE'"
- fi
- return 1
- fi
-
- echo $RESULT
+ URL_PATH="admin/backup?force_full=${FORCE_FULL}"
+
+ RESULT=$(
+ /usr/bin/curl --silent \
+ "${HEADERS[@]}" \
+ "${CERTOPTS[@]}" \
+ --request POST \
+ "${ALPHA_HOST}"/"${URL_PATH}" \
+ --data "destination=${BACKUP_DESTINATION}"
+ )
+
+ if grep -q errors <<<"${RESULT}"; then
+ ERROR=$(grep -oP '(?<=message":")[^"]*' <<<"${RESULT}")
+ MESSAGE="ERROR: ${ERROR}"
+ if grep -q code <<<"${RESULT}"; then
+ CODE=$(grep -oP '(?<=code":")[^"]*' <<<"${RESULT}")
+ echo "${MESSAGE} REASON='${CODE}'"
+ fi
+ return 1
+ fi
+
+ echo "${RESULT}"
}
@@ -209,21 +213,22 @@ backup_rest() {
# backup_graphql - trigger backup using GraphQL for Dgraph 20.03.1+
##########################
backup_graphql() {
- GQL="{\"query\": \"mutation { backup(input: {destination: \\\"${BACKUP_DESTINATION}\\\" forceFull: $FORCE_FULL }) { response { message code } } }\"}"
-
- RESULT=$(/usr/bin/curl --silent \
- "${HEADERS[@]}" \
- "${CERTOPTS[@]}" \
- --request POST \
- ${ALPHA_HOST}/admin \
- --data "$GQL"
- )
-
- if grep -q errors <<< "$RESULT"; then
- ERROR=$(grep -oP '(?<=message":")[^"]*' <<< $RESULT)
- echo "ERROR: $ERROR"
- return 1
- fi
-
- echo $RESULT
+ GQL="{\"query\": \"mutation { backup(input: {destination: \\\"${BACKUP_DESTINATION}\\\" forceFull: ${FORCE_FULL} }) { response { message code } } }\"}"
+
+ RESULT=$(
+ /usr/bin/curl --silent \
+ "${HEADERS[@]}" \
+ "${CERTOPTS[@]}" \
+ --request POST \
+ "${ALPHA_HOST}"/admin \
+ --data "${GQL}"
+ )
+
+ if grep -q errors <<<"${RESULT}"; then
+ ERROR=$(grep -oP '(?<=message":")[^"]*' <<<"${RESULT}")
+ echo "ERROR: ${ERROR}"
+ return 1
+ fi
+
+ echo "${RESULT}"
}
diff --git a/contrib/config/backups/client/compose-setup.sh b/contrib/config/backups/client/compose-setup.sh
index acc1ece431c..ce94bc84b62 100755
--- a/contrib/config/backups/client/compose-setup.sh
+++ b/contrib/config/backups/client/compose-setup.sh
@@ -14,221 +14,263 @@
# main - runs the script
##########################
main() {
- parse_command $@
- config_compose
- create_certs
+ parse_command $@
+ config_compose
+ create_certs
}
######
# usage - print friendly usage statement
##########################
usage() {
- cat <<-USAGE 1>&2
-Setup Docker Compose Environment
-
-Usage:
- $0 [FLAGS] --location [LOCATION]
-
-Flags:
- -j, --acl Enable Access Control List
- -t, --auth_token Enable auth token
- -e, --enc Enable Encryption
- -k, --tls Enable TLS
- -c, --tls_client_auth string Set TLS Auth String (default VERIFYIFGIVEN)
- -m, --make_tls_cert Create TLS Certificates and Key
- -v, --dgraph_version Set Dgraph Version
- -d, --debug Enable debug in output
- -h, --help Help for $0
-
-USAGE
+ cat <<-USAGE 1>&2
+ Setup Docker Compose Environment
+
+ Usage:
+ $0 [FLAGS] --location [LOCATION]
+
+ Flags:
+ -j, --acl Enable Access Control List
+ -t, --auth_token Enable auth token
+ -e, --enc Enable Encryption
+ -k, --tls Enable TLS
+ -c, --tls_client_auth string Set TLS Auth String (default VERIFYIFGIVEN)
+ -m, --make_tls_cert Create TLS Certificates and Key
+ -v, --dgraph_version Set Dgraph Version
+ -d, --debug Enable debug in output
+ -h, --help Help for $0
+
+ USAGE
}
######
# get_grep - find grep that supports look-ahead/behind regex
##########################
get_grep() {
- unset GREP_CMD
-
- ## Check for GNU grep compatibility
- if ! grep --version | head -1 | fgrep -q GNU; then
- local SYSTEM="$(uname -s)"
- if [[ "${SYSTEM,,}" == "freebsd" ]]; then
- ## Check FreeBSD install location
- if [[ -f "/usr/local/bin/grep" ]]; then
- GREP_CMD="/usr/local/bin/grep"
- else
- ## Save FreeBSD Instructions
- local MESSAGE="On FreeBSD, compatible grep can be installed with 'sudo pkg install gnugrep'"
- fi
- elif [[ "${SYSTEM,,}" == "darwin" ]]; then
- ## Check HomeBrew install location
- if [[ -f "/usr/local/opt/grep/libexec/gnubin/grep" ]]; then
- GREP_CMD="/usr/local/opt/grep/libexec/gnubin/grep"
- ## Check MacPorts install location
- elif [[ -f "/opt/local/bin/grep" ]]; then
- GREP_CMD="/opt/local/bin/grep"
- else
- ## Save MacPorts or HomeBrew Instructions
- if command -v brew > /dev/null; then
- local MESSAGE="On macOS, gnu-grep can be installed with 'brew install grep'\n"
- elif command -v port > /dev/null; then
- local MESSAGE="On macOS, grep can be installed with 'sudo port install grep'\n"
- fi
- fi
- fi
- else
- GREP_CMD="$(command -v grep)"
- fi
-
- ## Error if no suitable grep command found
- if [[ -z $GREP_CMD ]]; then
- printf "ERROR: GNU grep not found. Please install GNU compatible 'grep'\n\n%s" "$MESSAGE" 1>&2
- exit 1
- fi
+ unset GREP_CMD
+
+ ## Check for GNU grep compatibility
+ if ! grep --version | head -1 | fgrep -q GNU; then
+ local SYSTEM="$(uname -s)"
+ if [[ ${SYSTEM,,} == "freebsd" ]]; then
+ ## Check FreeBSD install location
+ if [[ -f "/usr/local/bin/grep" ]]; then
+ GREP_CMD="/usr/local/bin/grep"
+ else
+ ## Save FreeBSD Instructions
+ local MESSAGE="On FreeBSD, compatible grep can be installed with 'sudo pkg install gnugrep'"
+ fi
+ elif [[ ${SYSTEM,,} == "darwin" ]]; then
+ ## Check HomeBrew install location
+ if [[ -f "/usr/local/opt/grep/libexec/gnubin/grep" ]]; then
+ GREP_CMD="/usr/local/opt/grep/libexec/gnubin/grep"
+ ## Check MacPorts install location
+ elif [[ -f "/opt/local/bin/grep" ]]; then
+ GREP_CMD="/opt/local/bin/grep"
+ else
+ ## Save MacPorts or HomeBrew Instructions
+ if command -v brew >/dev/null; then
+ local MESSAGE="On macOS, gnu-grep can be installed with 'brew install grep'\n"
+ elif command -v port >/dev/null; then
+ local MESSAGE="On macOS, grep can be installed with 'sudo port install grep'\n"
+ fi
+ fi
+ fi
+ else
+ GREP_CMD="$(command -v grep)"
+ fi
+
+ ## Error if no suitable grep command found
+ if [[ -z ${GREP_CMD} ]]; then
+ printf "ERROR: GNU grep not found. Please install GNU compatible 'grep'\n\n%s" "${MESSAGE}" 1>&2
+ exit 1
+ fi
}
######
# get_getopt - find GNU getopt or print error message
##########################
get_getopt() {
- unset GETOPT_CMD
-
- ## Check for GNU getopt compatibility
- if [[ "$(getopt --version)" =~ "--" ]]; then
- local SYSTEM="$(uname -s)"
- if [[ "${SYSTEM,,}" == "freebsd" ]]; then
- ## Check FreeBSD install location
- if [[ -f "/usr/local/bin/getopt" ]]; then
- GETOPT_CMD="/usr/local/bin/getopt"
- else
- ## Save FreeBSD Instructions
- local MESSAGE="On FreeBSD, compatible getopt can be installed with 'sudo pkg install getopt'"
- fi
- elif [[ "${SYSTEM,,}" == "darwin" ]]; then
- ## Check HomeBrew install location
- if [[ -f "/usr/local/opt/gnu-getopt/bin/getopt" ]]; then
- GETOPT_CMD="/usr/local/opt/gnu-getopt/bin/getopt"
- ## Check MacPorts install location
- elif [[ -f "/opt/local/bin/getopt" ]]; then
- GETOPT_CMD="/opt/local/bin/getopt"
- else
- ## Save MacPorts or HomeBrew Instructions
- if command -v brew > /dev/null; then
- local MESSAGE="On macOS, gnu-getopt can be installed with 'brew install gnu-getopt'\n"
- elif command -v port > /dev/null; then
- local MESSAGE="On macOS, getopt can be installed with 'sudo port install getopt'\n"
- fi
- fi
- fi
- else
- GETOPT_CMD="$(command -v getopt)"
- fi
-
- ## Error if no suitable getopt command found
- if [[ -z $GETOPT_CMD ]]; then
- printf "ERROR: GNU getopt not found. Please install GNU compatible 'getopt'\n\n%s" "$MESSAGE" 1>&2
- exit 1
- fi
+ unset GETOPT_CMD
+
+ ## Check for GNU getopt compatibility
+ if [[ "$(getopt --version)" =~ "--" ]]; then
+ local SYSTEM="$(uname -s)"
+ if [[ ${SYSTEM,,} == "freebsd" ]]; then
+ ## Check FreeBSD install location
+ if [[ -f "/usr/local/bin/getopt" ]]; then
+ GETOPT_CMD="/usr/local/bin/getopt"
+ else
+ ## Save FreeBSD Instructions
+ local MESSAGE="On FreeBSD, compatible getopt can be installed with 'sudo pkg install getopt'"
+ fi
+ elif [[ ${SYSTEM,,} == "darwin" ]]; then
+ ## Check HomeBrew install location
+ if [[ -f "/usr/local/opt/gnu-getopt/bin/getopt" ]]; then
+ GETOPT_CMD="/usr/local/opt/gnu-getopt/bin/getopt"
+ ## Check MacPorts install location
+ elif [[ -f "/opt/local/bin/getopt" ]]; then
+ GETOPT_CMD="/opt/local/bin/getopt"
+ else
+ ## Save MacPorts or HomeBrew Instructions
+ if command -v brew >/dev/null; then
+ local MESSAGE="On macOS, gnu-getopt can be installed with 'brew install gnu-getopt'\n"
+ elif command -v port >/dev/null; then
+ local MESSAGE="On macOS, getopt can be installed with 'sudo port install getopt'\n"
+ fi
+ fi
+ fi
+ else
+ GETOPT_CMD="$(command -v getopt)"
+ fi
+
+ ## Error if no suitable getopt command found
+ if [[ -z ${GETOPT_CMD} ]]; then
+ printf "ERROR: GNU getopt not found. Please install GNU compatible 'getopt'\n\n%s" "${MESSAGE}" 1>&2
+ exit 1
+ fi
}
######
# parse_command - parse command line options using GNU getopt
##########################
parse_command() {
- get_getopt
-
- ## Parse Arguments with GNU getopt
- PARSED_ARGUMENTS=$(
- $GETOPT_CMD -o jtdhekmc:v: \
- --long acl,auth_token,enc,tls,make_tls_cert,tls_client_auth:,dgraph_version:,debug,help \
- -n 'compose-setup.sh' -- "$@"
- )
- if [ $? != 0 ] ; then usage; exit 1 ; fi
- eval set -- "$PARSED_ARGUMENTS"
-
- ## Defaults
- DEBUG="false"
- ACL_ENABLED="false"
- TOKEN_ENABLED="false"
- ENC_ENABLED="false"
- TLS_ENABLED="false"
- TLS_CLIENT_AUTH="VERIFYIFGIVEN"
- TLS_MAKE_CERTS="false"
-
- ## Process Agurments
- while true; do
- case "$1" in
- -j | --acl) ACL_ENABLED="true"; shift ;;
- -t | --auth_token) TOKEN_ENABLED=true; shift ;;
- -d | --debug) DEBUG="true"; shift ;;
- -h | --help) usage; exit;;
- -e | --enc) ENC_ENABLED="true"; shift ;;
- -k | --tls) TLS_ENABLED="true"; shift ;;
- -m | --make_tls_cert) TLS_MAKE_CERTS="true"; shift;;
- -c | --tls_client_auth) TLS_CLIENT_AUTH="$2"; shift 2;;
- -v | --dgraph_version) DGRAPH_VERSION="$2"; shift 2;;
- --) shift; break ;;
- *) break ;;
- esac
- done
-
- ## Set DGRAPH_VERSION to latest if it is not set yet
- [[ -z $DGRAPH_VERSION ]] && get_grep && DGRAPH_VERSION=$(curl -s https://get.dgraph.io/latest | $GREP_CMD -oP '(?<=tag_name":")[^"]*')
+ get_getopt
+
+ ## Parse Arguments with GNU getopt
+ PARSED_ARGUMENTS=$(
+ ${GETOPT_CMD} -o jtdhekmc:v: \
+ --long acl,auth_token,enc,tls,make_tls_cert,tls_client_auth:,dgraph_version:,debug,help \
+ -n 'compose-setup.sh' -- "$@"
+ )
+ if [[ $? != 0 ]]; then
+ usage
+ exit 1
+ fi
+ eval set -- "${PARSED_ARGUMENTS}"
+
+ ## Defaults
+ DEBUG="false"
+ ACL_ENABLED="false"
+ TOKEN_ENABLED="false"
+ ENC_ENABLED="false"
+ TLS_ENABLED="false"
+ TLS_CLIENT_AUTH="VERIFYIFGIVEN"
+ TLS_MAKE_CERTS="false"
+
+ ## Process Agurments
+ while true; do
+ case "$1" in
+ -j | --acl)
+ ACL_ENABLED="true"
+ shift
+ ;;
+ -t | --auth_token)
+ TOKEN_ENABLED=true
+ shift
+ ;;
+ -d | --debug)
+ DEBUG="true"
+ shift
+ ;;
+ -h | --help)
+ usage
+ exit
+ ;;
+ -e | --enc)
+ ENC_ENABLED="true"
+ shift
+ ;;
+ -k | --tls)
+ TLS_ENABLED="true"
+ shift
+ ;;
+ -m | --make_tls_cert)
+ TLS_MAKE_CERTS="true"
+ shift
+ ;;
+ -c | --tls_client_auth)
+ TLS_CLIENT_AUTH="$2"
+ shift 2
+ ;;
+ -v | --dgraph_version)
+ DGRAPH_VERSION="$2"
+ shift 2
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *) break ;;
+ esac
+ done
+
+ ## Set DGRAPH_VERSION to latest if it is not set yet
+ [[ -z ${DGRAPH_VERSION} ]] && get_grep && DGRAPH_VERSION=$(curl -s https://get.dgraph.io/latest | ${GREP_CMD} -oP '(?<=tag_name":")[^"]*')
}
######
# create_certs - creates cert and keys
##########################
create_certs() {
- command -v docker > /dev/null || \
- { echo "[ERROR]: 'docker' command not not found" 1>&2; exit 1; }
- docker version > /dev/null || \
- { echo "[ERROR]: docker not accessible for '$USER'" 1>&2; exit 1; }
-
- if [[ "$TLS_MAKE_CERTS" == "true" ]]; then
- [[ -z $DGRAPH_VERSION ]] && { echo "[ERROR]: 'DGRAPH_VERSION' not set. Aborting." 1>&2; exit 1; }
- rm --force $PWD/data/tls/*.{crt,key}
- docker run \
- --tty \
- --volume $PWD/data/tls:/tls dgraph/dgraph:$DGRAPH_VERSION \
- dgraph cert --dir /tls --client backupuser --nodes "localhost,alpha1,zero1,ratel" --duration 365
- fi
+ command -v docker >/dev/null ||
+ {
+ echo "[ERROR]: 'docker' command not not found" 1>&2
+ exit 1
+ }
+ docker version >/dev/null ||
+ {
+ echo "[ERROR]: docker not accessible for '${USER}'" 1>&2
+ exit 1
+ }
+
+ if [[ ${TLS_MAKE_CERTS} == "true" ]]; then
+ [[ -z ${DGRAPH_VERSION} ]] && {
+ echo "[ERROR]: 'DGRAPH_VERSION' not set. Aborting." 1>&2
+ exit 1
+ }
+ rm --force "${PWD}"/data/tls/*.{crt,key}
+ docker run \
+ --tty \
+ --volume "${PWD}"/data/tls:/tls dgraph/dgraph:"${DGRAPH_VERSION}" \
+ dgraph cert --dir /tls --client backupuser --nodes "localhost,alpha1,zero1,ratel" --duration 365
+ fi
}
######
# config_compose - configures .env and data/config/config.tml
##########################
config_compose() {
- if [[ $DEBUG == "true" ]]; then
- set -ex
- else
- set -e
- fi
-
- CFGPATH="./data/config"
- mkdir -p ./data/config
- [[ -f $CFGPATH/config.toml ]] && rm $CFGPATH/config.toml
- touch $CFGPATH/config.toml
-
- ## configure defaults
- echo "whitelist = '10.0.0.0/8,172.16.0.0/12,192.168.0.0/16'" >> "$CFGPATH/config.toml"
- echo "lru_mb = 1024" >> "$CFGPATH/config.toml"
-
- ## configure if user specifies
- [[ $ACL_ENABLED == "true" ]] && \
- echo "--acl \"secret-file=/dgraph/acl/hmac_secret_file;\"" >> "$CFGPATH/config.toml"
- [[ $TOKEN_ENABLED == "true" ]] && \
- echo "auth_token = '$(cat ./data/token/auth_token_file)'" >> "$CFGPATH/config.toml"
- [[ $ENC_ENABLED == "true" ]] && \
- echo "--encryption \"key-file=/dgraph/enc/enc_key_file;\"" >> "$CFGPATH/config.toml"
- [[ $TLS_ENABLED == "true" ]] &&
- cat <<-TLS_CONFIG >> $CFGPATH/config.toml
-tls_client_auth = '$TLS_CLIENT_AUTH'
-TLS_CONFIG
-
- ## configure dgraph version
- echo "DGRAPH_VERSION=$DGRAPH_VERSION" > .env
- cp *backup*.sh data
+ if [[ ${DEBUG} == "true" ]]; then
+ set -ex
+ else
+ set -e
+ fi
+
+ CFGPATH="./data/config"
+ mkdir -p ./data/config
+ [[ -f ${CFGPATH}/config.toml ]] && rm "${CFGPATH}"/config.toml
+ touch "${CFGPATH}"/config.toml
+
+ ## configure defaults
+ echo "whitelist = '10.0.0.0/8,172.16.0.0/12,192.168.0.0/16'" >>"${CFGPATH}/config.toml"
+ echo "lru_mb = 1024" >>"${CFGPATH}/config.toml"
+
+ ## configure if user specifies
+ [[ ${ACL_ENABLED} == "true" ]] &&
+ echo '--acl "secret-file=/dgraph/acl/hmac_secret_file;"' >>"${CFGPATH}/config.toml"
+ [[ ${TOKEN_ENABLED} == "true" ]] &&
+ echo "auth_token = '$(cat ./data/token/auth_token_file)'" >>"${CFGPATH}/config.toml"
+ [[ ${ENC_ENABLED} == "true" ]] &&
+ echo '--encryption "key-file=/dgraph/enc/enc_key_file;"' >>"${CFGPATH}/config.toml"
+ [[ ${TLS_ENABLED} == "true" ]] &&
+ cat <<-TLS_CONFIG >>"${CFGPATH}"/config.toml
+ tls_client_auth = '${TLS_CLIENT_AUTH}'
+ TLS_CONFIG
+
+ ## configure dgraph version
+ echo "DGRAPH_VERSION=${DGRAPH_VERSION}" >.env
+ cp *backup*.sh data
}
main $@
diff --git a/contrib/config/backups/client/dgraph-backup.sh b/contrib/config/backups/client/dgraph-backup.sh
index 0cc182d0823..5d1a4e95e02 100755
--- a/contrib/config/backups/client/dgraph-backup.sh
+++ b/contrib/config/backups/client/dgraph-backup.sh
@@ -10,158 +10,209 @@
# main - runs the script
##########################
main() {
- parse_command $@
- run_backup
+ parse_command $@
+ run_backup
}
######
# usage - print friendly usage statement
##########################
usage() {
- cat <<-USAGE 1>&2
-Run Binary Backup
-
-Usage:
- $0 [FLAGS] --location [LOCATION]
-
-Flags:
- -a, --alpha string Dgraph alpha HTTP/S server (default "localhost:8080")
- -i, --api_type API Type of REST or GraphQL (default "GraphQL")
- -t, --auth_token string The auth token passed to the server
- -d, --debug Enable debug in output
- -f, --force_full Force a full backup instead of an incremental backup.
- -h, --help Help for $0
- -l, --location Sets the source location URI (required).
- --minio_secure Backups to MinIO will use https instead of http
- -p, --password Password of the user if login is required.
- --subpath Directory Path To Use to store backups, (default "dgraph_\$(date +%Y%m%d)")
- --tls_cacert filepath The CA Cert file used to verify server certificates. Required for enabling TLS.
- --tls_cert string (optional) The Cert file provided by the client to the server.
- --tls_key string (optional) The private key file provided by the client to the server.
- -u, --user Username if login is required.
-
-USAGE
+ cat <<-USAGE 1>&2
+ Run Binary Backup
+
+ Usage:
+ $0 [FLAGS] --location [LOCATION]
+
+ Flags:
+ -a, --alpha string Dgraph alpha HTTP/S server (default "localhost:8080")
+ -i, --api_type API Type of REST or GraphQL (default "GraphQL")
+ -t, --auth_token string The auth token passed to the server
+ -d, --debug Enable debug in output
+ -f, --force_full Force a full backup instead of an incremental backup.
+ -h, --help Help for $0
+ -l, --location Sets the source location URI (required).
+ --minio_secure Backups to MinIO will use https instead of http
+ -p, --password Password of the user if login is required.
+ --subpath Directory Path To Use to store backups, (default "dgraph_\$(date +%Y%m%d)")
+ --tls_cacert filepath The CA Cert file used to verify server certificates. Required for enabling TLS.
+ --tls_cert string (optional) The Cert file provided by the client to the server.
+ --tls_key string (optional) The private key file provided by the client to the server.
+ -u, --user Username if login is required.
+
+ USAGE
}
######
# get_getopt - find GNU getopt or print error message
##########################
get_getopt() {
- unset GETOPT_CMD
-
- ## Check for GNU getopt compatibility
- if [[ "$(getopt --version)" =~ "--" ]]; then
- local SYSTEM="$(uname -s)"
- if [[ "${SYSTEM,,}" == "freebsd" ]]; then
- ## Check FreeBSD install location
- if [[ -f "/usr/local/bin/getopt" ]]; then
- GETOPT_CMD="/usr/local/bin/getopt"
- else
- ## Save FreeBSD Instructions
- local MESSAGE="On FreeBSD, compatible getopt can be installed with 'sudo pkg install getopt'"
- fi
- elif [[ "${SYSTEM,,}" == "darwin" ]]; then
- ## Check HomeBrew install location
- if [[ -f "/usr/local/opt/gnu-getopt/bin/getopt" ]]; then
- GETOPT_CMD="/usr/local/opt/gnu-getopt/bin/getopt"
- ## Check MacPorts install location
- elif [[ -f "/opt/local/bin/getopt" ]]; then
- GETOPT_CMD="/opt/local/bin/getopt"
- else
- ## Save MacPorts or HomeBrew Instructions
- if command -v brew > /dev/null; then
- local MESSAGE="On macOS, gnu-getopt can be installed with 'brew install gnu-getopt'\n"
- elif command -v port > /dev/null; then
- local MESSAGE="On macOS, getopt can be installed with 'sudo port install getopt'\n"
- fi
- fi
- fi
- else
- GETOPT_CMD="$(command -v getopt)"
- fi
-
- ## Error if no suitable getopt command found
- if [[ -z $GETOPT_CMD ]]; then
- printf "ERROR: GNU getopt not found. Please install GNU compatible 'getopt'\n\n%s" "$MESSAGE" 1>&2
- exit 1
- fi
+ unset GETOPT_CMD
+
+ ## Check for GNU getopt compatibility
+ if [[ "$(getopt --version)" =~ "--" ]]; then
+ local SYSTEM="$(uname -s)"
+ if [[ ${SYSTEM,,} == "freebsd" ]]; then
+ ## Check FreeBSD install location
+ if [[ -f "/usr/local/bin/getopt" ]]; then
+ GETOPT_CMD="/usr/local/bin/getopt"
+ else
+ ## Save FreeBSD Instructions
+ local MESSAGE="On FreeBSD, compatible getopt can be installed with 'sudo pkg install getopt'"
+ fi
+ elif [[ ${SYSTEM,,} == "darwin" ]]; then
+ ## Check HomeBrew install location
+ if [[ -f "/usr/local/opt/gnu-getopt/bin/getopt" ]]; then
+ GETOPT_CMD="/usr/local/opt/gnu-getopt/bin/getopt"
+ ## Check MacPorts install location
+ elif [[ -f "/opt/local/bin/getopt" ]]; then
+ GETOPT_CMD="/opt/local/bin/getopt"
+ else
+ ## Save MacPorts or HomeBrew Instructions
+ if command -v brew >/dev/null; then
+ local MESSAGE="On macOS, gnu-getopt can be installed with 'brew install gnu-getopt'\n"
+ elif command -v port >/dev/null; then
+ local MESSAGE="On macOS, getopt can be installed with 'sudo port install getopt'\n"
+ fi
+ fi
+ fi
+ else
+ GETOPT_CMD="$(command -v getopt)"
+ fi
+
+ ## Error if no suitable getopt command found
+ if [[ -z ${GETOPT_CMD} ]]; then
+ printf "ERROR: GNU getopt not found. Please install GNU compatible 'getopt'\n\n%s" "${MESSAGE}" 1>&2
+ exit 1
+ fi
}
######
# parse_command - parse command line options using GNU getopt
##########################
parse_command() {
- get_getopt
-
- ## Parse Arguments with GNU getopt
- PARSED_ARGUMENTS=$(
- $GETOPT_CMD -o a:i:t:dfhl:p:u: \
- --long alpha:,api_type:,auth_token:,debug,force_full,help,location:,minio_secure,password:,subpath:,tls_cacert:,tls_cert:,tls_key:,user: \
- -n 'dgraph-backup.sh' -- "$@"
- )
- if [ $? != 0 ] ; then usage; exit 1 ; fi
- eval set -- "$PARSED_ARGUMENTS"
-
- ## Defaults
- DEBUG="false"
- ALPHA_HOST="localhost:8080"
- BACKUP_DESTINATION=""
- SUBPATH=dgraph_$(date +%Y%m%d)
- API_TYPE="graphql"
- MINIO_SECURE=false
- AUTH_TOKEN=""
- FORCE_FULL="false"
-
- ## Process Agurments
- while true; do
- case "$1" in
- -a | --alpha) ALPHA_HOST="$2"; shift 2 ;;
- -i | --api_type) API_TYPE=${2,,}; shift 2;;
- -t | --auth_token) AUTH_TOKEN="$2"; shift 2 ;;
- -d | --debug) DEBUG=true; shift ;;
- -f | --force_full) FORCE_FULL=true; shift ;;
- -h | --help) usage; exit;;
- -m | --minio_secure) MINIO_SECURE=true; shift ;;
- -l | --location) BACKUP_DESTINATION="$2"; shift 2 ;;
- -p | --password) ACL_PASSWORD="$2"; shift 2;;
- --subpath) SUBPATH="$2"; shift 2 ;;
- --tls_cacert) CACERT_PATH="$2"; shift 2 ;;
- --tls_cert) CLIENT_CERT_PATH="$2"; shift 2;;
- --tls_key) CLIENT_KEY_PATH="$2"; shift 2;;
- -u | --user) ACL_USER="$2"; shift 2;;
- --) shift; break ;;
- *) break ;;
- esac
- done
-
- ## Check required variable was set
- if [[ -z "$BACKUP_DESTINATION" ]]; then
- printf "ERROR: location was not specified!!\n\n"
- usage
- exit 1
- fi
+ get_getopt
+
+ ## Parse Arguments with GNU getopt
+ PARSED_ARGUMENTS=$(
+ ${GETOPT_CMD} -o a:i:t:dfhl:p:u: \
+ --long alpha:,api_type:,auth_token:,debug,force_full,help,location:,minio_secure,password:,subpath:,tls_cacert:,tls_cert:,tls_key:,user: \
+ -n 'dgraph-backup.sh' -- "$@"
+ )
+ if [[ $? != 0 ]]; then
+ usage
+ exit 1
+ fi
+ eval set -- "${PARSED_ARGUMENTS}"
+
+ ## Defaults
+ DEBUG="false"
+ ALPHA_HOST="localhost:8080"
+ BACKUP_DESTINATION=""
+ SUBPATH=dgraph_$(date +%Y%m%d)
+ API_TYPE="graphql"
+ MINIO_SECURE=false
+ AUTH_TOKEN=""
+ FORCE_FULL="false"
+
+ ## Process Agurments
+ while true; do
+ case "$1" in
+ -a | --alpha)
+ ALPHA_HOST="$2"
+ shift 2
+ ;;
+ -i | --api_type)
+ API_TYPE=${2,,}
+ shift 2
+ ;;
+ -t | --auth_token)
+ AUTH_TOKEN="$2"
+ shift 2
+ ;;
+ -d | --debug)
+ DEBUG=true
+ shift
+ ;;
+ -f | --force_full)
+ FORCE_FULL=true
+ shift
+ ;;
+ -h | --help)
+ usage
+ exit
+ ;;
+ -m | --minio_secure)
+ MINIO_SECURE=true
+ shift
+ ;;
+ -l | --location)
+ BACKUP_DESTINATION="$2"
+ shift 2
+ ;;
+ -p | --password)
+ ACL_PASSWORD="$2"
+ shift 2
+ ;;
+ --subpath)
+ SUBPATH="$2"
+ shift 2
+ ;;
+ --tls_cacert)
+ CACERT_PATH="$2"
+ shift 2
+ ;;
+ --tls_cert)
+ CLIENT_CERT_PATH="$2"
+ shift 2
+ ;;
+ --tls_key)
+ CLIENT_KEY_PATH="$2"
+ shift 2
+ ;;
+ -u | --user)
+ ACL_USER="$2"
+ shift 2
+ ;;
+ --)
+ shift
+ break
+ ;;
+ *) break ;;
+ esac
+ done
+
+ ## Check required variable was set
+ if [[ -z ${BACKUP_DESTINATION} ]]; then
+ printf "ERROR: location was not specified!!\n\n"
+ usage
+ exit 1
+ fi
}
######
# run_backup - using user specified options, execute backup
##########################
run_backup() {
- if [[ $DEBUG == "true" ]]; then
- set -ex
- else
- set -e
- fi
-
- [[ -f ./backup_helper.sh ]] || { echo "ERROR: Backup Script library (./backup_helper.sh) missing" 1>&2; exit 1; }
- source ./backup_helper.sh
-
- ## login if user was specified
- if ! [[ -z $ACL_USER ]]; then
- ACCESS_TOKEN=$(get_token $ACL_USER $ACL_PASSWORD $AUTH_TOKEN)
- fi
-
- ## perform backup with valid options set
- backup "$ACCESS_TOKEN" "$AUTH_TOKEN"
+ if [[ ${DEBUG} == "true" ]]; then
+ set -ex
+ else
+ set -e
+ fi
+
+ [[ -f ./backup_helper.sh ]] || {
+ echo "ERROR: Backup Script library (./backup_helper.sh) missing" 1>&2
+ exit 1
+ }
+ source ./backup_helper.sh
+
+ ## login if user was specified
+ if ! [[ -z ${ACL_USER} ]]; then
+ ACCESS_TOKEN=$(get_token "${ACL_USER}" "${ACL_PASSWORD}" "${AUTH_TOKEN}")
+ fi
+
+ ## perform backup with valid options set
+ backup "${ACCESS_TOKEN}" "${AUTH_TOKEN}"
}
main $@
diff --git a/contrib/config/backups/gcp/README.md b/contrib/config/backups/gcp/README.md
index a91f4b47aef..c521f8ba261 100644
--- a/contrib/config/backups/gcp/README.md
+++ b/contrib/config/backups/gcp/README.md
@@ -1,12 +1,13 @@
# Binary Backups to Google Cloud Storage
-Binary backups can use [Google Cloud Storage](https://cloud.google.com/storage) for object storage using [MinIO GCS Gateway](https://docs.min.io/docs/minio-gateway-for-gcs.html).
+Binary backups can use [Google Cloud Storage](https://cloud.google.com/storage) for object storage
+using [MinIO GCS Gateway](https://docs.min.io/docs/minio-gateway-for-gcs.html).
## Provisioning GCS
Some example scripts have been provided to illustrate how to create a bucket in GCS.
-* [terraform](terraform/README.md) - terraform scripts to provision GCS bucket
+- [terraform](terraform/README.md) - terraform scripts to provision GCS bucket
## Setting up the Environment
@@ -14,30 +15,37 @@ Some example scripts have been provided to illustrate how to create a bucket in
You will need these tools:
-* Docker Environment
- * [Docker](https://docs.docker.com/get-docker/) - container engine platform
- * [Docker Compose](https://docs.docker.com/compose/install/) - orchestrates running dokcer containers
-* Kubernetes Environment
- * [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - required for interacting with Kubenetes platform
- * [helm](https://helm.sh/docs/intro/install/) - deploys Kuberetes packages called helm charts
- * [helm-diff](https://github.com/databus23/helm-diff) [optional] - displays differences that will be applied to Kubernetes cluster
- * [helmfile](https://github.com/roboll/helmfile#installation) [optional] - orchestrates helm chart deployments
+- Docker Environment
+ - [Docker](https://docs.docker.com/get-docker/) - container engine platform
+ - [Docker Compose](https://docs.docker.com/compose/install/) - orchestrates running dokcer
+ containers
+- Kubernetes Environment
+ - [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - required for interacting
+ with Kubenetes platform
+ - [helm](https://helm.sh/docs/intro/install/) - deploys Kuberetes packages called helm charts
+ - [helm-diff](https://github.com/databus23/helm-diff) [optional] - displays differences that
+ will be applied to Kubernetes cluster
+ - [helmfile](https://github.com/roboll/helmfile#installation) [optional] - orchestrates helm chart
+ deployments
### Using Docker Compose
-A `docker-compose.yml` configuration is provided that will run the MinIO GCS gateway and Dgraph cluster.
+A `docker-compose.yml` configuration is provided that will run the MinIO GCS gateway and Dgraph
+cluster.
#### Configuring Docker Compose
The Docker Compose configuration `docker-compose.yml` will require the following files:
- * `credentials.json` - credentials that grant access to the GCS bucket
- * `minio.env` - that holds `MINIO_ACCESS_KEY` and `MINIO_SECRET_KEY` values.
- * `env.sh` - tha stores `PROJECT_ID` and `BACKUP_BUCKET_NAME`.
+- `credentials.json` - credentials that grant access to the GCS bucket
+- `minio.env` - that holds `MINIO_ACCESS_KEY` and `MINIO_SECRET_KEY` values.
+- `env.sh` - tha stores `PROJECT_ID` and `BACKUP_BUCKET_NAME`.
For convenience, [terraform](terraform/README.md) scripts and generate a random password.
-The `minio.env` will be used by both Dgraph alpha node(s) and the [MinIO GCS Gateway](https://docs.min.io/docs/minio-gateway-for-gcs.html) server. You will need to create a file like this:
+The `minio.env` will be used by both Dgraph alpha node(s) and the
+[MinIO GCS Gateway](https://docs.min.io/docs/minio-gateway-for-gcs.html) server. You will need to
+create a file like this:
```bash
# minio.env
@@ -53,7 +61,7 @@ export PROJECT_ID=
export BACKUP_BUCKET_NAME=
```
-#### Using Docker Compose
+#### Running with Docker Compose
```bash
## source script for envvars: PROJECT_ID and BACKUP_BUCKET_NAME
@@ -64,8 +72,8 @@ docker-compose up --detach
#### Access Minio and Ratel UI
-* MinIO UI: http://localhost:9000
-* Ratel UI: http://localhost:8000
+- MinIO UI: http://localhost:9000
+- Ratel UI: http://localhost:8000
#### Clean Up Docker Environment
@@ -76,11 +84,15 @@ docker-compose rm
### Using Kubernetes with Helm Charts
-For Kubernetes, you can deploy [MinIO GCS Gateway](https://docs.min.io/docs/minio-gateway-for-gcs.html), Dgraph cluster, and a Kubernetes Cronjob that triggers backups using [helm](https://helm.sh/docs/intro/install/).
+For Kubernetes, you can deploy
+[MinIO GCS Gateway](https://docs.min.io/docs/minio-gateway-for-gcs.html), Dgraph cluster, and a
+Kubernetes Cronjob that triggers backups using [helm](https://helm.sh/docs/intro/install/).
#### Configuring Secrets Values
-These values are generated if you used either [terraform](terraform/README.md) scripts. If you already have an existing GCS bucket that you would like to use, you will need to create `charts/dgraph_secrets.yaml` and `charts/minio_secrets.yaml` files.
+These values are generated if you used either [terraform](terraform/README.md) scripts. If you
+already have an existing GCS bucket that you would like to use, you will need to create
+`charts/dgraph_secrets.yaml` and `charts/minio_secrets.yaml` files.
For the `charts/dgraph_secrets.yaml`, you would create a file like this:
@@ -104,7 +116,9 @@ gcsgateway:
#### Configuring Environments
-Create an `env.sh` file to store `BACKUP_BUCKET_NAME` and `PROJECT_ID`. If [terraform](terraform/README.md) scripts were used to create the GCS bucket, then these scripts will have already generated this file.
+Create an `env.sh` file to store `BACKUP_BUCKET_NAME` and `PROJECT_ID`. If
+[terraform](terraform/README.md) scripts were used to create the GCS bucket, then these scripts will
+have already generated this file.
This is the same file used for the Docker Compose environment and will look like this:
@@ -116,7 +130,10 @@ export BACKUP_BUCKET_NAME=
#### Deploy Using Helmfile
-If you have [helmfile](https://github.com/roboll/helmfile#installation) and [helm-diff](https://github.com/databus23/helm-diff) installed, you can deploy [MinIO GCS Gateway](https://docs.min.io/docs/minio-gateway-for-gcs.html) and Dgraph cluster with the following:
+If you have [helmfile](https://github.com/roboll/helmfile#installation) and
+[helm-diff](https://github.com/databus23/helm-diff) installed, you can deploy
+[MinIO GCS Gateway](https://docs.min.io/docs/minio-gateway-for-gcs.html) and Dgraph cluster with the
+following:
```bash
## source script for envvars: PROJECT_ID and BACKUP_BUCKET_NAME
@@ -152,7 +169,7 @@ helm install "my-release" \
#### Access Resources
-For MinIO UI, you can use this to access it at http://localhost:9000:
+For MinIO UI, you can use this to access it at http://localhost:9000:
```bash
export MINIO_POD_NAME=$(
@@ -211,8 +228,11 @@ helm delete gcsgw --namespace minio
## Triggering a Backup
-This is run from the host with the alpha node accessible on localhost at port `8080`. Can be done by running the docker-compose environment, or running `kubectl port-forward pod/dgraph-dgraph-alpha-0 8080:8080`.
-In the docker-compose environment, the host for `MINIO_HOST` is `gateway`. In the Kubernetes environment, using the scripts above, the `MINIO_HOST` is `gcsgw-minio.minio.svc`.
+This is run from the host with the alpha node accessible on localhost at port `8080`. Can be done by
+running the docker-compose environment, or running
+`kubectl port-forward pod/dgraph-dgraph-alpha-0 8080:8080`. In the docker-compose environment, the
+host for `MINIO_HOST` is `gateway`. In the Kubernetes environment, using the scripts above, the
+`MINIO_HOST` is `gcsgw-minio.minio.svc`.
### Using GraphQL
diff --git a/contrib/config/backups/gcp/terraform/README.md b/contrib/config/backups/gcp/terraform/README.md
index 8ffa49f7415..677eb35c67b 100644
--- a/contrib/config/backups/gcp/terraform/README.md
+++ b/contrib/config/backups/gcp/terraform/README.md
@@ -2,37 +2,48 @@
## About
-This script will create the required resources needed to create a bucket in Google Storage Bucket using the [`simple-bucket`](https://github.com/terraform-google-modules/terraform-google-cloud-storage/tree/master/modules/simple_bucket) Terraform module. These scripts will also create a `credentials.json` that will have access to the storage bucket, which is needed for the [MinIO GCS Gateway](https://docs.min.io/docs/minio-gateway-for-gcs.html) and optionally generate random MinIO access key and secret key.
+This script will create the required resources needed to create a bucket in Google Storage Bucket
+using the
+[`simple-bucket`](https://github.com/terraform-google-modules/terraform-google-cloud-storage/tree/master/modules/simple_bucket)
+Terraform module. These scripts will also create a `credentials.json` that will have access to the
+storage bucket, which is needed for the
+[MinIO GCS Gateway](https://docs.min.io/docs/minio-gateway-for-gcs.html) and optionally generate
+random MinIO access key and secret key.
## Prerequisites
You need the following installed to use this automation:
-* [Google Cloud SDK](https://cloud.google.com/sdk/docs/install) - for the `gcloud` command and required to access Google Cloud.
- * Google Project with billing enabled
- * `gcloud` logged into IAM account with roles added:
- * `serviceusage.apiKeys.create`
- * `clientauthconfig.clients.create`
- * `iam.serviceAccountKeys.create`
-* [Terraform](https://www.terraform.io/downloads.html) - tool used to provision resources and create templates
+- [Google Cloud SDK](https://cloud.google.com/sdk/docs/install) - for the `gcloud` command and
+ required to access Google Cloud.
+ - Google Project with billing enabled
+ - `gcloud` logged into IAM account with roles added:
+ - `serviceusage.apiKeys.create`
+ - `clientauthconfig.clients.create`
+ - `iam.serviceAccountKeys.create`
+- [Terraform](https://www.terraform.io/downloads.html) - tool used to provision resources and create
+ templates
## Configuration
You will need to define the following variables:
-* Required Variables:
- * `region` (required) - the region where the GCS bucket will be created
- * `project_id` (required) - a globally unique name for the Google project that will contain the GCS bucket
- * `name` (default = `my-dgraph-backups`) - globally unique name of the GCS bucket
-* Optional Variables:
- * `minio_access_key` - specify an access key or have terraform generate a random access key
- * `minio_secret_key` - specify a secret key or have terraform generate a random secret key
+- Required Variables:
+ - `region` (required) - the region where the GCS bucket will be created
+ - `project_id` (required) - a globally unique name for the Google project that will contain the
+ GCS bucket
+ - `name` (default = `my-dgraph-backups`) - globally unique name of the GCS bucket
+- Optional Variables:
+ - `minio_access_key` - specify an access key or have terraform generate a random access key
+ - `minio_secret_key` - specify a secret key or have terraform generate a random secret key
## Steps
### Define Variables
-You can define these when prompted, or in `terrafrom.tfvars` file, or through command line variables, e.g. `TF_VAR_project_id`, `TF_VAR_project_id`, and `TF_VAR_name`. Below is an example `terraform.tfvars` file:
+You can define these when prompted, or in `terrafrom.tfvars` file, or through command line
+variables, e.g. `TF_VAR_project_id`, `TF_VAR_project_id`, and `TF_VAR_name`. Below is an example
+`terraform.tfvars` file:
```terraform
# terraform.tfvars
diff --git a/contrib/config/backups/nfs/README.md b/contrib/config/backups/nfs/README.md
index 2b36112c808..cebeb0931a5 100644
--- a/contrib/config/backups/nfs/README.md
+++ b/contrib/config/backups/nfs/README.md
@@ -1,62 +1,78 @@
# Binary Backups to Network File System
-When using a file system for binary backups, NFS is recommended. NFS will allow *"backups work seamlessly across multiple machines and/or containers"*.
-
-* [Overview of NFS Servers](#overview-of-nfs-servers)
-* [Provision NFS Server Instructions](#provision-nfs-server-instructions)
- * [Using Remote Cloud Solutions](#using-remote-cloud-solutions)
- * [Using the Rook Solution](#using-the-rook-solution)
- * [Using a Local Vagrant Solution](#using-a-local-vagrant-solution)
- * [Vagrant Server](#vagrant-server)
- * [Vagrant Client (Optional)](#vagrant-client-optional)
- * [Vagrant Cleanup](#vagrant-cleanup)
-* [Testing NFS with Docker Compose](#testing-nfs-with-docker-compose)
- * [Setup Env Vars for Docker Compose](#setup-env-vars-for-docker-compose)
- * [Start Docker Compose with NFS Volume](#start-docker-compose-with-nfs-volume)
- * [Docker Cleanup](#docker-cleanup)
-* [Testing NFS with Kubernetes](#testing-nfs-with-kubernetes)
- * [Setup Env Vars for Kubernetes](#setup-env-vars-for-kubernetes)
- * [Deploy Using Helmfile](#deploy-using-helmfile)
- * [Cleanup Using Helmfile](#cleanup-using-helmfile)
- * [Minikube Notes](#minikube-notes)
- * [Minikube with Virtualbox](#minikube-with-virtualbox)
- * [Minikube with KVM](#minikube-with-kvm)
- * [Verify NFS between Minikube and Vagrant](#verify-nfs-between-minikube-and-vagrant)
-* [Accessing Dgraph Services](#accessing-dgraph-services)
-* [Trigger a Backup](#trigger-a-backup)
+When using a file system for binary backups, NFS is recommended. NFS will allow _"backups work
+seamlessly across multiple machines and/or containers"_.
+
+- [Overview of NFS Servers](#overview-of-nfs-servers)
+- [Provision NFS Server Instructions](#provision-nfs-server-instructions)
+ - [Using Remote Cloud Solutions](#using-remote-cloud-solutions)
+ - [Using the Rook Solution](#using-the-rook-solution)
+ - [Using a Local Vagrant Solution](#using-a-local-vagrant-solution)
+ - [Vagrant Server](#vagrant-server)
+ - [Vagrant Client (Optional)](#vagrant-client-optional)
+ - [Vagrant Cleanup](#vagrant-cleanup)
+- [Testing NFS with Docker Compose](#testing-nfs-with-docker-compose)
+ - [Setup Env Vars for Docker Compose](#setup-env-vars-for-docker-compose)
+ - [Start Docker Compose with NFS Volume](#start-docker-compose-with-nfs-volume)
+ - [Docker Cleanup](#docker-cleanup)
+- [Testing NFS with Kubernetes](#testing-nfs-with-kubernetes)
+ - [Setup Env Vars for Kubernetes](#setup-env-vars-for-kubernetes)
+ - [Deploy Using Helmfile](#deploy-using-helmfile)
+ - [Cleanup Using Helmfile](#cleanup-using-helmfile)
+ - [Minikube Notes](#minikube-notes)
+ - [Minikube with Virtualbox](#minikube-with-virtualbox)
+ - [Minikube with KVM](#minikube-with-kvm)
+ - [Verify NFS between Minikube and Vagrant](#verify-nfs-between-minikube-and-vagrant)
+- [Accessing Dgraph Services](#accessing-dgraph-services)
+- [Trigger a Backup](#trigger-a-backup)
## Overview of NFS Servers
-You can use external NFS outside of the [Docker](https://www.docker.com/) or [Kubernetes](https://kubernetes.io/) cluster, or deploy a container offering NFS services.
-
-For production environments, using an NFS server external to the cluster can increase availability in an event where [Kubernetes](https://kubernetes.io/) services get interrupted. In more advanced scenarios, deploying a container offering NFS services where the storage is backed by high-speed storage such as [Ceph](https://ceph.io/) is beneficial for large datasets. In this latter scenario, secondary storage such as an object store by the cloud provider could be used for greater availability in event of where Kubernetes services or the [Kubernetes](https://kubernetes.io/) cluster itself has a failure event.
-
-This guide provides tips on how to back up Dgraph using NFS. For this scope, automation here covers the following:
-
-* External NFS
- * Cloud Providers
- * AWS [EFS](https://aws.amazon.com/efs/) ([Elastic File System](https://aws.amazon.com/efs/))
- * [Google Cloud Filestore](https://cloud.google.com/filestore)
- * Local NFS Server
- * [Vagrant](https://www.vagrantup.com/) managed virtual server that implements Linux kernel-based NFS Server
-* Internal NFS (deployed as a container)
- * [Rook](https://rook.io/) NFS operator to deploy a container offering NFS Server with [Genesha NFS Server](https://github.com/nfs-ganesha/nfs-ganesha/wiki)
+You can use external NFS outside of the [Docker](https://www.docker.com/) or
+[Kubernetes](https://kubernetes.io/) cluster, or deploy a container offering NFS services.
+
+For production environments, using an NFS server external to the cluster can increase availability
+in an event where [Kubernetes](https://kubernetes.io/) services get interrupted. In more advanced
+scenarios, deploying a container offering NFS services where the storage is backed by high-speed
+storage such as [Ceph](https://ceph.io/) is beneficial for large datasets. In this latter scenario,
+secondary storage such as an object store by the cloud provider could be used for greater
+availability in event of where Kubernetes services or the [Kubernetes](https://kubernetes.io/)
+cluster itself has a failure event.
+
+This guide provides tips on how to back up Dgraph using NFS. For this scope, automation here covers
+the following:
+
+- External NFS
+ - Cloud Providers
+ - AWS [EFS](https://aws.amazon.com/efs/) ([Elastic File System](https://aws.amazon.com/efs/))
+ - [Google Cloud Filestore](https://cloud.google.com/filestore)
+ - Local NFS Server
+ - [Vagrant](https://www.vagrantup.com/) managed virtual server that implements Linux
+ kernel-based NFS Server
+- Internal NFS (deployed as a container)
+ - [Rook](https://rook.io/) NFS operator to deploy a container offering NFS Server with
+ [Genesha NFS Server](https://github.com/nfs-ganesha/nfs-ganesha/wiki)
## Provision NFS Server Instructions
### Using Remote Cloud Solutions
-You can provision external NFS to use with your Dgraph cluster running on Kubernetes using these scripts. Unlike object storage, such as S3 or GCS, this storage will not be accessible from the public Internet and so can only be accessed from within a private subnet.
+You can provision external NFS to use with your Dgraph cluster running on Kubernetes using these
+scripts. Unlike object storage, such as S3 or GCS, this storage will not be accessible from the
+public Internet and so can only be accessed from within a private subnet.
-* Shell Scripts
- * [Google Cloud Filestore](gcfs-cli/README.md) - provision FileStore using `gcloud`
-* Terraform
- * [Google Cloud Filestore](gcfs-terraform/README.md) - use Filestore as NFS share on GKE.
- * [Amazon Elastic File System](efs-terraform/README.md) - use EFS as NFS share on EKS.
+- Shell Scripts
+ - [Google Cloud Filestore](gcfs-cli/README.md) - provision FileStore using `gcloud`
+- Terraform
+ - [Google Cloud Filestore](gcfs-terraform/README.md) - use Filestore as NFS share on GKE.
+ - [Amazon Elastic File System](efs-terraform/README.md) - use EFS as NFS share on EKS.
### Using the Rook Solution
-You can use an internal NFS server running on Kubernetes with [Rook](https://rook.io/) NFS Operator. To enable this, run the following before running the [Kubernetes Environment](#testing-nfs-with-kubernetes). Both of these steps are required for this feature:
+You can use an internal NFS server running on Kubernetes with [Rook](https://rook.io/) NFS Operator.
+To enable this, run the following before running the
+[Kubernetes Environment](#testing-nfs-with-kubernetes). Both of these steps are required for this
+feature:
```bash
## Download Rook NFS Operator Manifests
@@ -67,9 +83,19 @@ cp charts/rook/env.sh env.sh
### Using a Local Vagrant Solution
-The steps to configure NFS for your local operating system or distro can vary greatly†, so a [Vagrant](https://www.vagrantup.com/) example is provided. This should work [Virtualbox](https://www.virtualbox.org/) provider on Windows, Mac, and Linux, as [Virtualbox](https://www.virtualbox.org/) creates routable IP addresses available to the host. Therefore, this NFS server can be accessed from either [Docker](https://docs.docker.com/engine/) or [Minikube](https://github.com/kubernetes/minikube) environments.
+The steps to configure NFS for your local operating system or distro can vary greatly†,
+so a [Vagrant](https://www.vagrantup.com/) example is provided. This should work
+[Virtualbox](https://www.virtualbox.org/) provider on Windows, Mac, and Linux, as
+[Virtualbox](https://www.virtualbox.org/) creates routable IP addresses available to the host.
+Therefore, this NFS server can be accessed from either [Docker](https://docs.docker.com/engine/) or
+[Minikube](https://github.com/kubernetes/minikube) environments.
-† Linux and macOS have native NFS implementations with macOS NFS configuration varying between macOS versions. Windows Server has different [NFS Server implementations](https://docs.microsoft.com/en-us/windows-server/storage/nfs/nfs-overview) between Windows Server versions. For Windows 10, there are open source options such as [Cygwin](https://www.cygwin.com/) or you can use Linux through [WSL](https://docs.microsoft.com/en-us/windows/wsl/install-win10)
+† Linux and macOS have native NFS implementations with macOS NFS configuration varying
+between macOS versions. Windows Server has different
+[NFS Server implementations](https://docs.microsoft.com/en-us/windows-server/storage/nfs/nfs-overview)
+between Windows Server versions. For Windows 10, there are open source options such as
+[Cygwin](https://www.cygwin.com/) or you can use Linux through
+[WSL](https://docs.microsoft.com/en-us/windows/wsl/install-win10)
#### Vagrant Server
@@ -106,9 +132,11 @@ vagrant destroy
### Setup Env Vars for Docker Compose
-If you used automation from [Vagrant Solution](#using-local-vagrant-solution), you can skip this step.
+If you used automation from [Vagrant Solution](#using-local-vagrant-solution), you can skip this
+step.
-Otherwise, you will need to create a file named `env.sh` and configure the IP address (or DNS name) and exported NFS shared file path:
+Otherwise, you will need to create a file named `env.sh` and configure the IP address (or DNS name)
+and exported NFS shared file path:
```bash
export NFS_PATH=""
@@ -137,9 +165,12 @@ docker volume ls | grep -q nfs_mount || docker volume rm nfs_nfsmount > /dev/nul
### Setup Env Vars for Kubernetes
-If you used automation from local [Vagrant Solution](#using-local-vagrant-solution), [Rook Solution](#using-rook-solution) cloud solution with [EFS](./efs-terraform/README.md) or [Google Cloud Filestore](./gcfs-terraform/README.md), you can skip this step.
+If you used automation from local [Vagrant Solution](#using-local-vagrant-solution),
+[Rook Solution](#using-rook-solution) cloud solution with [EFS](./efs-terraform/README.md) or
+[Google Cloud Filestore](./gcfs-terraform/README.md), you can skip this step.
-Otherwise, you will need to create a file named `env.sh` and configure the IP address (or DNS name) and exported NFS shared file path:
+Otherwise, you will need to create a file named `env.sh` and configure the IP address (or DNS name)
+and exported NFS shared file path:
```bash
export NFS_PATH=""
@@ -148,7 +179,9 @@ export NFS_SERVER=""
#### Deploy Using Helmfile
-If you have [helmfile](https://github.com/roboll/helmfile#installation) and [helm-diff](https://github.com/databus23/helm-diff) installed, you can deploy Dgraph with NFS support for backups with this:
+If you have [helmfile](https://github.com/roboll/helmfile#installation) and
+[helm-diff](https://github.com/databus23/helm-diff) installed, you can deploy Dgraph with NFS
+support for backups with this:
```bash
## Source required environments variables
@@ -165,11 +198,14 @@ helmfile delete
### Minikube Notes
-If you are using NFS with [Vagrant Solution](#using-local-vagrant-solution), you will need to park [minikube](https://github.com/kubernetes/minikube) on the same private network as Vagrant.
+If you are using NFS with [Vagrant Solution](#using-local-vagrant-solution), you will need to park
+[minikube](https://github.com/kubernetes/minikube) on the same private network as Vagrant.
#### Minikube with Virtualbox
-For [VirtualBox](https://www.virtualbox.org) environments, where both [Vagrant](https://www.vagrantup.com/) and [minikube](https://github.com/kubernetes/minikube) will use [Virtualbox](https://www.virtualbox.org), you can do the following:
+For [VirtualBox](https://www.virtualbox.org) environments, where both
+[Vagrant](https://www.vagrantup.com/) and [minikube](https://github.com/kubernetes/minikube) will
+use [Virtualbox](https://www.virtualbox.org), you can do the following:
```bash
## Vagrant should have been started with Virtualbox by default
@@ -184,7 +220,9 @@ minikube start --host-only-cidr='192.168.123.1/24'
#### Minikube with KVM
-When using vagrant with `libvirt` (see [vagrant-libvirt](https://github.com/vagrant-libvirt/vagrant-libvirt)), you can have [minikube](https://github.com/kubernetes/minikube) target the same network.
+When using vagrant with `libvirt` (see
+[vagrant-libvirt](https://github.com/vagrant-libvirt/vagrant-libvirt)), you can have
+[minikube](https://github.com/kubernetes/minikube) target the same network.
```bash
## Vagrant should have been started with KVM
@@ -201,7 +239,8 @@ minikube start --kvm-network nfs0
#### Verify NFS between Minikube and Vagrant
-Next, verify that NFS share works between the Vagrant NFS server and client Dgraph Alpha pod running in [minikube](https://github.com/kubernetes/minikube).
+Next, verify that NFS share works between the Vagrant NFS server and client Dgraph Alpha pod running
+in [minikube](https://github.com/kubernetes/minikube).
Create a file from the client:
@@ -226,9 +265,11 @@ logout
## Accessing Dgraph Services
-In the [Docker Compose Environment](#testing-nfs-with-docker-compose), Ratel UI will be accessible from http://localhost:8000 and Alpha from http://localhost:8080.
+In the [Docker Compose Environment](#testing-nfs-with-docker-compose), Ratel UI will be accessible
+from http://localhost:8000 and Alpha from http://localhost:8080.
-In a [Kubernetes Environment](#testing-nfs-with-kubernetes), you will need to use port-forward to access these from `localhost`.
+In a [Kubernetes Environment](#testing-nfs-with-kubernetes), you will need to use port-forward to
+access these from `localhost`.
For Dgraph Alpha, you can use this to access it at http://localhost:8080:
@@ -260,7 +301,11 @@ kubectl --namespace default port-forward $RATEL_POD_NAME 8000:8000
## Trigger a Backup
-In the [Kubernetes Environment](#testing-nfs-with-kubernetes), backups are scheduled automatically using the [Kubernetes CronJob](https://kubernetes.io/docs/concepts/workloads/controllers/cron-jobs/). As long as the services are available locally (see [Accessing Dgraph Services](#accessing-dgraph-services)), we can trigger a backup using a `curl` command.
+In the [Kubernetes Environment](#testing-nfs-with-kubernetes), backups are scheduled automatically
+using the
+[Kubernetes CronJob](https://kubernetes.io/docs/concepts/workloads/controllers/cron-jobs/). As long
+as the services are available locally (see [Accessing Dgraph Services](#accessing-dgraph-services)),
+we can trigger a backup using a `curl` command.
For the [Docker Compose Environment](#testing-nfs-with-docker-compose) you can do the following:
@@ -274,7 +319,8 @@ HEADER="Content-Type: application/json"
curl --silent --header "$HEADER" --request POST $ALPHA_HOST:8080/admin --data "$GRAPHQL"
```
-For [Kubernetes Environment](#testing-nfs-with-kubernetes), after running port-forward, you can do the following:
+For [Kubernetes Environment](#testing-nfs-with-kubernetes), after running port-forward, you can do
+the following:
```bash
ALPHA_HOST="localhost"
diff --git a/contrib/config/backups/nfs/charts/rook/fetch-operator.sh b/contrib/config/backups/nfs/charts/rook/fetch-operator.sh
index a9aaa28f956..5a80494f588 100755
--- a/contrib/config/backups/nfs/charts/rook/fetch-operator.sh
+++ b/contrib/config/backups/nfs/charts/rook/fetch-operator.sh
@@ -1,15 +1,18 @@
#!/usr/bin/env bash
-command -v git > /dev/null || \
- { echo "[ERROR]: 'git' command not not found" 1>&2; exit 1; }
+command -v git >/dev/null ||
+ {
+ echo "[ERROR]: 'git' command not not found" 1>&2
+ exit 1
+ }
ROOK_VERSION="v1.4.7"
DEST_PATH="${PWD}/$(dirname "${BASH_SOURCE[0]}")/rook-nfs-operator-kustomize/base"
TEMP_PATH=$(mktemp -d)
-cd $TEMP_PATH
-git clone --single-branch --branch $ROOK_VERSION https://github.com/rook/rook.git 2> /dev/null
+cd "${TEMP_PATH}" || exit
+git clone --single-branch --branch "${ROOK_VERSION}" https://github.com/rook/rook.git 2>/dev/null
for MANIFEST in common.yaml provisioner.yaml operator.yaml; do
- cp $TEMP_PATH/rook/cluster/examples/kubernetes/nfs/$MANIFEST $DEST_PATH
+ cp "${TEMP_PATH}"/rook/cluster/examples/kubernetes/nfs/"${MANIFEST}" "${DEST_PATH}"
done
diff --git a/contrib/config/backups/nfs/charts/rook/helmify.sh b/contrib/config/backups/nfs/charts/rook/helmify.sh
index ea04840ae4b..72c3c76e271 100755
--- a/contrib/config/backups/nfs/charts/rook/helmify.sh
+++ b/contrib/config/backups/nfs/charts/rook/helmify.sh
@@ -8,42 +8,48 @@ dir=${chart}-kustomize
chart=${chart/.\//}
build() {
- if [ ! -d "$dir" ]; then
- echo "directory \"$dir\" does not exist. make a kustomize project there in order to generate a local helm chart at $chart/ from it!" 1>&2
- exit 1
- fi
+ if [[ ! -d ${dir} ]]; then
+ echo "directory \"${dir}\" does not exist. make a kustomize project there in order to generate a local helm chart at ${chart}/ from it!" 1>&2
+ exit 1
+ fi
- mkdir -p $chart/templates
- echo "generating $chart/Chart.yaml" 1>&2
- cat < $chart/Chart.yaml
+ mkdir -p "${chart}"/templates
+ echo "generating ${chart}/Chart.yaml" 1>&2
+ cat <"${chart}"/Chart.yaml
apiVersion: v1
appVersion: "1.0"
description: A Helm chart for Kubernetes
-name: $chart
+name: ${chart}
version: 0.1.0
EOF
- echo "generating $chart/templates/NOTES.txt" 1>&2
- cat < $chart/templates/NOTES.txt
-$chart has been installed as release {{ .Release.Name }}.
+ echo "generating ${chart}/templates/NOTES.txt" 1>&2
+ cat <"${chart}"/templates/NOTES.txt
+${chart} has been installed as release {{ .Release.Name }}.
Run \`helm status {{ .Release.Name }}\` for more information.
Run \`helm delete --purge {{.Release.Name}}\` to uninstall.
EOF
- echo "running kustomize" 1>&2
- (cd $dir; kubectl kustomize overlays/$env) > $chart/templates/all.yaml
- echo "running helm lint" 1>&2
- helm lint $chart
- echo "generated following files:"
- tree $chart
+ echo "running kustomize" 1>&2
+ (
+ cd "${dir}" || exit
+ kubectl kustomize overlays/"${env}"
+ ) >"${chart}"/templates/all.yaml
+ echo "running helm lint" 1>&2
+ helm lint "${chart}"
+ echo "generated following files:"
+ tree "${chart}"
}
clean() {
- rm $chart/Chart.yaml
- rm $chart/templates/*.{yaml,txt}
+ rm "${chart}"/Chart.yaml
+ rm "${chart}"/templates/*.{yaml,txt}
}
-case "$cmd" in
- "build" ) build ;;
- "clean" ) clean ;;
- * ) echo "unsupported command: $cmd" 1>&2; exit 1 ;;
+case "${cmd}" in
+"build") build ;;
+"clean") clean ;;
+*)
+ echo "unsupported command: ${cmd}" 1>&2
+ exit 1
+ ;;
esac
diff --git a/contrib/config/backups/nfs/efs-terraform/README.md b/contrib/config/backups/nfs/efs-terraform/README.md
index 1ce507c50f8..b256e277e4f 100644
--- a/contrib/config/backups/nfs/efs-terraform/README.md
+++ b/contrib/config/backups/nfs/efs-terraform/README.md
@@ -1,54 +1,75 @@
# Amazon Elastic File Services with Terraform
-These [Terraform](https://www.terraform.io/) scripts and modules will create the resources required to support an NFS server instance using [Amazon Elastic File Services](https://aws.amazon.com/efs/).
+These [Terraform](https://www.terraform.io/) scripts and modules will create the resources required
+to support an NFS server instance using [Amazon Elastic File Services](https://aws.amazon.com/efs/).
This automation script will create the following resources:
-* [EFS](https://aws.amazon.com/efs/) Server
-* SG to allow EKS worker nodes to access the [EFS](https://aws.amazon.com/efs/) Server (if discovery used)
-* Configuration file (`../env.sh`) that specifies NFS Server and Path
+- [EFS](https://aws.amazon.com/efs/) Server
+- SG to allow EKS worker nodes to access the [EFS](https://aws.amazon.com/efs/) Server (if discovery
+ used)
+- Configuration file (`../env.sh`) that specifies NFS Server and Path
## Prerequisites
To use this automation, you must install the following:
-* [AWS CLI](https://aws.amazon.com/cli/) - AWS CLI installed and configured with local profile
-* [Terraform](https://www.terraform.io/downloads.html) - tool used to provision resources and create templates
+- [AWS CLI](https://aws.amazon.com/cli/) - AWS CLI installed and configured with local profile
+- [Terraform](https://www.terraform.io/downloads.html) - tool used to provision resources and create
+ templates
## Configuration
You can use the following input variables to configure this automation:
-* **Required**
- * `vpc_name` or `vpc_id` - specify either explicit `vpc_id` or a name of Tag `Name` used
- * `subnets` or use [discovery](#discovery) - specify Subnet IDs for subnets that will have access to EFS, or have this discovered automatically
-* **Optional**
- * `security_groups` or use [discovery](#discovery) - specify SG IDs of security groups to add that will allow access to EFS server, or have this discovered automatically.
- * `dns_name` with `dns_domain` or `zone_id` - this is used to create a friendly alternative name such as `myfileserver.devest.mycompany.com`
- * `encrypted` (default: false) - whether EFS storage is encrypted or not
+- **Required**
+- `vpc_name` or `vpc_id` - specify either explicit `vpc_id` or a name of Tag `Name` used
+- `subnets` or use [discovery](#discovery) - specify Subnet IDs for subnets that will have access to
+ EFS, or have this discovered automatically
+- **Optional**
+- `security_groups` or use [discovery](#discovery) - specify SG IDs of security groups to add that
+ will allow access to EFS server, or have this discovered automatically.
+- `dns_name` with `dns_domain` or `zone_id` - this is used to create a friendly alternative name
+ such as `myfileserver.devest.mycompany.com`
+- `encrypted` (default: false) - whether EFS storage is encrypted or not
## Discovery
-Configuring the following values allows this automation to discover the resources used to configure EFS. These can be overridden by specifying explicit values as input variables.
+Configuring the following values allows this automation to discover the resources used to configure
+EFS. These can be overridden by specifying explicit values as input variables.
These are values affected by discovery:
- * **VPC Name** - you can supply either explicit `vpc_id` or `vpc_name` if VPC has a tag key of `Name`.
- * **EKS Cluster Name** - if `eks_cluster_name` is not specified, then the VPC tag `Name` will be used as the EKS Cluster Name. This is default configuration if both VPC and EKS cluster that was provisioned by `eksctl`.
- * **Private Subnets** - if `subnets` is not specified, private subnets used by an EKS cluster can be discovered provided that the tags are set up appropriately (see [Requirements for Discovery](#requirements-for-discovery))
- * **Security Group** (optional for access)- if `security_groups` is not specified this security group can be discovered provided that the tags are set up appropriately (see [Requirements for Discovery](#requirements-for-discovery))
- * **DNS Domain** (optional for DNS name)- a domain name, e.g. `devtest.mycompany.com.`, managed by Route53 can be specified to fetch a Zone ID, otherwise a `zone_id` must be specified to use this feature. When using this, you need to supply the CNAME you want to use, e.g. `myfileserver` with `dns_name`
+- **VPC Name** - you can supply either explicit `vpc_id` or `vpc_name` if VPC has a tag key of
+ `Name`.
+- **EKS Cluster Name** - if `eks_cluster_name` is not specified, then the VPC tag `Name` will be
+ used as the EKS Cluster Name. This is default configuration if both VPC and EKS cluster that was
+ provisioned by `eksctl`.
+- **Private Subnets** - if `subnets` is not specified, private subnets used by an EKS cluster can be
+ discovered provided that the tags are set up appropriately (see
+ [Requirements for Discovery](#requirements-for-discovery))
+- **Security Group** (optional for access)- if `security_groups` is not specified this security
+ group can be discovered provided that the tags are set up appropriately (see
+ [Requirements for Discovery](#requirements-for-discovery))
+- **DNS Domain** (optional for DNS name)- a domain name, e.g. `devtest.mycompany.com.`, managed by
+ Route53 can be specified to fetch a Zone ID, otherwise a `zone_id` must be specified to use this
+ feature. When using this, you need to supply the CNAME you want to use, e.g. `myfileserver` with
+ `dns_name`
### Requirements for Discovery
-You will need to have the appropriate tags per subnets and security groups configured to support the discovery feature. This feature will allow these [Terraform](https://www.terraform.io/) scripts to find the resources required to allow EFS configuration alongside an Amazon EKS cluster and SG configuration to allow EKS worker nodes to access EFS. If you used `eksctl` to provision your cluster, these tags and keys will be set up automatically.
+You will need to have the appropriate tags per subnets and security groups configured to support the
+discovery feature. This feature will allow these [Terraform](https://www.terraform.io/) scripts to
+find the resources required to allow EFS configuration alongside an Amazon EKS cluster and SG
+configuration to allow EKS worker nodes to access EFS. If you used `eksctl` to provision your
+cluster, these tags and keys will be set up automatically.
#### Subnets
Your private subnets where EKS is installed should have the following tags:
| Tag Key | Tag Value |
-|---------------------------------------------|-----------|
+| ------------------------------------------- | --------- |
| `kubernetes.io/cluster/${EKS_CLUSTER_NAME}` | `shared` |
| `kubernetes.io/role/internal-elb` | `1` |
@@ -57,7 +78,7 @@ Your private subnets where EKS is installed should have the following tags:
A security group used to allow access to EKS Nodes needs to have the following tags:
| Tag Key | Tag Value |
-|---------------------------------------------|----------------------|
+| ------------------------------------------- | -------------------- |
| `kubernetes.io/cluster/${EKS_CLUSTER_NAME}` | `owned` |
| `aws:eks:cluster-name` | `{EKS_CLUSTER_NAME}` |
@@ -65,7 +86,8 @@ A security group used to allow access to EKS Nodes needs to have the following t
### Define Variables
-If discovery was configured (see [Requirements for Discovery](#requirements-for-discovery)), you can specify this for `terraform.tfvars` files:
+If discovery was configured (see [Requirements for Discovery](#requirements-for-discovery)), you can
+specify this for `terraform.tfvars` files:
```hcl
vpc_name = "dgraph-eks-test-cluster"
@@ -116,7 +138,8 @@ terraform apply
## Cleanup
-When finished, you can destroy resources created with [Terraform](https://www.terraform.io/) using this:
+When finished, you can destroy resources created with [Terraform](https://www.terraform.io/) using
+this:
```bash
terraform destroy
diff --git a/contrib/config/backups/nfs/gcfs-cli/README.md b/contrib/config/backups/nfs/gcfs-cli/README.md
index 15ddcd177fb..68ff068cedd 100644
--- a/contrib/config/backups/nfs/gcfs-cli/README.md
+++ b/contrib/config/backups/nfs/gcfs-cli/README.md
@@ -1,34 +1,38 @@
# Google Cloud Filestore using Google Cloud SDK (Shell)
-This shell script creates the resources needed to create an NFS server instance using Google Cloud Filestore.
+This shell script creates the resources needed to create an NFS server instance using Google Cloud
+Filestore.
This automation will create the following resources:
- * [Google Cloud Filestore Server](https://cloud.google.com/filestore)
- * Configuration file (`../env.sh`) that specifies NFS Server and Path
+- [Google Cloud Filestore Server](https://cloud.google.com/filestore)
+- Configuration file (`../env.sh`) that specifies NFS Server and Path
## Prerequisites
You need the following installed to use this automation:
-* [Google Cloud SDK](https://cloud.google.com/sdk/docs/install) - for the `gcloud` command and required to access Google Cloud.
-* [bash](https://www.gnu.org/software/bash/) - shell environment
+- [Google Cloud SDK](https://cloud.google.com/sdk/docs/install) - for the `gcloud` command and
+ required to access Google Cloud.
+- [bash](https://www.gnu.org/software/bash/) - shell environment
## Configuration
You will need to define these environment variables:
-* Required Variables:
- * `MY_FS_NAME` (required) - Name of Filestore instance.
-* Optional Variables:
- * `MY_PROJECT` (default to current configured project) - Project with billing enabled to create Filestore instance.
- * `MY_ZONE` (default `us-central1-b`) - zone where Filestore instance will be created
- * `MY_FS_CAPACITY` (default `1TB`) - size of the storage used for Filestore
- * `MY_FS_SHARE_NAME` (default `volumes`) - NFS path
+- Required Variables:
+ - `MY_FS_NAME` (required) - Name of Filestore instance.
+- Optional Variables:
+ - `MY_PROJECT` (default to current configured project) - Project with billing enabled to create
+ Filestore instance.
+ - `MY_ZONE` (default `us-central1-b`) - zone where Filestore instance will be created
+ - `MY_FS_CAPACITY` (default `1TB`) - size of the storage used for Filestore
+ - `MY_FS_SHARE_NAME` (default `volumes`) - NFS path
## Create Filestore
-Run these steps to create [filestore](https://cloud.google.com/filestore) and populate the configuration (`../env.sh`)
+Run these steps to create [filestore](https://cloud.google.com/filestore) and populate the
+configuration (`../env.sh`)
### Define Variables
diff --git a/contrib/config/backups/nfs/gcfs-cli/create_gcfs.sh b/contrib/config/backups/nfs/gcfs-cli/create_gcfs.sh
index 3b64b06480e..60535db0dac 100755
--- a/contrib/config/backups/nfs/gcfs-cli/create_gcfs.sh
+++ b/contrib/config/backups/nfs/gcfs-cli/create_gcfs.sh
@@ -6,35 +6,38 @@ set -e
# main
##################
main() {
- check_environment $@
- create_filestore
- create_config_values
+ check_environment $@
+ create_filestore
+ create_config_values
}
#####
# check_environment
##################
check_environment() {
- ## Check for Azure CLI command
- command -v gcloud > /dev/null || \
- { echo "[ERROR]: 'az' command not not found" 1>&2; exit 1; }
+ ## Check for Azure CLI command
+ command -v gcloud >/dev/null ||
+ {
+ echo "[ERROR]: 'az' command not not found" 1>&2
+ exit 1
+ }
- if [[ -z "${MY_FS_NAME}" ]]; then
- if (( $# < 1 )); then
- printf "[ERROR]: Need at least one parameter or define 'MY_FS_NAME'\n\n" 1>&2
- printf "Usage:\n\t$0 \n\tMY_FS_NAME= $0\n" 1>&2
- exit 1
- fi
- fi
+ if [[ -z ${MY_FS_NAME} ]]; then
+ if (($# < 1)); then
+ printf "[ERROR]: Need at least one parameter or define 'MY_FS_NAME'\n\n" 1>&2
+ printf "Usage:\n\t$0 \n\tMY_FS_NAME= $0\n" 1>&2
+ exit 1
+ fi
+ fi
- MY_PROJECT=${MY_PROJECT:-$(gcloud config get-value project)}
- MY_ZONE=${MY_ZONE:-"us-central1-b"}
- MY_FS_TIER=${MY_FS_TIER:-"STANDARD"}
- MY_FS_CAPACITY=${MY_FS_CAPACITY:-"1TB"}
- MY_FS_SHARE_NAME=${MY_FS_SHARE_NAME:-"volumes"}
- MY_NETWORK_NAME=${MY_NETWORK_NAME:-"default"}
- MY_FS_NAME=${MY_FS_NAME:-$1}
- CREATE_ENV_VALUES=${CREATE_ENV_VALUES:-"true"}
+ MY_PROJECT=${MY_PROJECT:-$(gcloud config get-value project)}
+ MY_ZONE=${MY_ZONE:-"us-central1-b"}
+ MY_FS_TIER=${MY_FS_TIER:-"STANDARD"}
+ MY_FS_CAPACITY=${MY_FS_CAPACITY:-"1TB"}
+ MY_FS_SHARE_NAME=${MY_FS_SHARE_NAME:-"volumes"}
+ MY_NETWORK_NAME=${MY_NETWORK_NAME:-"default"}
+ MY_FS_NAME=${MY_FS_NAME:-$1}
+ CREATE_ENV_VALUES=${CREATE_ENV_VALUES:-"true"}
}
@@ -42,42 +45,44 @@ check_environment() {
# create_filestore
##################
create_filestore() {
- if ! gcloud filestore instances list | grep -q ${MY_FS_NAME}; then
- gcloud filestore instances create ${MY_FS_NAME} \
- --project=${MY_PROJECT} \
- --zone=${MY_ZONE} \
- --tier=${MY_FS_TIER} \
- --file-share=name="${MY_FS_SHARE_NAME}",capacity=${MY_FS_CAPACITY} \
- --network=name="${MY_NETWORK_NAME}"
- fi
+ if ! gcloud filestore instances list | grep -q "${MY_FS_NAME}"; then
+ gcloud filestore instances create "${MY_FS_NAME}" \
+ --project="${MY_PROJECT}" \
+ --zone="${MY_ZONE}" \
+ --tier="${MY_FS_TIER}" \
+ --file-share=name="${MY_FS_SHARE_NAME}",capacity="${MY_FS_CAPACITY}" \
+ --network=name="${MY_NETWORK_NAME}"
+ fi
}
#####
# create_config_values
##################
create_config_values() {
- ## TODO: Verify Server Exists
+ ## TODO: Verify Server Exists
- ## Create Minio env file and Helm Chart secret files
- if [[ "${CREATE_ENV_VALUES}" =~ true|(y)es ]]; then
- echo "[INFO]: Creating 'env.sh' file"
- SERVER_ADDRESS=$(gcloud filestore instances describe ${MY_FS_NAME} \
- --project=${MY_PROJECT} \
- --zone=${MY_ZONE} \
- --format="value(networks.ipAddresses[0])"
- )
- SERVER_SHARE=$(gcloud filestore instances describe ${MY_FS_NAME} \
- --project=${MY_PROJECT} \
- --zone=${MY_ZONE} \
- --format="value(fileShares[0].name)"
- )
+ ## Create Minio env file and Helm Chart secret files
+ if [[ ${CREATE_ENV_VALUES} =~ true|(y)es ]]; then
+ echo "[INFO]: Creating 'env.sh' file"
+ SERVER_ADDRESS=$(
+ gcloud filestore instances describe "${MY_FS_NAME}" \
+ --project="${MY_PROJECT}" \
+ --zone="${MY_ZONE}" \
+ --format="value(networks.ipAddresses[0])"
+ )
+ SERVER_SHARE=$(
+ gcloud filestore instances describe "${MY_FS_NAME}" \
+ --project="${MY_PROJECT}" \
+ --zone="${MY_ZONE}" \
+ --format="value(fileShares[0].name)"
+ )
- cat <<-EOF > ../env.sh
-## Configuration generated by 'create_gcfs.sh' script
-export NFS_PATH="${SERVER_SHARE}"
-export NFS_SERVER="${SERVER_ADDRESS}"
-EOF
- fi
+ cat <<-EOF >../env.sh
+ ## Configuration generated by 'create_gcfs.sh' script
+ export NFS_PATH="${SERVER_SHARE}"
+ export NFS_SERVER="${SERVER_ADDRESS}"
+ EOF
+ fi
}
main $@
diff --git a/contrib/config/backups/nfs/gcfs-terraform/README.md b/contrib/config/backups/nfs/gcfs-terraform/README.md
index 80833c78152..b29f4d06a2b 100644
--- a/contrib/config/backups/nfs/gcfs-terraform/README.md
+++ b/contrib/config/backups/nfs/gcfs-terraform/README.md
@@ -1,39 +1,46 @@
# Google Cloud Filestore with Terraform
-These [Terraform](https://www.terraform.io/) scripts and modules will create the resources required to create an NFS server instance using Google Cloud Filestore.
+These [Terraform](https://www.terraform.io/) scripts and modules will create the resources required
+to create an NFS server instance using Google Cloud Filestore.
This automation will create the following resources:
- * [Google Cloud Filestore Server](https://cloud.google.com/filestore)
- * Configuration file (`../env.sh`) that specifies NFS Server and Path
+- [Google Cloud Filestore Server](https://cloud.google.com/filestore)
+- Configuration file (`../env.sh`) that specifies NFS Server and Path
## Prerequisites
You need the following installed to use this automation:
-* [Google Cloud SDK](https://cloud.google.com/sdk/docs/install) - for the `gcloud` command and required to access Google Cloud.
- * Google Project with billing enabled
-* [Terraform](https://www.terraform.io/downloads.html) - tool used to provision resources and create templates
+- [Google Cloud SDK](https://cloud.google.com/sdk/docs/install) - for the `gcloud` command and
+ required to access Google Cloud.
+ - Google Project with billing enabled
+- [Terraform](https://www.terraform.io/downloads.html) - tool used to provision resources and create
+ templates
## Configuration
You will need to define the following variables:
-* Required Variables:
- * `project_id` (required) - a globally unique name for the Google project that will contain the GCS bucket
- * `name` (required) - name of GCFS server instance
-* Optional Variables:
- * `zone` (default = `us-central1-b`) - specify zone where instances will be located
- * `tier` (default = `STANDARD`) - service tier of the instance, e.g. `TIER_UNSPECIFIED`, `STANDARD`, `PREMIUM`, `BASIC_HDD`, `BASIC_SSD`, and `HIGH_SCALE_SSD`.
- * `network` (default = `default`) - specify a GCE VPC network to which the instance is connected.
- * `capacity_gb` (default = `1024`) - specify file share capacity in GiB (minimum of `1024`)
- * `share_name` (default = `volumes`)- specify a name of the file share
+- Required Variables:
+ - `project_id` (required) - a globally unique name for the Google project that will contain the
+ GCS bucket
+ - `name` (required) - name of GCFS server instance
+- Optional Variables:
+ - `zone` (default = `us-central1-b`) - specify zone where instances will be located
+ - `tier` (default = `STANDARD`) - service tier of the instance, e.g. `TIER_UNSPECIFIED`,
+ `STANDARD`, `PREMIUM`, `BASIC_HDD`, `BASIC_SSD`, and `HIGH_SCALE_SSD`.
+ - `network` (default = `default`) - specify a GCE VPC network to which the instance is connected.
+ - `capacity_gb` (default = `1024`) - specify file share capacity in GiB (minimum of `1024`)
+ - `share_name` (default = `volumes`)- specify a name of the file share
## Steps
### Define Variables
-You can define these when prompted, in `terrafrom.tfvars` file, or through command line variables, e.g. `TF_VAR_project_id`, `TF_VAR_project_id`, and `TF_VAR_name`. Below is an example `terraform.tfvars` file:
+You can define these when prompted, in `terrafrom.tfvars` file, or through command line variables,
+e.g. `TF_VAR_project_id`, `TF_VAR_project_id`, and `TF_VAR_name`. Below is an example
+`terraform.tfvars` file:
```terraform
## terraform.tfvars
@@ -58,7 +65,8 @@ terraform apply
## Cleanup
-When finished, you can destroy resources created with [Terraform](https://www.terraform.io/) using this:
+When finished, you can destroy resources created with [Terraform](https://www.terraform.io/) using
+this:
```bash
terraform destroy
diff --git a/contrib/config/backups/nfs/vagrant/provision.sh b/contrib/config/backups/nfs/vagrant/provision.sh
index 6dbb64cdf65..53acdc474e9 100644
--- a/contrib/config/backups/nfs/vagrant/provision.sh
+++ b/contrib/config/backups/nfs/vagrant/provision.sh
@@ -4,26 +4,26 @@
## main
#################################
main() {
- export DEV_USER=${1:-'vagrant'}
- export PYTHON_VERSION=${PYTHON_VERSION:-'3.8.2'}
- INSTALL_DOCKER=${INSTALL_DOCKER:-'true'}
- INSTALL_COMPOSE=${INSTALL_COMPOSE:-'true'}
-
- setup_hosts
-
- case $(hostname) in
- *nfs-server*)
- install_nfs_server
- ;;
- *nfs-client*)
- install_nfs_client
- [[ $INSTALL_DOCKER =~ "true" ]] && install_docker
- [[ $INSTALL_COMPOSE =~ "true" ]] && \
- export -f install_compose && \
- install_common && \
- su $DEV_USER -c "install_compose"
- ;;
- esac
+ export DEV_USER=${1:-'vagrant'}
+ export PYTHON_VERSION=${PYTHON_VERSION:-'3.8.2'}
+ INSTALL_DOCKER=${INSTALL_DOCKER:-'true'}
+ INSTALL_COMPOSE=${INSTALL_COMPOSE:-'true'}
+
+ setup_hosts
+
+ case $(hostname) in
+ *nfs-server*)
+ install_nfs_server
+ ;;
+ *nfs-client*)
+ install_nfs_client
+ [[ ${INSTALL_DOCKER} =~ "true" ]] && install_docker
+ [[ ${INSTALL_COMPOSE} =~ "true" ]] &&
+ export -f install_compose &&
+ install_common &&
+ su "${DEV_USER}" -c "install_compose"
+ ;;
+ esac
}
@@ -31,128 +31,131 @@ main() {
## setup_hosts - configure /etc/hosts in absence of DNS
#################################
setup_hosts() {
- CONFIG_FILE=/vagrant/hosts
- if [[ ! -f /vagrant/hosts ]]; then
- echo "INFO: '$CONFIG_FILE' does not exist. Skipping configuring /etc/hosts"
- return 1
- fi
-
- while read -a LINE; do
- ## append to hosts entry if it doesn't exist
- if ! grep -q "${LINE[1]}" /etc/hosts; then
- printf "%s %s \n" ${LINE[*]} >> /etc/hosts
- fi
- done < $CONFIG_FILE
+ CONFIG_FILE=/vagrant/hosts
+ if [[ ! -f /vagrant/hosts ]]; then
+ echo "INFO: '${CONFIG_FILE}' does not exist. Skipping configuring /etc/hosts"
+ return 1
+ fi
+
+ while read -a LINE; do
+ ## append to hosts entry if it doesn't exist
+ if ! grep -q "${LINE[1]}" /etc/hosts; then
+ printf "%s %s \n" "${LINE[*]}" >>/etc/hosts
+ fi
+ done <"${CONFIG_FILE}"
}
######
## install_nfs_server
#################################
install_nfs_server() {
- SHAREPATH=${1:-"/srv/share"}
- ACCESSLIST=${2:-'*'}
- apt-get -qq update && apt-get install -y nfs-kernel-server
- mkdir -p $SHAREPATH
- chown -R nobody:nogroup $SHAREPATH
- chmod -R 777 $SHAREPATH
- sed -i "\:$SHAREPATH:d" /etc/exports
- echo "$SHAREPATH $ACCESSLIST(rw,sync,no_root_squash,no_subtree_check)" >> /etc/exports
- exportfs -rav
+ SHAREPATH=${1:-"/srv/share"}
+ ACCESSLIST=${2:-'*'}
+ apt-get -qq update && apt-get install -y nfs-kernel-server
+ mkdir -p "${SHAREPATH}"
+ chown -R nobody:nogroup "${SHAREPATH}"
+ chmod -R 777 "${SHAREPATH}"
+ sed -i "\:${SHAREPATH}:d" /etc/exports
+ echo "${SHAREPATH} ${ACCESSLIST}(rw,sync,no_root_squash,no_subtree_check)" >>/etc/exports
+ exportfs -rav
}
######
## install_nfs_client
#################################
install_nfs_client() {
- MOUNTPATH=${1:-"/mnt/share"}
- NFS_PATH=${2:-"/srv/share"}
- NFS_SERVER=$(grep nfs-server /vagrant/vagrant/hosts | cut -d' ' -f1)
- apt-get -qq update && apt-get install -y nfs-common
+ MOUNTPATH=${1:-"/mnt/share"}
+ NFS_PATH=${2:-"/srv/share"}
+ NFS_SERVER=$(grep nfs-server /vagrant/vagrant/hosts | cut -d' ' -f1)
+ apt-get -qq update && apt-get install -y nfs-common
- mkdir -p $MOUNTPATH
- mount -t nfs $NFS_SERVER:$NFS_PATH $MOUNTPATH
+ mkdir -p "${MOUNTPATH}"
+ mount -t nfs "${NFS_SERVER}":"${NFS_PATH}" "${MOUNTPATH}"
}
######
## install_common
#################################
install_common() {
- apt-get update -qq -y
-
- ## tools and libs needed by pyenv
- ## ref. https://github.com/pyenv/pyenv/wiki/Common-build-problems
- apt-get install -y \
- build-essential \
- curl \
- git \
- libbz2-dev \
- libffi-dev \
- liblzma-dev \
- libncurses5-dev \
- libncursesw5-dev \
- libreadline-dev \
- libsqlite3-dev \
- libssl-dev \
- llvm \
- make \
- python-openssl \
- software-properties-common \
- sqlite \
- tk-dev \
- wget \
- xz-utils \
- zlib1g-dev
+ apt-get update -qq -y
+
+ ## tools and libs needed by pyenv
+ ## ref. https://github.com/pyenv/pyenv/wiki/Common-build-problems
+ apt-get install -y \
+ build-essential \
+ curl \
+ git \
+ libbz2-dev \
+ libffi-dev \
+ liblzma-dev \
+ libncurses5-dev \
+ libncursesw5-dev \
+ libreadline-dev \
+ libsqlite3-dev \
+ libssl-dev \
+ llvm \
+ make \
+ python-openssl \
+ software-properties-common \
+ sqlite \
+ tk-dev \
+ wget \
+ xz-utils \
+ zlib1g-dev
}
######
## install_docker
#################################
install_docker() {
- [[ -z "$DEV_USER" ]] && { echo '$DEV_USER not specified. Aborting' 2>&1 ; return 1; }
-
- apt update -qq -y && apt-get install -y \
- apt-transport-https \
- ca-certificates \
- gnupg-agent
-
- curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
- add-apt-repository \
- "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
+ [[ -z ${DEV_USER} ]] && {
+ echo '$DEV_USER not specified. Aborting' 2>&1
+ return 1
+ }
+
+ apt update -qq -y && apt-get install -y \
+ apt-transport-https \
+ ca-certificates \
+ gnupg-agent
+
+ curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
+ add-apt-repository \
+ "deb [arch=amd64] https://download.docker.com/linux/ubuntu \
$(lsb_release -cs) \
stable"
- apt update -qq -y
- apt-get -y install docker-ce docker-ce-cli containerd.io
+ apt update -qq -y
+ apt-get -y install docker-ce docker-ce-cli containerd.io
- usermod -aG docker $DEV_USER
+ usermod -aG docker "${DEV_USER}"
}
######
## install_compose - installs pyenv, python, docker-compose
#################################
install_compose() {
- PROJ=pyenv-installer
- SCRIPT_URL=https://github.com/pyenv/$PROJ/raw/master/bin/$PROJ
- curl -sL $SCRIPT_URL | bash
-
- ## setup current environment
- export PATH="$HOME/.pyenv/bin:$PATH"
- eval "$(pyenv init -)"
- eval "$(pyenv virtualenv-)"
-
- ## append to shell environment
- cat <<-'BASHRC' >> ~/.bashrc
-
-export PATH="$HOME/.pyenv/bin:$PATH"
-eval "$(pyenv init -)"
-eval "$(pyenv virtualenv-init -)"
-BASHRC
-
- ## install recent version of python 3
- pyenv install $PYTHON_VERSION
- pyenv global $PYTHON_VERSION
- pip install --upgrade pip
- pip install docker-compose
- pyenv rehash
+ PROJ=pyenv-installer
+ SCRIPT_URL=https://github.com/pyenv/${PROJ}/raw/master/bin/${PROJ}
+ curl -sL "${SCRIPT_URL}" | bash
+
+ ## setup current environment
+ export PATH="${HOME}/.pyenv/bin:${PATH}"
+ eval "$(pyenv init -)"
+ eval "$(pyenv virtualenv-)"
+
+ ## append to shell environment
+ cat <<-'BASHRC' >>~/.bashrc
+
+ export PATH="$HOME/.pyenv/bin:$PATH"
+ eval "$(pyenv init -)"
+ eval "$(pyenv virtualenv-init -)"
+ BASHRC
+
+ ## install recent version of python 3
+ pyenv install "${PYTHON_VERSION}"
+ pyenv global "${PYTHON_VERSION}"
+ pip install --upgrade pip
+ pip install docker-compose
+ pyenv rehash
}
main $@
diff --git a/contrib/config/backups/s3/README.md b/contrib/config/backups/s3/README.md
index 82d71a07c04..beb615912a0 100644
--- a/contrib/config/backups/s3/README.md
+++ b/contrib/config/backups/s3/README.md
@@ -6,7 +6,8 @@ Binary backups can use AWS S3 (Simple Storage Service) for an object storage.
Some example scripts have been provided to illustrate how to create S3.
-* [Terraform](terraform/README.md) - terraform scripts to provision S3 bucket and an IAM user with access to the S3 bucket.
+- [Terraform](terraform/README.md) - terraform scripts to provision S3 bucket and an IAM user with
+ access to the S3 bucket.
## Setting up the environment
@@ -14,14 +15,18 @@ Some example scripts have been provided to illustrate how to create S3.
You will need these tools:
-* Docker Environment
- * [Docker](https://docs.docker.com/get-docker/) - container engine platform
- * [Docker Compose](https://docs.docker.com/compose/install/) - orchestrates running dokcer containers
-* Kubernetes Environment
- * [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - required for interacting with Kubenetes platform
- * [helm](https://helm.sh/docs/intro/install/) - deploys Kuberetes packages called helm charts
- * [helm-diff](https://github.com/databus23/helm-diff) [optional] - displays differences that will be applied to Kubernetes cluster
- * [helmfile](https://github.com/roboll/helmfile#installation) [optional] - orchestrates helm chart deployments
+- Docker Environment
+ - [Docker](https://docs.docker.com/get-docker/) - container engine platform
+ - [Docker Compose](https://docs.docker.com/compose/install/) - orchestrates running dokcer
+ containers
+- Kubernetes Environment
+ - [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - required for interacting
+ with Kubenetes platform
+ - [helm](https://helm.sh/docs/intro/install/) - deploys Kuberetes packages called helm charts
+ - [helm-diff](https://github.com/databus23/helm-diff) [optional] - displays differences that
+ will be applied to Kubernetes cluster
+ - [helmfile](https://github.com/roboll/helmfile#installation) [optional] - orchestrates helm chart
+ deployments
### Using Docker Compose
@@ -29,7 +34,8 @@ A `docker-compose.yml` configuration is provided that will run the Dgraph cluste
#### Configuring Docker Compose
-You will need to create an `s3.env` file first like the example below. If you created the S3 bucket using the [Terraform](terraform/README.md) scripts, this will have been created automatically.
+You will need to create an `s3.env` file first like the example below. If you created the S3 bucket
+using the [Terraform](terraform/README.md) scripts, this will have been created automatically.
```bash
## s3.env
@@ -37,7 +43,7 @@ AWS_ACCESS_KEY_ID=
AWS_SECRET_ACCESS_KEY=
```
-#### Using Docker Compose
+#### Running with Docker Compose
```bash
## Run a Dgraph Cluster
@@ -46,7 +52,7 @@ docker-compose up --detach
#### Access Ratel UI
-* Ratel UI: http://localhost:8000
+- Ratel UI: http://localhost:8000
#### Clean up the Docker Environment
@@ -57,13 +63,16 @@ docker-compose rm
### Using Kubernetes with Helm Charts
-For Kubernetes, you can deploy a Dgraph cluster and a Kubernetes Cronjob that triggers backups using [Helm](https://helm.sh/docs/intro/install/).
+For Kubernetes, you can deploy a Dgraph cluster and a Kubernetes Cronjob that triggers backups using
+[Helm](https://helm.sh/docs/intro/install/).
#### Configuring secrets values
-These values are automatically created if you used the [Terraform](terraform/README.md) scripts.
+These values are automatically created if you used the [Terraform](terraform/README.md) scripts.
-If you already an existing S3 bucket you would like to use, you will need to create `charts/dgraph_secrets.yaml` files as shown below. Otherwise, if you created the bucket using the [Terraform](terraform/README.md) scripts, then this would be created automatically.
+If you already an existing S3 bucket you would like to use, you will need to create
+`charts/dgraph_secrets.yaml` files as shown below. Otherwise, if you created the bucket using the
+[Terraform](terraform/README.md) scripts, then this would be created automatically.
For the `charts/dgraph_secrets.yaml`, you would create a file like this:
@@ -79,7 +88,8 @@ backups:
#### Configuring Environments
-We need to define one environment variable `BACKUP_PATH`. If [Terraform](terraform/README.md) scripts were used to create the S3 bucket, we can source the `env.sh` or otherwise create it here:
+We need to define one environment variable `BACKUP_PATH`. If [Terraform](terraform/README.md)
+scripts were used to create the S3 bucket, we can source the `env.sh` or otherwise create it here:
```bash
## env.sh
@@ -88,14 +98,17 @@ export BACKUP_PATH=s3://s3..amazonaws.com/
#### Deploy using Helmfile
-If you have [helmfile](https://github.com/roboll/helmfile#installation) and the [helm-diff](https://github.com/databus23/helm-diff) plugin installed, you can deploy a Dgraph cluster with the following:
+If you have [helmfile](https://github.com/roboll/helmfile#installation) and the
+[helm-diff](https://github.com/databus23/helm-diff) plugin installed, you can deploy a Dgraph
+cluster with the following:
```bash
-## source script for BACKUP_PATH env var
+## source script for BACKUP_PATH env var
. env.sh
## deploy Dgraph cluster and configure K8S CronJob with BACKUP_PATH
helmfile apply
```
+
#### Deploy using Helm
```bash
@@ -157,7 +170,9 @@ kubectl delete pvc --selector release=my-release # dgraph release name used earl
## Triggering a backup
-This is run from the host with the alpha node accessible on localhost at port `8080`. This can can be done by running the `docker-compose` environment, or in the Kubernetes environment, after running `kubectl --namespace default port-forward pod/dgraph-dgraph-alpha-0 8080:8080`.
+This is run from the host with the alpha node accessible on localhost at port `8080`. This can can
+be done by running the `docker-compose` environment, or in the Kubernetes environment, after running
+`kubectl --namespace default port-forward pod/dgraph-dgraph-alpha-0 8080:8080`.
### Using GraphQL
diff --git a/contrib/config/backups/s3/terraform/README.md b/contrib/config/backups/s3/terraform/README.md
index 73a974cd573..a5f850f1197 100644
--- a/contrib/config/backups/s3/terraform/README.md
+++ b/contrib/config/backups/s3/terraform/README.md
@@ -2,28 +2,31 @@
## About
-This script will create the required resources needed to create S3 (Simple Storage Service) bucket using [`s3-bucket`](github.com/darkn3rd/s3-bucket) module.
+This script will create the required resources needed to create S3 (Simple Storage Service) bucket
+using [`s3-bucket`](github.com/darkn3rd/s3-bucket) module.
## Prerequisites
You need the following installed to use this automation:
-* [AWS CLI](https://aws.amazon.com/cli/) - AWS CLI installed and configured with local profile
-* [Terraform](https://www.terraform.io/downloads.html) - tool used to provision resources and create templates
+- [AWS CLI](https://aws.amazon.com/cli/) - AWS CLI installed and configured with local profile
+- [Terraform](https://www.terraform.io/downloads.html) - tool used to provision resources and create
+ templates
## Configuration
You will need to define the following variables:
-* Required Variables:
- * `region` (required) - region where bucket will be created
- * `name` (required) - unique name of s3 bucket
+- Required Variables:
+ - `region` (required) - region where bucket will be created
+ - `name` (required) - unique name of s3 bucket
## Steps
### Define Variables
-You can define these when prompted, or in `terrafrom.tfvars` file, or through command line variables, e.g. `TF_VAR_name`, `TF_VAR_region`.
+You can define these when prompted, or in `terrafrom.tfvars` file, or through command line
+variables, e.g. `TF_VAR_name`, `TF_VAR_region`.
```terraform
# terraform.tfvars
@@ -39,11 +42,14 @@ terraform init
### Prepare and Provision Resources
-This will create an S3 bucket and an IAM user that has access to that bucket. For convenience, will also generate the following files:
+This will create an S3 bucket and an IAM user that has access to that bucket. For convenience, will
+also generate the following files:
-* `../s3.env` - used to demonstrate or test dgraph backups with s3 bucket in local docker environment
-* `../env.sh`- destination string to use trigger backups from the command line or to configure Kubernetes cron jobs to schedule backups
-* `../charts/dgraph_secrets.yaml` - used to deploy Dgraph with support for backups
+- `../s3.env` - used to demonstrate or test dgraph backups with s3 bucket in local docker
+ environment
+- `../env.sh`- destination string to use trigger backups from the command line or to configure
+ Kubernetes cron jobs to schedule backups
+- `../charts/dgraph_secrets.yaml` - used to deploy Dgraph with support for backups
```bash
## get a list of changes that will be made
diff --git a/contrib/config/monitoring/jaeger/README.md b/contrib/config/monitoring/jaeger/README.md
index 403ee2bd38e..8b1acb71cfd 100644
--- a/contrib/config/monitoring/jaeger/README.md
+++ b/contrib/config/monitoring/jaeger/README.md
@@ -1,6 +1,10 @@
# Jaeger
-Jaeger is a distributed tracing system that can be integrated with Dgraph. Included in this section automation to help install Jaeger into your Kubernetes environment.
+Jaeger is a distributed tracing system that can be integrated with Dgraph. Included in this section
+automation to help install Jaeger into your Kubernetes environment.
-* [operator](operator/README.md) - use jaeger operator to install `all-in-one` jaeger pod with [badger](https://github.com/dgraph-io/badger) for storage.
-* [chart](chart/README.md) - use jaeger helm chart to install distributed jaeger cluster with [ElasticSearch](https://www.elastic.co/) or [Cassandra](https://cassandra.apache.org/) for storage.
+- [operator](operator/README.md) - use jaeger operator to install `all-in-one` jaeger pod with
+ [badger](https://github.com/dgraph-io/badger) for storage.
+- [chart](chart/README.md) - use jaeger helm chart to install distributed jaeger cluster with
+ [ElasticSearch](https://www.elastic.co/) or [Cassandra](https://cassandra.apache.org/) for
+ storage.
diff --git a/contrib/config/monitoring/jaeger/chart/README.md b/contrib/config/monitoring/jaeger/chart/README.md
index 615579d695a..06a385a670b 100644
--- a/contrib/config/monitoring/jaeger/chart/README.md
+++ b/contrib/config/monitoring/jaeger/chart/README.md
@@ -1,20 +1,25 @@
# Jaeger Helm Chart
-The [Jaeger Helm Chart](https://github.com/jaegertracing/helm-charts/tree/master/charts/jaeger) adds all components required to run Jaeger in Kubernetes for a production-like deployment.
+The [Jaeger Helm Chart](https://github.com/jaegertracing/helm-charts/tree/master/charts/jaeger) adds
+all components required to run Jaeger in Kubernetes for a production-like deployment.
## Tool Requirements
### Required
-* [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - required to interact with kubernetes
-* [helm](https://helm.sh/docs/intro/install/) - required to install jaeger, cassandra, and elasticsearch using helm chart
+- [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - required to interact with
+ kubernetes
+- [helm](https://helm.sh/docs/intro/install/) - required to install jaeger, cassandra, and
+ elasticsearch using helm chart
### Optional
-These tools are optional if you would like to use a single command to install all the jaeger components and dgraph configured to use jaeger.
+These tools are optional if you would like to use a single command to install all the jaeger
+components and dgraph configured to use jaeger.
-* [helmfile](https://github.com/roboll/helmfile#installation) (optional)
-* [helm-diff](https://github.com/databus23/helm-diff) helm plugin: `helm plugin install https://github.com/databus23/helm-diff`
+- [helmfile](https://github.com/roboll/helmfile#installation) (optional)
+- [helm-diff](https://github.com/databus23/helm-diff) helm plugin:
+ `helm plugin install https://github.com/databus23/helm-diff`
## Deploy
@@ -54,7 +59,6 @@ helm install "my-release" \
dgraph/dgraph
```
-
## Cleanup
### Cleanup Using Helmfile
@@ -94,4 +98,4 @@ kubectl port-forward --namespace observability $POD_NAME 16686:16686
Afterward, you can visit:
-* http://localhost:16686
+- http://localhost:16686
diff --git a/contrib/config/monitoring/jaeger/operator/README.md b/contrib/config/monitoring/jaeger/operator/README.md
index efde2141bde..cedd62b296f 100644
--- a/contrib/config/monitoring/jaeger/operator/README.md
+++ b/contrib/config/monitoring/jaeger/operator/README.md
@@ -1,20 +1,25 @@
# Jaeger Operator
-The [Jaeger operator](https://github.com/jaegertracing/jaeger-operator) is an implementation of a [Kubernetes operator](https://coreos.com/operators/) that aims to ease the operational complexity of deploying and managing Jaeger.
+The [Jaeger operator](https://github.com/jaegertracing/jaeger-operator) is an implementation of a
+[Kubernetes operator](https://coreos.com/operators/) that aims to ease the operational complexity of
+deploying and managing Jaeger.
## Tool Requirements
### Required
-* [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - required to interact with kubernetes
-* [helm](https://helm.sh/docs/intro/install/) - required to install jaeger-operator using helm chart
+- [kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - required to interact with
+ kubernetes
+- [helm](https://helm.sh/docs/intro/install/) - required to install jaeger-operator using helm chart
### Optional
-These tools are optional if you would like to use a single command to install all the jaeger components and dgraph configured to use jaeger.
+These tools are optional if you would like to use a single command to install all the jaeger
+components and dgraph configured to use jaeger.
-* [helmfile](https://github.com/roboll/helmfile#installation)
-* [helm-diff](https://github.com/databus23/helm-diff) helm plugin: `helm plugin install https://github.com/databus23/helm-diff`
+- [helmfile](https://github.com/roboll/helmfile#installation)
+- [helm-diff](https://github.com/databus23/helm-diff) helm plugin:
+ `helm plugin install https://github.com/databus23/helm-diff`
## Deploy
@@ -92,4 +97,4 @@ kubectl port-forward --namespace observability $POD_NAME 16686:16686
Afterward, visit:
-* http://localhost:16686
+- http://localhost:16686
diff --git a/contrib/config/monitoring/jaeger/operator/helmify.sh b/contrib/config/monitoring/jaeger/operator/helmify.sh
index 84369c55bb1..c42206f1920 100755
--- a/contrib/config/monitoring/jaeger/operator/helmify.sh
+++ b/contrib/config/monitoring/jaeger/operator/helmify.sh
@@ -8,42 +8,48 @@ dir=${chart}-kustomize
chart=${chart/.\//}
build() {
- if [ ! -d "$dir" ]; then
- echo "directory \"$dir\" does not exist. make a kustomize project there in order to generate a local helm chart at $chart/ from it!" 1>&2
- exit 1
- fi
+ if [[ ! -d ${dir} ]]; then
+ echo "directory \"${dir}\" does not exist. make a kustomize project there in order to generate a local helm chart at ${chart}/ from it!" 1>&2
+ exit 1
+ fi
- mkdir -p $chart/templates
- echo "generating $chart/Chart.yaml" 1>&2
- cat < $chart/Chart.yaml
+ mkdir -p "${chart}"/templates
+ echo "generating ${chart}/Chart.yaml" 1>&2
+ cat <"${chart}"/Chart.yaml
apiVersion: v1
appVersion: "1.0"
description: A Helm chart for Kubernetes
-name: $chart
+name: ${chart}
version: 0.1.0
EOF
- echo "generating $chart/templates/NOTES.txt" 1>&2
- cat < $chart/templates/NOTES.txt
-$chart has been installed as release {{ .Release.Name }}.
+ echo "generating ${chart}/templates/NOTES.txt" 1>&2
+ cat <"${chart}"/templates/NOTES.txt
+${chart} has been installed as release {{ .Release.Name }}.
Run \`helm status {{ .Release.Name }}\` for more information.
Run \`helm delete --purge {{.Release.Name}}\` to uninstall.
EOF
- echo "running kustomize" 1>&2
- (cd $dir; kubectl kustomize overlays/$env) > $chart/templates/all.yaml
- echo "running helm lint" 1>&2
- helm lint $chart
- echo "generated following files:"
- tree $chart
+ echo "running kustomize" 1>&2
+ (
+ cd "${dir}" || exit
+ kubectl kustomize overlays/"${env}"
+ ) >"${chart}"/templates/all.yaml
+ echo "running helm lint" 1>&2
+ helm lint "${chart}"
+ echo "generated following files:"
+ tree "${chart}"
}
clean() {
- rm $chart/Chart.yaml
- rm $chart/templates/*.yaml
+ rm "${chart}"/Chart.yaml
+ rm "${chart}"/templates/*.yaml
}
-case "$cmd" in
- "build" ) build ;;
- "clean" ) clean ;;
- * ) echo "unsupported command: $cmd" 1>&2; exit 1 ;;
+case "${cmd}" in
+"build") build ;;
+"clean") clean ;;
+*)
+ echo "unsupported command: ${cmd}" 1>&2
+ exit 1
+ ;;
esac
diff --git a/contrib/config/monitoring/prometheus/README.md b/contrib/config/monitoring/prometheus/README.md
index 85e08865bf4..2e1c18f7b1e 100644
--- a/contrib/config/monitoring/prometheus/README.md
+++ b/contrib/config/monitoring/prometheus/README.md
@@ -1,21 +1,34 @@
## Prometheus Metrics
-[Prometheus](https://prometheus.io/) platform for gathering metrics and triggering alerts. This can be used to monitor Dgraph deployed on the Kubernetes platform.
+[Prometheus](https://prometheus.io/) platform for gathering metrics and triggering alerts. This can
+be used to monitor Dgraph deployed on the Kubernetes platform.
You can install [Prometheus](https://prometheus.io/) using either of these options:
-* Kubernetes manifests (this directory)
- * Instructions: [Deploy: Monitoring in Kubernetes](https://dgraph.io/docs/deploy/#monitoring-in-kubernetes)
-* Helm Chart Values - This will install [Prometheus](https://prometheus.io/), [AlertManager](https://prometheus.io/docs/alerting/latest/alertmanager/), and [Grafana](https://grafana.com/).
- * Instructions: [README.md](chart-values/README.md)
+- Kubernetes manifests (this directory)
+ - Instructions:
+ [Deploy: Monitoring in Kubernetes](https://dgraph.io/docs/deploy/#monitoring-in-kubernetes)
+- Helm Chart Values - This will install [Prometheus](https://prometheus.io/),
+ [AlertManager](https://prometheus.io/docs/alerting/latest/alertmanager/), and
+ [Grafana](https://grafana.com/).
+ - Instructions: [README.md](chart-values/README.md)
## Kubernetes Manifests Details
-These manifests require the [prometheus-operator](https://coreos.com/blog/the-prometheus-operator.html) to be installed before using these (see [instructions](https://dgraph.io/docs/deploy/#monitoring-in-kubernetes)).
+These manifests require the
+[prometheus-operator](https://coreos.com/blog/the-prometheus-operator.html) to be installed before
+using these (see [instructions](https://dgraph.io/docs/deploy/#monitoring-in-kubernetes)).
This will contain the following files:
-* `prometheus.yaml` - Prometheus service and Dgraph service monitors to keep the configuration synchronized Dgraph configuration changes. The service monitor use service discovery, such as Kubernetes labels and namespaces, to discover Dgraph. Should you have multiple Dgraph installations installed, such as a dev-test and production, you can tailor these narrow the scope of which Dgraph version you would want to track.
-* `alertmanager-config.yaml` - This is a secret you can create when installing `alertmanager.yaml`. Here you can specify where to direct alerts, such as Slack or PagerDuty.
-* `alertmanager.yaml` - AlertManager service to trigger alerts if metrics fall over a threshold specified in alert rules.
-* `alert-rules.yaml` - These are rules that can trigger alerts. Adjust these as they make sense for your Dgraph deployment.
+- `prometheus.yaml` - Prometheus service and Dgraph service monitors to keep the configuration
+ synchronized Dgraph configuration changes. The service monitor use service discovery, such as
+ Kubernetes labels and namespaces, to discover Dgraph. Should you have multiple Dgraph
+ installations installed, such as a dev-test and production, you can tailor these narrow the scope
+ of which Dgraph version you would want to track.
+- `alertmanager-config.yaml` - This is a secret you can create when installing `alertmanager.yaml`.
+ Here you can specify where to direct alerts, such as Slack or PagerDuty.
+- `alertmanager.yaml` - AlertManager service to trigger alerts if metrics fall over a threshold
+ specified in alert rules.
+- `alert-rules.yaml` - These are rules that can trigger alerts. Adjust these as they make sense for
+ your Dgraph deployment.
diff --git a/contrib/config/monitoring/prometheus/chart-values/README.md b/contrib/config/monitoring/prometheus/chart-values/README.md
index a3fb235474a..a045800a445 100644
--- a/contrib/config/monitoring/prometheus/chart-values/README.md
+++ b/contrib/config/monitoring/prometheus/chart-values/README.md
@@ -1,19 +1,31 @@
# Helm Chart Values
-You can install [Prometheus](https://prometheus.io/) and [Grafana](https://grafana.com/) using this helm chart and supplied helm chart values.
+You can install [Prometheus](https://prometheus.io/) and [Grafana](https://grafana.com/) using this
+helm chart and supplied helm chart values.
## Usage
### Tool Requirements
-* [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - Kubernetes client tool to interact with a Kubernetes cluster
-* [Helm](https://helm.sh/) - package manager for Kubernetes
-* [Helmfile](https://github.com/roboll/helmfile#installation) (optional) - declarative spec that allows you to compose several helm charts
- * [helm-diff](https://github.com/databus23/helm-diff) - helm plugin used by `helmfile` to show differences when applying helm files.
+- [Kubectl](https://kubernetes.io/docs/tasks/tools/install-kubectl/) - Kubernetes client tool to
+ interact with a Kubernetes cluster
+- [Helm](https://helm.sh/) - package manager for Kubernetes
+- [Helmfile](https://github.com/roboll/helmfile#installation) (optional) - declarative spec that
+ allows you to compose several helm charts
+ - [helm-diff](https://github.com/databus23/helm-diff) - helm plugin used by `helmfile` to show
+ differences when applying helm files.
### Using Helm
-You can use helm to install [kube-prometheus-stack](https://github.com/prometheus-operator/kube-prometheus) helm chart. This helm chart is a collection of Kubernetes manifests, [Grafana](http://grafana.com/) dashboards, , [Prometheus rules](https://prometheus.io/docs/prometheus/latest/configuration/recording_rules/) combined with scripts to provide monitoring with [Prometheus](https://prometheus.io/) using the [Prometheus Operator](https://github.com/prometheus-operator/prometheus-operator). This helm chart will also install [Grafana](http://grafana.com/), [node_exporter](https://github.com/prometheus/node_exporter), [kube-state-metrics](https://github.com/kubernetes/kube-state-metrics).
+You can use helm to install
+[kube-prometheus-stack](https://github.com/prometheus-operator/kube-prometheus) helm chart. This
+helm chart is a collection of Kubernetes manifests, [Grafana](http://grafana.com/) dashboards, ,
+[Prometheus rules](https://prometheus.io/docs/prometheus/latest/configuration/recording_rules/)
+combined with scripts to provide monitoring with [Prometheus](https://prometheus.io/) using the
+[Prometheus Operator](https://github.com/prometheus-operator/prometheus-operator). This helm chart
+will also install [Grafana](http://grafana.com/),
+[node_exporter](https://github.com/prometheus/node_exporter),
+[kube-state-metrics](https://github.com/kubernetes/kube-state-metrics).
To use this, run the following:
@@ -36,7 +48,9 @@ helm install my-prometheus \
### Using Helmfile
-You can use helmfile to manage multiple helm charts and corresponding helmcharts values from a single configuration file: `helmfile.yaml`. The provided example `helmfile.yaml` will show how to use this to install the helm chart.
+You can use helmfile to manage multiple helm charts and corresponding helmcharts values from a
+single configuration file: `helmfile.yaml`. The provided example `helmfile.yaml` will show how to
+use this to install the helm chart.
To use this, run the following:
@@ -51,28 +65,33 @@ helmfile apply
## Grafana Dashboards
-You can import [Grafana](https://grafana.com/) Dashboards from within the web consoles.
+You can import [Grafana](https://grafana.com/) Dashboards from within the web consoles.
There's an example dash board for some metrics that you can use to monitor Dgraph on Kubernetes:
-* [dgraph-kubernetes-grafana-dashboard.json](../../grafana/dgraph-kubernetes-grafana-dashboard.json)
+- [dgraph-kubernetes-grafana-dashboard.json](../../grafana/dgraph-kubernetes-grafana-dashboard.json)
-## Helm Chart Values
+## Helm Chart Configuration
Here are some Helm chart values you may want to configure depending on your environment.
### General
-* `grafana.service.type` - set to `LoadBalancer` if you would like to expose this port.
-* `grafana.service.annotations` - add annotations to configure a `LoadBalancer` such as if it is internal or external facing, DNS name with external-dns, etc.
-* `prometheus.service.type` - set to `LoadBalancer` if you would like to expose this port.
-* `prometheus.service.annotations` - add annotations to configure a `LoadBalancer` such as if it is internal or external facing, DNS name with external-dns, etc.
+- `grafana.service.type` - set to `LoadBalancer` if you would like to expose this port.
+- `grafana.service.annotations` - add annotations to configure a `LoadBalancer` such as if it is
+ internal or external facing, DNS name with external-dns, etc.
+- `prometheus.service.type` - set to `LoadBalancer` if you would like to expose this port.
+- `prometheus.service.annotations` - add annotations to configure a `LoadBalancer` such as if it is
+ internal or external facing, DNS name with external-dns, etc.
### Dgraph Service Monitors
-* `prometheus.additionalServiceMonitors.namespaceSelector.matchNames` - if you want to match a dgraph installed into a specific namespace.
-* `prometheus.additionalServiceMonitors.selector.matchLabels` - if you want to match through a specific labels in your dgraph deployment. Currently matches `monitor: zero.dgraph-io` and `monitor: alpha.dgraph-io`, which si the default for [Dgraph helm chart](https://github.com/dgraph-io/charts).
-
+- `prometheus.additionalServiceMonitors.namespaceSelector.matchNames` - if you want to match a
+ dgraph installed into a specific namespace.
+- `prometheus.additionalServiceMonitors.selector.matchLabels` - if you want to match through a
+ specific labels in your dgraph deployment. Currently matches `monitor: zero.dgraph-io` and
+ `monitor: alpha.dgraph-io`, which si the default for
+ [Dgraph helm chart](https://github.com/dgraph-io/charts).
## Alerting for Dgraph
@@ -93,11 +112,14 @@ export PAGERDUTY_INTEGRATION_KEY=''
helmfile apply
```
-For PagerDuty integration, you will need to add a service with integration type of `Prometheus` and later copy the integration key that is created.
+For PagerDuty integration, you will need to add a service with integration type of `Prometheus` and
+later copy the integration key that is created.
### Alerting for Dgraph binary backups with Kubenretes CronJobs
-In addition to adding alerts for Dgraph, if you you enabled binary backups through Kubernetes CronJob enabled with the Dgraph helm chart (see [backups/README.md](../backups/README.md)), you can use the examples here add alerting for backup cron jobs.
+In addition to adding alerts for Dgraph, if you you enabled binary backups through Kubernetes
+CronJob enabled with the Dgraph helm chart (see [backups/README.md](../backups/README.md)), you can
+use the examples here add alerting for backup cron jobs.
With `helmfile`, you can deploy this using the following:
@@ -117,8 +139,10 @@ helmfile apply
## Upgrading from previous versions
-Previously, this chart was called `stable/prometheus-operator`, which has been deprecated and now called `prometheus-community/kube-prometheus-stack`. If you are using the old chart, you will have to do a migration to use the new chart.
+Previously, this chart was called `stable/prometheus-operator`, which has been deprecated and now
+called `prometheus-community/kube-prometheus-stack`. If you are using the old chart, you will have
+to do a migration to use the new chart.
The prometheus community has created a migration guide for this process:
-* [Migrating from stable/prometheus-operator chart](https://github.com/prometheus-community/helm-charts/blob/main/charts/kube-prometheus-stack/README.md#migrating-from-stableprometheus-operator-chart)
+- [Migrating from stable/prometheus-operator chart](https://github.com/prometheus-community/helm-charts/blob/main/charts/kube-prometheus-stack/README.md#migrating-from-stableprometheus-operator-chart)
diff --git a/contrib/config/terraform/aws/ha/README.md b/contrib/config/terraform/aws/ha/README.md
index 151baa53466..613aba994f7 100644
--- a/contrib/config/terraform/aws/ha/README.md
+++ b/contrib/config/terraform/aws/ha/README.md
@@ -1,7 +1,7 @@
# Highly Available Dgraph on AWS using terraform
-[Terraform](https://terraform.io/) automates the process of spinning up the EC2 instance, setting up, and running Dgraph in it.
-This setup deploys terraform in HA mode in AWS.
+[Terraform](https://terraform.io/) automates the process of spinning up the EC2 instance, setting
+up, and running Dgraph in it. This setup deploys terraform in HA mode in AWS.
Here are the steps to follow:
@@ -9,24 +9,32 @@ Here are the steps to follow:
2. [Download](https://terraform.io/downloads.html) and install terraform.
-3. Create a `terraform.tfvars` file similar to that of [terraform.tfvars.example](./terraform.tfvars.example) and edit the variables inside accordingly.
-You can override any variable present in [variables.tf](./variables.tf) by providing an explicit value in `terraform.tfvars` file.
+3. Create a `terraform.tfvars` file similar to that of
+ [terraform.tfvars.example](./terraform.tfvars.example) and edit the variables inside accordingly.
+ You can override any variable present in [variables.tf](./variables.tf) by providing an explicit
+ value in `terraform.tfvars` file.
4. Execute the following commands:
```sh
-$ terraform init
-$ terraform plan
-$ terraform apply
+terraform init
+terraform plan
+terraform apply
```
-The output of `terraform apply` will contain the Load Balancer DNS name configured with the setup. Dgraph-ratel will be available on `:8000`.
-Change the server URL in the dashboard to `:8080` and start playing with dgraph.
+The output of `terraform apply` will contain the Load Balancer DNS name configured with the setup.
+Dgraph-ratel will be available on `:8000`. Change the server URL in the dashboard to
+`:8080` and start playing with dgraph.
5. Use `terraform destroy` to delete the setup and restore the previous state.
-### Note
+**Note**
-* The terraform setup has been tested to work well with AWS [m5](https://aws.amazon.com/ec2/instance-types/m5/) instances.
+- The terraform setup has been tested to work well with AWS
+ [m5](https://aws.amazon.com/ec2/instance-types/m5/) instances.
-* AWS ALBs (Application Load Balancers) configured with this template do not support gRPC load balancing. To get the best performance out of the Dgraph cluster, you can use an externally configured load balancer with gRPC capabilities like [HA Proxy](https://www.haproxy.com/blog/haproxy-1-9-2-adds-grpc-support/) or [Nginx](https://www.nginx.com/blog/nginx-1-13-10-grpc/).
+- AWS ALBs (Application Load Balancers) configured with this template do not support gRPC load
+ balancing. To get the best performance out of the Dgraph cluster, you can use an externally
+ configured load balancer with gRPC capabilities like
+ [HA Proxy](https://www.haproxy.com/blog/haproxy-1-9-2-adds-grpc-support/) or
+ [Nginx](https://www.nginx.com/blog/nginx-1-13-10-grpc/).
diff --git a/contrib/config/terraform/aws/ha/dgraph/ratel/variables.tf b/contrib/config/terraform/aws/ha/dgraph/ratel/variables.tf
index 403cf603048..74d46516644 100644
--- a/contrib/config/terraform/aws/ha/dgraph/ratel/variables.tf
+++ b/contrib/config/terraform/aws/ha/dgraph/ratel/variables.tf
@@ -59,12 +59,7 @@ variable "key_pair_name" {
description = "Key Pair name to associate with the instances."
}
-variable "alpha_completed" {
- type = bool
- description = "Temporary variable to define dependency between ratel and alpha."
-}
-
variable "dgraph_version" {
type = string
description = "Dgraph version for installation."
-}
+}
\ No newline at end of file
diff --git a/contrib/config/terraform/aws/ha/dgraph/zero/variables.tf b/contrib/config/terraform/aws/ha/dgraph/zero/variables.tf
index cd2b0abb8bd..5e9ee19fc9a 100644
--- a/contrib/config/terraform/aws/ha/dgraph/zero/variables.tf
+++ b/contrib/config/terraform/aws/ha/dgraph/zero/variables.tf
@@ -23,16 +23,6 @@ variable "disk_iops" {
description = "IOPS limit for the disk associated with the instance."
}
-variable "vpc_id" {
- type = string
- description = "VPC ID of the dgraph cluster we created."
-}
-
-variable "lb_arn" {
- type = string
- description = "Resource ARN of the dgraph load balancer."
-}
-
variable "sg_id" {
type = string
description = "Security group ID for the created dgraph VPC."
@@ -61,4 +51,4 @@ variable "key_pair_name" {
variable "dgraph_version" {
type = string
description = "Dgraph version for installation."
-}
+}
\ No newline at end of file
diff --git a/contrib/config/terraform/aws/standalone/README.md b/contrib/config/terraform/aws/standalone/README.md
index 60cf2bd07a1..96b3a1d5723 100644
--- a/contrib/config/terraform/aws/standalone/README.md
+++ b/contrib/config/terraform/aws/standalone/README.md
@@ -1,10 +1,12 @@
# Deploy Dgraph on AWS using Terraform
-> **NOTE: This Terraform template creates a Dgraph database cluster with a public IP accessible to anyone. You can set the `assign_public_ip` variable
-to false to skip creating a public IP address and you can configure access to Dgraph yourself.**
+> **NOTE: This Terraform template creates a Dgraph database cluster with a public IP accessible to
+> anyone. You can set the `assign_public_ip` variable to false to skip creating a public IP address
+> and you can configure access to Dgraph yourself.**
-[Terraform](https://terraform.io/) automates the process spinning up the EC2 instance, setting up and running Dgraph in it.
-This setup deploys terraform in standalone mode inside a single EC2 instance.
+[Terraform](https://terraform.io/) automates the process spinning up the EC2 instance, setting up
+and running Dgraph in it. This setup deploys terraform in standalone mode inside a single EC2
+instance.
Here are the steps to follow:
@@ -12,18 +14,21 @@ Here are the steps to follow:
2. [Download](https://terraform.io/downloads.html) and install terraform.
-3. Create a `terraform.tfvars` file similar to that of [terraform.tfvars.example](./terraform.tfvars.example) and edit the variables inside accordingly.
-You can override any variable present in [variables.tf](./variables.tf) by providing an explicit value in `terraform.tfvars` file.
-
+3. Create a `terraform.tfvars` file similar to that of
+ [terraform.tfvars.example](./terraform.tfvars.example) and edit the variables inside accordingly.
+ You can override any variable present in [variables.tf](./variables.tf) by providing an explicit
+ value in `terraform.tfvars` file.
+
4. Execute the following commands:
```sh
-$ terraform init
-$ terraform plan
-$ terraform apply
+terraform init
+terraform plan
+terraform apply
```
-The output of `terraform apply` will contain the IP address assigned to your EC2 instance. Dgraph-ratel will be available on `:8000`.
-Change the server URL in the dashboard to `:8080` and start playing with dgraph.
+The output of `terraform apply` will contain the IP address assigned to your EC2 instance.
+Dgraph-ratel will be available on `:8000`. Change the server URL in the dashboard to
+`:8080` and start playing with dgraph.
5. Use `terraform destroy` to delete the setup and restore the previous state.
diff --git a/contrib/config/terraform/gcp/standalone/README.md b/contrib/config/terraform/gcp/standalone/README.md
index 5aef02ae6e2..cc120c7e847 100644
--- a/contrib/config/terraform/gcp/standalone/README.md
+++ b/contrib/config/terraform/gcp/standalone/README.md
@@ -1,10 +1,12 @@
# Deploy Dgraph on GCP using Terraform
-> **NOTE: This Terraform template creates a Dgraph database cluster with a public IP accessible to anyone. You can set the `assign_public_ip` variable
-to false to skip creating a public IP address and you can configure access to Dgraph yourself.**
+> **NOTE: This Terraform template creates a Dgraph database cluster with a public IP accessible to
+> anyone. You can set the `assign_public_ip` variable to false to skip creating a public IP address
+> and you can configure access to Dgraph yourself.**
-[Terraform](https://terraform.io/) automates the process spinning up GCP compute instance, setting up and running Dgraph in it.
-This setup deploys terraform in standalone mode inside a single GCP compute instance.
+[Terraform](https://terraform.io/) automates the process spinning up GCP compute instance, setting
+up and running Dgraph in it. This setup deploys terraform in standalone mode inside a single GCP
+compute instance.
Here are the steps to be followed:
@@ -12,7 +14,8 @@ Here are the steps to be followed:
2. [Download](https://terraform.io/downloads.html) and install terraform.
-3. Generate service account keys for your GCP account either using the dashboard or `gcloud` CLI as shown below:
+3. Generate service account keys for your GCP account either using the dashboard or `gcloud` CLI as
+ shown below:
```sh
gcloud iam service-accounts keys create ./account.json \
@@ -35,7 +38,8 @@ Outputs:
dgraph_ip =
```
-The output of `terraform apply` will contain the IP address assigned to your instance. Dgraph-ratel will be available on `:8000`.
-Change the server URL in the dashboard to `:8080` and start playing with dgraph.
+The output of `terraform apply` will contain the IP address assigned to your instance. Dgraph-ratel
+will be available on `:8000`. Change the server URL in the dashboard to
+`:8080` and start playing with dgraph.
5. Use `terraform destroy` to delete the setup and restore the state.
diff --git a/contrib/config/terraform/gcp/standalone/outputs.tf b/contrib/config/terraform/gcp/standalone/outputs.tf
index 9b841f2cafc..2dc1a17c628 100644
--- a/contrib/config/terraform/gcp/standalone/outputs.tf
+++ b/contrib/config/terraform/gcp/standalone/outputs.tf
@@ -3,5 +3,5 @@
# UI is then accessible using :8000
# ----------------------------------------------------------------------------------
output dgraph_ip {
- value = length(google_compute_instance.dgraph_standalone.network_interface.0.access_config) == 0 ? "" : google_compute_instance.dgraph_standalone.network_interface.0.access_config.0.nat_ip
}
+ value = length(google_compute_instance.dgraph_standalone.network_interface[0].access_config) == 0 ? "" : google_compute_instance.dgraph_standalone.network_interface[0].access_config[0].nat_ip
\ No newline at end of file
diff --git a/contrib/config/terraform/gcp/standalone/variables.tf b/contrib/config/terraform/gcp/standalone/variables.tf
index 6be6d19acf9..d075799fd10 100644
--- a/contrib/config/terraform/gcp/standalone/variables.tf
+++ b/contrib/config/terraform/gcp/standalone/variables.tf
@@ -51,14 +51,8 @@ variable "dgraph_version" {
default = "1.1.0"
}
-variable "dgraph_ui_port" {
- type = string
- description = "Port number of ratel interface"
- default = "8000"
-}
-
variable "assign_public_ip" {
type = string
default = "true"
description = "Should a public IP address be assigned to the compute instance running dgraph in standalone mode."
-}
+}
\ No newline at end of file
diff --git a/contrib/config/terraform/kubernetes/README.md b/contrib/config/terraform/kubernetes/README.md
index 046ad3ae8b3..0835ac911c7 100644
--- a/contrib/config/terraform/kubernetes/README.md
+++ b/contrib/config/terraform/kubernetes/README.md
@@ -5,27 +5,35 @@ consistent replication and linearizable reads. It's built from ground up to perf
of queries. Being a native graph database, it tightly controls how the data is arranged on disk to
optimize for query performance and throughput, reducing disk seeks and network calls in a cluster.
-### Introduction
+## Introduction
-The Terraform template creates the following resources towards setting up a Dgraph cluster on AWS EKS.
+The Terraform template creates the following resources towards setting up a Dgraph cluster on AWS
+EKS.
-- AWS VPC with 2 private subnets for hosting the EKS cluster, 2 public subnets to host the load balancers to expose the services and one NAT subnet to provision the NAT gateway required for the nodes/pods in the private subnet to communicate with the internet. Also sets up the NACL rules for secure inter subnet communication.
+- AWS VPC with 2 private subnets for hosting the EKS cluster, 2 public subnets to host the load
+ balancers to expose the services and one NAT subnet to provision the NAT gateway required for the
+ nodes/pods in the private subnet to communicate with the internet. Also sets up the NACL rules for
+ secure inter subnet communication.
- AWS EKS in the private subnets to host the Dgraph cluster.
-- The Dgraph cluster Kubernetes resources in either a standalone mode or a HA mode(refer to the variables available to tweak the provisioning of the Dgraph cluster below) on the EKS cluster.
+- The Dgraph cluster Kubernetes resources in either a standalone mode or a HA mode(refer to the
+ variables available to tweak the provisioning of the Dgraph cluster below) on the EKS cluster.
-### Prerequisites
+## Prerequisites
- Terraform > 0.12.0
- awscli >= 1.18.32
-## Steps to follow to get the Dgraph cluster on AWS EKS up and running:
+## Steps to follow to get the Dgraph cluster on AWS EKS up and running
-1. You must have an AWS account with privileges to create VPC, EKS and associated resources. Ensure awscli setup with the right credentials (One can also use AWS_PROFILE=\ terraform \ alternatively).
+1. You must have an AWS account with privileges to create VPC, EKS and associated resources. Ensure
+ awscli setup with the right credentials (One can also use AWS_PROFILE=\ terraform
+ \ alternatively).
2. [Download](https://terraform.io/downloads.html) and install Terraform.
-3. Create a `terraform.tfvars` file similar to that of `terraform.tfvars.example` and edit the variables inside accordingly.
- You can override any variable present in [variables.tf](./variables.tf) by providing an explicit value in `terraform.tfvars` file.
+3. Create a `terraform.tfvars` file similar to that of `terraform.tfvars.example` and edit the
+ variables inside accordingly. You can override any variable present in
+ [variables.tf](./variables.tf) by providing an explicit value in `terraform.tfvars` file.
4. Execute the following commands:
@@ -33,60 +41,60 @@ The Terraform template creates the following resources towards setting up a Dgra
$ terraform init
$ terraform plan -target=module.aws
$ terraform apply -target=module.aws
-# One can choose to not run the following commands if they intend to use [Helm charts](https://github.com/dgraph-io/charts) to provision their resources on the Kubernetes cluster.
+# One can choose to not run the following commands if they intend to use [Helm charts](https://github.com/dgraph-io/charts)
+# to provision their resources on the Kubernetes cluster.
# If you want to manage the state of the Kubernetes resources using Terraform, run the following commands as well:
$ terraform plan -target=module.dgraph
$ terraform apply -target=module.dgraph
```
-> Note: Both the modules cannot be applied in the same run owing to the way Terraform [evaluates](https://www.terraform.io/docs/providers/kubernetes/index.html#stacking-with-managed-kubernetes-cluster-resources) the provider blocks.
+> Note: Both the modules cannot be applied in the same run owing to the way Terraform
+> [evaluates](https://www.terraform.io/docs/providers/kubernetes/index.html#stacking-with-managed-kubernetes-cluster-resources)
+> the provider blocks.
-The command `terraform apply -target=module.dgraph` would output the hostnames of the Load Balancers exposing the Alpha, Zero and Ratel services.
+The command `terraform apply -target=module.dgraph` would output the hostnames of the Load Balancers
+exposing the Alpha, Zero and Ratel services.
5. Use `terraform destroy -target=module.aws` to delete the setup and restore the previous state.
-
-
The following table lists the configurable parameters of the template and their default values:
-| Parameter | Description | Default |
-| ------------------------- | ------------------------------------------------------------ | ------------- |
-| `prefix` | The namespace prefix for all resources | dgraph |
-| `cidr` | The CIDR of the VPC | 10.20.0.0/16 |
-| `region` | The region to deploy the resources in | ap-south-1 |
-| `ha` | Enable or disable HA deployment of Dgraph | true |
-| `ingress_whitelist_cidrs` | The CIDRs whitelisted at the service Load Balancer | ["0.0.0.0/0"] |
-| `only_whitelist_local_ip` | "Only whitelist the IP of the executioner at the service Load Balancers | true |
-| `worker_nodes_count` | The number of worker nodes to provision with the EKS cluster | 3 |
-| `instance_types` | The list of instance types to run as worker nodes | ["m5.large"] |
-| `namespace` | The namespace to deploy the Dgraph pods to | dgraph |
-| `zero_replicas` | The number of Zero replicas to create. Overridden by the ha variable which when disabled leads to creation of only 1 Zero pod | 3 |
-| `zero_persistence` | If enabled mounts a persistent disk to the Zero pods | true |
-| `zero_storage_size_gb` | The size of the persistent disk to attach to the Zero pods in GiB | 10 |
+| Parameter | Description | Default |
+| ------------------------- | ------------------------------------------------------------------------------------------------------------------------------- | ------------- |
+| `prefix` | The namespace prefix for all resources | dgraph |
+| `cidr` | The CIDR of the VPC | 10.20.0.0/16 |
+| `region` | The region to deploy the resources in | ap-south-1 |
+| `ha` | Enable or disable HA deployment of Dgraph | true |
+| `ingress_whitelist_cidrs` | The CIDRs whitelisted at the service Load Balancer | ["0.0.0.0/0"] |
+| `only_whitelist_local_ip` | "Only whitelist the IP of the executioner at the service Load Balancers | true |
+| `worker_nodes_count` | The number of worker nodes to provision with the EKS cluster | 3 |
+| `instance_types` | The list of instance types to run as worker nodes | ["m5.large"] |
+| `namespace` | The namespace to deploy the Dgraph pods to | dgraph |
+| `zero_replicas` | The number of Zero replicas to create. Overridden by the ha variable which when disabled leads to creation of only 1 Zero pod | 3 |
+| `zero_persistence` | If enabled mounts a persistent disk to the Zero pods | true |
+| `zero_storage_size_gb` | The size of the persistent disk to attach to the Zero pods in GiB | 10 |
| `alpha_replicas` | The number of Alpha replicas to create. Overridden by the ha variable which when disabled leads to creation of only 1 Alpha pod | 3 |
-| `alpha_initialize_data` | If set, runs an init container to help with loading the data into Alpha | false |
-| `alpha_persistence` | If enabled, mounts a persistent disk to the Alpha pods | true |
-| `alpha_storage_size_gb` | The size of the persistent disk to attach to the Alpha pods in GiB | 10 |
-| `alpha_lru_size_mb` | The LRU cache to enable on Alpha pods in MiB | 2048 |
-
+| `alpha_initialize_data` | If set, runs an init container to help with loading the data into Alpha | false |
+| `alpha_persistence` | If enabled, mounts a persistent disk to the Alpha pods | true |
+| `alpha_storage_size_gb` | The size of the persistent disk to attach to the Alpha pods in GiB | 10 |
+| `alpha_lru_size_mb` | The LRU cache to enable on Alpha pods in MiB | 2048 |
-> NOTE:
+> NOTE:
>
> 1. If `ha` is set to `false` the `worker_node_count` is overridden to `1`.
->
-> 2. If `only_whitelist_local_ip` is set to`true`, the `ingress_whitelist_cidrs is overridden` to local IP of the executioner.
->
+> 2. If `only_whitelist_local_ip` is set to`true`, the `ingress_whitelist_cidrs is overridden` to
+> local IP of the executioner.
> 3. The `kubeconfig` file is created in the root directory of this repository.
+> 4. One could use Helm to install the Kubernetes resources onto the cluster, in which case comment
+> out the `dgraph` module in `main.tf`.
+> 5. The number of `worker_nodes` needs to be more than the greater of replicas of Zero/Alpha when
+> `ha` is enabled to ensure the topological scheduling based on hostnames works.
+> 6. The hostnames of the service Load Balancers are part of the output of the run. Please use the
+> respective service ports in conjunction with the hostnames. TLS is not enabled.
+> 7. When `alpha_initialize_data`is set to `true`, an init container is provisioned to help with
+> loading the data as follows:
>
-> 4. One could use Helm to install the Kubernetes resources onto the cluster, in which case comment out the `dgraph` module in `main.tf`.
->
-> 5. The number of `worker_nodes` needs to be more than the greater of replicas of Zero/Alpha when `ha` is enabled to ensure the topological scheduling based on hostnames works.
->
-> 6. The hostnames of the service Load Balancers are part of the output of the run. Please use the respective service ports in conjunction with the hostnames. TLS is not enabled.
->
-> 7. When `alpha_initialize_data`is set to `true`, an init container is provisioned to help with loading the data as follows:
->
-> ```
+> ```bash
> # Initializing the Alphas:
> #
> # You may want to initialize the Alphas with data before starting, e.g.
@@ -106,7 +114,8 @@ The following table lists the configurable parameters of the template and their
> #
> # kubectl exec dgraph-alpha-0 -c init-alpha touch /dgraph/doneinit
> #
-> # Note that pod restarts cause re-execution of Init Containers. If persistance is # enabled /dgraph is persisted across pod restarts, the Init Container will exit
+> # Note that pod restarts cause re-execution of Init Containers. If persistance is
+> # enabled /dgraph is persisted across pod restarts, the Init Container will exit
> # automatically when /dgraph/doneinit is present and proceed with starting
> # the Alpha process.
> ```
diff --git a/contrib/config/terraform/kubernetes/modules/aws/modules/eks/outputs.tf b/contrib/config/terraform/kubernetes/modules/aws/modules/eks/outputs.tf
index 93eced0d988..d5eb4642823 100644
--- a/contrib/config/terraform/kubernetes/modules/aws/modules/eks/outputs.tf
+++ b/contrib/config/terraform/kubernetes/modules/aws/modules/eks/outputs.tf
@@ -21,7 +21,7 @@ apiVersion: v1
clusters:
- cluster:
server: ${aws_eks_cluster.eks.endpoint}
- certificate-authority-data: ${aws_eks_cluster.eks.certificate_authority.0.data}
+ certificate-authority-data: ${aws_eks_cluster.eks.certificate_authority[0].data}
name: kubernetes
contexts:
- context:
@@ -59,4 +59,4 @@ resource "local_file" "kubeconfig" {
output "kubeconfig_path" {
value = local.kubeconfig_path
-}
+}
\ No newline at end of file
diff --git a/contrib/config/terraform/kubernetes/modules/aws/modules/eks/variables.tf b/contrib/config/terraform/kubernetes/modules/aws/modules/eks/variables.tf
index ad874f557f0..cbf344b2a83 100644
--- a/contrib/config/terraform/kubernetes/modules/aws/modules/eks/variables.tf
+++ b/contrib/config/terraform/kubernetes/modules/aws/modules/eks/variables.tf
@@ -41,10 +41,4 @@ variable "cluster_subnet_ids" {
variable "db_subnet_ids" {
type = list
description = "The private subnet IDs"
-}
-
-variable "ingress_whitelist_cidrs" {
- type = list
- default = ["0.0.0.0/0"]
- description = "The IPs whitelisted on the load balancer"
-}
+}
\ No newline at end of file
diff --git a/contrib/config/terraform/kubernetes/modules/aws/modules/vpc/nacl-config.tf b/contrib/config/terraform/kubernetes/modules/aws/modules/vpc/nacl-config.tf
index cde44275f97..5cbd0e0a26d 100644
--- a/contrib/config/terraform/kubernetes/modules/aws/modules/vpc/nacl-config.tf
+++ b/contrib/config/terraform/kubernetes/modules/aws/modules/vpc/nacl-config.tf
@@ -1,6 +1,6 @@
resource "aws_network_acl" "lb_nacl" {
vpc_id = aws_vpc.vpc.id
- subnet_ids = aws_subnet.lb_subnets.*.id
+ subnet_ids = aws_subnet.lb_subnets[*].id
ingress {
@@ -89,7 +89,7 @@ resource "aws_network_acl" "nat_nacl" {
resource "aws_network_acl" "db_nacl" {
vpc_id = aws_vpc.vpc.id
- subnet_ids = aws_subnet.db_subnets.*.id
+ subnet_ids = aws_subnet.db_subnets[*].id
ingress {
from_port = 0
@@ -148,4 +148,4 @@ resource "aws_network_acl" "db_nacl" {
tags = map(
"Name", "${var.cluster_name}-db-nacl",
)
-}
+}
\ No newline at end of file
diff --git a/contrib/config/terraform/kubernetes/modules/aws/modules/vpc/outputs.tf b/contrib/config/terraform/kubernetes/modules/aws/modules/vpc/outputs.tf
index 2541e7123f3..66d93c39179 100644
--- a/contrib/config/terraform/kubernetes/modules/aws/modules/vpc/outputs.tf
+++ b/contrib/config/terraform/kubernetes/modules/aws/modules/vpc/outputs.tf
@@ -3,9 +3,9 @@ output "vpc_id" {
}
output "db_subnet_ids" {
- value = aws_subnet.db_subnets.*.id
+ value = aws_subnet.db_subnets[*].id
}
output "cluster_subnet_ids" {
- value = concat(aws_subnet.db_subnets.*.id, aws_subnet.lb_subnets.*.id)
-}
+ value = concat(aws_subnet.db_subnets[*].id, aws_subnet.lb_subnets[*].id)
+}
\ No newline at end of file
diff --git a/contrib/config/terraform/kubernetes/modules/dgraph/modules/alpha/templates/alpha_init.sh b/contrib/config/terraform/kubernetes/modules/dgraph/modules/alpha/templates/alpha_init.sh
index fba0ee4e78c..16a0412027d 100644
--- a/contrib/config/terraform/kubernetes/modules/dgraph/modules/alpha/templates/alpha_init.sh
+++ b/contrib/config/terraform/kubernetes/modules/dgraph/modules/alpha/templates/alpha_init.sh
@@ -2,4 +2,4 @@
trap "exit" SIGINT SIGTERM
echo "Write to /dgraph/doneinit when ready."
-until [ -f /dgraph/doneinit ]; do sleep 2; done
+until [[ -f /dgraph/doneinit ]]; do sleep 2; done
diff --git a/contrib/config/terraform/kubernetes/modules/dgraph/modules/zero/variables.tf b/contrib/config/terraform/kubernetes/modules/dgraph/modules/zero/variables.tf
index 0d5da6e5a4d..ffff551069f 100644
--- a/contrib/config/terraform/kubernetes/modules/dgraph/modules/zero/variables.tf
+++ b/contrib/config/terraform/kubernetes/modules/dgraph/modules/zero/variables.tf
@@ -142,13 +142,7 @@ variable "readiness_probe_failure_threshold" {
default = 6
}
-
-variable "termination_grace_period_seconds" {
- type = number
- default = 60
-}
-
variable "namespace_resource" {
type = any
default = null
-}
+}
\ No newline at end of file
diff --git a/contrib/config/vault/README.md b/contrib/config/vault/README.md
index cff53a6289d..3447f4cc150 100644
--- a/contrib/config/vault/README.md
+++ b/contrib/config/vault/README.md
@@ -2,4 +2,4 @@
This is a small guide and sample scripts to get started on your Vault journey with Dgraph.
-* [Dgraph Vault Integration using Docker Compose](./docker/README.md)
+- [Dgraph Vault Integration using Docker Compose](./docker/README.md)
diff --git a/contrib/config/vault/docker/README.md b/contrib/config/vault/docker/README.md
index 3d399693bfe..31cdc22136e 100644
--- a/contrib/config/vault/docker/README.md
+++ b/contrib/config/vault/docker/README.md
@@ -1,52 +1,59 @@
# HashiCorp Vault Integration: Docker
-This shows how to set up a local staging server for HashiCorp Vault and Dgraph. Through these steps below, you can create secrets for [Encryption at Rest](https://dgraph.io/docs/enterprise-features/encryption-at-rest/) and [Access Control Lists](https://dgraph.io/docs/enterprise-features/access-control-lists/). You can change the example secrets in [vault/payload_alpha_secrets.json](vault/payload_alpha_secrets.json) file.
+This shows how to set up a local staging server for HashiCorp Vault and Dgraph. Through these steps
+below, you can create secrets for
+[Encryption at Rest](https://dgraph.io/docs/enterprise-features/encryption-at-rest/) and
+[Access Control Lists](https://dgraph.io/docs/enterprise-features/access-control-lists/). You can
+change the example secrets in [vault/payload_alpha_secrets.json](vault/payload_alpha_secrets.json)
+file.
This guide will demonstrate using best practices with two personas:
-* `admin` persona with privileged permissions to configure an auth method
-* `app` persona (`dgraph`) - a consumer of secrets stored in Vault
+- `admin` persona with privileged permissions to configure an auth method
+- `app` persona (`dgraph`) - a consumer of secrets stored in Vault
Steps using `bind_secret_id`:
-1. [Configure Dgraph and Vault Versions](#Step-1-configure-dgraph-and-vault-versions)
-2. [Launch unsealed Vault server](#Step-2-launch-unsealed-Vault-server)
-3. [Enable AppRole Auth and KV Secrets](#Step-3-enable-AppRole-Auth-and-KV-Secrets)
-4. [Create an `admin` policy](#Step-4-create-an-admin-policy)
-5. [Create an `admin` role with the attached policy](#Step-5-create-an-admin-role-with-the-attached-policy)
-6. [Retrieve the admin token](#Step-6-retrieve-the-admin-token)
-7. [Create a `dgraph` policy to access the secrets](#Step-7-create-a-dgraph-policy-to-access-the-secrets)
-8. [Create a `dgraph` role with the attached policy](#Step-8-create-a-dgraph-role-with-the-attached-policy)
-9. [Save secrets using admin persona](#Step-9-save-secrets-using-admin-persona)
-10. [Retrieve the `dgraph` token and save credentials](#Step-10-retrieve-the-dgraph-token-and-save-credentials)
-11. [Verify secrets access using app persona](#Step-11-verify-secrets-access-using-app-persona)
-12. [Launch Dgraph](#Step-12-launch-Dgraph)
-
-Alternative Steps using `bound_cidr_list` (see [Using HashiCorp Vault CIDR List for Authentication](#Using-hashicorp-vault-cidr-list-for-authentication)):
-
-1. [Configure Dgraph and Vault Versions](#Step-1-configure-dgraph-and-vault-versions)
-2. [Launch unsealed Vault server](#Step-2-launch-unsealed-Vault-server)
-3. [Enable AppRole Auth and KV Secrets](#Step-3-enable-AppRole-Auth-and-KV-Secrets)
-4. [Create an `admin` policy](#Step-4-create-an-admin-policy)
-5. [Create an `admin` role with the attached policy](#Step-5-create-an-admin-role-with-the-attached-policy)
-6. [Retrieve the admin token](#Step-6-retrieve-the-admin-token)
-7. [Create a `dgraph` policy to access the secrets](#Step-7-create-a-dgraph-policy-to-access-the-secrets)
-8. [Create a `dgraph` role using `bound_cidr_list`](#Step-8-create-a-dgraph-role-using-bound_cidr_list)
-9. [Save secrets using admin persona](#Step-9-save-secrets-using-admin-persona)
-10. [Retrieve the dgraph token using only the `role-id`](#Step-10-retrieve-the-dgraph-token-using-only-the-role-id)
-11. [Verify secrets access using app persona](#Step-11-verify-secrets-access-using-app-persona)
-12. [Launch Dgraph](#Step-12-launch-Dgraph)
+1. [Configure Dgraph and Vault Versions](#step-1-configure-dgraph-and-vault-versions)
+2. [Launch unsealed Vault server](#step-2-launch-unsealed-vault-server)
+3. [Enable AppRole Auth and KV Secrets](#step-3-enable-approle-auth-and-kv-secrets)
+4. [Create an `admin` policy](#step-4-create-an-admin-policy)
+5. [Create an `admin` role with the attached policy](#step-5-create-an-admin-role-with-the-attached-policy)
+6. [Retrieve the admin token](#step-6-retrieve-the-admin-token)
+7. [Create a `dgraph` policy to access the secrets](#step-7-create-a-dgraph-policy-to-access-the-secrets)
+8. [Create a `dgraph` role with the attached policy](#step-8-create-a-dgraph-role-with-the-attached-policy)
+9. [Save secrets using admin persona](#step-9-save-secrets-using-admin-persona)
+10. [Retrieve the `dgraph` token and save credentials](#step-10-retrieve-the-dgraph-token-and-save-credentials)
+11. [Verify secrets access using app persona](#step-11-verify-secrets-access-using-app-persona)
+12. [Launch Dgraph](#step-12-launch-dgraph)
+
+Alternative Steps using `bound_cidr_list` (see
+[Using HashiCorp Vault CIDR List for Authentication](#using-hashicorp-vault-cidr-list-for-authentication)):
+
+1. [Configure Dgraph and Vault Versions](#step-1-configure-dgraph-and-vault-versions)
+2. [Launch unsealed Vault server](#step-2-launch-unsealed-vault-server)
+3. [Enable AppRole Auth and KV Secrets](#step-3-enable-approle-auth-and-kv-secrets)
+4. [Create an `admin` policy](#step-4-create-an-admin-policy)
+5. [Create an `admin` role with the attached policy](#step-5-create-an-admin-role-with-the-attached-policy)
+6. [Retrieve the admin token](#step-6-retrieve-the-admin-token)
+7. [Create a `dgraph` policy to access the secrets](#step-7-create-a-dgraph-policy-to-access-the-secrets)
+8. [Create a `dgraph` role using `bound_cidr_list`](#step-8-create-a-dgraph-role-using-bound_cidr_list)
+9. [Save secrets using admin persona](#step-9-save-secrets-using-admin-persona)
+10. [Retrieve the dgraph token using only the `role-id`](#step-10-retrieve-the-dgraph-token-using-only-the-role-id)
+11. [Verify secrets access using app persona](#step-11-verify-secrets-access-using-app-persona)
+12. [Launch Dgraph](#step-12-launch-dgraph)
## Prerequisites
-* [Docker](https://docs.docker.com/engine/install/)
-* [Docker Compose](https://docs.docker.com/compose/install/)
-* [jq](https://stedolan.github.io/jq/)
-* [curl](https://curl.se/)
+- [Docker](https://docs.docker.com/engine/install/)
+- [Docker Compose](https://docs.docker.com/compose/install/)
+- [jq](https://stedolan.github.io/jq/)
+- [curl](https://curl.se/)
## Steps
-This configures an app role that requires log in with `role-id` and `secret-id` to login. This is the default role setting where `bind_secret_id` is enabled.
+This configures an app role that requires log in with `role-id` and `secret-id` to login. This is
+the default role setting where `bind_secret_id` is enabled.
### Step 1: Configure Dgraph and Vault Versions
@@ -188,7 +195,6 @@ curl --silent \
http://$VAULT_ADDRESS/v1/sys/policies/acl/dgraph | jq
```
-
### Step 8: Create a `dgraph` role with the attached policy
```bash
@@ -211,7 +217,9 @@ curl --silent \
### Step 9: Save secrets using admin persona
-This will save secrets for both [Encryption at Rest](https://dgraph.io/docs/enterprise-features/encryption-at-rest/) and [Access Control Lists](https://dgraph.io/docs/enterprise-features/access-control-lists/).
+This will save secrets for both
+[Encryption at Rest](https://dgraph.io/docs/enterprise-features/encryption-at-rest/) and
+[Access Control Lists](https://dgraph.io/docs/enterprise-features/access-control-lists/).
```bash
curl --silent \
@@ -221,7 +229,10 @@ curl --silent \
http://$VAULT_ADDRESS/v1/secret/data/dgraph/alpha | jq
```
-**NOTE**: When updating K/V Version 2 secrets, be sure to increment the `options.cas` value to increase the version. For example, if updating the `enc_key` value to 32-bits, you would update `./vault/payload_alpha_secrets.json` to look like the following:
+**NOTE**: When updating K/V Version 2 secrets, be sure to increment the `options.cas` value to
+increase the version. For example, if updating the `enc_key` value to 32-bits, you would update
+`./vault/payload_alpha_secrets.json` to look like the following:
+
```json
{
"options": {
@@ -289,7 +300,9 @@ curl localhost:8080/health | jq -r '.[].ee_features | .[]' | sed 's/^/* /'
## Using HashiCorp Vault CIDR List for Authentication
-As an alternative, you can restrict access to a limited range of IP addresses and disable the requirement for a `secret-id`. In this scenario, we will set `bind_seccret_id` to `false`, and supply a list of IP address ranges for the `bound_cidr_list` key.
+As an alternative, you can restrict access to a limited range of IP addresses and disable the
+requirement for a `secret-id`. In this scenario, we will set `bind_seccret_id` to `false`, and
+supply a list of IP address ranges for the `bound_cidr_list` key.
Only two steps will need to be changed, but otherwise the other steps are the same:
diff --git a/contrib/docker-build/README.md b/contrib/docker-build/README.md
index 415e04bedb3..8374cae96e1 100644
--- a/contrib/docker-build/README.md
+++ b/contrib/docker-build/README.md
@@ -1,13 +1,12 @@
# Docker build script
-This directory contains a Makefile that can be used to build Dgraph inside the
-official Dgraph Docker container. This is useful for situations when the host
-system cannot be used to build a binary that will work with the container (for
-example, if the host system has a different version of glibc).
+This directory contains a Makefile that can be used to build Dgraph inside the official Dgraph
+Docker container. This is useful for situations when the host system cannot be used to build a
+binary that will work with the container (for example, if the host system has a different version of
+glibc).
## Usage
-Run `make install` in this directory. The script will ask you for your password
-in order to change ownership of the compiled binary. By default, files written
-by Docker will be owned by root. This script also takes care of moving the
-binary to $GOPATH/bin.
+Run `make install` in this directory. The script will ask you for your password in order to change
+ownership of the compiled binary. By default, files written by Docker will be owned by root. This
+script also takes care of moving the binary to $GOPATH/bin.
diff --git a/contrib/docker-build/build.sh b/contrib/docker-build/build.sh
index d8b66d25d23..2baf0054eeb 100755
--- a/contrib/docker-build/build.sh
+++ b/contrib/docker-build/build.sh
@@ -3,5 +3,5 @@
export DEBIAN_FRONTEND=noninteractive
apt-get update
apt-get install -y build-essential git golang
-cd /dgraph/dgraph
+cd /dgraph/dgraph || exit
make
diff --git a/contrib/local-test/README.md b/contrib/local-test/README.md
index 44c731ae0fa..5fa4c096c61 100644
--- a/contrib/local-test/README.md
+++ b/contrib/local-test/README.md
@@ -2,18 +2,19 @@
A collection of make commands that enable:
-* hot reloading of built local images in a docker compose environment
-* creating/updating dql/graphql schemas
-* loading data in RDF and JSON encoding
-* running DQL/GraphQL queries/mutations
+- hot reloading of built local images in a docker compose environment
+- creating/updating dql/graphql schemas
+- loading data in RDF and JSON encoding
+- running DQL/GraphQL queries/mutations
Requirements
-* Docker
-* make
-* curl
-* [jq](https://stedolan.github.io/jq/download/) (optional, for formatting JSON results)
-* [gql](https://github.com/matthewmcneely/gql/tree/feature/add-query-and-variables-from-file/builds) (for running graphql queries)
+- Docker
+- make
+- curl
+- [jq](https://stedolan.github.io/jq/download/) (optional, for formatting JSON results)
+- [gql](https://github.com/matthewmcneely/gql/tree/feature/add-query-and-variables-from-file/builds)
+ (for running graphql queries)
One final requirement is to build a local image of dgraph from the source currently on your machine.
@@ -26,29 +27,44 @@ This will build a `dgraph/dgraph:local` image in your local Docker registry.
## Make targets
### `make help`
+
Lists all available make targets and a short description.
### `make up`
-Brings up a simple alpha and zero node using docker compose in your local Docker environment. The target then tails the log out from both containers. This target also launches a *[watchtower](https://containrrr.dev/watchtower/)* container that will automatically restart alpha and zero when it detects a new dgraph image (built via `cd .. && make image-local`).
-The process for hot-reloading development basically involves `make up`, modifying source on your machine, then `make image-local`. The changes in your source will show up in the locally deployed dgraph containers when watchtower restarts them.
+Brings up a simple alpha and zero node using docker compose in your local Docker environment. The
+target then tails the log out from both containers. This target also launches a
+_[watchtower](https://containrrr.dev/watchtower/)_ container that will automatically restart alpha
+and zero when it detects a new dgraph image (built via `cd .. && make image-local`).
+
+The process for hot-reloading development basically involves `make up`, modifying source on your
+machine, then `make image-local`. The changes in your source will show up in the locally deployed
+dgraph containers when watchtower restarts them.
Note that this deployment is completely insecure—it's meant for local testing only.
### `make up-with-lambda`
-Brings up the alpha and zero containers along with the dgraph lambda container. Note this lambda container is based on `dgraph/dgraph-lambda:latest`. If you're trying to debug the lambda container, you'll need reference your local image in the docker compose file.
+
+Brings up the alpha and zero containers along with the dgraph lambda container. Note this lambda
+container is based on `dgraph/dgraph-lambda:latest`. If you're trying to debug the lambda container,
+you'll need reference your local image in the docker compose file.
### `make down` and `make down-with-lambda`
+
Stops the containers.
### `make refresh`
-Restarts the containers if a new `dgraph/dgraph:local` image is available. This shouldn't be needed if the *watchtower* container is running correctly.
+
+Restarts the containers if a new `dgraph/dgraph:local` image is available. This shouldn't be needed
+if the _watchtower_ container is running correctly.
### `make schema-dql`
+
Updates dgraph with the schema defined in `schema.dql`.
Example schema.dql:
-```
+
+```dql
type Person {
name
boss_of
@@ -70,165 +86,178 @@ works_for: [uid] @reverse .
```
### `make schema-gql`
+
Updates dgraph with the schema defined in `schema.gql`
Example schema.gql:
+
```graphql
type Post {
- id: ID!
- title: String!
- text: String
- datePublished: DateTime
- author: Author!
+ id: ID!
+ title: String!
+ text: String
+ datePublished: DateTime
+ author: Author!
}
type Author {
- id: ID!
- name: String!
- posts: [Post!] @hasInverse(field: author)
+ id: ID!
+ name: String!
+ posts: [Post!] @hasInverse(field: author)
}
```
### `make drop-data`
+
Drops all data from the cluster, but not the schema.
### `make drop-all`
+
Drops all data and the schema from the cluster.
### `make load-data-gql`
-Loads JSON data defined in `gql-data.json`. This target is useful for loading data into schemas defined with GraphQL SDL.
+
+Loads JSON data defined in `gql-data.json`. This target is useful for loading data into schemas
+defined with GraphQL SDL.
Example gql-data.json:
+
```json
[
+ {
+ "uid": "_:katie_howgate",
+ "dgraph.type": "Author",
+ "Author.name": "Katie Howgate",
+ "Author.posts": [
+ {
+ "uid": "_:katie_howgate_1"
+ },
+ {
+ "uid": "_:katie_howgate_2"
+ }
+ ]
+ },
+ {
+ "uid": "_:timo_denk",
+ "dgraph.type": "Author",
+ "Author.name": "Timo Denk",
+ "Author.posts": [
+ {
+ "uid": "_:timo_denk_1"
+ },
+ {
+ "uid": "_:timo_denk_2"
+ }
+ ]
+ },
+ {
+ "uid": "_:katie_howgate_1",
+ "dgraph.type": "Post",
+ "Post.title": "Graph Theory 101",
+ "Post.text": "https://www.lancaster.ac.uk/stor-i-student-sites/katie-howgate/2021/04/27/graph-theory-101/",
+ "Post.datePublished": "2021-04-27",
+ "Post.author": {
+ "uid": "_:katie_howgate"
+ }
+ },
+ {
+ "uid": "_:katie_howgate_2",
+ "dgraph.type": "Post",
+ "Post.title": "Hypergraphs – not just a cool name!",
+ "Post.text": "https://www.lancaster.ac.uk/stor-i-student-sites/katie-howgate/2021/04/29/hypergraphs-not-just-a-cool-name/",
+ "Post.datePublished": "2021-04-29",
+ "Post.author": {
+ "uid": "_:katie_howgate"
+ }
+ },
+ {
+ "uid": "_:timo_denk_1",
+ "dgraph.type": "Post",
+ "Post.title": "Polynomial-time Approximation Schemes",
+ "Post.text": "https://timodenk.com/blog/ptas/",
+ "Post.datePublished": "2019-04-12",
+ "Post.author": {
+ "uid": "_:timo_denk"
+ }
+ },
+ {
+ "uid": "_:timo_denk_2",
+ "dgraph.type": "Post",
+ "Post.title": "Graph Theory Overview",
+ "Post.text": "https://timodenk.com/blog/graph-theory-overview/",
+ "Post.datePublished": "2017-08-03",
+ "Post.author": {
+ "uid": "_:timo_denk"
+ }
+ }
+]
+```
+
+### `make load-data-dql-json`
+
+Loads JSON data defined in `dql-data.json`. This target is useful for loading data into schemas
+defined with base dgraph types.
+
+Example dql-data.json:
+
+```json
+{
+ "set": [
{
- "uid": "_:katie_howgate",
- "dgraph.type": "Author",
- "Author.name": "Katie Howgate",
- "Author.posts": [
- {
- "uid": "_:katie_howgate_1"
- },
- {
- "uid": "_:katie_howgate_2"
- }
- ]
+ "uid": "_:company1",
+ "industry": "Machinery",
+ "dgraph.type": "Company",
+ "name": "CompanyABC"
},
{
- "uid": "_:timo_denk",
- "dgraph.type": "Author",
- "Author.name": "Timo Denk",
- "Author.posts": [
- {
- "uid": "_:timo_denk_1"
- },
- {
- "uid": "_:timo_denk_2"
- }
- ]
+ "uid": "_:company2",
+ "industry": "High Tech",
+ "dgraph.type": "Company",
+ "name": "The other company"
},
{
- "uid": "_:katie_howgate_1",
- "dgraph.type": "Post",
- "Post.title": "Graph Theory 101",
- "Post.text": "https://www.lancaster.ac.uk/stor-i-student-sites/katie-howgate/2021/04/27/graph-theory-101/",
- "Post.datePublished": "2021-04-27",
- "Post.author": {
- "uid": "_:katie_howgate"
- }
+ "uid": "_:jack",
+ "works_for": { "uid": "_:company1" },
+ "dgraph.type": "Person",
+ "name": "Jack"
},
{
- "uid": "_:katie_howgate_2",
- "dgraph.type": "Post",
- "Post.title": "Hypergraphs – not just a cool name!",
- "Post.text": "https://www.lancaster.ac.uk/stor-i-student-sites/katie-howgate/2021/04/29/hypergraphs-not-just-a-cool-name/",
- "Post.datePublished": "2021-04-29",
- "Post.author": {
- "uid": "_:katie_howgate"
- }
+ "uid": "_:ivy",
+ "works_for": { "uid": "_:company1" },
+ "boss_of": { "uid": "_:jack" },
+ "dgraph.type": "Person",
+ "name": "Ivy"
},
{
- "uid": "_:timo_denk_1",
- "dgraph.type": "Post",
- "Post.title": "Polynomial-time Approximation Schemes",
- "Post.text": "https://timodenk.com/blog/ptas/",
- "Post.datePublished": "2019-04-12",
- "Post.author": {
- "uid": "_:timo_denk"
- }
+ "uid": "_:zoe",
+ "works_for": { "uid": "_:company1" },
+ "dgraph.type": "Person",
+ "name": "Zoe"
},
{
- "uid": "_:timo_denk_2",
- "dgraph.type": "Post",
- "Post.title": "Graph Theory Overview",
- "Post.text": "https://timodenk.com/blog/graph-theory-overview/",
- "Post.datePublished": "2017-08-03",
- "Post.author": {
- "uid": "_:timo_denk"
- }
+ "uid": "_:jose",
+ "works_for": { "uid": "_:company2" },
+ "dgraph.type": "Person",
+ "name": "Jose"
+ },
+ {
+ "uid": "_:alexei",
+ "works_for": { "uid": "_:company2" },
+ "boss_of": { "uid": "_:jose" },
+ "dgraph.type": "Person",
+ "name": "Alexei"
}
-]
-```
-
-### `make load-data-dql-json`
-Loads JSON data defined in `dql-data.json`. This target is useful for loading data into schemas defined with base dgraph types.
-
-Example dql-data.json:
-```json
-{
- "set": [
- {
- "uid": "_:company1",
- "industry": "Machinery",
- "dgraph.type": "Company",
- "name": "CompanyABC"
- },
- {
- "uid": "_:company2",
- "industry": "High Tech",
- "dgraph.type": "Company",
- "name": "The other company"
- },
- {
- "uid": "_:jack",
- "works_for": { "uid": "_:company1"},
- "dgraph.type": "Person",
- "name": "Jack"
- },
- {
- "uid": "_:ivy",
- "works_for": { "uid": "_:company1"},
- "boss_of": { "uid": "_:jack"},
- "dgraph.type": "Person",
- "name": "Ivy"
- },
- {
- "uid": "_:zoe",
- "works_for": { "uid": "_:company1"},
- "dgraph.type": "Person",
- "name": "Zoe"
- },
- {
- "uid": "_:jose",
- "works_for": { "uid": "_:company2"},
- "dgraph.type": "Person",
- "name": "Jose"
- },
- {
- "uid": "_:alexei",
- "works_for": { "uid": "_:company2"},
- "boss_of": { "uid": "_:jose"},
- "dgraph.type": "Person",
- "name": "Alexei"
- }
- ]
+ ]
}
```
### `make load-data-dql-rdf`
-Loads RDF data defined in `dql-data.rdf`. This target is useful for loading data into schemas defined with base dgraph types.
+
+Loads RDF data defined in `dql-data.rdf`. This target is useful for loading data into schemas
+defined with base dgraph types.
Example dql-data.rdf:
+
```rdf
{
set {
@@ -269,10 +298,12 @@ Example dql-data.rdf:
```
### `make query-dql`
+
Runs the query defined in query.dql.
Example query.dql:
-```
+
+```dql
{
q(func: eq(name, "CompanyABC")) {
name
@@ -285,9 +316,11 @@ Example query.dql:
```
### `make query-gql`
+
Runs the query defined in query.gql and optional variables defined in variables.json.
Example query-gql:
+
```graphql
query QueryAuthor($order: PostOrder) {
queryAuthor {
@@ -304,10 +337,11 @@ query QueryAuthor($order: PostOrder) {
```
Example variables.json:
+
```json
{
- "order": {
- "desc": "datePublished"
- }
+ "order": {
+ "desc": "datePublished"
+ }
}
-```
\ No newline at end of file
+```
diff --git a/contrib/manual_tests/README.md b/contrib/manual_tests/README.md
index facd74b0373..19c1c565275 100644
--- a/contrib/manual_tests/README.md
+++ b/contrib/manual_tests/README.md
@@ -3,8 +3,8 @@
To run manual tests:
- Set `$DGRAPH_BIN` to the path of the Dgraph binary you want to test.
-- Set `$EXIT_ON_FAILURE` to `1` to stop testing immediately after a test fails,
- leaving Dgraph running and the test directory intact.
+- Set `$EXIT_ON_FAILURE` to `1` to stop testing immediately after a test fails, leaving Dgraph
+ running and the test directory intact.
- Execute `./test.sh`.
For long-running tests:
diff --git a/contrib/manual_tests/log.sh b/contrib/manual_tests/log.sh
index 78a080f2eef..098a1c4623f 100755
--- a/contrib/manual_tests/log.sh
+++ b/contrib/manual_tests/log.sh
@@ -1,21 +1,21 @@
#!/usr/bin/env bash
function _log_date() {
- date '+%Y-%m-%d %H:%M:%S'
+ date '+%Y-%m-%d %H:%M:%S'
}
function log::debug() {
- printf '%b\n' "\e[32m[DEBUG] $(_log_date) $*\e[0m"
+ printf '%b\n' "\e[32m[DEBUG] $(_log_date) $*\e[0m"
}
function log::info() {
- printf '%b\n' "\e[34m[ INFO] $(_log_date) $*\e[0m"
+ printf '%b\n' "\e[34m[ INFO] $(_log_date) $*\e[0m"
}
function log::warn() {
- printf '%b\n' "\e[33m[ WARN] $(_log_date) $*\e[0m"
+ printf '%b\n' "\e[33m[ WARN] $(_log_date) $*\e[0m"
}
function log::error() {
- printf '%b\n' "\e[31m[ERROR] $(_log_date) $*\e[0m"
+ printf '%b\n' "\e[31m[ERROR] $(_log_date) $*\e[0m"
}
diff --git a/contrib/manual_tests/test.sh b/contrib/manual_tests/test.sh
index 66a2e3f534c..e7ee87a8213 100755
--- a/contrib/manual_tests/test.sh
+++ b/contrib/manual_tests/test.sh
@@ -2,599 +2,599 @@
set -euo pipefail
-"$DGRAPH_BIN" version
+"${DGRAPH_BIN}" version
-readonly TEST_PATH="$PWD/_tmp"
+readonly TEST_PATH="${PWD}/_tmp"
-readonly DATA_PATH="$TEST_PATH/data"
-readonly LOGS_PATH="$TEST_PATH/logs"
-readonly DGRAPH_PATH="$TEST_PATH/dgraph"
+readonly DATA_PATH="${TEST_PATH}/data"
+readonly LOGS_PATH="${TEST_PATH}/logs"
+readonly DGRAPH_PATH="${TEST_PATH}/dgraph"
-readonly ENCRYPTION_KEY_PATH="$DGRAPH_PATH/encryption_key_file"
-readonly ACL_SECRET_PATH="$DGRAPH_PATH/acl_secret_file"
-readonly TLS_PATH="$DGRAPH_PATH/tls"
+readonly ENCRYPTION_KEY_PATH="${DGRAPH_PATH}/encryption_key_file"
+readonly ACL_SECRET_PATH="${DGRAPH_PATH}/acl_secret_file"
+readonly TLS_PATH="${DGRAPH_PATH}/tls"
readonly DATASET_1MILLION_FILE_URL='https://github.com/dgraph-io/benchmarks/blob/master/data/1million.rdf.gz?raw=true'
-readonly DATASET_1MILLION_FILE_PATH="$DATA_PATH/1million.rdf.gz"
+readonly DATASET_1MILLION_FILE_PATH="${DATA_PATH}/1million.rdf.gz"
readonly DATASET_1MILLION_SCHEMA_URL='https://github.com/dgraph-io/benchmarks/blob/master/data/1million.schema?raw=true'
-readonly DATASET_1MILLION_SCHEMA_PATH="$DATA_PATH/1million.schema"
+readonly DATASET_1MILLION_SCHEMA_PATH="${DATA_PATH}/1million.schema"
source "log.sh"
function dataset::1million::download() {
- if ! [ -f "$DATASET_1MILLION_FILE_PATH" ]; then
- log::debug "Downloading from $DATASET_1MILLION_FILE_URL."
- curl -L "$DATASET_1MILLION_FILE_URL" --output "$DATASET_1MILLION_FILE_PATH"
- fi
-
- if ! [ -f "$DATASET_1MILLION_SCHEMA_PATH" ]; then
- log::debug "Downloading from $DATASET_1MILLION_SCHEMA_URL."
- curl -L "$DATASET_1MILLION_SCHEMA_URL" --output "$DATASET_1MILLION_SCHEMA_PATH"
- fi
+ if ! [[ -f ${DATASET_1MILLION_FILE_PATH} ]]; then
+ log::debug "Downloading from ${DATASET_1MILLION_FILE_URL}."
+ curl -L "${DATASET_1MILLION_FILE_URL}" --output "${DATASET_1MILLION_FILE_PATH}"
+ fi
+
+ if ! [[ -f ${DATASET_1MILLION_SCHEMA_PATH} ]]; then
+ log::debug "Downloading from ${DATASET_1MILLION_SCHEMA_URL}."
+ curl -L "${DATASET_1MILLION_SCHEMA_URL}" --output "${DATASET_1MILLION_SCHEMA_PATH}"
+ fi
}
function dataset::1million::verify() {
- local count_names_exp=197408
- count_names_got=$(
- curl \
- -SsX POST \
- -H 'Content-Type: application/json' \
- -d '{ "query": "query { test(func: has(name@.)) { count(uid) } }" }' \
- 'localhost:8081/query' | jq '.data.test[0].count'
- )
-
- if [ "$count_names_got" -ne "$count_names_exp" ]; then
- log::error "Could not verify 1million, expected: $count_names_exp, got: $count_names_got"
- return 1
- fi
+ local count_names_exp=197408
+ count_names_got=$(
+ curl \
+ -SsX POST \
+ -H 'Content-Type: application/json' \
+ -d '{ "query": "query { test(func: has(name@.)) { count(uid) } }" }' \
+ 'localhost:8081/query' | jq '.data.test[0].count'
+ )
+
+ if [[ ${count_names_got} -ne ${count_names_exp} ]]; then
+ log::error "Could not verify 1million, expected: ${count_names_exp}, got: ${count_names_got}"
+ return 1
+ fi
}
function portkill() {
- local pids
- if pids="$(lsof -nti ":$1")"; then
- echo "$pids" | xargs kill -9
- fi
+ local pids
+ if pids="$(lsof -nti ":$1")"; then
+ echo "${pids}" | xargs kill -9
+ fi
}
function dgraph::killall() {
- while pkill -x 'dgraph'; do
- log::debug 'Killing running Dgraph instances.'
- sleep 1
- done
+ while pkill -x 'dgraph'; do
+ log::debug 'Killing running Dgraph instances.'
+ sleep 1
+ done
}
function dgraph::start_zero() {
- local -r i="$i"
- log::debug "Starting Zero $i."
+ local -r i="${i}"
+ log::debug "Starting Zero ${i}."
- local grpc_port=$((5080 + i))
- local http_port=$((6080 + i))
+ local grpc_port=$((5080 + i))
+ local http_port=$((6080 + i))
- for port in "$grpc_port" "$http_port"; do
- portkill "$port"
- done
+ for port in "${grpc_port}" "${http_port}"; do
+ portkill "${port}"
+ done
- local zero_args_default=(--cwd "$DGRAPH_PATH/zero$i" --raft="idx=$i" --port_offset "$i")
+ local zero_args_default=(--cwd "${DGRAPH_PATH}/zero${i}" --raft="idx=${i}" --port_offset "${i}")
- if [ "$i" -ne 1 ]; then
- zero_args_default+=(--peer 'localhost:5081')
- fi
+ if [[ ${i} -ne 1 ]]; then
+ zero_args_default+=(--peer 'localhost:5081')
+ fi
- "$DGRAPH_BIN" zero "${zero_args_default[@]}" "${@:2}" &>"$LOGS_PATH/zero$i" &
- sleep 1
+ "${DGRAPH_BIN}" zero "${zero_args_default[@]}" "${@:2}" &>"${LOGS_PATH}/zero${i}" &
+ sleep 1
}
function dgraph::start_zeros() {
- local -r n="$1"
- for i in $(seq "$n"); do
- dgraph::start_zero "$i" "${@:2}"
- done
+ local -r n="$1"
+ for i in $(seq "${n}"); do
+ dgraph::start_zero "${i}" "${@:2}"
+ done
}
function dgraph::start_alpha() {
- local -r i="$1"
- log::debug "Starting Alpha $i."
-
- local internal_port=$((7080 + i))
- local http_port=$((8080 + i))
- local grpc_port=$((9080 + i))
-
- for port in "$internal_port" "$http_port" "$grpc_port"; do
- portkill "$port"
- done
-
- "$DGRAPH_BIN" \
- alpha \
- --cwd "$DGRAPH_PATH/alpha$i" \
- --port_offset "$i" \
- --zero 'localhost:5081' \
- "${@:2}" &>"$LOGS_PATH/alpha$i" &
- sleep 1
+ local -r i="$1"
+ log::debug "Starting Alpha ${i}."
+
+ local internal_port=$((7080 + i))
+ local http_port=$((8080 + i))
+ local grpc_port=$((9080 + i))
+
+ for port in "${internal_port}" "${http_port}" "${grpc_port}"; do
+ portkill "${port}"
+ done
+
+ "${DGRAPH_BIN}" \
+ alpha \
+ --cwd "${DGRAPH_PATH}/alpha${i}" \
+ --port_offset "${i}" \
+ --zero 'localhost:5081' \
+ "${@:2}" &>"${LOGS_PATH}/alpha${i}" &
+ sleep 1
}
function dgraph::start_alphas() {
- local -r n="$1"
- for i in $(seq "$n"); do
- dgraph::start_alpha "$i" "${@:2}"
- done
+ local -r n="$1"
+ for i in $(seq "${n}"); do
+ dgraph::start_alpha "${i}" "${@:2}"
+ done
}
function dgraph::generate_encryption_key() {
- dd if=/dev/random bs=1 count=32 of="$ENCRYPTION_KEY_PATH"
+ dd if=/dev/random bs=1 count=32 of="${ENCRYPTION_KEY_PATH}"
}
function dgraph::generate_acl_secret() {
- dd if=/dev/random bs=1 count=256 of="$ACL_SECRET_PATH"
+ dd if=/dev/random bs=1 count=256 of="${ACL_SECRET_PATH}"
}
function dgraph::generate_tls() {
- "$DGRAPH_BIN" cert --cwd "$DGRAPH_PATH" --nodes 'localhost'
+ "${DGRAPH_BIN}" cert --cwd "${DGRAPH_PATH}" --nodes 'localhost'
}
function dgraph::healthcheck_zero() {
- local -r i="$1"
- local -r http_port=$((6080 + i))
- local response
-
- while true; do
- response="$(curl -Ss "localhost:$http_port/health")"
- if [ "$response" == "Please retry again, server is not ready to accept requests" ]; then
- log::warn "Zero $i is not ready, retrying in 1s."
- sleep 1
- else
- break
- fi
- done
-
- if [ "$response" != "OK" ]; then
- log::error "Zero $i is not healthy."
- echo "$response"
- return 1
- fi
-
- log::debug "Zero $i is healthy."
+ local -r i="$1"
+ local -r http_port=$((6080 + i))
+ local response
+
+ while true; do
+ response="$(curl -Ss "localhost:${http_port}/health")"
+ if [[ ${response} == "Please retry again, server is not ready to accept requests" ]]; then
+ log::warn "Zero ${i} is not ready, retrying in 1s."
+ sleep 1
+ else
+ break
+ fi
+ done
+
+ if [[ ${response} != "OK" ]]; then
+ log::error "Zero ${i} is not healthy."
+ echo "${response}"
+ return 1
+ fi
+
+ log::debug "Zero ${i} is healthy."
}
function dgraph::healthcheck_alpha() {
- local -r i="$1"
- local -r http_port=$((8080 + i))
- local response
-
- while true; do
- response="$(curl -Ss "localhost:$http_port/health")"
- if [ "$response" == "Please retry again, server is not ready to accept requests" ]; then
- log::warn "Alpha $i is not ready, retrying in 1s."
- sleep 1
- else
- break
- fi
- done
-
- if [ "$(echo "$response" | jq '.[0].status')" != '"healthy"' ]; then
- log::error "Alpha $i is not healthy."
- echo "$response" | jq || echo "$response"
- return 1
- fi
-
- log::debug "Alpha $i is healthy."
+ local -r i="$1"
+ local -r http_port=$((8080 + i))
+ local response
+
+ while true; do
+ response="$(curl -Ss "localhost:${http_port}/health")"
+ if [[ ${response} == "Please retry again, server is not ready to accept requests" ]]; then
+ log::warn "Alpha ${i} is not ready, retrying in 1s."
+ sleep 1
+ else
+ break
+ fi
+ done
+
+ if [[ "$(echo "${response}" | jq '.[0].status')" != '"healthy"' ]]; then
+ log::error "Alpha ${i} is not healthy."
+ echo "${response}" | jq || echo "${response}"
+ return 1
+ fi
+
+ log::debug "Alpha ${i} is healthy."
}
function dgraph::healthcheck_alpha_tls() {
- local -r i="$1"
- local -r http_port=$((8080 + i))
- local response
-
- while true; do
- response="$(curl --insecure -Ss "https://localhost:$http_port/health")"
- if [ "$response" == "Please retry again, server is not ready to accept requests" ]; then
- log::warn "Alpha $i is not ready, retrying in 1s."
- sleep 1
- else
- break
- fi
- done
-
- if [ "$(echo "$response" | jq '.[0].status')" != '"healthy"' ]; then
- log::error "Alpha $i is not healthy."
- echo "$response" | jq || echo "$response"
- return 1
- fi
-
- log::debug "Alpha $i is healthy."
+ local -r i="$1"
+ local -r http_port=$((8080 + i))
+ local response
+
+ while true; do
+ response="$(curl --insecure -Ss "https://localhost:${http_port}/health")"
+ if [[ ${response} == "Please retry again, server is not ready to accept requests" ]]; then
+ log::warn "Alpha ${i} is not ready, retrying in 1s."
+ sleep 1
+ else
+ break
+ fi
+ done
+
+ if [[ "$(echo "${response}" | jq '.[0].status')" != '"healthy"' ]]; then
+ log::error "Alpha ${i} is not healthy."
+ echo "${response}" | jq || echo "${response}"
+ return 1
+ fi
+
+ log::debug "Alpha ${i} is healthy."
}
function dgraph::increment() {
- local -r i="$1"
- local -r grpc_port=$((9080 + i))
- "$DGRAPH_BIN" increment --alpha "localhost:$grpc_port" "${@:2}" |
- grep -oP 'Counter VAL: \K\d+' |
- tail -1
+ local -r i="$1"
+ local -r grpc_port=$((9080 + i))
+ "${DGRAPH_BIN}" increment --alpha "localhost:${grpc_port}" "${@:2}" |
+ grep -oP 'Counter VAL: \K\d+' |
+ tail -1
}
function setup() {
- dgraph::killall
+ dgraph::killall
- log::debug 'Removing old test files.'
+ log::debug 'Removing old test files.'
- rm -rf "$LOGS_PATH"
- mkdir -p "$LOGS_PATH"
+ rm -rf "${LOGS_PATH}"
+ mkdir -p "${LOGS_PATH}"
- rm -rf "$DGRAPH_PATH"
- mkdir -p "$DGRAPH_PATH"
+ rm -rf "${DGRAPH_PATH}"
+ mkdir -p "${DGRAPH_PATH}"
- mkdir -p "$DATA_PATH"
+ mkdir -p "${DATA_PATH}"
}
function cleanup() {
- dgraph::killall
+ dgraph::killall
- log::debug 'Removing old test files.'
- rm -rf "$TEST_PATH"
+ log::debug 'Removing old test files.'
+ rm -rf "${TEST_PATH}"
}
function test::manual_start() {
- local -r n_zeros=3
- local -r n_alphas=3
-
- dgraph::start_zeros "$n_zeros"
- dgraph::start_alphas "$n_alphas"
-
- for i in $(seq "$n_zeros"); do
- dgraph::healthcheck_zero "$i"
- done
-
- sleep 5
-
- for i in $(seq "$n_alphas"); do
- dgraph::healthcheck_alpha "$i"
- done
-
- local count
- for i in $(seq "$n_alphas"); do
- count="$(dgraph::increment "$i")"
- if [ "$i" -ne "$count" ]; then
- log::error "Expected increment: $i but got: $count"
- return 1
- fi
- done
+ local -r n_zeros=3
+ local -r n_alphas=3
+
+ dgraph::start_zeros "${n_zeros}"
+ dgraph::start_alphas "${n_alphas}"
+
+ for i in $(seq "${n_zeros}"); do
+ dgraph::healthcheck_zero "${i}"
+ done
+
+ sleep 5
+
+ for i in $(seq "${n_alphas}"); do
+ dgraph::healthcheck_alpha "${i}"
+ done
+
+ local count
+ for i in $(seq "${n_alphas}"); do
+ count="$(dgraph::increment "${i}")"
+ if [[ ${i} -ne ${count} ]]; then
+ log::error "Expected increment: ${i} but got: ${count}"
+ return 1
+ fi
+ done
}
function test::manual_start_encryption() {
- dgraph::generate_encryption_key
+ dgraph::generate_encryption_key
- local -r n_zeros=3
- local -r n_alphas=3
+ local -r n_zeros=3
+ local -r n_alphas=3
- dgraph::start_zeros "$n_zeros"
- dgraph::start_alphas "$n_alphas" --encryption "key-file=$ENCRYPTION_KEY_PATH;"
+ dgraph::start_zeros "${n_zeros}"
+ dgraph::start_alphas "${n_alphas}" --encryption "key-file=${ENCRYPTION_KEY_PATH};"
- for i in $(seq "$n_zeros"); do
- dgraph::healthcheck_zero "$i"
- done
+ for i in $(seq "${n_zeros}"); do
+ dgraph::healthcheck_zero "${i}"
+ done
- sleep 5
+ sleep 5
- for i in $(seq "$n_alphas"); do
- dgraph::healthcheck_alpha "$i"
- done
+ for i in $(seq "${n_alphas}"); do
+ dgraph::healthcheck_alpha "${i}"
+ done
- local count
- for i in $(seq "$n_alphas"); do
- count="$(dgraph::increment "$i")"
- if [ "$i" -ne "$count" ]; then
- log::error "Expected increment: $i but got: $count"
- return 1
- fi
- done
+ local count
+ for i in $(seq "${n_alphas}"); do
+ count="$(dgraph::increment "${i}")"
+ if [[ ${i} -ne ${count} ]]; then
+ log::error "Expected increment: ${i} but got: ${count}"
+ return 1
+ fi
+ done
}
function test::manual_start_acl() {
- dgraph::generate_acl_secret
+ dgraph::generate_acl_secret
- local -r n_zeros=3
- local -r n_alphas=3
+ local -r n_zeros=3
+ local -r n_alphas=3
- dgraph::start_zeros "$n_zeros"
- dgraph::start_alphas "$n_alphas" --acl "secret-file=$ACL_SECRET_PATH;"
+ dgraph::start_zeros "${n_zeros}"
+ dgraph::start_alphas "${n_alphas}" --acl "secret-file=${ACL_SECRET_PATH};"
- for i in $(seq "$n_zeros"); do
- dgraph::healthcheck_zero "$i"
- done
+ for i in $(seq "${n_zeros}"); do
+ dgraph::healthcheck_zero "${i}"
+ done
- sleep 5
+ sleep 5
- for i in $(seq "$n_alphas"); do
- dgraph::healthcheck_alpha "$i"
- done
+ for i in $(seq "${n_alphas}"); do
+ dgraph::healthcheck_alpha "${i}"
+ done
- local count
- for i in $(seq "$n_alphas"); do
- count="$(dgraph::increment "$i" --user groot --password password)"
- if [ "$i" -ne "$count" ]; then
- log::error "Expected increment: $i but got: $count"
- return 1
- fi
- done
+ local count
+ for i in $(seq "${n_alphas}"); do
+ count="$(dgraph::increment "${i}" --user groot --password password)"
+ if [[ ${i} -ne ${count} ]]; then
+ log::error "Expected increment: ${i} but got: ${count}"
+ return 1
+ fi
+ done
}
# Test manual start with external TLS enabled.
function test::manual_start_tls() {
- dgraph::generate_tls
+ dgraph::generate_tls
- local -r n_zeros=3
- local -r n_alphas=3
+ local -r n_zeros=3
+ local -r n_alphas=3
- dgraph::start_zeros "$n_zeros"
- dgraph::start_alphas "$n_alphas" --tls "ca-cert=$TLS_PATH/ca.crt; server-cert=$TLS_PATH/node.crt; server-key=$TLS_PATH/node.key;"
+ dgraph::start_zeros "${n_zeros}"
+ dgraph::start_alphas "${n_alphas}" --tls "ca-cert=${TLS_PATH}/ca.crt; server-cert=${TLS_PATH}/node.crt; server-key=${TLS_PATH}/node.key;"
- for i in $(seq "$n_zeros"); do
- dgraph::healthcheck_zero "$i"
- done
+ for i in $(seq "${n_zeros}"); do
+ dgraph::healthcheck_zero "${i}"
+ done
- sleep 5
+ sleep 5
- for i in $(seq "$n_alphas"); do
- dgraph::healthcheck_alpha_tls "$i"
- done
+ for i in $(seq "${n_alphas}"); do
+ dgraph::healthcheck_alpha_tls "${i}"
+ done
- local count
- for i in $(seq "$n_alphas"); do
- count="$(dgraph::increment "$i" --tls "ca-cert=$TLS_PATH/ca.crt;")"
- if [ "$i" -ne "$count" ]; then
- log::error "Expected increment: $i but got: $count"
- return 1
- fi
- done
+ local count
+ for i in $(seq "${n_alphas}"); do
+ count="$(dgraph::increment "${i}" --tls "ca-cert=${TLS_PATH}/ca.crt;")"
+ if [[ ${i} -ne ${count} ]]; then
+ log::error "Expected increment: ${i} but got: ${count}"
+ return 1
+ fi
+ done
}
# Test manual start with both internal and external TLS enabled.
function test::manual_start_tls2() {
- dgraph::generate_tls
-
- local -r n_zeros=3
- local -r n_alphas=3
-
- for i in $(seq "$n_zeros"); do
- "$DGRAPH_BIN" cert --client "zero$i" --cwd "$DGRAPH_PATH"
- dgraph::start_zero "$i" \
- --tls "ca-cert=$TLS_PATH/ca.crt; internal-port=true; client-cert=$TLS_PATH/client.zero$i.crt; client-key=$TLS_PATH/client.zero$i.key; server-cert=$TLS_PATH/node.crt; server-key=$TLS_PATH/node.key;"
- done
-
- for i in $(seq "$n_alphas"); do
- "$DGRAPH_BIN" cert --client "alpha$i" --cwd "$DGRAPH_PATH"
- dgraph::start_alpha "$i" \
- --tls "ca-cert=$TLS_PATH/ca.crt; internal-port=true; client-cert=$TLS_PATH/client.alpha$i.crt; client-key=$TLS_PATH/client.alpha$i.key; server-cert=$TLS_PATH/node.crt; server-key=$TLS_PATH/node.key;"
- done
-
- for i in $(seq "$n_zeros"); do
- dgraph::healthcheck_zero "$i"
- done
-
- sleep 5
-
- for i in $(seq "$n_alphas"); do
- dgraph::healthcheck_alpha_tls "$i"
- done
-
- local count
- for i in $(seq "$n_alphas"); do
- count="$(dgraph::increment "$i" --tls "ca-cert=$TLS_PATH/ca.crt;")"
- if [ "$i" -ne "$count" ]; then
- log::error "Expected increment: $i but got: $count"
- return 1
- fi
- done
+ dgraph::generate_tls
+
+ local -r n_zeros=3
+ local -r n_alphas=3
+
+ for i in $(seq "${n_zeros}"); do
+ "${DGRAPH_BIN}" cert --client "zero${i}" --cwd "${DGRAPH_PATH}"
+ dgraph::start_zero "${i}" \
+ --tls "ca-cert=${TLS_PATH}/ca.crt; internal-port=true; client-cert=${TLS_PATH}/client.zero${i}.crt; client-key=${TLS_PATH}/client.zero${i}.key; server-cert=${TLS_PATH}/node.crt; server-key=${TLS_PATH}/node.key;"
+ done
+
+ for i in $(seq "${n_alphas}"); do
+ "${DGRAPH_BIN}" cert --client "alpha${i}" --cwd "${DGRAPH_PATH}"
+ dgraph::start_alpha "${i}" \
+ --tls "ca-cert=${TLS_PATH}/ca.crt; internal-port=true; client-cert=${TLS_PATH}/client.alpha${i}.crt; client-key=${TLS_PATH}/client.alpha${i}.key; server-cert=${TLS_PATH}/node.crt; server-key=${TLS_PATH}/node.key;"
+ done
+
+ for i in $(seq "${n_zeros}"); do
+ dgraph::healthcheck_zero "${i}"
+ done
+
+ sleep 5
+
+ for i in $(seq "${n_alphas}"); do
+ dgraph::healthcheck_alpha_tls "${i}"
+ done
+
+ local count
+ for i in $(seq "${n_alphas}"); do
+ count="$(dgraph::increment "${i}" --tls "ca-cert=${TLS_PATH}/ca.crt;")"
+ if [[ ${i} -ne ${count} ]]; then
+ log::error "Expected increment: ${i} but got: ${count}"
+ return 1
+ fi
+ done
}
function test::manual_start_encryption_acl_tls() {
- dgraph::generate_encryption_key
- dgraph::generate_acl_secret
- dgraph::generate_tls
-
- local -r n_zeros=3
- local -r n_alphas=3
-
- dgraph::start_zeros "$n_zeros"
- dgraph::start_alphas "$n_alphas" \
- --acl "secret-file=$ACL_SECRET_PATH;" \
- --encryption "key-file=$ENCRYPTION_KEY_PATH" \
- --tls "ca-cert=$TLS_PATH/ca.crt; server-cert=$TLS_PATH/node.crt; server-key=$TLS_PATH/node.key;"
-
- for i in $(seq "$n_zeros"); do
- dgraph::healthcheck_zero "$i"
- done
-
- sleep 5
-
- for i in $(seq "$n_alphas"); do
- dgraph::healthcheck_alpha_tls "$i"
- done
-
- local count
- for i in $(seq "$n_alphas"); do
- count="$(dgraph::increment "$i" --tls "ca-cert=$TLS_PATH/ca.crt;" --user groot --password password)"
- if [ "$i" -ne "$count" ]; then
- log::error "Expected increment: $i but got: $count"
- return 1
- fi
- done
+ dgraph::generate_encryption_key
+ dgraph::generate_acl_secret
+ dgraph::generate_tls
+
+ local -r n_zeros=3
+ local -r n_alphas=3
+
+ dgraph::start_zeros "${n_zeros}"
+ dgraph::start_alphas "${n_alphas}" \
+ --acl "secret-file=${ACL_SECRET_PATH};" \
+ --encryption "key-file=${ENCRYPTION_KEY_PATH}" \
+ --tls "ca-cert=${TLS_PATH}/ca.crt; server-cert=${TLS_PATH}/node.crt; server-key=${TLS_PATH}/node.key;"
+
+ for i in $(seq "${n_zeros}"); do
+ dgraph::healthcheck_zero "${i}"
+ done
+
+ sleep 5
+
+ for i in $(seq "${n_alphas}"); do
+ dgraph::healthcheck_alpha_tls "${i}"
+ done
+
+ local count
+ for i in $(seq "${n_alphas}"); do
+ count="$(dgraph::increment "${i}" --tls "ca-cert=${TLS_PATH}/ca.crt;" --user groot --password password)"
+ if [[ ${i} -ne ${count} ]]; then
+ log::error "Expected increment: ${i} but got: ${count}"
+ return 1
+ fi
+ done
}
function test::live_loader() {
- dataset::1million::download
+ dataset::1million::download
- dgraph::start_zeros 1
- dgraph::start_alphas 2
+ dgraph::start_zeros 1
+ dgraph::start_alphas 2
- sleep 5
+ sleep 5
- log::debug 'Running live loader.'
- "$DGRAPH_BIN" \
- live \
- --alpha 'localhost:9081' \
- --cwd "$DGRAPH_PATH/live" \
- --files "$DATASET_1MILLION_FILE_PATH" \
- --schema "$DATASET_1MILLION_SCHEMA_PATH" \
- --zero 'localhost:5081' &>"$LOGS_PATH/live"
+ log::debug 'Running live loader.'
+ "${DGRAPH_BIN}" \
+ live \
+ --alpha 'localhost:9081' \
+ --cwd "${DGRAPH_PATH}/live" \
+ --files "${DATASET_1MILLION_FILE_PATH}" \
+ --schema "${DATASET_1MILLION_SCHEMA_PATH}" \
+ --zero 'localhost:5081' &>"${LOGS_PATH}/live"
- dataset::1million::verify
+ dataset::1million::verify
}
function test::bulk_loader() {
- dataset::1million::download
+ dataset::1million::download
- dgraph::start_zeros 1
+ dgraph::start_zeros 1
- sleep 5
+ sleep 5
- log::debug 'Running bulk loader.'
- "$DGRAPH_BIN" \
- bulk \
- --cwd "$DGRAPH_PATH/bulk" \
- --files "$DATASET_1MILLION_FILE_PATH" \
- --schema "$DATASET_1MILLION_SCHEMA_PATH" \
- --map_shards 1 \
- --reduce_shards 1 \
- --zero 'localhost:5081' &>"$LOGS_PATH/bulk"
+ log::debug 'Running bulk loader.'
+ "${DGRAPH_BIN}" \
+ bulk \
+ --cwd "${DGRAPH_PATH}/bulk" \
+ --files "${DATASET_1MILLION_FILE_PATH}" \
+ --schema "${DATASET_1MILLION_SCHEMA_PATH}" \
+ --map_shards 1 \
+ --reduce_shards 1 \
+ --zero 'localhost:5081' &>"${LOGS_PATH}/bulk"
- mkdir -p "$DGRAPH_PATH/alpha1"
- cp -r "$DGRAPH_PATH/bulk/out/0/p" "$DGRAPH_PATH/alpha1"
+ mkdir -p "${DGRAPH_PATH}/alpha1"
+ cp -r "${DGRAPH_PATH}/bulk/out/0/p" "${DGRAPH_PATH}/alpha1"
- dgraph::start_alphas 1
- sleep 5
+ dgraph::start_alphas 1
+ sleep 5
- dataset::1million::verify
- log::info "Bulk load succeeded."
+ dataset::1million::verify
+ log::info "Bulk load succeeded."
- log::debug "Exporting data."
+ log::debug "Exporting data."
- local export_result
- export_result=$(curl -Ss 'localhost:8081/admin/export')
+ local export_result
+ export_result=$(curl -Ss 'localhost:8081/admin/export')
- if [ "$(echo "$export_result" | jq '.code')" != '"Success"' ]; then
- log::error 'Export failed.'
- echo "$export_result" | jq || echo "$export_result"
- return 1
- else
- log::info "Export succeeded."
- fi
+ if [[ "$(echo "${export_result}" | jq '.code')" != '"Success"' ]]; then
+ log::error 'Export failed.'
+ echo "${export_result}" | jq || echo "${export_result}"
+ return 1
+ else
+ log::info "Export succeeded."
+ fi
- log::debug "Backing up data."
+ log::debug "Backing up data."
- local -r backup_path="$TEST_PATH/backup"
- rm -rf "$backup_path"
- mkdir -p "$backup_path"
+ local -r backup_path="${TEST_PATH}/backup"
+ rm -rf "${backup_path}"
+ mkdir -p "${backup_path}"
- local backup_result
- backup_result=$(curl -SsX POST -H 'Content-Type: application/json' -d "
+ local backup_result
+ backup_result=$(curl -SsX POST -H 'Content-Type: application/json' -d "
{
- \"query\": \"mutation { backup(input: {destination: \\\"$backup_path\\\"}) { response { message code } } }\"
+ \"query\": \"mutation { backup(input: {destination: \\\"${backup_path}\\\"}) { response { message code } } }\"
}" 'http://localhost:8081/admin')
- if [ "$(echo "$backup_result" | jq '.data.backup.response.code')" != '"Success"' ]; then
- log::error 'Backup failed.'
- echo "$backup_result" | jq || echo "$backup_result"
- return 1
- else
- log::info "Backup succeeded."
- fi
+ if [[ "$(echo "${backup_result}" | jq '.data.backup.response.code')" != '"Success"' ]]; then
+ log::error 'Backup failed.'
+ echo "${backup_result}" | jq || echo "${backup_result}"
+ return 1
+ else
+ log::info "Backup succeeded."
+ fi
- setup
+ setup
- dgraph::start_zeros 1
+ dgraph::start_zeros 1
- sleep 5
+ sleep 5
- log::info "Restoring data."
- "$DGRAPH_BIN" \
- restore \
- --cwd "$DGRAPH_PATH/restore" \
- --location "$backup_path" \
- --postings "$DGRAPH_PATH" \
- --zero 'localhost:5081' &>"$LOGS_PATH/restore"
+ log::info "Restoring data."
+ "${DGRAPH_BIN}" \
+ restore \
+ --cwd "${DGRAPH_PATH}/restore" \
+ --location "${backup_path}" \
+ --postings "${DGRAPH_PATH}" \
+ --zero 'localhost:5081' &>"${LOGS_PATH}/restore"
- mkdir -p "$DGRAPH_PATH/alpha1"
- mv "$DGRAPH_PATH/p1" "$DGRAPH_PATH/alpha1/p"
+ mkdir -p "${DGRAPH_PATH}/alpha1"
+ mv "${DGRAPH_PATH}/p1" "${DGRAPH_PATH}/alpha1/p"
- dgraph::start_alphas 1
- sleep 5
+ dgraph::start_alphas 1
+ sleep 5
- dataset::1million::verify
- log::info "Restore succeeded."
+ dataset::1million::verify
+ log::info "Restore succeeded."
}
# Run `dgraph increment` in a loop with 1, 2, and 3 groups respectively and verify the result.
function testx::increment() {
- local -r increment_factor=100
-
- # Set replicas to 1 so that each Alpha forms its own group.
- dgraph::start_zeros 1 --replicas 1
- local alphas=()
-
- dgraph::start_alpha 1
- alphas+=("localhost:9081")
-
- for i in {1..20000}; do
- if [ "$i" -eq 5000 ]; then
- dgraph::start_alpha 2
- alphas+=("localhost:9082")
- elif [ "$i" -eq 10000 ]; then
- dgraph::start_alpha 3
- alphas+=("localhost:9083")
- fi
-
- # Pick an Alpha in a round-robin manner and run the increment tool on it.
- count="$(
- "$DGRAPH_BIN" increment --alpha "${alphas[$((i % ${#alphas[@]}))]}" --num "$increment_factor" |
- grep -oP 'Counter VAL: \K\d+' |
- tail -1
- )"
- if [ "$count" -ne $((i * increment_factor)) ]; then
- log::error "Increment error: expected: $count, got: $i"
- return 1
- fi
- log::debug "Increment: $count"
- done
+ local -r increment_factor=100
+
+ # Set replicas to 1 so that each Alpha forms its own group.
+ dgraph::start_zeros 1 --replicas 1
+ local alphas=()
+
+ dgraph::start_alpha 1
+ alphas+=("localhost:9081")
+
+ for i in {1..20000}; do
+ if [[ ${i} -eq 5000 ]]; then
+ dgraph::start_alpha 2
+ alphas+=("localhost:9082")
+ elif [[ ${i} -eq 10000 ]]; then
+ dgraph::start_alpha 3
+ alphas+=("localhost:9083")
+ fi
+
+ # Pick an Alpha in a round-robin manner and run the increment tool on it.
+ count="$(
+ "${DGRAPH_BIN}" increment --alpha "${alphas[$((i % ${#alphas[@]}))]}" --num "${increment_factor}" |
+ grep -oP 'Counter VAL: \K\d+' |
+ tail -1
+ )"
+ if [[ ${count} -ne $((i * increment_factor)) ]]; then
+ log::error "Increment error: expected: ${count}, got: ${i}"
+ return 1
+ fi
+ log::debug "Increment: ${count}"
+ done
}
function dgraph::run_tests() {
- local passed=0
- local failed=0
-
- for test in $(compgen -A function "${1:-test::}"); do
- log::info "$test starting."
-
- setup
- if "$test"; then
- log::info "$test succeeded."
- ((passed += 1))
- else
- log::error "$test failed."
- ((failed += 1))
-
- if [ "${EXIT_ON_FAILURE:-0}" -eq 1 ]; then
- return 1
- fi
- fi
- done
-
- local -r summary="$passed tests passed, $failed failed."
- if [ "$failed" -ne 0 ]; then
- log::error "$summary"
- return 1
- else
- log::info "$summary"
- return 0
- fi
+ local passed=0
+ local failed=0
+
+ for test in $(compgen -A function "${1:-test::}"); do
+ log::info "${test} starting."
+
+ setup
+ if "${test}"; then
+ log::info "${test} succeeded."
+ ((passed += 1))
+ else
+ log::error "${test} failed."
+ ((failed += 1))
+
+ if [[ ${EXIT_ON_FAILURE:-0} -eq 1 ]]; then
+ return 1
+ fi
+ fi
+ done
+
+ local -r summary="${passed} tests passed, ${failed} failed."
+ if [[ ${failed} -ne 0 ]]; then
+ log::error "${summary}"
+ return 1
+ else
+ log::info "${summary}"
+ return 0
+ fi
}
function main() {
- cleanup
- dgraph::run_tests "$@"
- local status="$?"
- cleanup
- return $status
+ cleanup
+ dgraph::run_tests "$@"
+ local status="$?"
+ cleanup
+ return "${status}"
}
main "$@"
diff --git a/contrib/scripts/cover.sh b/contrib/scripts/cover.sh
index ce374e79bcf..bf7d58b8545 100755
--- a/contrib/scripts/cover.sh
+++ b/contrib/scripts/cover.sh
@@ -1,35 +1,34 @@
#!/bin/bash
-SRC="$( cd -P "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/.."
+SRC="$(cd -P "$(dirname "${BASH_SOURCE[0]}")" && pwd)/.."
TMP=$(mktemp /tmp/dgraph-coverage-XXXXX.txt)
BUILD=$1
# If build variable is empty then we set it.
-if [ -z "$1" ]; then
- BUILD=$SRC/build
+if [[ -z $1 ]]; then
+ BUILD=${SRC}/build
fi
OUT=$2
-if [ -z "$OUT" ]; then
- OUT=$SRC/coverage.out
+if [[ -z ${OUT} ]]; then
+ OUT=${SRC}/coverage.out
fi
-rm -f $OUT
+rm -f "${OUT}"
set -e
-
# create coverage output
-echo 'mode: atomic' > $OUT
-for PKG in $(go list ./...|grep -v -E 'vendor|contrib|wiki|customtok'); do
- if [[ "$TRAVIS" == true ]]; then
- go test -v -timeout 25m -covermode=atomic -coverprofile=$TMP $PKG
- else
- go test -v -race -timeout 25m -covermode=atomic -coverprofile=$TMP $PKG | go-test-teamcity
- fi
- tail -n +2 $TMP >> $OUT
+echo 'mode: atomic' >"${OUT}"
+for PKG in $(go list ./... | grep -v -E 'vendor|contrib|wiki|customtok'); do
+ if [[ ${TRAVIS} == true ]]; then
+ go test -v -timeout 25m -covermode=atomic -coverprofile="${TMP}" "${PKG}"
+ else
+ go test -v -race -timeout 25m -covermode=atomic -coverprofile="${TMP}" "${PKG}" | go-test-teamcity
+ fi
+ tail -n +2 "${TMP}" >>"${OUT}"
done
# open in browser if not in a build environment
-if [ ! -z "$DISPLAY" ]; then
- go tool cover -html=$OUT
+if [[ -n ${DISPLAY} ]]; then
+ go tool cover -html="${OUT}"
fi
diff --git a/contrib/scripts/functions.sh b/contrib/scripts/functions.sh
index 7ee0c432440..36cb68887c8 100755
--- a/contrib/scripts/functions.sh
+++ b/contrib/scripts/functions.sh
@@ -7,53 +7,53 @@ set -e
# May be called with an argument which is a docker compose file
# to use *instead of* the default docker-compose.yml.
function restartCluster {
- if [[ -z $1 ]]; then
- compose_file="docker-compose.yml"
- else
- compose_file="$(readlink -f $1)"
- fi
-
- basedir=$(dirname "${BASH_SOURCE[0]}")/../..
- pushd $basedir/dgraph >/dev/null
- echo "Rebuilding dgraph ..."
-
- docker_compose_gopath="${GOPATH:-$(go env GOPATH)}"
- make install
-
- if [[ "$OSTYPE" == "darwin"* ]]; then
- if !(AVAILABLE_RAM=$(cat ~/Library/Group\ Containers/group.com.docker/settings.json | grep memoryMiB | grep -oe "[0-9]\+") && test $AVAILABLE_RAM -ge 6144); then
- echo -e "\e[33mWarning: You may not have allocated enough memory for Docker on Mac. Please increase the allocated RAM to at least 6GB with a 4GB swap. See https://docs.docker.com/docker-for-mac/#resources \e[0m"
- fi
- docker_compose_gopath=`pwd`/../osx-docker-gopath
-
- # FIXME: read the go version from a constant
- docker run --rm \
- -v dgraph_gopath:/go \
- -v dgraph_gocache:/root/.cache/go-build \
- -v `pwd`/..:/app \
- -w /app/dgraph \
- golang:1.19.5 \
- go build -o /app/osx-docker-gopath/bin/dgraph
- fi
-
- docker ps -a --filter label="cluster=test" --format "{{.Names}}" | xargs -r docker rm -f
- GOPATH=$docker_compose_gopath docker-compose -p dgraph -f $compose_file up --force-recreate --build --remove-orphans -d || exit 1
- popd >/dev/null
-
- $basedir/contrib/wait-for-it.sh -t 60 localhost:6180 || exit 1
- $basedir/contrib/wait-for-it.sh -t 60 localhost:9180 || exit 1
- sleep 10 || exit 1
+ if [[ -z $1 ]]; then
+ compose_file="docker-compose.yml"
+ else
+ compose_file="$(readlink -f "$1")"
+ fi
+
+ basedir=$(dirname "${BASH_SOURCE[0]}")/../..
+ pushd "${basedir}"/dgraph >/dev/null
+ echo "Rebuilding dgraph ..."
+
+ docker_compose_gopath="${GOPATH:-$(go env GOPATH)}"
+ make install
+
+ if [[ ${OSTYPE} == "darwin"* ]]; then
+ if !(AVAILABLE_RAM=$(cat ~/Library/Group\ Containers/group.com.docker/settings.json | grep memoryMiB | grep -oe "[0-9]\+") && test "${AVAILABLE_RAM}" -ge 6144); then
+ echo -e "\e[33mWarning: You may not have allocated enough memory for Docker on Mac. Please increase the allocated RAM to at least 6GB with a 4GB swap. See https://docs.docker.com/docker-for-mac/#resources \e[0m"
+ fi
+ docker_compose_gopath=$(pwd)/../osx-docker-gopath
+
+ # FIXME: read the go version from a constant
+ docker run --rm \
+ -v dgraph_gopath:/go \
+ -v dgraph_gocache:/root/.cache/go-build \
+ -v $(pwd)/..:/app \
+ -w /app/dgraph \
+ golang:1.19.5 \
+ go build -o /app/osx-docker-gopath/bin/dgraph
+ fi
+
+ docker ps -a --filter label="cluster=test" --format "{{.Names}}" | xargs -r docker rm -f
+ GOPATH=${docker_compose_gopath} docker-compose -p dgraph -f "${compose_file}" up --force-recreate --build --remove-orphans -d || exit 1
+ popd >/dev/null
+
+ "${basedir}"/contrib/wait-for-it.sh -t 60 localhost:6180 || exit 1
+ "${basedir}"/contrib/wait-for-it.sh -t 60 localhost:9180 || exit 1
+ sleep 10 || exit 1
}
function stopCluster {
- docker ps --filter label="cluster=test" --format "{{.Names}}" \
- | xargs -r docker stop | sed 's/^/Stopped /'
- docker ps -a --filter label="cluster=test" --format "{{.Names}}" \
- | xargs -r docker rm | sed 's/^/Removed /'
+ docker ps --filter label="cluster=test" --format "{{.Names}}" |
+ xargs -r docker stop | sed 's/^/Stopped /'
+ docker ps -a --filter label="cluster=test" --format "{{.Names}}" |
+ xargs -r docker rm | sed 's/^/Removed /'
}
function loginWithGroot() {
- curl -s -XPOST localhost:8180/login -d '{"userid": "groot","password": "password"}' \
- | python3 -c \
- "import json; resp = input(); data = json.loads(resp); print(data['data']['accessJWT'])"
+ curl -s -XPOST localhost:8180/login -d '{"userid": "groot","password": "password"}' |
+ python3 -c \
+ "import json; resp = input(); data = json.loads(resp); print(data['data']['accessJWT'])"
}
diff --git a/contrib/scripts/goldendata-queries.sh b/contrib/scripts/goldendata-queries.sh
index d77766a8e0e..9fecca533ed 100755
--- a/contrib/scripts/goldendata-queries.sh
+++ b/contrib/scripts/goldendata-queries.sh
@@ -1,46 +1,44 @@
#!/bin/bash
basedir=$(dirname "${BASH_SOURCE[0]}")/../..
-source $basedir/contrib/scripts/functions.sh
-pushd $(dirname "${BASH_SOURCE[0]}")/queries &> /dev/null
+source "${basedir}"/contrib/scripts/functions.sh
+pushd $(dirname "${BASH_SOURCE[0]}")/queries &>/dev/null
function run_index_test {
- local max_attempts=${ATTEMPTS-5}
- local timeout=${TIMEOUT-1}
- local attempt=0
- local exitCode=0
-
- X=$1
- GREPFOR=$2
- ANS=$3
- echo "Running test: ${X}"
- while (( $attempt < $max_attempts ))
- do
- set +e
- accessToken=`loginWithGroot`
- N=`curl -s -H 'Content-Type: application/dql' localhost:8180/query -XPOST -d @${X}.in -H "X-Dgraph-AccessToken: $accessToken"`
- exitCode=$?
-
- set -e
-
- if [[ $exitCode == 0 ]]
- then
- break
- fi
-
- echo "Failure! Retrying in $timeout.." 1>&2
- sleep $timeout
- attempt=$(( attempt + 1 ))
- timeout=$(( timeout * 2 ))
- done
-
- NUM=$(echo $N | python3 -m json.tool | grep $GREPFOR | wc -l)
- if [[ ! "$NUM" -eq "$ANS" ]]; then
- echo "Index test failed: ${X} Expected: $ANS Got: $NUM"
- exit 1
- else
- echo -e "Index test passed: ${X}\n"
- fi
+ local max_attempts=${ATTEMPTS-5}
+ local timeout=${TIMEOUT-1}
+ local attempt=0
+ local exitCode=0
+
+ X=$1
+ GREPFOR=$2
+ ANS=$3
+ echo "Running test: ${X}"
+ while ((attempt < max_attempts)); do
+ set +e
+ accessToken=$(loginWithGroot)
+ N=$(curl -s -H 'Content-Type: application/dql' localhost:8180/query -XPOST -d @"${X}".in -H "X-Dgraph-AccessToken: ${accessToken}")
+ exitCode=$?
+
+ set -e
+
+ if [[ ${exitCode} == 0 ]]; then
+ break
+ fi
+
+ echo "Failure! Retrying in ${timeout}.." 1>&2
+ sleep "${timeout}"
+ attempt=$((attempt + 1))
+ timeout=$((timeout * 2))
+ done
+
+ NUM=$(echo "${N}" | python3 -m json.tool | grep "${GREPFOR}" | wc -l)
+ if [[ ! ${NUM} -eq ${ANS} ]]; then
+ echo "Index test failed: ${X} Expected: ${ANS} Got: ${NUM}"
+ exit 1
+ else
+ echo -e "Index test passed: ${X}\n"
+ fi
}
echo -e "Running some queries and checking count of results returned."
@@ -54,5 +52,4 @@ run_index_test releasedate_sort_first_offset release_date 2316
run_index_test releasedate_geq release_date 60992
run_index_test gen_anyof_good_bad name 1104
-popd &> /dev/null
-
+popd &>/dev/null
diff --git a/contrib/scripts/install-dependencies.sh b/contrib/scripts/install-dependencies.sh
index e02a0603844..4bcd9be184c 100755
--- a/contrib/scripts/install-dependencies.sh
+++ b/contrib/scripts/install-dependencies.sh
@@ -11,4 +11,3 @@ govendor fetch github.com/prometheus/client_golang/prometheus/...@v0.9.2
govendor fetch google.golang.org/grpc/...@v1.13.0
# Vendor dgo (latest version before API changes).
govendor fetch github.com/dgraph-io/dgo...@v1.0.0
-
diff --git a/contrib/scripts/load-test.sh b/contrib/scripts/load-test.sh
index 86a1b1f6d26..68fcf124eee 100755
--- a/contrib/scripts/load-test.sh
+++ b/contrib/scripts/load-test.sh
@@ -5,12 +5,12 @@ REQUIRED_MEM=$((20 * ONE_GB))
set -e
-total_mem_kb=`cat /proc/meminfo | awk '/MemTotal:/ {print $2}'`
-if [[ $total_mem_kb -lt $((REQUIRED_MEM / 1024)) ]]; then
- printf >&2 "Load test requires system with at least %dGB of memory\n" \
- $((REQUIRED_MEM / ONE_GB))
- exit 1
+total_mem_kb=$(cat /proc/meminfo | awk '/MemTotal:/ {print $2}')
+if [[ ${total_mem_kb} -lt $((REQUIRED_MEM / 1024)) ]]; then
+ printf >&2 "Load test requires system with at least %dGB of memory\n" \
+ $((REQUIRED_MEM / ONE_GB))
+ exit 1
fi
-bash contrib/scripts/loader.sh $1
-bash contrib/scripts/transactions.sh $1
+bash contrib/scripts/loader.sh "$1"
+bash contrib/scripts/transactions.sh "$1"
diff --git a/contrib/scripts/loader.sh b/contrib/scripts/loader.sh
index 52e7d37ae86..0987b580028 100755
--- a/contrib/scripts/loader.sh
+++ b/contrib/scripts/loader.sh
@@ -1,42 +1,42 @@
#!/bin/bash
basedir=$(dirname "${BASH_SOURCE[0]}")/../..
-goldendata=$(pwd)/$basedir/systest/data/goldendata.rdf.gz
+goldendata=$(pwd)/${basedir}/systest/data/goldendata.rdf.gz
set -e
-source $basedir/contrib/scripts/functions.sh
+source "${basedir}"/contrib/scripts/functions.sh
restartCluster
# Create a temporary directory to use for running live loader.
-tmpdir=`mktemp --tmpdir -d loader.tmp-XXXXXX`
-trap "rm -rf $tmpdir" EXIT
-pushd $tmpdir
-echo "Inside `pwd`"
+tmpdir=$(mktemp --tmpdir -d loader.tmp-XXXXXX)
+trap "rm -rf ${tmpdir}" EXIT
+pushd "${tmpdir}"
+echo "Inside $(pwd)"
# log file size.
-ls -laH $goldendata
+ls -laH "${goldendata}"
echo "Setting schema."
while true; do
- accessJWT=`loginWithGroot`
- curl -s -XPOST --output alter.txt -d '
+ accessJWT=$(loginWithGroot)
+ curl -s -XPOST --output alter.txt -d '
name: string @index(term) @lang .
initial_release_date: datetime @index(year) .
- ' "http://localhost:8180/alter" -H "X-Dgraph-AccessToken: $accessJWT"
- cat alter.txt
- echo
- cat alter.txt | grep -iq "success" && break
- echo "Retrying..."
- sleep 3
+ ' "http://localhost:8180/alter" -H "X-Dgraph-AccessToken: ${accessJWT}"
+ cat alter.txt
+ echo
+ cat alter.txt | grep -iq "success" && break
+ echo "Retrying..."
+ sleep 3
done
rm -f alter.txt
echo -e "\nRunning dgraph live."
-dgraph live -f $goldendata -a "127.0.0.1:9180" -z "127.0.0.1:5180" -c 10 -u groot -p password
+dgraph live -f "${goldendata}" -a "127.0.0.1:9180" -z "127.0.0.1:5180" -c 10 -u groot -p password
popd
-rm -rf $tmpdir
+rm -rf "${tmpdir}"
echo "Running queries"
-$basedir/contrib/scripts/goldendata-queries.sh
+"${basedir}"/contrib/scripts/goldendata-queries.sh
stopCluster
diff --git a/contrib/scripts/transactions.sh b/contrib/scripts/transactions.sh
index a766e6a2edb..19c40ad0f11 100755
--- a/contrib/scripts/transactions.sh
+++ b/contrib/scripts/transactions.sh
@@ -1,29 +1,29 @@
#!/bin/bash
basedir=$(dirname "${BASH_SOURCE[0]}")/../..
-contrib=$basedir/contrib
+contrib=${basedir}/contrib
set -e
# go test -v $contrib/integration/testtxn/main_test.go
-source $contrib/scripts/functions.sh
+source "${contrib}"/scripts/functions.sh
restartCluster
echo "* Running transaction tests."
echo "* Running bank tests"
-go run $contrib/integration/bank/main.go --alpha=localhost:9180,localhost:9182,localhost:9183 --verbose=false
+go run "${contrib}"/integration/bank/main.go --alpha=localhost:9180,localhost:9182,localhost:9183 --verbose=false
echo "* Running account upsert tests"
-go run $contrib/integration/acctupsert/main.go --alpha=localhost:9180
+go run "${contrib}"/integration/acctupsert/main.go --alpha=localhost:9180
echo "* Running sentence swap tests"
-pushd $contrib/integration/swap
+pushd "${contrib}"/integration/swap
go build . && ./swap --alpha=localhost:9180
popd
echo "* Running mutate from #1750."
-pushd $contrib/integration/mutates
+pushd "${contrib}"/integration/mutates
go build . && ./mutates --add --alpha=localhost:9180
./mutates --alpha=localhost:9180
popd
diff --git a/contrib/standalone/hooks/build b/contrib/standalone/hooks/build
index c9af4522026..e5d3368f085 100755
--- a/contrib/standalone/hooks/build
+++ b/contrib/standalone/hooks/build
@@ -1,3 +1,3 @@
#!/bin/bash
# Used for Makefile or Docker Hub builds
-docker build -t dgraph/standalone:${DGRAPH_VERSION} --build-arg DGRAPH_VERSION=${DGRAPH_VERSION} .
+docker build -t dgraph/standalone:"${DGRAPH_VERSION}" --build-arg DGRAPH_VERSION="${DGRAPH_VERSION}" .
diff --git a/contrib/standalone/run.sh b/contrib/standalone/run.sh
index 454c3e7b154..e486cca0009 100644
--- a/contrib/standalone/run.sh
+++ b/contrib/standalone/run.sh
@@ -13,4 +13,5 @@ export DGRAPH_ALPHA_WHITELIST=0.0.0.0/0
export DGRAPH_ALPHA_SECURITY='whitelist=0.0.0.0/0'
# TODO properly handle SIGTERM for all three processes.
-dgraph zero & dgraph alpha
+dgraph zero &
+dgraph alpha
diff --git a/contrib/systemd/centos/README.md b/contrib/systemd/centos/README.md
index 16173052166..0d2263db543 100644
--- a/contrib/systemd/centos/README.md
+++ b/contrib/systemd/centos/README.md
@@ -20,8 +20,8 @@ mkdir -p /var/lib/dgraph/{p,w,zw}
chown -R dgraph:dgraph /var/{lib,log}/dgraph
```
-Next, copy the `systemd` unit files, i.e. `dgraph-alpha.service`, `dgraph-zero.service`,
-and `dgraph-ui.service`, in this directory to `/etc/systemd/system/`.
+Next, copy the `systemd` unit files, i.e. `dgraph-alpha.service`, `dgraph-zero.service`, and
+`dgraph-ui.service`, in this directory to `/etc/systemd/system/`.
> **NOTE** These unit files expect that Dgraph is installed as `/usr/local/bin/dgraph`.
@@ -39,8 +39,7 @@ systemctl enable dgraph-alpha
systemctl start dgraph-alpha
```
-The `dgraph-ui` service is optional and, unlike `dgraph-zero`, will not be started
-automatically.
+The `dgraph-ui` service is optional and, unlike `dgraph-zero`, will not be started automatically.
```Bash
systemctl enable dgraph-ui
diff --git a/contrib/systemd/centos/add_dgraph_account.sh b/contrib/systemd/centos/add_dgraph_account.sh
index 0e6a867e42e..80d02f21999 100755
--- a/contrib/systemd/centos/add_dgraph_account.sh
+++ b/contrib/systemd/centos/add_dgraph_account.sh
@@ -1,16 +1,16 @@
#!/usr/bin/env bash
- sudo_cmd=""
- if hash sudo 2>/dev/null; then
- sudo_cmd="sudo"
- echo "Requires sudo permission to install Dgraph in Systemd."
- if ! $sudo_cmd -v; then
- print_error "Need sudo privileges to complete installation."
- exit 1;
- fi
+sudo_cmd=""
+if hash sudo 2>/dev/null; then
+ sudo_cmd="sudo"
+ echo "Requires sudo permission to install Dgraph in Systemd."
+ if ! ${sudo_cmd} -v; then
+ print_error "Need sudo privileges to complete installation."
+ exit 1
fi
+fi
-$sudo_cmd groupadd --system dgraph
-$sudo_cmd useradd --system -d /var/lib/dgraph -s /bin/false -g dgraph dgraph
-$sudo_cmd mkdir -p /var/log/dgraph
-$sudo_cmd mkdir -p /var/lib/dgraph/{p,w,zw}
-$sudo_cmd chown -R dgraph:dgraph /var/{lib,log}/dgraph
+${sudo_cmd} groupadd --system dgraph
+${sudo_cmd} useradd --system -d /var/lib/dgraph -s /bin/false -g dgraph dgraph
+${sudo_cmd} mkdir -p /var/log/dgraph
+${sudo_cmd} mkdir -p /var/lib/dgraph/{p,w,zw}
+${sudo_cmd} chown -R dgraph:dgraph /var/{lib,log}/dgraph
diff --git a/contrib/systemd/ha_cluster/README.md b/contrib/systemd/ha_cluster/README.md
index 50193cdc1b5..e40a5dd8a91 100644
--- a/contrib/systemd/ha_cluster/README.md
+++ b/contrib/systemd/ha_cluster/README.md
@@ -1,15 +1,17 @@
# Systemd Configuration for a HA Dgraph Cluster
-This following document describes how to configure several nodes that are managed through [systemd](https://systemd.io/).
+This following document describes how to configure several nodes that are managed through
+[systemd](https://systemd.io/).
## Overview
You will configure the following types of Dgraph nodes:
-* zero nodes
- * zero leader node - an initial leader node configured at start of cluster, e.g. `zero-0`
- * zero peer nodes - peer nodes, e.g. `zero-1`, `zero-2`, that point to the zero leader
-* alpha nodes - configured similarly, e.g. `alpha-0`, `alpha-1`, `alpha-2`, that point to list of all zero nodes
+- zero nodes
+ - zero leader node - an initial leader node configured at start of cluster, e.g. `zero-0`
+ - zero peer nodes - peer nodes, e.g. `zero-1`, `zero-2`, that point to the zero leader
+- alpha nodes - configured similarly, e.g. `alpha-0`, `alpha-1`, `alpha-2`, that point to list of
+ all zero nodes
> **NOTE** These commands are run as root using bash shell.
@@ -33,10 +35,12 @@ chown --recursive dgraph:dgraph /var/{lib,log}/dgraph
### Configure First Zero Node
-Edit the file [dgraph-zero-0.service](dgraph-zero-0.service) as necessary. There are three parameters and include the hostname:
+Edit the file [dgraph-zero-0.service](dgraph-zero-0.service) as necessary. There are three
+parameters and include the hostname:
-* `--replicas` - total number of zeros
-* `--idx` - initial zero node will be `1`, and each zero node added afterward will have the `idx` increased by `1`
+- `--replicas` - total number of zeros
+- `--idx` - initial zero node will be `1`, and each zero node added afterward will have the `idx`
+ increased by `1`
Copy the file to `/etc/systemd/system/dgraph-zero.service` and run the following:
@@ -47,7 +51,9 @@ systemctl start dgraph-zero
### Configure Second Zero Node
-This process is similar to previous step. Edit the file [dgraph-zero-1.service](dgraph-zero-1.service) as required. Replace the string `{{ zero-0 }}` to match the hostname of the zero leader, such as `zero-0`. The `idx` will be set to `2`
+This process is similar to previous step. Edit the file
+[dgraph-zero-1.service](dgraph-zero-1.service) as required. Replace the string `{{ zero-0 }}` to
+match the hostname of the zero leader, such as `zero-0`. The `idx` will be set to `2`
Copy the file to `/etc/systemd/system/dgraph-zero.service` and run the following:
@@ -58,7 +64,8 @@ systemctl start dgraph-zero
### Configure Third Zero Node
-For the third zero node, [dgraph-zero-2.service](dgraph-zero-2.service), this is configured in the same manner as the second zero node with the `idx` set to `3`
+For the third zero node, [dgraph-zero-2.service](dgraph-zero-2.service), this is configured in the
+same manner as the second zero node with the `idx` set to `3`
Copy the file to `/etc/systemd/system/dgraph-zero.service` and run the following:
@@ -69,7 +76,10 @@ systemctl start dgraph-zero
### Configure Firewall for Zero Ports
-For zero you will want to open up port `5080` (GRPC). The port `6080` (HTTP) is optional admin port that is not required by clients. For further information, see https://dgraph.io/docs/deploy/ports-usage/. This process will vary depending on firewall you are using. Some examples below:
+For zero you will want to open up port `5080` (GRPC). The port `6080` (HTTP) is optional admin port
+that is not required by clients. For further information, see
+https://dgraph.io/docs/deploy/ports-usage/. This process will vary depending on firewall you are
+using. Some examples below:
On **Ubuntu 18.04**:
@@ -100,9 +110,12 @@ mkdir --parents /var/{log/dgraph,lib/dgraph/{w,p}}
chown --recursive dgraph:dgraph /var/{lib,log}/dgraph
```
-Edit the file [dgraph-alpha.service](dgraph-alpha.service) as required. For the `--zero` parameter, you want to create a list that matches all the zeros in your cluster, so that when `{{ zero-0 }}`, `{{ zero-1 }}`, and `{{ zero-2 }}` are replaced, you will have a string something like this (adjusted to your organization's domain):
+Edit the file [dgraph-alpha.service](dgraph-alpha.service) as required. For the `--zero` parameter,
+you want to create a list that matches all the zeros in your cluster, so that when `{{ zero-0 }}`,
+`{{ zero-1 }}`, and `{{ zero-2 }}` are replaced, you will have a string something like this
+(adjusted to your organization's domain):
-```
+```bash
--zero zero-0:5080,zero-1:5080,zero-2:5080
```
@@ -115,7 +128,9 @@ systemctl start dgraph-alpha
### Configure Firewall for Alpha Ports
-For alpha you will want to open up ports `7080` (GRPC), `8080` (HTTP/S), and `9080` (GRPC). For further information, see: https://dgraph.io/docs/deploy/ports-usage/. This process will vary depending on firewall you are using. Some examples below:
+For alpha you will want to open up ports `7080` (GRPC), `8080` (HTTP/S), and `9080` (GRPC). For
+further information, see: https://dgraph.io/docs/deploy/ports-usage/. This process will vary
+depending on firewall you are using. Some examples below:
On **Ubuntu 18.04**:
@@ -129,7 +144,6 @@ ufw allow from any to any port 9080 proto tcp
On **CentOS 8**:
-
```bash
# NOTE: public zone is the default and includes NIC used to access service
# enable internal port
diff --git a/contrib/systemd/ha_cluster/tests/README.md b/contrib/systemd/ha_cluster/tests/README.md
index 618844e5b5e..885ed51ba70 100644
--- a/contrib/systemd/ha_cluster/tests/README.md
+++ b/contrib/systemd/ha_cluster/tests/README.md
@@ -4,7 +4,8 @@ These are tests to both demonstrate and test functionality of systemd units to m
## Requirements
-* HashiCorp [Vagrant](https://www.vagrantup.com/) - automation to manage virtual machine systems and provision them.
+- HashiCorp [Vagrant](https://www.vagrantup.com/) - automation to manage virtual machine systems and
+ provision them.
## Instructions
@@ -27,14 +28,15 @@ vagrant up
#### Using libvirt Provider
-If you running on Linux and would like to use KVM for a speedier Vagrant experience, you can install the `vagrant-libvirt` plugin (see [Installation](https://github.com/vagrant-libvirt/vagrant-libvirt#installation)) and run this:
+If you running on Linux and would like to use KVM for a speedier Vagrant experience, you can install
+the `vagrant-libvirt` plugin (see
+[Installation](https://github.com/vagrant-libvirt/vagrant-libvirt#installation)) and run this:
```bash
export VAGRANT_DEFAULT_PROVIDER=libvirt
vagrant up
```
-
### Logging Into the System
You can log into the guest virtual machines with SSH.
@@ -73,6 +75,7 @@ vagrant ssh zero-0 --command "sudo journalctl -u dgraph-zero"
# get logs from alpha0
vagrant ssh alpha-0 --command "sudo journalctl -u dgraph-alpha"
```
+
### Cleanup and Destroy VMs
```bash
@@ -83,7 +86,9 @@ vagrant destroy --force
### Configuration
-The configuration is a `hosts` file format, space-delimited. This defines both the hostnames and virtual IP address used to create the virtual guests. Vagrant in combination with the underlying virtual machine provider will create a virtual network accessible by the host.
+The configuration is a `hosts` file format, space-delimited. This defines both the hostnames and
+virtual IP address used to create the virtual guests. Vagrant in combination with the underlying
+virtual machine provider will create a virtual network accessible by the host.
```host
@@ -95,11 +100,14 @@ You can use `default` for one system to be designated as the default for `vagran
#### Dgraph Version
-By default, the latest Dgraph version will be used to for the version. If you want to use another version, you can set the environment variable `DGRAPH_VERSION` for the desired version.
+By default, the latest Dgraph version will be used to for the version. If you want to use another
+version, you can set the environment variable `DGRAPH_VERSION` for the desired version.
### Windows Environment
-On Windows, for either Hyper/V or Virtualbox providers, for convenience you can specify username `SMB_USER` and password `SMB_PASSWD` before running `vagrant up`, so that you won't get prompted 6 times for username and password.
+On Windows, for either Hyper/V or Virtualbox providers, for convenience you can specify username
+`SMB_USER` and password `SMB_PASSWD` before running `vagrant up`, so that you won't get prompted 6
+times for username and password.
> **NOTE**: Setting a password in an environment variable is not considered security best practices.
@@ -116,22 +124,24 @@ vagrant up
## Environments Tested
-* Guest OS
- * [Cent OS 8](https://app.vagrantup.com/generic/boxes/centos8) from [Roboxes](https://roboxes.org/)
- * [Ubuntu 18.04](https://app.vagrantup.com/generic/boxes/ubuntu1804) from [Roboxes](https://roboxes.org/)
-* Providers
- * [libvirt](https://github.com/vagrant-libvirt/vagrant-libvirt) (KVM) on Ubuntu 19.10
- * [VirtualBox](https://www.vagrantup.com/docs/providers/virtualbox) on Win10 Home, Mac OS X 10.14
- * [Hyper/V](https://www.vagrantup.com/docs/providers/hyperv) on Win10 Pro
+- Guest OS
+ - [Cent OS 8](https://app.vagrantup.com/generic/boxes/centos8) from
+ [Roboxes](https://roboxes.org/)
+ - [Ubuntu 18.04](https://app.vagrantup.com/generic/boxes/ubuntu1804) from
+ [Roboxes](https://roboxes.org/)
+- Providers
+ - [libvirt](https://github.com/vagrant-libvirt/vagrant-libvirt) (KVM) on Ubuntu 19.10
+ - [VirtualBox](https://www.vagrantup.com/docs/providers/virtualbox) on Win10 Home, Mac OS X 10.14
+ - [Hyper/V](https://www.vagrantup.com/docs/providers/hyperv) on Win10 Pro
## Resources
-* Vagrant
- * Util API: https://www.rubydoc.info/github/hashicorp/vagrant/Vagrant/Util/Platform
- * Multi-Machine: https://www.vagrantup.com/docs/multi-machine
- * Synced Folders: https://www.vagrantup.com/docs/synced-folders
- * lib-virt: https://github.com/vagrant-libvirt/vagrant-libvirt#synced-folders
- * Provisioning: https://www.vagrantup.com/docs/provisioning
-* Dgraph
- * Documentation: https://dgraph.io/docs/
- * Community: https://discuss.dgraph.io/
+- Vagrant
+ - Util API: https://www.rubydoc.info/github/hashicorp/vagrant/Vagrant/Util/Platform
+ - Multi-Machine: https://www.vagrantup.com/docs/multi-machine
+ - Synced Folders: https://www.vagrantup.com/docs/synced-folders
+ - lib-virt: https://github.com/vagrant-libvirt/vagrant-libvirt#synced-folders
+ - Provisioning: https://www.vagrantup.com/docs/provisioning
+- Dgraph
+ - Documentation: https://dgraph.io/docs/
+ - Community: https://discuss.dgraph.io/
diff --git a/contrib/systemd/ha_cluster/tests/centos8/provision.sh b/contrib/systemd/ha_cluster/tests/centos8/provision.sh
index 76ad8677d5e..9510ddf7ae1 100755
--- a/contrib/systemd/ha_cluster/tests/centos8/provision.sh
+++ b/contrib/systemd/ha_cluster/tests/centos8/provision.sh
@@ -4,52 +4,55 @@
# main
################################
main() {
- if [[ $1 =~ h(elp)?|\? ]]; then usage; fi
- if (( $# != 1 )); then usage; fi
- REPLICAS=$1
+ if [[ $1 =~ h(elp)?|\? ]]; then usage; fi
+ if (($# != 1)); then usage; fi
+ REPLICAS=$1
- echo "RUNNING script"
+ echo "RUNNING script"
- setup_hosts
- install_dgraph
- setup_user_group
- setup_systemd
- setup_firewall
+ setup_hosts
+ install_dgraph
+ setup_user_group
+ setup_systemd
+ setup_firewall
}
#####
# usage
################################
usage() {
- printf " Usage: \n\t$0 [REPLICAS]\n\n" >&2
- exit 1
+ printf " Usage: \n\t$0 [REPLICAS]\n\n" >&2
+ exit 1
}
#####
# install_dgraph - installer script from https://get.dgraph.io
################################
install_dgraph() {
- [[ -z "$DGRAPH_VERSION" ]] && { echo 'DGRAPH_VERSION not specified. Aborting' 2>&1 ; return 1; }
- echo "INFO: Installing Dgraph with 'curl -sSf https://get.dgraph.io | ACCEPT_LICENSE="y" VERSION="$DGRAPH_VERSION" bash'"
- curl -sSf https://get.dgraph.io | ACCEPT_LICENSE="y" VERSION="$DGRAPH_VERSION" bash
+ [[ -z ${DGRAPH_VERSION} ]] && {
+ echo 'DGRAPH_VERSION not specified. Aborting' 2>&1
+ return 1
+ }
+ echo "INFO: Installing Dgraph with 'curl -sSf https://get.dgraph.io | ACCEPT_LICENSE="y" VERSION=""${DGRAPH_VERSION}"" bash'"
+ curl -sSf https://get.dgraph.io | ACCEPT_LICENSE="y" VERSION="${DGRAPH_VERSION}" bash
}
#####
# setup_hosts - configure /etc/hosts in absence of DNS
################################
setup_hosts() {
- CONFIG_FILE=/vagrant/hosts
- if [[ ! -f /vagrant/hosts ]]; then
- echo "INFO: '$CONFIG_FILE' does not exist. Skipping configuring /etc/hosts"
- return 1
- fi
-
- while read -a LINE; do
- ## append to hosts entry if it doesn't exist
- if ! grep -q "${LINE[1]}" /etc/hosts; then
- printf "%s %s \n" ${LINE[*]} >> /etc/hosts
- fi
- done < $CONFIG_FILE
+ CONFIG_FILE=/vagrant/hosts
+ if [[ ! -f /vagrant/hosts ]]; then
+ echo "INFO: '${CONFIG_FILE}' does not exist. Skipping configuring /etc/hosts"
+ return 1
+ fi
+
+ while read -a LINE; do
+ ## append to hosts entry if it doesn't exist
+ if ! grep -q "${LINE[1]}" /etc/hosts; then
+ printf "%s %s \n" "${LINE[*]}" >>/etc/hosts
+ fi
+ done <"${CONFIG_FILE}"
}
#####
@@ -64,120 +67,120 @@ setup_user_group() {
# setup_firewall on Ubuntu 18.04 and CentOS 8
################################
setup_firewall() {
- case $(hostname) in
- *zero*)
- PORTS=(5080 6080)
- ;;
- *alpha*)
- PORTS=(7080 8080 9080)
- ;;
- esac
-
- if grep -q centos /etc/os-release; then
- if /usr/bin/firewall-cmd --state 2>&1 | grep -q "^running$"; then
- for PORT in ${PORTS[*]}; do
- firewall-cmd --zone=public --permanent --add-port=$PORT/tcp
- firewall-cmd --reload
- done
- fi
- elif grep -iq ubuntu /etc/os-release; then
- if /usr/sbin/ufw status | grep -wq active; then
- for PORT in ${PORTS[*]}; do
- ufw allow from any to any port $PORT proto tcp
- done
- fi
- fi
+ case $(hostname) in
+ *zero*)
+ PORTS=(5080 6080)
+ ;;
+ *alpha*)
+ PORTS=(7080 8080 9080)
+ ;;
+ esac
+
+ if grep -q centos /etc/os-release; then
+ if /usr/bin/firewall-cmd --state 2>&1 | grep -q "^running$"; then
+ for PORT in ${PORTS[*]}; do
+ firewall-cmd --zone=public --permanent --add-port="${PORT}"/tcp
+ firewall-cmd --reload
+ done
+ fi
+ elif grep -iq ubuntu /etc/os-release; then
+ if /usr/sbin/ufw status | grep -wq active; then
+ for PORT in ${PORTS[*]}; do
+ ufw allow from any to any port "${PORT}" proto tcp
+ done
+ fi
+ fi
}
#####
# setup_systemd_zero - setup dir and systemd unit for zero leader or peer
################################
setup_systemd_zero() {
- TYPE=${1:-"peer"}
- LDR="zero-0:5080"
- WAL=/var/lib/dgraph/zw
- IDX=$(( $(grep -o '[0-9]' <<< $HOSTNAME) + 1 ))
- if [[ $TYPE == "leader" ]]; then
- EXEC="/bin/bash -c '/usr/local/bin/dgraph zero --my=\$(hostname):5080 --wal $WAL
- --raft="idx=$IDX" --replicas $REPLICAS'"
- else
- EXEC="/bin/bash -c '/usr/local/bin/dgraph zero --my=\$(hostname):5080 --peer $LDR --wal $WAL
- --raft="idx=$IDX" --replicas $REPLICAS'"
- fi
-
- mkdir -p /var/{log/dgraph,lib/dgraph/zw}
- chown -R dgraph:dgraph /var/{lib,log}/dgraph
-
- install_systemd_unit "zero" "$EXEC"
+ TYPE=${1:-"peer"}
+ LDR="zero-0:5080"
+ WAL=/var/lib/dgraph/zw
+ IDX=$(($(grep -o '[0-9]' <<<"${HOSTNAME}") + 1))
+ if [[ ${TYPE} == "leader" ]]; then
+ EXEC="/bin/bash -c '/usr/local/bin/dgraph zero --my=\$(hostname):5080 --wal ${WAL}
+ --raft="idx=${IDX}" --replicas ${REPLICAS}'"
+ else
+ EXEC="/bin/bash -c '/usr/local/bin/dgraph zero --my=\$(hostname):5080 --peer ${LDR} --wal ${WAL}
+ --raft="idx=${IDX}" --replicas ${REPLICAS}'"
+ fi
+
+ mkdir -p /var/{log/dgraph,lib/dgraph/zw}
+ chown -R dgraph:dgraph /var/{lib,log}/dgraph
+
+ install_systemd_unit "zero" "${EXEC}"
}
#####
# setup_systemd_alpha - setup dir and systemd unit for alpha
################################
setup_systemd_alpha() {
- WAL=/var/lib/dgraph/w
- POSTINGS=/var/lib/dgraph/p
- # build array based on number of replicas
- for (( I=0; I <= $REPLICAS-1; I++)); do ZEROS+=("zero-$I:5080");done
- IFS=, eval 'ZERO_LIST="${ZEROS[*]}"' # join by ','
+ WAL=/var/lib/dgraph/w
+ POSTINGS=/var/lib/dgraph/p
+ # build array based on number of replicas
+ for ((I = 0; I <= REPLICAS - 1; I++)); do ZEROS+=("zero-${I}:5080"); done
+ IFS=, eval 'ZERO_LIST="${ZEROS[*]}"' # join by ','
- EXEC="/bin/bash -c '/usr/local/bin/dgraph alpha --my=\$(hostname):7080 --zero $ZERO_LIST --postings $POSTINGS --wal $WAL'"
+ EXEC="/bin/bash -c '/usr/local/bin/dgraph alpha --my=\$(hostname):7080 --zero ${ZERO_LIST} --postings ${POSTINGS} --wal ${WAL}'"
- mkdir -p /var/{log/dgraph,lib/dgraph/{w,p}}
- chown -R dgraph:dgraph /var/{lib,log}/dgraph
+ mkdir -p /var/{log/dgraph,lib/dgraph/{w,p}}
+ chown -R dgraph:dgraph /var/{lib,log}/dgraph
- install_systemd_unit "alpha" "$EXEC"
+ install_systemd_unit "alpha" "${EXEC}"
}
#####
# install_systemd_unit - config systemd unit give exec str and service type
################################
install_systemd_unit() {
- TYPE=$1
- EXEC=$2
-
- if [[ ! -f /etc/systemd/system/dgraph-$TYPE.service ]]; then
- cat <<-EOF > /etc/systemd/system/dgraph-$TYPE.service
-[Unit]
-Description=dgraph $TYPE server
-Wants=network.target
-After=network.target
-
-[Service]
-Type=simple
-WorkingDirectory=/var/lib/dgraph
-Restart=on-failure
-ExecStart=$EXEC
-StandardOutput=journal
-StandardError=journal
-User=dgraph
-Group=dgraph
-
-[Install]
-WantedBy=multi-user.target
-EOF
- systemctl enable dgraph-$TYPE
- systemctl start dgraph-$TYPE
- else
- echo "Skipping as 'dgraph-$TYPE.service' already exists"
- fi
+ TYPE=$1
+ EXEC=$2
+
+ if [[ ! -f /etc/systemd/system/dgraph-${TYPE}.service ]]; then
+ cat <<-EOF >/etc/systemd/system/dgraph-"${TYPE}".service
+ [Unit]
+ Description=dgraph ${TYPE} server
+ Wants=network.target
+ After=network.target
+
+ [Service]
+ Type=simple
+ WorkingDirectory=/var/lib/dgraph
+ Restart=on-failure
+ ExecStart=${EXEC}
+ StandardOutput=journal
+ StandardError=journal
+ User=dgraph
+ Group=dgraph
+
+ [Install]
+ WantedBy=multi-user.target
+ EOF
+ systemctl enable dgraph-"${TYPE}"
+ systemctl start dgraph-"${TYPE}"
+ else
+ echo "Skipping as 'dgraph-${TYPE}.service' already exists"
+ fi
}
#####
# setup_systemd - configure systemd unit based on hostname
################################
setup_systemd() {
- case $(hostname) in
- *zero-0*)
- setup_systemd_zero "leader"
- ;;
- *zero-[1-9]*)
- setup_systemd_zero "peer"
- ;;
- *alpha*)
- setup_systemd_alpha
- ;;
- esac
+ case $(hostname) in
+ *zero-0*)
+ setup_systemd_zero "leader"
+ ;;
+ *zero-[1-9]*)
+ setup_systemd_zero "peer"
+ ;;
+ *alpha*)
+ setup_systemd_alpha
+ ;;
+ esac
}
main $@
diff --git a/contrib/systemd/ha_cluster/tests/ubuntu1804/provision.sh b/contrib/systemd/ha_cluster/tests/ubuntu1804/provision.sh
index 76ad8677d5e..9510ddf7ae1 100755
--- a/contrib/systemd/ha_cluster/tests/ubuntu1804/provision.sh
+++ b/contrib/systemd/ha_cluster/tests/ubuntu1804/provision.sh
@@ -4,52 +4,55 @@
# main
################################
main() {
- if [[ $1 =~ h(elp)?|\? ]]; then usage; fi
- if (( $# != 1 )); then usage; fi
- REPLICAS=$1
+ if [[ $1 =~ h(elp)?|\? ]]; then usage; fi
+ if (($# != 1)); then usage; fi
+ REPLICAS=$1
- echo "RUNNING script"
+ echo "RUNNING script"
- setup_hosts
- install_dgraph
- setup_user_group
- setup_systemd
- setup_firewall
+ setup_hosts
+ install_dgraph
+ setup_user_group
+ setup_systemd
+ setup_firewall
}
#####
# usage
################################
usage() {
- printf " Usage: \n\t$0 [REPLICAS]\n\n" >&2
- exit 1
+ printf " Usage: \n\t$0 [REPLICAS]\n\n" >&2
+ exit 1
}
#####
# install_dgraph - installer script from https://get.dgraph.io
################################
install_dgraph() {
- [[ -z "$DGRAPH_VERSION" ]] && { echo 'DGRAPH_VERSION not specified. Aborting' 2>&1 ; return 1; }
- echo "INFO: Installing Dgraph with 'curl -sSf https://get.dgraph.io | ACCEPT_LICENSE="y" VERSION="$DGRAPH_VERSION" bash'"
- curl -sSf https://get.dgraph.io | ACCEPT_LICENSE="y" VERSION="$DGRAPH_VERSION" bash
+ [[ -z ${DGRAPH_VERSION} ]] && {
+ echo 'DGRAPH_VERSION not specified. Aborting' 2>&1
+ return 1
+ }
+ echo "INFO: Installing Dgraph with 'curl -sSf https://get.dgraph.io | ACCEPT_LICENSE="y" VERSION=""${DGRAPH_VERSION}"" bash'"
+ curl -sSf https://get.dgraph.io | ACCEPT_LICENSE="y" VERSION="${DGRAPH_VERSION}" bash
}
#####
# setup_hosts - configure /etc/hosts in absence of DNS
################################
setup_hosts() {
- CONFIG_FILE=/vagrant/hosts
- if [[ ! -f /vagrant/hosts ]]; then
- echo "INFO: '$CONFIG_FILE' does not exist. Skipping configuring /etc/hosts"
- return 1
- fi
-
- while read -a LINE; do
- ## append to hosts entry if it doesn't exist
- if ! grep -q "${LINE[1]}" /etc/hosts; then
- printf "%s %s \n" ${LINE[*]} >> /etc/hosts
- fi
- done < $CONFIG_FILE
+ CONFIG_FILE=/vagrant/hosts
+ if [[ ! -f /vagrant/hosts ]]; then
+ echo "INFO: '${CONFIG_FILE}' does not exist. Skipping configuring /etc/hosts"
+ return 1
+ fi
+
+ while read -a LINE; do
+ ## append to hosts entry if it doesn't exist
+ if ! grep -q "${LINE[1]}" /etc/hosts; then
+ printf "%s %s \n" "${LINE[*]}" >>/etc/hosts
+ fi
+ done <"${CONFIG_FILE}"
}
#####
@@ -64,120 +67,120 @@ setup_user_group() {
# setup_firewall on Ubuntu 18.04 and CentOS 8
################################
setup_firewall() {
- case $(hostname) in
- *zero*)
- PORTS=(5080 6080)
- ;;
- *alpha*)
- PORTS=(7080 8080 9080)
- ;;
- esac
-
- if grep -q centos /etc/os-release; then
- if /usr/bin/firewall-cmd --state 2>&1 | grep -q "^running$"; then
- for PORT in ${PORTS[*]}; do
- firewall-cmd --zone=public --permanent --add-port=$PORT/tcp
- firewall-cmd --reload
- done
- fi
- elif grep -iq ubuntu /etc/os-release; then
- if /usr/sbin/ufw status | grep -wq active; then
- for PORT in ${PORTS[*]}; do
- ufw allow from any to any port $PORT proto tcp
- done
- fi
- fi
+ case $(hostname) in
+ *zero*)
+ PORTS=(5080 6080)
+ ;;
+ *alpha*)
+ PORTS=(7080 8080 9080)
+ ;;
+ esac
+
+ if grep -q centos /etc/os-release; then
+ if /usr/bin/firewall-cmd --state 2>&1 | grep -q "^running$"; then
+ for PORT in ${PORTS[*]}; do
+ firewall-cmd --zone=public --permanent --add-port="${PORT}"/tcp
+ firewall-cmd --reload
+ done
+ fi
+ elif grep -iq ubuntu /etc/os-release; then
+ if /usr/sbin/ufw status | grep -wq active; then
+ for PORT in ${PORTS[*]}; do
+ ufw allow from any to any port "${PORT}" proto tcp
+ done
+ fi
+ fi
}
#####
# setup_systemd_zero - setup dir and systemd unit for zero leader or peer
################################
setup_systemd_zero() {
- TYPE=${1:-"peer"}
- LDR="zero-0:5080"
- WAL=/var/lib/dgraph/zw
- IDX=$(( $(grep -o '[0-9]' <<< $HOSTNAME) + 1 ))
- if [[ $TYPE == "leader" ]]; then
- EXEC="/bin/bash -c '/usr/local/bin/dgraph zero --my=\$(hostname):5080 --wal $WAL
- --raft="idx=$IDX" --replicas $REPLICAS'"
- else
- EXEC="/bin/bash -c '/usr/local/bin/dgraph zero --my=\$(hostname):5080 --peer $LDR --wal $WAL
- --raft="idx=$IDX" --replicas $REPLICAS'"
- fi
-
- mkdir -p /var/{log/dgraph,lib/dgraph/zw}
- chown -R dgraph:dgraph /var/{lib,log}/dgraph
-
- install_systemd_unit "zero" "$EXEC"
+ TYPE=${1:-"peer"}
+ LDR="zero-0:5080"
+ WAL=/var/lib/dgraph/zw
+ IDX=$(($(grep -o '[0-9]' <<<"${HOSTNAME}") + 1))
+ if [[ ${TYPE} == "leader" ]]; then
+ EXEC="/bin/bash -c '/usr/local/bin/dgraph zero --my=\$(hostname):5080 --wal ${WAL}
+ --raft="idx=${IDX}" --replicas ${REPLICAS}'"
+ else
+ EXEC="/bin/bash -c '/usr/local/bin/dgraph zero --my=\$(hostname):5080 --peer ${LDR} --wal ${WAL}
+ --raft="idx=${IDX}" --replicas ${REPLICAS}'"
+ fi
+
+ mkdir -p /var/{log/dgraph,lib/dgraph/zw}
+ chown -R dgraph:dgraph /var/{lib,log}/dgraph
+
+ install_systemd_unit "zero" "${EXEC}"
}
#####
# setup_systemd_alpha - setup dir and systemd unit for alpha
################################
setup_systemd_alpha() {
- WAL=/var/lib/dgraph/w
- POSTINGS=/var/lib/dgraph/p
- # build array based on number of replicas
- for (( I=0; I <= $REPLICAS-1; I++)); do ZEROS+=("zero-$I:5080");done
- IFS=, eval 'ZERO_LIST="${ZEROS[*]}"' # join by ','
+ WAL=/var/lib/dgraph/w
+ POSTINGS=/var/lib/dgraph/p
+ # build array based on number of replicas
+ for ((I = 0; I <= REPLICAS - 1; I++)); do ZEROS+=("zero-${I}:5080"); done
+ IFS=, eval 'ZERO_LIST="${ZEROS[*]}"' # join by ','
- EXEC="/bin/bash -c '/usr/local/bin/dgraph alpha --my=\$(hostname):7080 --zero $ZERO_LIST --postings $POSTINGS --wal $WAL'"
+ EXEC="/bin/bash -c '/usr/local/bin/dgraph alpha --my=\$(hostname):7080 --zero ${ZERO_LIST} --postings ${POSTINGS} --wal ${WAL}'"
- mkdir -p /var/{log/dgraph,lib/dgraph/{w,p}}
- chown -R dgraph:dgraph /var/{lib,log}/dgraph
+ mkdir -p /var/{log/dgraph,lib/dgraph/{w,p}}
+ chown -R dgraph:dgraph /var/{lib,log}/dgraph
- install_systemd_unit "alpha" "$EXEC"
+ install_systemd_unit "alpha" "${EXEC}"
}
#####
# install_systemd_unit - config systemd unit give exec str and service type
################################
install_systemd_unit() {
- TYPE=$1
- EXEC=$2
-
- if [[ ! -f /etc/systemd/system/dgraph-$TYPE.service ]]; then
- cat <<-EOF > /etc/systemd/system/dgraph-$TYPE.service
-[Unit]
-Description=dgraph $TYPE server
-Wants=network.target
-After=network.target
-
-[Service]
-Type=simple
-WorkingDirectory=/var/lib/dgraph
-Restart=on-failure
-ExecStart=$EXEC
-StandardOutput=journal
-StandardError=journal
-User=dgraph
-Group=dgraph
-
-[Install]
-WantedBy=multi-user.target
-EOF
- systemctl enable dgraph-$TYPE
- systemctl start dgraph-$TYPE
- else
- echo "Skipping as 'dgraph-$TYPE.service' already exists"
- fi
+ TYPE=$1
+ EXEC=$2
+
+ if [[ ! -f /etc/systemd/system/dgraph-${TYPE}.service ]]; then
+ cat <<-EOF >/etc/systemd/system/dgraph-"${TYPE}".service
+ [Unit]
+ Description=dgraph ${TYPE} server
+ Wants=network.target
+ After=network.target
+
+ [Service]
+ Type=simple
+ WorkingDirectory=/var/lib/dgraph
+ Restart=on-failure
+ ExecStart=${EXEC}
+ StandardOutput=journal
+ StandardError=journal
+ User=dgraph
+ Group=dgraph
+
+ [Install]
+ WantedBy=multi-user.target
+ EOF
+ systemctl enable dgraph-"${TYPE}"
+ systemctl start dgraph-"${TYPE}"
+ else
+ echo "Skipping as 'dgraph-${TYPE}.service' already exists"
+ fi
}
#####
# setup_systemd - configure systemd unit based on hostname
################################
setup_systemd() {
- case $(hostname) in
- *zero-0*)
- setup_systemd_zero "leader"
- ;;
- *zero-[1-9]*)
- setup_systemd_zero "peer"
- ;;
- *alpha*)
- setup_systemd_alpha
- ;;
- esac
+ case $(hostname) in
+ *zero-0*)
+ setup_systemd_zero "leader"
+ ;;
+ *zero-[1-9]*)
+ setup_systemd_zero "peer"
+ ;;
+ *alpha*)
+ setup_systemd_alpha
+ ;;
+ esac
}
main $@
diff --git a/contrib/tlstest/README.md b/contrib/tlstest/README.md
index d55e4c10a96..70622d9d94e 100644
--- a/contrib/tlstest/README.md
+++ b/contrib/tlstest/README.md
@@ -2,19 +2,28 @@
This directory contains several scripts, that helps with testing of tls functionality in dgraph.
-- `Makefile` - cleans up the directory, creates CA, client and server keys and signed certs, executes the tests
+- `Makefile` - cleans up the directory, creates CA, client and server keys and signed certs,
+ executes the tests
- `server_nopass.sh` - starts server that use unencryped private key
-- `server_nopass_client_auth.sh` - starts server that use unencryped private key, and require client authentication
+- `server_nopass_client_auth.sh` - starts server that use unencryped private key, and require client
+ authentication
- `server_pass.sh` - starts server that use encrypted/password protected private key
- `server_11.sh` - starts server with maximum TLS version set to 1.1
- `client_nopass.sh` - executes dgraph-live-loader configured to use unencrypted privae key
-- `client_pass.sh` - executes dgraph-live-loader configured to use encrypted/password protected private key
+- `client_pass.sh` - executes dgraph-live-loader configured to use encrypted/password protected
+ private key
- `client_nocert.sh` - executes dgraph-live-loader without configured client certificate
- `client_12.sh` - executes dgraph-live-loader with minimum TLS version set to 1.2
## Notes
-Go x509 package supports only encrypted private keys conaining "DEK-Info". By default, openssl doesn't include it in generated keys. Fortunately, if encryption method is explicitly set in the command line, openssl adds "DEK-Info" header.
-`server_pass.sh` should be used with `client_pass.sh`. This enable testing of `tls_server_name` configuration option. Mixing `_pass` and `_nopass` client/server shows that server name is verified by the client.
+Go x509 package supports only encrypted private keys conaining "DEK-Info". By default, openssl
+doesn't include it in generated keys. Fortunately, if encryption method is explicitly set in the
+command line, openssl adds "DEK-Info" header.
-For testing purposes, DNS names for server1.dgraph.io and server2.dgraph.io has to be resolvable. Editing /etc/hosts is the simplest way to achieve this.
+`server_pass.sh` should be used with `client_pass.sh`. This enable testing of `tls_server_name`
+configuration option. Mixing `_pass` and `_nopass` client/server shows that server name is verified
+by the client.
+
+For testing purposes, DNS names for server1.dgraph.io and server2.dgraph.io has to be resolvable.
+Editing /etc/hosts is the simplest way to achieve this.
diff --git a/contrib/tlstest/alpha_notls.sh b/contrib/tlstest/alpha_notls.sh
index f9d5b73527c..84993aa5047 100755
--- a/contrib/tlstest/alpha_notls.sh
+++ b/contrib/tlstest/alpha_notls.sh
@@ -1,3 +1,3 @@
#!/bin/bash
set -e
-$DGRAPH_BIN alpha --zero 127.0.0.1:5081 &> alpha.log
+${DGRAPH_BIN} alpha --zero 127.0.0.1:5081 &>alpha.log
diff --git a/contrib/tlstest/alpha_tls.sh b/contrib/tlstest/alpha_tls.sh
index 97d3bd2fec7..29fb3078638 100755
--- a/contrib/tlstest/alpha_tls.sh
+++ b/contrib/tlstest/alpha_tls.sh
@@ -1,4 +1,4 @@
#!/bin/bash
set -e
-$DGRAPH_BIN alpha --tls "ca-cert=$PWD/tls/ca.crt; server-cert=$PWD/tls/node.crt; server-key=$PWD/tls/node.key;" --zero 127.0.0.1:5081 &> alpha.log
+${DGRAPH_BIN} alpha --tls "ca-cert=${PWD}/tls/ca.crt; server-cert=${PWD}/tls/node.crt; server-key=${PWD}/tls/node.key;" --zero 127.0.0.1:5081 &>alpha.log
diff --git a/contrib/tlstest/alpha_tls_auth.sh b/contrib/tlstest/alpha_tls_auth.sh
index c20b79f872c..fe5f64ecf06 100755
--- a/contrib/tlstest/alpha_tls_auth.sh
+++ b/contrib/tlstest/alpha_tls_auth.sh
@@ -1,3 +1,3 @@
#!/bin/bash
set -e
-$DGRAPH_BIN alpha --tls "ca-cert=$PWD/tls/ca.crt; server-cert=$PWD/tls/node.crt; server-key=$PWD/tls/node.key; client-auth-type=REQUIREANDVERIFY;" --zero 127.0.0.1:5081 &> alpha.log
+${DGRAPH_BIN} alpha --tls "ca-cert=${PWD}/tls/ca.crt; server-cert=${PWD}/tls/node.crt; server-key=${PWD}/tls/node.key; client-auth-type=REQUIREANDVERIFY;" --zero 127.0.0.1:5081 &>alpha.log
diff --git a/contrib/tlstest/live_notls.sh b/contrib/tlstest/live_notls.sh
index b21b3d52ac6..bab4e2c7dc6 100755
--- a/contrib/tlstest/live_notls.sh
+++ b/contrib/tlstest/live_notls.sh
@@ -1,3 +1,3 @@
#!/bin/bash
set -e
-$DGRAPH_BIN live -d localhost:9080 -r data.rdf.gz -z 127.0.0.1:5081
+${DGRAPH_BIN} live -d localhost:9080 -r data.rdf.gz -z 127.0.0.1:5081
diff --git a/contrib/tlstest/live_tls.sh b/contrib/tlstest/live_tls.sh
index b21b3d52ac6..bab4e2c7dc6 100755
--- a/contrib/tlstest/live_tls.sh
+++ b/contrib/tlstest/live_tls.sh
@@ -1,3 +1,3 @@
#!/bin/bash
set -e
-$DGRAPH_BIN live -d localhost:9080 -r data.rdf.gz -z 127.0.0.1:5081
+${DGRAPH_BIN} live -d localhost:9080 -r data.rdf.gz -z 127.0.0.1:5081
diff --git a/contrib/tlstest/live_tls_auth.sh b/contrib/tlstest/live_tls_auth.sh
index 35ff84e91b7..87333a92f93 100755
--- a/contrib/tlstest/live_tls_auth.sh
+++ b/contrib/tlstest/live_tls_auth.sh
@@ -1,3 +1,3 @@
#!/bin/bash
set -e
-$DGRAPH_BIN live -d localhost:9080 --tls "server-name=localhost;" -r data.rdf.gz -z 127.0.0.1:5081
+${DGRAPH_BIN} live -d localhost:9080 --tls "server-name=localhost;" -r data.rdf.gz -z 127.0.0.1:5081
diff --git a/contrib/tlstest/run.sh b/contrib/tlstest/run.sh
index a5d5a42e96d..a0d13be0066 100755
--- a/contrib/tlstest/run.sh
+++ b/contrib/tlstest/run.sh
@@ -1,7 +1,7 @@
#!/bin/bash
dir=$(dirname "${BASH_SOURCE[0]}")
-pushd $dir
+pushd "${dir}"
set -e
make test
popd
diff --git a/contrib/tlstest/server_nopass.sh b/contrib/tlstest/server_nopass.sh
index 1da70b0eeca..aff2e66db86 100755
--- a/contrib/tlstest/server_nopass.sh
+++ b/contrib/tlstest/server_nopass.sh
@@ -1,4 +1,4 @@
#!/bin/bash
../../dgraph/dgraph alpha --tls "ca-cert=ca.crt; client-cert=server.crt; client-key=server.key" \
---zero 127.0.0.1:5081 &> dgraph.log
+ --zero 127.0.0.1:5081 &>dgraph.log
diff --git a/contrib/tlstest/server_pass.sh b/contrib/tlstest/server_pass.sh
index f75389a1c39..551b914f993 100755
--- a/contrib/tlstest/server_pass.sh
+++ b/contrib/tlstest/server_pass.sh
@@ -1,3 +1,3 @@
#!/bin/bash
-../../dgraph/dgraph alpha --tls "ca-cert=ca.crt; client-cert=server_pass.crt; client-key=server_pass.key;" --zero 127.0.0.1:5081 &> dgraph.log
+../../dgraph/dgraph alpha --tls "ca-cert=ca.crt; client-cert=server_pass.crt; client-key=server_pass.key;" --zero 127.0.0.1:5081 &>dgraph.log
diff --git a/contrib/tlstest/test.sh b/contrib/tlstest/test.sh
index 0c8870b1b3c..e97df9c3813 100755
--- a/contrib/tlstest/test.sh
+++ b/contrib/tlstest/test.sh
@@ -2,32 +2,32 @@
trap "cleanup" EXIT
cleanup() {
- killall -KILL dgraph >/dev/null 2>/dev/null
+ killall -KILL dgraph >/dev/null 2>/dev/null
}
ALPHA=$1
LIVE=$2
EXPECTED=$3
-$DGRAPH_BIN zero -w zw -o 1 > zero.log 2>&1 &
+${DGRAPH_BIN} zero -w zw -o 1 >zero.log 2>&1 &
sleep 5
-$ALPHA >/dev/null 2>&1 &
+${ALPHA} >/dev/null 2>&1 &
-if [ "x$RELOAD_TEST" != "x" ]; then
- trap '' HUP
- rm -f ./tls/ca.key
- $DGRAPH_BIN cert -d $PWD/tls -n localhost -c live --force
- killall -HUP dgraph >/dev/null 2>/dev/null
- sleep 3
+if [[ "x${RELOAD_TEST}" != "x" ]]; then
+ trap '' HUP
+ rm -f ./tls/ca.key
+ ${DGRAPH_BIN} cert -d "${PWD}"/tls -n localhost -c live --force
+ killall -HUP dgraph >/dev/null 2>/dev/null
+ sleep 3
fi
-timeout 30s $LIVE > live.log 2>&1
+timeout 30s "${LIVE}" >live.log 2>&1
RESULT=$?
-if [ $RESULT != $EXPECTED ]; then
- echo "$ALPHA <-> $LIVE, Result: $RESULT != Expected: $EXPECTED"
- exit 1
+if [[ ${RESULT} != "${EXPECTED}" ]]; then
+ echo "${ALPHA} <-> ${LIVE}, Result: ${RESULT} != Expected: ${EXPECTED}"
+ exit 1
fi
exit 0
diff --git a/contrib/tlstest/test_reload.sh b/contrib/tlstest/test_reload.sh
index 97e2a6bee81..1f7558fcc0d 100755
--- a/contrib/tlstest/test_reload.sh
+++ b/contrib/tlstest/test_reload.sh
@@ -3,31 +3,31 @@
trap "cleanup" EXIT
cleanup() {
- killall -9 dgraph >/dev/null 2>/dev/null
+ killall -9 dgraph >/dev/null 2>/dev/null
}
ALPHA=./alpha_tls.sh
LIVE=./live_tls.sh
EXPECTED=1
-$DGRAPH_BIN zero -w zw -o 1 > zero.log 2>&1 &
+${DGRAPH_BIN} zero -w zw -o 1 >zero.log 2>&1 &
sleep 5
# start the server
-$ALPHA > /dev/null 2>&1 &
-timeout 30s $LIVE > /dev/null 2>&1
+${ALPHA} >/dev/null 2>&1 &
+timeout 30s "${LIVE}" >/dev/null 2>&1
RESULT=$?
# regenerate TLS certificate
rm -f ./tls/ca.key
-$DGRAPH_BIN cert -d $PWD/tls -n localhost -c live --force
-pkill -HUP dgraph > /dev/null 2>&1
+${DGRAPH_BIN} cert -d "${PWD}"/tls -n localhost -c live --force
+pkill -HUP dgraph >/dev/null 2>&1
# try to connect again
-timeout 30s $LIVE > /dev/null 2>&1
+timeout 30s "${LIVE}" >/dev/null 2>&1
RESULT=$?
-if [ $RESULT == $EXPECTED ]; then
+if [[ ${RESULT} == "${EXPECTED}" ]]; then
exit 0
else
echo "Error while reloading TLS certificate"
diff --git a/contrib/wait-for-it.sh b/contrib/wait-for-it.sh
index 1b491608e73..822ce15816e 100755
--- a/contrib/wait-for-it.sh
+++ b/contrib/wait-for-it.sh
@@ -22,15 +22,14 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
-cmdname=$(basename $0)
+cmdname=$(basename "$0")
-echoerr() { if [[ $QUIET -ne 1 ]]; then echo "$@" 1>&2; fi }
+echoerr() { if [[ ${QUIET} -ne 1 ]]; then echo "$@" 1>&2; fi; }
-usage()
-{
- cat << USAGE >&2
+usage() {
+ cat <&2
Usage:
- $cmdname host:port [-s] [-t timeout] [-- command args]
+ ${cmdname} host:port [-s] [-t timeout] [-- command args]
-h HOST | --host=HOST Host or IP under test
-p PORT | --port=PORT TCP port under test
Alternatively, you specify the host and port as host:port
@@ -40,121 +39,117 @@ Usage:
Timeout in seconds, zero for no timeout
-- COMMAND ARGS Execute command with args after the test finishes
USAGE
- exit 1
+ exit 1
}
-wait_for()
-{
- if [[ $TIMEOUT -gt 0 ]]; then
- echoerr "$cmdname: waiting $TIMEOUT seconds for $HOST:$PORT"
- else
- echoerr "$cmdname: waiting for $HOST:$PORT without a timeout"
- fi
- start_ts=$(date +%s)
- while :
- do
- if [[ $ISBUSY -eq 1 ]]; then
- nc -z $HOST $PORT
- result=$?
- else
- (echo > /dev/tcp/$HOST/$PORT) >/dev/null 2>&1
- result=$?
- fi
- if [[ $result -eq 0 ]]; then
- end_ts=$(date +%s)
- echoerr "$cmdname: $HOST:$PORT is available after $((end_ts - start_ts)) seconds"
- break
- fi
- sleep 1
- done
- return $result
+wait_for() {
+ if [[ ${TIMEOUT} -gt 0 ]]; then
+ echoerr "${cmdname}: waiting ${TIMEOUT} seconds for ${HOST}:${PORT}"
+ else
+ echoerr "${cmdname}: waiting for ${HOST}:${PORT} without a timeout"
+ fi
+ start_ts=$(date +%s)
+ while :; do
+ if [[ ${ISBUSY} -eq 1 ]]; then
+ nc -z "${HOST}" "${PORT}"
+ result=$?
+ else
+ (echo >/dev/tcp/"${HOST}"/"${PORT}") >/dev/null 2>&1
+ result=$?
+ fi
+ if [[ ${result} -eq 0 ]]; then
+ end_ts=$(date +%s)
+ echoerr "${cmdname}: ${HOST}:${PORT} is available after $((end_ts - start_ts)) seconds"
+ break
+ fi
+ sleep 1
+ done
+ return "${result}"
}
-wait_for_wrapper()
-{
- # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
- if [[ $QUIET -eq 1 ]]; then
- timeout $BUSYTIMEFLAG $TIMEOUT $0 --quiet --child --host=$HOST --port=$PORT --timeout=$TIMEOUT &
- else
- timeout $BUSYTIMEFLAG $TIMEOUT $0 --child --host=$HOST --port=$PORT --timeout=$TIMEOUT &
- fi
- PID=$!
- trap "kill -INT -$PID" INT
- wait $PID
- RESULT=$?
- if [[ $RESULT -ne 0 ]]; then
- echoerr "$cmdname: timeout occurred after waiting $TIMEOUT seconds for $HOST:$PORT"
- fi
- return $RESULT
+wait_for_wrapper() {
+ # In order to support SIGINT during timeout: http://unix.stackexchange.com/a/57692
+ if [[ ${QUIET} -eq 1 ]]; then
+ timeout "${BUSYTIMEFLAG}" "${TIMEOUT}" "$0" --quiet --child --host="${HOST}" --port="${PORT}" --timeout="${TIMEOUT}" &
+ else
+ timeout "${BUSYTIMEFLAG}" "${TIMEOUT}" "$0" --child --host="${HOST}" --port="${PORT}" --timeout="${TIMEOUT}" &
+ fi
+ PID=$!
+ trap "kill -INT -${PID}" INT
+ wait "${PID}"
+ RESULT=$?
+ if [[ ${RESULT} -ne 0 ]]; then
+ echoerr "${cmdname}: timeout occurred after waiting ${TIMEOUT} seconds for ${HOST}:${PORT}"
+ fi
+ return "${RESULT}"
}
# process arguments
-while [[ $# -gt 0 ]]
-do
- case "$1" in
- *:* )
- hostport=(${1//:/ })
- HOST=${hostport[0]}
- PORT=${hostport[1]}
- shift 1
- ;;
- --child)
- CHILD=1
- shift 1
- ;;
- -q | --quiet)
- QUIET=1
- shift 1
- ;;
- -s | --strict)
- STRICT=1
- shift 1
- ;;
- -h)
- HOST="$2"
- if [[ $HOST == "" ]]; then break; fi
- shift 2
- ;;
- --host=*)
- HOST="${1#*=}"
- shift 1
- ;;
- -p)
- PORT="$2"
- if [[ $PORT == "" ]]; then break; fi
- shift 2
- ;;
- --port=*)
- PORT="${1#*=}"
- shift 1
- ;;
- -t)
- TIMEOUT="$2"
- if [[ $TIMEOUT == "" ]]; then break; fi
- shift 2
- ;;
- --timeout=*)
- TIMEOUT="${1#*=}"
- shift 1
- ;;
- --)
- shift
- CLI=("$@")
- break
- ;;
- --help)
- usage
- ;;
- *)
- echoerr "Unknown argument: $1"
- usage
- ;;
- esac
+while [[ $# -gt 0 ]]; do
+ case "$1" in
+ *:*)
+ hostport=(${1//:/ })
+ HOST=${hostport[0]}
+ PORT=${hostport[1]}
+ shift 1
+ ;;
+ --child)
+ CHILD=1
+ shift 1
+ ;;
+ -q | --quiet)
+ QUIET=1
+ shift 1
+ ;;
+ -s | --strict)
+ STRICT=1
+ shift 1
+ ;;
+ -h)
+ HOST="$2"
+ if [[ ${HOST} == "" ]]; then break; fi
+ shift 2
+ ;;
+ --host=*)
+ HOST="${1#*=}"
+ shift 1
+ ;;
+ -p)
+ PORT="$2"
+ if [[ ${PORT} == "" ]]; then break; fi
+ shift 2
+ ;;
+ --port=*)
+ PORT="${1#*=}"
+ shift 1
+ ;;
+ -t)
+ TIMEOUT="$2"
+ if [[ ${TIMEOUT} == "" ]]; then break; fi
+ shift 2
+ ;;
+ --timeout=*)
+ TIMEOUT="${1#*=}"
+ shift 1
+ ;;
+ --)
+ shift
+ CLI=("$@")
+ break
+ ;;
+ --help)
+ usage
+ ;;
+ *)
+ echoerr "Unknown argument: $1"
+ usage
+ ;;
+ esac
done
-if [[ "$HOST" == "" || "$PORT" == "" ]]; then
- echoerr "Error: you need to provide a host and port to test."
- usage
+if [[ ${HOST} == "" || ${PORT} == "" ]]; then
+ echoerr "Error: you need to provide a host and port to test."
+ usage
fi
TIMEOUT=${TIMEOUT:-15}
@@ -165,34 +160,34 @@ QUIET=${QUIET:-0}
# check to see if timeout is from busybox?
# check to see if timeout is from busybox?
TIMEOUT_PATH=$(realpath $(which timeout))
-if [[ $TIMEOUT_PATH =~ "busybox" ]]; then
- ISBUSY=1
- BUSYTIMEFLAG="-t"
+if [[ ${TIMEOUT_PATH} =~ "busybox" ]]; then
+ ISBUSY=1
+ BUSYTIMEFLAG="-t"
else
- ISBUSY=0
- BUSYTIMEFLAG=""
+ ISBUSY=0
+ BUSYTIMEFLAG=""
fi
-if [[ $CHILD -gt 0 ]]; then
- wait_for
- RESULT=$?
- exit $RESULT
+if [[ ${CHILD} -gt 0 ]]; then
+ wait_for
+ RESULT=$?
+ exit "${RESULT}"
else
- if [[ $TIMEOUT -gt 0 ]]; then
- wait_for_wrapper
- RESULT=$?
- else
- wait_for
- RESULT=$?
- fi
+ if [[ ${TIMEOUT} -gt 0 ]]; then
+ wait_for_wrapper
+ RESULT=$?
+ else
+ wait_for
+ RESULT=$?
+ fi
fi
-if [[ $CLI != "" ]]; then
- if [[ $RESULT -ne 0 && $STRICT -eq 1 ]]; then
- echoerr "$cmdname: strict mode, refusing to execute subprocess"
- exit $RESULT
- fi
- exec "${CLI[@]}"
+if [[ ${CLI} != "" ]]; then
+ if [[ ${RESULT} -ne 0 && ${STRICT} -eq 1 ]]; then
+ echoerr "${cmdname}: strict mode, refusing to execute subprocess"
+ exit "${RESULT}"
+ fi
+ exec "${CLI[@]}"
else
- exit $RESULT
-fi
\ No newline at end of file
+ exit "${RESULT}"
+fi
diff --git a/dgraph/cmd/alpha/mutations_mode/docker-compose.yml b/dgraph/cmd/alpha/mutations_mode/docker-compose.yml
index 6f4f4f91006..5a92867f022 100644
--- a/dgraph/cmd/alpha/mutations_mode/docker-compose.yml
+++ b/dgraph/cmd/alpha/mutations_mode/docker-compose.yml
@@ -9,98 +9,101 @@ services:
labels:
cluster: test
ports:
- - "8080"
- - "9080"
+ - "8080"
+ - "9080"
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha1:7080 --zero=zero1:5080,zero2:5080,zero3:5080
- --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --limit "mutations=disallow;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha1:7080 --zero=zero1:5080,zero2:5080,zero3:5080 --logtostderr -v=2 --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --limit "mutations=disallow;"
alpha2:
image: dgraph/dgraph:local
working_dir: /data/alpha2
labels:
cluster: test
ports:
- - "8080"
- - "9080"
+ - "8080"
+ - "9080"
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha2:7080 --zero=zero1:5080,zero2:5080,zero3:5080
- --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --limit "mutations=strict;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha2:7080 --zero=zero1:5080,zero2:5080,zero3:5080 --logtostderr -v=2 --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --limit "mutations=strict;"
alpha3:
image: dgraph/dgraph:local
working_dir: /data/alpha3
labels:
cluster: test
ports:
- - "8080"
- - "9080"
+ - "8080"
+ - "9080"
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha3:7080 --zero=zero1:5080,zero2:5080,zero3:5080
- --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --limit "mutations=strict;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha3:7080 --zero=zero1:5080,zero2:5080,zero3:5080 --logtostderr -v=2 --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --limit "mutations=strict;"
zero1:
image: dgraph/dgraph:local
working_dir: /data/zero1
labels:
cluster: test
ports:
- - "5080"
- - "6080"
+ - "5080"
+ - "6080"
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft "idx=1;" --my=zero1:5080 --replicas=1 --logtostderr
- -v=2 --bindall
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft
+ "idx=1;" --my=zero1:5080 --replicas=1 --logtostderr -v=2 --bindall
zero2:
image: dgraph/dgraph:local
working_dir: /data/zero2
depends_on:
- - zero1
+ - zero1
labels:
cluster: test
ports:
- - "5080"
- - "6080"
+ - "5080"
+ - "6080"
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft "idx=2;" --my=zero2:5080 --replicas=1 --logtostderr
- -v=2 --peer=zero1:5080
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft
+ "idx=2;" --my=zero2:5080 --replicas=1 --logtostderr -v=2 --peer=zero1:5080
zero3:
image: dgraph/dgraph:local
working_dir: /data/zero3
depends_on:
- - zero2
+ - zero2
labels:
cluster: test
ports:
- - "5080"
- - "6080"
+ - "5080"
+ - "6080"
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft "idx=3;" --my=zero3:5080 --replicas=1 --logtostderr
- -v=2 --peer=zero1:5080
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft
+ "idx=3;" --my=zero3:5080 --replicas=1 --logtostderr -v=2 --peer=zero1:5080
volumes: {}
diff --git a/dgraph/cmd/alpha/testrun.sh b/dgraph/cmd/alpha/testrun.sh
index 95f20cef4a1..b522c31dcfe 100755
--- a/dgraph/cmd/alpha/testrun.sh
+++ b/dgraph/cmd/alpha/testrun.sh
@@ -2,60 +2,63 @@
set -e
-dir="$HOME/dgraph"
+dir="${HOME}/dgraph"
# We display error to the user if Dgraph isn't installed.
if ! hash dgraph 2>/dev/null; then
echo "Please install Dgraph and try again."
- exit 1
+ exit 1
fi
# Double quotes are used to store the command in a variable which can be used later. `${i}` is how you access value of a variable in a double quoted string. Also other double quotes have to be escaped like for workers with a double quoted string. Also tee is used in append mode to redirect out to log file apart from displaying it on stdout.
-i=1;
+i=1
server1="dgraph --config testrun/conf1.yaml 2>&1 | tee -a dgraph1.log &"
-i=2;
+i=2
server2="dgraph --config testrun/conf2.yaml 2>&1 | tee -a dgraph2.log &"
-i=3;
+i=3
server3="dgraph --config testrun/conf3.yaml 2>&1 | tee -a dgraph3.log &"
function checkServer {
port=$1
# Status evaluates if there is a process running on $port.
- status=$(nc -z 127.0.0.1 $port; echo $?)
+ status=$(
+ nc -z 127.0.0.1 "${port}"
+ echo $?
+ )
# If status is 1, we restart the relevant server
- if [ $status -ne 0 ]; then
- if [ $port -eq "8080" ]; then
- echo "Restarting server 1"
- eval $server1
- fi
- if [ $port -eq "8082" ]; then
- echo "Restarting server 2"
- eval $server2
- fi
- if [ $port -eq "8084" ]; then
- echo "Restarting server 3"
- eval $server3
- fi
+ if [[ ${status} -ne 0 ]]; then
+ if [[ ${port} -eq "8080" ]]; then
+ echo "Restarting server 1"
+ eval "${server1}"
+ fi
+ if [[ ${port} -eq "8082" ]]; then
+ echo "Restarting server 2"
+ eval "${server2}"
+ fi
+ if [[ ${port} -eq "8084" ]]; then
+ echo "Restarting server 3"
+ eval "${server3}"
+ fi
fi
}
# Kill already running Dgraph processes.
-if pgrep "dgraph" > /dev/null; then
+if pgrep "dgraph" >/dev/null; then
killall dgraph
fi
# Start the servers.
echo "Starting server 1"
-eval $server1
+eval "${server1}"
# Lets wait for the first server to bootup because it will form the cluster.
sleep 5
echo "Starting server 2"
-eval $server2
+eval "${server2}"
sleep 5
echo "Starting server 3"
-eval $server3
+eval "${server3}"
# Check that the servers should be running every 30 seconds.
while true; do
diff --git a/dgraph/cmd/alpha/testrun/conf1.yaml b/dgraph/cmd/alpha/testrun/conf1.yaml
index 49202e73fe0..d30d6330c5d 100644
--- a/dgraph/cmd/alpha/testrun/conf1.yaml
+++ b/dgraph/cmd/alpha/testrun/conf1.yaml
@@ -21,4 +21,3 @@ groups: 0,1
nomutations: true
group_conf: groups.conf
-
diff --git a/dgraph/cmd/alpha/testrun/conf2.yaml b/dgraph/cmd/alpha/testrun/conf2.yaml
index e6a28cd0481..4dcef8f4a1b 100644
--- a/dgraph/cmd/alpha/testrun/conf2.yaml
+++ b/dgraph/cmd/alpha/testrun/conf2.yaml
@@ -22,4 +22,3 @@ nomutations: true
group_conf: groups.conf
peer: localhost:12345
-
diff --git a/dgraph/cmd/alpha/testrun/conf3.yaml b/dgraph/cmd/alpha/testrun/conf3.yaml
index d1cf212382c..83c49fcd93c 100644
--- a/dgraph/cmd/alpha/testrun/conf3.yaml
+++ b/dgraph/cmd/alpha/testrun/conf3.yaml
@@ -22,4 +22,3 @@ nomutations: true
group_conf: groups.conf
peer: localhost:12345
-
diff --git a/dgraph/cmd/alpha/thoughts.md b/dgraph/cmd/alpha/thoughts.md
index 548bc3ee9e7..653fa923e10 100644
--- a/dgraph/cmd/alpha/thoughts.md
+++ b/dgraph/cmd/alpha/thoughts.md
@@ -2,20 +2,10 @@
2. sync.WaitGroup.
-func handle(..) {
- wg.Add(1)
- ...
- wg.Done()
-}
+func handle(..) { wg.Add(1) ... wg.Done() }
-func main() {
- wg := new(sync.WaitGroup)
- for i := 0; i < N; i++ {
- go handle(..)
- }
- wg.Wait()
-}
+func main() { wg := new(sync.WaitGroup) for i := 0; i < N; i++ { go handle(..) } wg.Wait() }
-The above wouldn't work, because goroutines don't necessarily get scheduled immediately.
-So, wg.Add(1) wouldn't get called, which means wg.Wait() wouldn't block, and the program
-would finish execution before goroutines had a chance to be run.
+The above wouldn't work, because goroutines don't necessarily get scheduled immediately. So,
+wg.Add(1) wouldn't get called, which means wg.Wait() wouldn't block, and the program would finish
+execution before goroutines had a chance to be run.
diff --git a/dgraph/cmd/bulk/speed_tests/run.sh b/dgraph/cmd/bulk/speed_tests/run.sh
index 9ff222d41f5..b1d6dcd28f2 100755
--- a/dgraph/cmd/bulk/speed_tests/run.sh
+++ b/dgraph/cmd/bulk/speed_tests/run.sh
@@ -6,15 +6,15 @@ scriptDir=$(dirname "$(readlink -f "$0")")
while [[ $# -gt 1 ]]; do
key="$1"
- case $key in
- --tmp)
- tmp="$2"
- shift
- ;;
- *)
- echo "unknown option $1"
- exit 1
- ;;
+ case ${key} in
+ --tmp)
+ tmp="$2"
+ shift
+ ;;
+ *)
+ echo "unknown option $1"
+ exit 1
+ ;;
esac
shift
done
@@ -24,17 +24,20 @@ tmp=${tmp:-tmp}
go install -race github.com/hypermodeinc/dgraph/cmd/dgraph-bulk-loader
function run_test {
- [[ $# == 2 ]] || { echo "bad args"; exit 1; }
+ [[ $# == 2 ]] || {
+ echo "bad args"
+ exit 1
+ }
schema=$1
rdfs=$2
- rm -rf $tmp
- mkdir $tmp
+ rm -rf "${tmp}"
+ mkdir "${tmp}"
- echo "$schema" > $tmp/sch.schema
+ echo "${schema}" >"${tmp}"/sch.schema
# Run bulk loader.
- $(go env GOPATH)/bin/dgraph-bulk-loader -map_shards=5 -reduce_shards=2 -shufflers=2 -mapoutput_mb=15 -tmp "$tmp/tmp" -out "$tmp/out" -l "$tmp/LEASE" -s "$tmp/sch.schema" -r "$rdfs"
+ $(go env GOPATH)/bin/dgraph-bulk-loader -map_shards=5 -reduce_shards=2 -shufflers=2 -mapoutput_mb=15 -tmp "${tmp}/tmp" -out "${tmp}/out" -l "${tmp}/LEASE" -s "${tmp}/sch.schema" -r "${rdfs}"
}
echo "========================="
diff --git a/dgraph/cmd/bulk/split_gz.sh b/dgraph/cmd/bulk/split_gz.sh
index 59627e0ca20..bc61b138918 100755
--- a/dgraph/cmd/bulk/split_gz.sh
+++ b/dgraph/cmd/bulk/split_gz.sh
@@ -3,22 +3,22 @@
set -e
if [[ $# != 2 ]]; then
- echo "Usage: $0 "
- exit 1
+ echo "Usage: $0 "
+ exit 1
fi
inDir=$1
outDir=$2
-rm -rf $outDir
-mkdir $outDir
-for inputFile in $inDir/*.rdf.gz; do
- echo Processing: $inputFile
- base=$(basename $inputFile | cut -f 1 -d '.')
- gunzip < $inputFile | split --lines=10000000 - $outDir/$base$(echo _)
+rm -rf "${outDir}"
+mkdir "${outDir}"
+for inputFile in ${inDir}/*.rdf.gz; do
+ echo Processing: "${inputFile}"
+ base=$(basename "${inputFile}" | cut -f 1 -d '.')
+ gunzip <"${inputFile}" | split --lines=10000000 - "${outDir}"/"${base}"$(echo _)
done
-for chunkedFile in $outDir/*; do
- echo "Zipping: $chunkedFile"
- gzip -S .rdf.gz $chunkedFile &
+for chunkedFile in ${outDir}/*; do
+ echo "Zipping: ${chunkedFile}"
+ gzip -S .rdf.gz "${chunkedFile}" &
done
wait
diff --git a/dgraph/cmd/bulk/systest/run.sh b/dgraph/cmd/bulk/systest/run.sh
index 42c22071c25..d01205626e9 100755
--- a/dgraph/cmd/bulk/systest/run.sh
+++ b/dgraph/cmd/bulk/systest/run.sh
@@ -11,15 +11,15 @@ go install github.com/hypermodeinc/dgraph/cmd/dgraphzero
echo "Done."
fail=false
-for suite in $script_dir/suite*; do
- echo Running test suite: $(basename $suite)
+for suite in ${script_dir}/suite*; do
+ echo Running test suite: $(basename "${suite}")
rm -rf tmp
mkdir tmp
pushd tmp >/dev/null
mkdir dg
pushd dg >/dev/null
- $(go env GOPATH)/bin/dgraph-bulk-loader -r $suite/rdfs.rdf -s $suite/schema.txt >/dev/null 2>&1
+ $(go env GOPATH)/bin/dgraph-bulk-loader -r "${suite}"/rdfs.rdf -s "${suite}"/schema.txt >/dev/null 2>&1
mv out/0 p
popd >/dev/null
@@ -37,20 +37,19 @@ for suite in $script_dir/suite*; do
sleep 2
popd >/dev/null # out of tmp
- result=$(curl --silent -H "Content-Type: application/dql" localhost:8080/query -XPOST -d @$suite/query.json)
- if ! $(jq --argfile a <(echo $result) --argfile b $suite/result.json -n 'def post_recurse(f): def r: (f | select(. != null) | r), .; r; def post_recurse: post_recurse(.[]?); ($a | (post_recurse | arrays) |= sort) as $a | ($b | (post_recurse | arrays) |= sort) as $b | $a == $b')
- then
+ result=$(curl --silent -H "Content-Type: application/dql" localhost:8080/query -XPOST -d @"${suite}"/query.json)
+ if ! $(jq --argfile a <(echo "${result}") --argfile b "${suite}"/result.json -n 'def post_recurse(f): def r: (f | select(. != null) | r), .; r; def post_recurse: post_recurse(.[]?); ($a | (post_recurse | arrays) |= sort) as $a | ($b | (post_recurse | arrays) |= sort) as $b | $a == $b'); then
echo "Actual result doesn't match expected result:"
- echo "Actual: $result"
- echo "Expected: $(cat $suite/result.json)"
+ echo "Actual: ${result}"
+ echo "Expected: $(cat "${suite}"/result.json)"
fail=true
fi
- kill $dgPid
- kill $dgzPid
+ kill "${dgPid}"
+ kill "${dgzPid}"
sleep 2
done
-if $fail; then
+if ${fail}; then
exit 1
fi
diff --git a/dgraph/cmd/bulk/systest/test-bulk-schema.sh b/dgraph/cmd/bulk/systest/test-bulk-schema.sh
index 7dcb8d47fcf..46909d85ed9 100755
--- a/dgraph/cmd/bulk/systest/test-bulk-schema.sh
+++ b/dgraph/cmd/bulk/systest/test-bulk-schema.sh
@@ -3,110 +3,108 @@
readonly ME=${0##*/}
readonly SRCROOT=$(git rev-parse --show-toplevel)
-readonly DOCKER_CONF=$SRCROOT/dgraph/cmd/bulk/systest/docker-compose.yml
+readonly DOCKER_CONF=${SRCROOT}/dgraph/cmd/bulk/systest/docker-compose.yml
declare -ri ZERO_PORT=5180 HTTP_PORT=8180
-INFO() { echo "$ME: $@"; }
-ERROR() { echo >&2 "$ME: $@"; }
-FATAL() { ERROR "$@"; exit 1; }
+INFO() { echo "${ME}: $@"; }
+ERROR() { echo >&2 "${ME}: $@"; }
+FATAL() {
+ ERROR "$@"
+ exit 1
+}
function DockerCompose {
- docker-compose -p dgraph "$@"
+ docker-compose -p dgraph "$@"
}
set -e
INFO "rebuilding dgraph"
-cd $SRCROOT
+cd "${SRCROOT}"
make install >/dev/null
INFO "running bulk load schema test"
-WORKDIR=$(mktemp --tmpdir -d $ME.tmp-XXXXXX)
-INFO "using workdir $WORKDIR"
-cd $WORKDIR
+WORKDIR=$(mktemp --tmpdir -d "${ME}".tmp-XXXXXX)
+INFO "using workdir ${WORKDIR}"
+cd "${WORKDIR}"
-LOGFILE=$WORKDIR/output.log
+LOGFILE=${WORKDIR}/output.log
trap ErrorExit EXIT
-function ErrorExit
-{
- local ev=$?
- if [[ $ev -ne 0 ]]; then
- ERROR "*** unexpected error ***"
- if [[ -e $LOGFILE ]]; then
- tail -40 $LOGFILE
- fi
- fi
- if [[ ! $DEBUG ]]; then
- rm -rf $WORKDIR
- fi
- exit $ev
+function ErrorExit {
+ local ev=$?
+ if [[ ${ev} -ne 0 ]]; then
+ ERROR "*** unexpected error ***"
+ if [[ -e ${LOGFILE} ]]; then
+ tail -40 "${LOGFILE}"
+ fi
+ fi
+ if [[ -z ${DEBUG} ]]; then
+ rm -rf "${WORKDIR}"
+ fi
+ exit "${ev}"
}
-function StartZero
-{
- INFO "starting zero container"
- DockerCompose -f $DOCKER_CONF up --force-recreate --remove-orphans -d zero1
- TIMEOUT=10
- while [[ $TIMEOUT > 0 ]]; do
- if docker logs zero1 2>&1 | grep -q 'CID set'; then
- return
- else
- TIMEOUT=$((TIMEOUT - 1))
- sleep 1
- fi
- done
- FATAL "failed to start zero"
+function StartZero {
+ INFO "starting zero container"
+ DockerCompose -f "${DOCKER_CONF}" up --force-recreate --remove-orphans -d zero1
+ TIMEOUT=10
+ while [[ ${TIMEOUT} > 0 ]]; do
+ if docker logs zero1 2>&1 | grep -q 'CID set'; then
+ return
+ else
+ TIMEOUT=$((TIMEOUT - 1))
+ sleep 1
+ fi
+ done
+ FATAL "failed to start zero"
}
-function StartAlpha
-{
- local p_dir=$1
-
- INFO "starting alpha container"
- DockerCompose -f $DOCKER_CONF up --force-recreate --remove-orphans --no-start alpha1
- if [[ $p_dir ]]; then
- docker cp $p_dir alpha1:/data/alpha1/
- fi
- DockerCompose -f $DOCKER_CONF up -d --remove-orphans alpha1
-
- TIMEOUT=10
- while [[ $TIMEOUT > 0 ]]; do
- if docker logs alpha1 2>&1 | grep -q 'Got Zero leader'; then
- return
- else
- TIMEOUT=$((TIMEOUT - 1))
- sleep 1
- fi
- done
- FATAL "failed to start alpha"
+function StartAlpha {
+ local p_dir=$1
+
+ INFO "starting alpha container"
+ DockerCompose -f "${DOCKER_CONF}" up --force-recreate --remove-orphans --no-start alpha1
+ if [[ -n ${p_dir} ]]; then
+ docker cp "${p_dir}" alpha1:/data/alpha1/
+ fi
+ DockerCompose -f "${DOCKER_CONF}" up -d --remove-orphans alpha1
+
+ TIMEOUT=10
+ while [[ ${TIMEOUT} > 0 ]]; do
+ if docker logs alpha1 2>&1 | grep -q 'Got Zero leader'; then
+ return
+ else
+ TIMEOUT=$((TIMEOUT - 1))
+ sleep 1
+ fi
+ done
+ FATAL "failed to start alpha"
}
-function ResetCluster
-{
- INFO "restarting cluster with only one zero and alpha"
- DockerCompose -f $DOCKER_CONF down --remove-orphans
- StartZero
- StartAlpha
+function ResetCluster {
+ INFO "restarting cluster with only one zero and alpha"
+ DockerCompose -f "${DOCKER_CONF}" down --remove-orphans
+ StartZero
+ StartAlpha
}
-function UpdateDatabase
-{
- INFO "adding predicate with default type to schema"
- curl localhost:$HTTP_PORT/alter -X POST -d$'
+function UpdateDatabase {
+ INFO "adding predicate with default type to schema"
+ curl localhost:"${HTTP_PORT}"/alter -X POST -d$'
predicate_with_no_uid_count:string .
predicate_with_default_type:default .
predicate_with_index_no_uid_count:string @index(exact) .
' &>/dev/null
- # Wait for background indexing to finish.
- # TODO: Use better way of waiting once it's available.
- sleep 5
+ # Wait for background indexing to finish.
+ # TODO: Use better way of waiting once it's available.
+ sleep 5
- curl -H "Content-Type: application/rdf" localhost:$HTTP_PORT/mutate?commitNow=true -X POST -d $'
+ curl -H "Content-Type: application/rdf" localhost:"${HTTP_PORT}"/mutate?commitNow=true -X POST -d $'
{
set {
_:company1 "CompanyABC" .
@@ -115,90 +113,85 @@ predicate_with_index_no_uid_count:string @index(exact) .
' &>/dev/null
}
-function QuerySchema
-{
- INFO "running schema query"
- local out_file="schema.out"
- curl -sS -H "Content-Type: application/dql" localhost:$HTTP_PORT/query -XPOST -d'schema(pred:[genre,language,name,revenue,predicate_with_default_type,predicate_with_index_no_uid_count,predicate_with_no_uid_count]) {}' | python3 -c "import json,sys; d=json.load(sys.stdin); json.dump(d['data'],sys.stdout,sort_keys=True,indent=2)" > $out_file
- echo >> $out_file
+function QuerySchema {
+ INFO "running schema query"
+ local out_file="schema.out"
+ curl -sS -H "Content-Type: application/dql" localhost:"${HTTP_PORT}"/query -XPOST -d'schema(pred:[genre,language,name,revenue,predicate_with_default_type,predicate_with_index_no_uid_count,predicate_with_no_uid_count]) {}' | python3 -c "import json,sys; d=json.load(sys.stdin); json.dump(d['data'],sys.stdout,sort_keys=True,indent=2)" >"${out_file}"
+ echo >>"${out_file}"
}
-function DoExport
-{
- INFO "running export"
- docker exec alpha1 curl -Ss -H "Content-Type: application/json" localhost:$HTTP_PORT/admin -XPOST -d '{ "query": "mutation { export(input: {format: \"rdf\"}) { response { code message } }}" }' &>/dev/null
- sleep 2
- docker cp alpha1:/data/alpha1/export .
- sleep 1
+function DoExport {
+ INFO "running export"
+ docker exec alpha1 curl -Ss -H "Content-Type: application/json" localhost:"${HTTP_PORT}"/admin -XPOST -d '{ "query": "mutation { export(input: {format: \"rdf\"}) { response { code message } }}" }' &>/dev/null
+ sleep 2
+ docker cp alpha1:/data/alpha1/export .
+ sleep 1
}
-function BulkLoadExportedData
-{
- INFO "bulk loading exported data"
- # using a random HTTP port for pprof to avoid collisions with other processes
- HTTPPORT=$(( ( RANDOM % 1000 ) + 8080 ))
- dgraph bulk -z localhost:$ZERO_PORT --http "localhost:$HTTPPORT"\
- -s ../dir1/export/*/g01.schema.gz \
- -f ../dir1/export/*/g01.rdf.gz \
- >$LOGFILE 2>&1 "${LOGFILE}" 2>&1 fixture.schema <fixture.schema <fixture.rdf <fixture.rdf < "E.T. the Extra-Terrestrial" .
_:et "Science Fiction" .
_:et "792.9" .
EOF
- dgraph bulk -z localhost:$ZERO_PORT -s fixture.schema -f fixture.rdf \
- >$LOGFILE 2>&1 "${LOGFILE}" 2>&1 fixture.schema <fixture.schema <fixture.rdf <fixture.rdf < "E.T. the Extra-Terrestrial" .
_:et "Science Fiction" .
_:et "792.9" .
EOF
- dgraph bulk -z localhost:$ZERO_PORT -s fixture.schema -f fixture.rdf \
- --map_shards 2 --reduce_shards 2 \
- >$LOGFILE 2>&1 "${LOGFILE}" 2>&1 |/dev/null | grep '{s}' | cut -d' ' -f3 > all_dbs.out
- dgraph debug -p out/1/p 2>|/dev/null | grep '{s}' | cut -d' ' -f3 >> all_dbs.out
- diff <(LC_ALL=C sort all_dbs.out | uniq -c) - <|/dev/null | grep '{s}' | cut -d' ' -f3 >all_dbs.out
+ dgraph debug -p out/1/p 2>|/dev/null | grep '{s}' | cut -d' ' -f3 >>all_dbs.out
+ diff <(LC_ALL=C sort all_dbs.out | uniq -c) - <
password =
db =
```
-
Export the SQL database into a schema and RDF file, e.g. the schema.txt and sql.rdf file below
-```
+
+```bash
dgraph migrate --config config.properties --output_schema schema.txt --output_data sql.rdf
```
-If you are connecting to a remote DB (something hosted on AWS, GCP, etc...), you need to pass the following flags
-```
+If you are connecting to a remote DB (something hosted on AWS, GCP, etc...), you need to pass the
+following flags
+
+````bash
-- host
-- port
Import the data into Dgraph with the live loader (the example below is connecting to the Dgraph zero and alpha servers running on the default ports)
-```
+
+```bash
dgraph live -z localhost:5080 -a localhost:9080 --files sql.rdf --format=rdf --schema schema.txt
-```
+````
diff --git a/dgraph/docker-compose.yml b/dgraph/docker-compose.yml
index 2d468fc0c60..620e21b7ed7 100644
--- a/dgraph/docker-compose.yml
+++ b/dgraph/docker-compose.yml
@@ -14,7 +14,10 @@ services:
source: $GOPATH/bin
target: /gobin
read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --my=zero1:5080 --replicas 3 --raft="idx=1" --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --my=zero1:5080 --replicas 3 --raft="idx=1" --logtostderr -v=2 --bindall --expose_trace
+ --profile_mode block --block_rate 10
zero2:
image: dgraph/dgraph:local
@@ -32,7 +35,9 @@ services:
source: $GOPATH/bin
target: /gobin
read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --my=zero2:5080 --replicas 3 --raft="idx=2" --logtostderr -v=2 --peer=zero1:5080
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --my=zero2:5080 --replicas 3 --raft="idx=2" --logtostderr -v=2 --peer=zero1:5080
zero3:
image: dgraph/dgraph:local
@@ -50,7 +55,9 @@ services:
source: $GOPATH/bin
target: /gobin
read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --my=zero3:5080 --replicas 3 --raft="idx=3" --logtostderr -v=2 --peer=zero1:5080
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --my=zero3:5080 --replicas 3 --raft="idx=3" --logtostderr -v=2 --peer=zero1:5080
alpha1:
image: dgraph/dgraph:local
@@ -74,9 +81,12 @@ services:
labels:
cluster: test
service: alpha
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha1:7080 --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha1:7080
+ --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10
+ --logtostderr -v=2 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --acl
+ "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
alpha2:
image: dgraph/dgraph:local
@@ -102,9 +112,12 @@ services:
labels:
cluster: test
service: alpha
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha2:7080 --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha2:7080
+ --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10
+ --logtostderr -v=2 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --acl
+ "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
alpha3:
image: dgraph/dgraph:local
@@ -130,9 +143,12 @@ services:
labels:
cluster: test
service: alpha
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha3:7080 --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha3:7080
+ --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10
+ --logtostderr -v=2 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --acl
+ "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
alpha4:
image: dgraph/dgraph:local
@@ -158,9 +174,12 @@ services:
labels:
cluster: test
service: alpha
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha4:7080 --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha4:7080
+ --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10
+ --logtostderr -v=2 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --acl
+ "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
alpha5:
image: dgraph/dgraph:local
@@ -186,9 +205,12 @@ services:
labels:
cluster: test
service: alpha
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha5:7080 --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha5:7080
+ --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10
+ --logtostderr -v=2 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --acl
+ "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
alpha6:
image: dgraph/dgraph:local
@@ -214,9 +236,12 @@ services:
labels:
cluster: test
service: alpha
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha6:7080 --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --encryption "key-file=/dgraph-enc/enc-key;" --my=alpha6:7080
+ --zero=zero1:5080,zero2:5080,zero3:5080 --expose_trace --profile_mode block --block_rate 10
+ --logtostderr -v=2 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --acl
+ "secret-file=/dgraph-acl/hmac-secret; access-ttl=20s;"
minio:
image: minio/minio:latest
diff --git a/dgraphtest/README.md b/dgraphtest/README.md
index bc41996c7c8..b7c8c10a1cd 100644
--- a/dgraphtest/README.md
+++ b/dgraphtest/README.md
@@ -3,15 +3,18 @@
### Setup Env Variables
1. set `TEST_DGRAPH_CLOUD_CLUSTER_URL` to the URL that the dgo client should use
-2. set `TEST_DGRAPH_CLOUD_CLUSTER_TOKEN` to one of API keys that you can generate on the `Settings` page on the cloud UI
-3. `TEST_DGRAPH_CLOUD_ACL` to `false` if ACLs are disabled. By default, ACLs are assumed to be enabled.
+2. set `TEST_DGRAPH_CLOUD_CLUSTER_TOKEN` to one of API keys that you can generate on the `Settings`
+ page on the cloud UI
+3. `TEST_DGRAPH_CLOUD_ACL` to `false` if ACLs are disabled. By default, ACLs are assumed to be
+ enabled.
### Schema Mode
-The tests require the `Schema Mode` to be set to `Flexible` from the `Settings` page on the cloud UI.
+The tests require the `Schema Mode` to be set to `Flexible` from the `Settings` page on the cloud
+UI.
### Running Tests
-```
+```bash
go test -tags=cloud ./...
```
diff --git a/dql/README.md b/dql/README.md
index ea0c9b13a7b..c201a69068c 100644
--- a/dql/README.md
+++ b/dql/README.md
@@ -1,8 +1,8 @@
-Comparing the old and new methods, we find that using slice makes the parsing 20% faster on
-average than using channels. Also, using slices allows the parser to backtrack and peek the
-tokens which couldn't be done using channels as each token can only be consumed once.
+Comparing the old and new methods, we find that using slice makes the parsing 20% faster on average
+than using channels. Also, using slices allows the parser to backtrack and peek the tokens which
+couldn't be done using channels as each token can only be consumed once.
-```
+```bash
Name unit Old New Improvement
----------------------------------------------------------------------
Benchmark_Filters-4 ns/op 14007 9634 31 %
diff --git a/ee/README.md b/ee/README.md
index 8282dffb902..4130acb636e 100644
--- a/ee/README.md
+++ b/ee/README.md
@@ -1,4 +1,5 @@
# Dgraph Enterprise Edition (EE)
-The files stored here correspond to the Dgraph Enterprise Edition features, which are under the [Dgraph Community License](https://github.com/hypermodeinc/dgraph/blob/main/licenses/DCL.txt) (_not_ the Apache 2 License).
-
+The files stored here correspond to the Dgraph Enterprise Edition features, which are under the
+[Dgraph Community License](https://github.com/hypermodeinc/dgraph/blob/main/licenses/DCL.txt) (_not_
+the Apache 2 License).
diff --git a/go.mod b/go.mod
index 906f0bff6f1..7447bc92743 100644
--- a/go.mod
+++ b/go.mod
@@ -1,6 +1,7 @@
module github.com/hypermodeinc/dgraph/v24
go 1.22.7
+toolchain go1.23.4
require (
contrib.go.opencensus.io/exporter/jaeger v0.2.1
@@ -164,4 +165,4 @@ require (
gotest.tools/v3 v3.5.1 // indirect
)
-retract v24.0.3 // should have been a minor release instead of a patch
+retract v24.0.3 // should have been a minor release instead of a patch
\ No newline at end of file
diff --git a/graphql/bench/README.md b/graphql/bench/README.md
index adf66451425..51efce81c29 100644
--- a/graphql/bench/README.md
+++ b/graphql/bench/README.md
@@ -1,14 +1,15 @@
-Compare performance of Auth vs Non-Auth Queries and Mutation.
-Queries were benchmarked against pre generated dataset. We had two cases: Single Level Query and Deep Query
-For Mutation we benchmarked add, delete and Multi level Mutation.
-We also compared the overhead of adding auth rules.
-Results and other details are mentioned here
+Compare performance of Auth vs Non-Auth Queries and Mutation. Queries were benchmarked against pre
+generated dataset. We had two cases: Single Level Query and Deep Query For Mutation we benchmarked
+add, delete and Multi level Mutation. We also compared the overhead of adding auth rules. Results
+and other details are mentioned
+ here
-To regenerate the benchmark results run it once with Non-Auth schema `schema.graphql`
-and compare the result by generating the benchmark with Auth schema `schema_auth.graphql`.
+To regenerate the benchmark results run it once with Non-Auth schema `schema.graphql` and compare
+the result by generating the benchmark with Auth schema `schema_auth.graphql`.
**GraphQL pre and post processing time:**
-````
+
+```bash
Auth:
Benchmark Name | Pre Time | Post Time | Ratio of Processing Time by Actual Time
BenchmarkNestedQuery 144549ns 1410978ns 0.14%
@@ -20,22 +21,24 @@ Benchmark Name | Pre Time | Post Time | Ratio of Process
BenchmarkNestedQuery 117319ns 716261089ns 26.65%
BenchmarkOneLevelMutation 29643908ns 83077638ns 2.6%
BenchmarkMultiLevelMutation 20579295ns 53566488ns 6.2%
-````
+```
+
**Summary**:
-````
+
+```bash
Query:
Running the Benchmark:
Command: go test -bench=. -benchtime=60s
- go test -bench=. -benchtime=60s
- goos: linux
- goarch: amd64
- pkg: github.com/dgraph-io/dgraph/graphql/e2e/auth/bench
+ go test -bench=. -benchtime=60s
+ goos: linux
+ goarch: amd64
+ pkg: github.com/dgraph-io/dgraph/graphql/e2e/auth/bench
Auth
- BenchmarkNestedQuery-8 88 815315761 ns/op
- BenchmarkOneLevelQuery-8 4357 15626384 ns/op
+ BenchmarkNestedQuery-8 88 815315761 ns/op
+ BenchmarkOneLevelQuery-8 4357 15626384 ns/op
Non-Auth
- BenchmarkNestedQuery-8 33 2218877846 ns/op
- BenchmarkOneLevelQuery-8 4446 16100509 ns/op
+ BenchmarkNestedQuery-8 33 2218877846 ns/op
+ BenchmarkOneLevelQuery-8 4446 16100509 ns/op
Mutation:
@@ -55,4 +58,5 @@ BenchmarkMutation: 0.464559706s
BenchmarkMultiLevelMutation: 1.440681796s
BenchmarkOneLevelMutation:
Add Time: 9.549761333s
-Delete Time: 1.200276696s
\ No newline at end of file
+Delete Time: 1.200276696s
+```
diff --git a/graphql/e2e/admin_auth/poorman_auth/docker-compose.yml b/graphql/e2e/admin_auth/poorman_auth/docker-compose.yml
index 2841da69d41..eacaf02af6b 100644
--- a/graphql/e2e/admin_auth/poorman_auth/docker-compose.yml
+++ b/graphql/e2e/admin_auth/poorman_auth/docker-compose.yml
@@ -14,7 +14,10 @@ services:
source: $GOPATH/bin
target: /gobin
read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --my=zero1:5080 --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --my=zero1:5080 --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate
+ 10
alpha1:
image: dgraph/dgraph:local
@@ -30,6 +33,8 @@ services:
labels:
cluster: test
service: alpha1
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha1:7080 --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16; token=itIsSecret;"
- --trace "ratio=1.0;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha1:7080 --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10
+ --logtostderr -v=2 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;
+ token=itIsSecret;" --trace "ratio=1.0;"
diff --git a/graphql/e2e/admin_auth/poorman_auth_with_acl/docker-compose.yml b/graphql/e2e/admin_auth/poorman_auth_with_acl/docker-compose.yml
index e4197940f43..a79d5449066 100644
--- a/graphql/e2e/admin_auth/poorman_auth_with_acl/docker-compose.yml
+++ b/graphql/e2e/admin_auth/poorman_auth_with_acl/docker-compose.yml
@@ -14,7 +14,10 @@ services:
source: $GOPATH/bin
target: /gobin
read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --my=zero1:5080 --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --my=zero1:5080 --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate
+ 10
alpha1:
image: dgraph/dgraph:local
@@ -34,7 +37,9 @@ services:
labels:
cluster: test
service: alpha1
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha1:7080 --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=2
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16; token=itIsSecret;"
- --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=3s;"
- --trace "ratio=1.0;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha1:7080 --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10
+ --logtostderr -v=2 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;
+ token=itIsSecret;" --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=3s;" --trace
+ "ratio=1.0;"
diff --git a/graphql/e2e/auth/debug_off/docker-compose.yml b/graphql/e2e/auth/debug_off/docker-compose.yml
index 8e81c087440..3ef0d1699bb 100644
--- a/graphql/e2e/auth/debug_off/docker-compose.yml
+++ b/graphql/e2e/auth/debug_off/docker-compose.yml
@@ -14,7 +14,10 @@ services:
source: $GOPATH/bin
target: /gobin
read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10 --my=zero1:5080
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10
+ --my=zero1:5080
alpha1:
image: dgraph/dgraph:local
@@ -30,6 +33,8 @@ services:
labels:
cluster: test
service: alpha1
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=3 --my=alpha1:7080
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --trace "ratio=1.0;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=3
+ --my=alpha1:7080 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --trace
+ "ratio=1.0;"
diff --git a/graphql/e2e/auth/docker-compose.yml b/graphql/e2e/auth/docker-compose.yml
index 42c182bdd2f..1a170d44a84 100644
--- a/graphql/e2e/auth/docker-compose.yml
+++ b/graphql/e2e/auth/docker-compose.yml
@@ -14,7 +14,10 @@ services:
source: $GOPATH/bin
target: /gobin
read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10 --my=zero1:5080
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10
+ --my=zero1:5080
alpha1:
image: dgraph/dgraph:local
@@ -30,7 +33,8 @@ services:
labels:
cluster: test
service: alpha1
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=3 --my=alpha1:7080
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --graphql "debug=true;"
- --trace "ratio=1.0;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=3
+ --my=alpha1:7080 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --graphql
+ "debug=true;" --trace "ratio=1.0;"
diff --git a/graphql/e2e/auth_closed_by_default/docker-compose.yml b/graphql/e2e/auth_closed_by_default/docker-compose.yml
index 6643a992a50..1a170d44a84 100644
--- a/graphql/e2e/auth_closed_by_default/docker-compose.yml
+++ b/graphql/e2e/auth_closed_by_default/docker-compose.yml
@@ -14,7 +14,10 @@ services:
source: $GOPATH/bin
target: /gobin
read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10 --my=zero1:5080
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10
+ --my=zero1:5080
alpha1:
image: dgraph/dgraph:local
@@ -30,7 +33,8 @@ services:
labels:
cluster: test
service: alpha1
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=3 --my=alpha1:7080
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --graphql "debug=true;"
- --trace "ratio=1.0;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=3
+ --my=alpha1:7080 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --graphql
+ "debug=true;" --trace "ratio=1.0;"
diff --git a/graphql/e2e/common/README.md b/graphql/e2e/common/README.md
index 74c2fc1570e..71f3cf1cc42 100644
--- a/graphql/e2e/common/README.md
+++ b/graphql/e2e/common/README.md
@@ -1 +1,3 @@
-This package contains test functions which are called by other packages. The intention of this package is to contain all the end to end tests which can be run with different configuration options like schema, encoding, http method etc.
\ No newline at end of file
+This package contains test functions which are called by other packages. The intention of this
+package is to contain all the end to end tests which can be run with different configuration options
+like schema, encoding, http method etc.
diff --git a/graphql/e2e/common/error_test.yaml b/graphql/e2e/common/error_test.yaml
index b17cb0d4f91..6d10599d988 100644
--- a/graphql/e2e/common/error_test.yaml
+++ b/graphql/e2e/common/error_test.yaml
@@ -1,5 +1,4 @@
--
- name: "Unknown root field"
+- name: Unknown root field
gqlrequest: |
query {
getAuthorszzz(id: "0x1") { name }
@@ -7,12 +6,16 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Cannot query field \"getAuthorszzz\" on type \"Query\". Did you mean
- \"getAuthor\" or \"getauthor1\"?",
- "locations": [ { "line": 2, "column": 3 } ] } ]
+ [
+ {
+ "message":
+ Cannot query field "getAuthorszzz" on type "Query". Did you mean "getAuthor" or
+ "getauthor1"/?,
+ "locations": [{ "line": 2, "column": 3 }],
+ },
+ ]
--
- name: "Unknown field"
+- name: Unknown field
gqlrequest: |
query {
getAuthor(id: "0x1") { namezzz }
@@ -20,11 +23,14 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Cannot query field \"namezzz\" on type \"Author\". Did you mean \"name\"?",
- "locations": [ { "line": 2, "column": 26 } ] } ]
+ [
+ {
+ "message": 'Cannot query field "namezzz" on type "Author". Did you mean "name"?',
+ "locations": [{ "line": 2, "column": 26 }],
+ },
+ ]
--
- name: "Undefined variable"
+- name: Undefined variable
gqlrequest: |
query {
getAuthor(id: $theID) { name }
@@ -32,11 +38,9 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Variable \"$theID\" is not defined.",
- "locations": [ { "line": 2, "column": 17 } ] } ]
+ [{ "message": Variable "$theID" is not defined., "locations": [{ "line": 2, "column": 17 }] }]
--
- name: "input of wrong type"
+- name: input of wrong type
gqlrequest: |
query {
queryAuthor(filter: { reputation: { le: "hi there" } }) { name }
@@ -44,11 +48,14 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Expected type Float, found \"hi there\".",
- "locations": [ { "line": 2, "column": 44 } ] } ]
+ [
+ {
+ "message": 'Expected type Float, found "hi there".',
+ "locations": [{ "line": 2, "column": 44 }],
+ },
+ ]
--
- name: "unknown variable type"
+- name: unknown variable type
gqlrequest: |
query queryAuthor($filter: AuthorFiltarzzz!) {
queryAuthor(filter: $filter) { name }
@@ -56,40 +63,40 @@
gqlvariables: |
{ "filter": "type was wrong" }
errors:
- [ { "message": "Variable type provided AuthorFiltarzzz! is incompatible with expected
- type AuthorFilter",
- "locations": [{ "line": 2, "column": 23}]},
- { "message": "Variable \"$filter\" of type \"AuthorFiltarzzz!\" used in position
- expecting type \"AuthorFilter\".",
- "locations": [ { "line": 2, "column": 23 } ] },
- { "message": "Unknown type \"AuthorFiltarzzz\".",
- "locations": [ { "line": 1, "column": 1 } ] } ]
+ [
+ {
+ message":
+ Variable type provided AuthorFiltarzzz! is incompatible with expected type AuthorFilter,
+ "locations": [{ "line": 2, "column": 23 }],
+ },
+ {
+ "message":
+ Variable "$filter" of type "AuthorFiltarzzz!" used in position expecting type
+ "AuthorFilter".,
+ "locations": [{ "line": 2, "column": 23 }],
+ },
+ { "message": Unknown type "AuthorFiltarzzz"., "locations": [{ "line": 1, "column": 1 }] },
+ ]
--
- name: "variable of wrong type"
+- name: variable of wrong type
gqlrequest: |
query queryAuthor($filter: AuthorFilter!) {
queryAuthor(filter: $filter) { name }
}
gqlvariables: |
{ "filter": 57 }
- errors:
- [ { "message": "must be a AuthorFilter",
- "path": [ "variable", "filter"] } ]
+ errors: [{ "message": must be a AuthorFilter, "path": [variable, filter] }]
--
- name: "variable field of wrong type"
+- name: variable field of wrong type
gqlrequest: |
query queryAuthor($filter: AuthorFilter!) {
queryAuthor(filter: $filter) { name }
}
gqlvariables: |
{ }
- errors:
- [ { "message": "must be defined",
- "path": [ "variable", "filter"] } ]
--
- name: "subscription on type without @withSubscription directive should return error"
+ errors: [{ "message": must be defined, "path": [variable, filter] }]
+
+- name: subscription on type without @withSubscription directive should return error
gqlrequest: |
subscription {
getAuthor(id: "0x1") { name }
@@ -97,11 +104,14 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Cannot query field \"getAuthor\" on type \"Subscription\".",
- "locations": [ { "line": 2, "column": 3 } ] } ]
+ [
+ {
+ "message": Cannot query field "getAuthor" on type "Subscription".,
+ "locations": [{ "line": 2, "column": 3 }],
+ },
+ ]
--
- name: "@cascade only accepts those fields as a argument, which are present in given type"
+- name: "@cascade only accepts those fields as a argument, which are present in given type"
gqlrequest: |
query {
queryAuthor @cascade(fields:["title"]){
@@ -112,12 +122,16 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Field `title` is not present in type `Author`. You can only use fields in cascade which are in type `Author`",
- "locations": [{ "line": 2, "column": 16}]
- } ]
+ [
+ {
+ "message":
+ Field `title` is not present in type `Author`. You can only use fields in cascade which
+ are in type `Author`,
+ "locations": [{ "line": 2, "column": 16 }],
+ },
+ ]
--
- name: "Out of range error for int32 type"
+- name: Out of range error for int32 type
gqlrequest: |
mutation {
addPost(input:[{title:"Dgraph",author:{name:"Bob"},numLikes:2147483648}]){
@@ -133,11 +147,14 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Out of range value '2147483648', for type `Int`",
- "locations": [ { "line": 2, "column": 63 } ] } ]
+ [
+ {
+ "message": "Out of range value '2147483648', for type `Int`",
+ "locations": [{ "line": 2, "column": 63 }],
+ },
+ ]
--
- name: "Out of range error for int64 type"
+- name: Out of range error for int64 type
gqlrequest: |
mutation {
addPost(input:[{title:"Dgraph",author:{name:"Bob"},numViews:9223372036854775808}]){
@@ -153,11 +170,14 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Out of range value '9223372036854775808', for type `Int64`",
- "locations": [ { "line": 2, "column": 63 } ] } ]
+ [
+ {
+ "message": "Out of range value '9223372036854775808', for type `Int64`",
+ "locations": [{ "line": 2, "column": 63 }],
+ },
+ ]
--
- name: "@cascade only accepts numUids or given type name as arguments for add or update payload "
+- name: "@cascade only accepts numUids or given type name as arguments for add or update payload"
gqlrequest: |
mutation {
addAuthor(input:[{name:"jatin"}]) @cascade(fields:["name"]) {
@@ -169,12 +189,16 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Field `name` is not present in type `AddAuthorPayload`. You can only use fields in cascade which are in type `AddAuthorPayload`",
- "locations": [{ "line": 2, "column": 38}]
- } ]
+ [
+ {
+ "message":
+ Field `name` is not present in type `AddAuthorPayload`. You can only use fields in cascade
+ which are in type `AddAuthorPayload`,
+ "locations": [{ "line": 2, "column": 38 }],
+ },
+ ]
--
- name: "String value is Incompatible with Int32 type given in variable"
+- name: String value is Incompatible with Int32 type given in variable
gqlrequest: |
mutation($numLikes:Int) {
addPost(input:[{title:"Dgraph",author:{name:"Bob"},numLikes:$numLikes}]){
@@ -189,12 +213,9 @@
}
gqlvariables: |
{ "numLikes": "21474836" }
- errors:
- [ { "message": "cannot use string as Int",
- "path": [ "variable","numLikes" ] } ]
+ errors: [{ "message": cannot use string as Int, "path": [variable, numLikes] }]
--
- name: "Float value is Incompatible with Int64 type"
+- name: Float value is Incompatible with Int64 type
gqlrequest: |
mutation {
addPost(input:[{title:"Dgraph",author:{name:"Bob"},numViews:180143985094.0}]){
@@ -210,11 +231,14 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Type mismatched for Value `180143985094.0`, expected: Int64, got: 'Float'",
- "locations": [ { "line": 2, "column": 63 } ] } ]
+ [
+ {
+ "message": "Type mismatched for Value `180143985094.0`, expected: Int64, got: 'Float'",
+ "locations": [{ "line": 2, "column": 63 }],
+ },
+ ]
--
- name: "Out of range error for int32 type given in variable"
+- name: Out of range error for int32 type given in variable
gqlrequest: |
mutation($numLikes:Int) {
addPost(input:[{title:"Dgraph",author:{name:"Bob"},numLikes:$numLikes}]){
@@ -230,11 +254,9 @@
gqlvariables: |
{ "numLikes": 2147483648 }
errors:
- [ { "message": "Out of range value '2147483648', for type `Int`",
- "path": [ "variable","numLikes" ] } ]
+ [{ "message": "Out of range value '2147483648', for type `Int`", "path": [variable, numLikes] }]
--
- name: "Out of range error for int64 type in variable"
+- name: Out of range error for int64 type in variable
gqlrequest: |
mutation($numViews:Int64) {
addPost(input:[{title:"Dgraph",author:{name:"Bob"},numViews:$numViews}]){
@@ -250,11 +272,14 @@
gqlvariables: |
{ "numViews":9223372036854775808}
errors:
- [ { "message": "Out of range value '9223372036854775808', for type `Int64`",
- "path": [ "variable", "numViews" ] } ]
+ [
+ {
+ "message": "Out of range value '9223372036854775808', for type `Int64`",
+ "path": [variable, numViews],
+ },
+ ]
--
- name: "Float value is Incompatible with Int64 type given in variable"
+- name: Float value is Incompatible with Int64 type given in variable
gqlrequest: |
mutation addPost($Post: [AddPostInput!]!){
addPost(input:$Post){
@@ -275,11 +300,14 @@
} ]
}
errors:
- [ { "message": "Type mismatched for Value `180143985094.0`, expected:`Int64`",
- "path": [ "variable", "Post",0.0,"numViews" ] } ]
+ [
+ {
+ "message": "Type mismatched for Value `180143985094.0`, expected:`Int64`",
+ "path": [variable, Post, 0.0, numViews],
+ },
+ ]
--
- name: "Error for int64 value given in list as variable"
+- name: Error for int64 value given in list as variable
gqlrequest: |
mutation addpost1($Post: [Addpost1Input!]!){
addpost1(input:$Post){
@@ -296,10 +324,14 @@
} ]
}
errors:
- [ { "message": "Type mismatched for Value `180143985094.0`, expected:`Int64`",
- "path": [ "variable", "Post",0.0,"likesByMonth",0.0 ] } ]
+ [
+ {
+ "message": "Type mismatched for Value `180143985094.0`, expected:`Int64`",
+ "path": [variable, Post, 0.0, likesByMonth, 0.0],
+ },
+ ]
-- name: "Error for int64 value given in list"
+- name: Error for int64 value given in list
gqlrequest: |
mutation {
addpost1(input:[{title:"Dgraph",likesByMonth: [180143985094.0,33,1,66]}]){
@@ -312,11 +344,14 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Type mismatched for Value `180143985094.0`, expected: Int64, got: 'Float'",
- "locations": [ { "line": 2, "column": 50 } ] } ]
+ [
+ {
+ "message": "Type mismatched for Value `180143985094.0`, expected: Int64, got: 'Float'",
+ "locations": [{ "line": 2, "column": 50 }],
+ },
+ ]
--
- name: "Error for int value given in list as variable"
+- name: Error for int value given in list as variable
gqlrequest: |
mutation addpost1($Post: [Addpost1Input!]!){
addpost1(input:$Post){
@@ -333,10 +368,14 @@
} ]
}
errors:
- [ { "message": "Out of range value '2147483648', for type `Int`",
- "path": [ "variable", "Post",0.0,"commentsByMonth",0.0 ] } ]
+ [
+ {
+ "message": "Out of range value '2147483648', for type `Int`",
+ "path": [variable, Post, 0.0, commentsByMonth, 0.0],
+ },
+ ]
-- name: "Error for int value given in list"
+- name: Error for int value given in list
gqlrequest: |
mutation {
addpost1(input:[{title:"Dgraph",commentsByMonth: [2147483648,33,1,66]}]){
@@ -349,10 +388,14 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Out of range value '2147483648', for type `Int`",
- "locations": [ { "line": 2, "column": 53 } ] } ]
+ [
+ {
+ "message": "Out of range value '2147483648', for type `Int`",
+ "locations": [{ "line": 2, "column": 53 }],
+ },
+ ]
-- name: "Error when multiple filter functions are used"
+- name: Error when multiple filter functions are used
gqlrequest: |
query {
queryBook(filter:{bookId: {eq:2 le:2}})
@@ -363,11 +406,16 @@
gqlvariables: |
{ }
errors:
- [ { "message": "Int64Filter filter expects only one filter function, got: 2",
- "locations": [ { "line": 2, "column": 29 } ] } ]
+ [
+ {
+ "message": "Int64Filter filter expects only one filter function, got: 2",
+ "locations": [{ "line": 2, "column": 29 }],
+ },
+ ]
--
- name: "@cascade only accepts those fields as a argument, which are present in given type at both root and deep levels"
+- name:
+ "@cascade only accepts those fields as a argument, which are present in given type at both root
+ and deep levels"
gqlrequest: |
query {
queryAuthor @cascade(fields: ["dob","reputation"]) {
@@ -380,12 +428,18 @@
}
}
errors:
- [ { "message": "Field `text1` is not present in type `Post`. You can only use fields in cascade which are in type `Post`",
- "locations": [{ "line": 5, "column": 10}]
- } ]
+ [
+ {
+ "message":
+ Field `text1` is not present in type `Post`. You can only use fields in cascade which are
+ in type `Post`,
+ "locations": [{ "line": 5, "column": 10 }],
+ },
+ ]
--
- name: "@cascade only accepts those fields as a argument, which are present in given type at deep level using variables"
+- name:
+ "@cascade only accepts those fields as a argument, which are present in given type at deep level
+ using variables"
gqlrequest: |
query($fieldsRoot: [String], $fieldsDeep: [String]) {
queryAuthor @cascade(fields: $fieldsRoot) {
@@ -408,12 +462,18 @@
]
}
errors:
- [ { "message": "input: variables.fieldsDeep.text1 Field `text1` is not present in type `Post`. You can only use fields in cascade which are in type `Post`",
- "locations": [{ "line": 5, "column": 10}]
- } ]
+ [
+ {
+ "message":
+ "input: variables.fieldsDeep.text1 Field `text1` is not present in type `Post`. You can
+ only use fields in cascade which are in type `Post`",
+ "locations": [{ "line": 5, "column": 10 }],
+ },
+ ]
--
- name: "@cascade only accepts those fields as a argument, which are present in given type at root level using variables"
+- name:
+ "@cascade only accepts those fields as a argument, which are present in given type at root level
+ using variables"
gqlrequest: |
query($fieldsRoot: [String], $fieldsDeep: [String]) {
queryAuthor @cascade(fields: $fieldsRoot) {
@@ -436,6 +496,11 @@
]
}
errors:
- [ { "message": "input: variables.fieldsRoot.reputation1 Field `reputation1` is not present in type `Author`. You can only use fields in cascade which are in type `Author`",
- "locations": [{ "line": 2, "column": 15}]
- } ]
+ [
+ {
+ "message":
+ "input: variables.fieldsRoot.reputation1 Field `reputation1` is not present in type
+ `Author`. You can only use fields in cascade which are in type `Author`",
+ "locations": [{ "line": 2, "column": 15 }],
+ },
+ ]
diff --git a/graphql/e2e/custom_logic/README.md b/graphql/e2e/custom_logic/README.md
index 7bff03f161d..80f035d5a2b 100644
--- a/graphql/e2e/custom_logic/README.md
+++ b/graphql/e2e/custom_logic/README.md
@@ -1,8 +1,7 @@
-The test file should be run after bringing up the docker containers via docker-compose.
-Since the tests rely on a mock server, which is implemented via cmd/main.go, run the following
-command.
+The test file should be run after bringing up the docker containers via docker-compose. Since the
+tests rely on a mock server, which is implemented via cmd/main.go, run the following command.
-```
+```bash
docker-compose up --build
```
diff --git a/graphql/e2e/custom_logic/cmd/go.mod b/graphql/e2e/custom_logic/cmd/go.mod
index 41e29f4740c..dead160ff45 100644
--- a/graphql/e2e/custom_logic/cmd/go.mod
+++ b/graphql/e2e/custom_logic/cmd/go.mod
@@ -1,8 +1,8 @@
module github.com/hypermodeinc/dgraph/graphql/e2e/custom_logic/cmd
-go 1.19
+go 1.22.7
require (
github.com/graph-gophers/graphql-go v1.5.0
gopkg.in/yaml.v2 v2.4.0
-)
+)
\ No newline at end of file
diff --git a/graphql/e2e/custom_logic/cmd/graphqlresponse.yaml b/graphql/e2e/custom_logic/cmd/graphqlresponse.yaml
index 91ac57accd7..5a63b4e0520 100644
--- a/graphql/e2e/custom_logic/cmd/graphqlresponse.yaml
+++ b/graphql/e2e/custom_logic/cmd/graphqlresponse.yaml
@@ -1,5 +1,5 @@
-- name: "validinputfield"
- description: "Test case for validating remote input type with valid local input type."
+- name: validinputfield
+ description: Test case for validating remote input type with valid local input type
schema: |
type Country {
code: String
@@ -15,70 +15,71 @@
countries(filter: CountryInput!): [Country!]!
}
-- name: "invalidfield"
- description: "Test case for validating remote input type with invalid local input type."
+- name: invalidfield
+ description: Test case for validating remote input type with invalid local input type
schema: |
- type Country {
- code: String
- name: String
- states: [State]
- std: Int
- }
+ type Country {
+ code: String
+ name: String
+ states: [State]
+ std: Int
+ }
- type State @remote {
- code: String
- name: String
- country: Country
- }
+ type State @remote {
+ code: String
+ name: String
+ country: Country
+ }
- input CountryInput {
- code: Int!
- name: String!
- states: [StateInput]
- }
+ input CountryInput {
+ code: Int!
+ name: String!
+ states: [StateInput]
+ }
- input StateInput {
- code: String!
- name: String!
- }
+ input StateInput {
+ code: String!
+ name: String!
+ }
- type Query{
- countries(filter: CountryInput!): [Country!]!
- }
+ type Query{
+ countries(filter: CountryInput!): [Country!]!
+ }
-- name: "nestedinvalid"
- description: "Test case to type check nested types."
+- name: nestedinvalid
+ description: Test case to type check nested types
schema: |
- type Country @remote {
- code: String
- name: String
- states: [State]
- std: Int
- }
+ type Country @remote {
+ code: String
+ name: String
+ states: [State]
+ std: Int
+ }
- type State @remote {
- code: String
- name: String
- country: Country
- }
+ type State @remote {
+ code: String
+ name: String
+ country: Country
+ }
- input CountryInput {
- code: String!
- name: String!
- states: [StateInput]
- }
+ input CountryInput {
+ code: String!
+ name: String!
+ states: [StateInput]
+ }
- input StateInput {
- code: String!
- name: Int!
- }
+ input StateInput {
+ code: String!
+ name: Int!
+ }
- type Query{
- countries(filter: CountryInput!): [Country!]!
- }
+ type Query{
+ countries(filter: CountryInput!): [Country!]!
+ }
-- name: "validcountry"
- description: "Test case to check return type is valid and results are properly rewritten by the dgraph"
+- name: validcountry
+ description:
+ Test case to check return type is valid and results are properly rewritten by the dgraph
schema: |
type Country {
code: String
@@ -89,8 +90,8 @@
country(code: ID!): Country!
}
-- name: "argsonfields"
- description: "Test case to check args on fields can be passed by Dgraph"
+- name: argsonfields
+ description: Test case to check args on fields can be passed by Dgraph
schema: |
type Country {
code(size: Int!): String
@@ -117,36 +118,36 @@
}
}
-- name: "validcountrywitherror"
- description: "Test case to validate dgraph can handle both valid data and error"
+- name: validcountrywitherror
+ description: Test case to validate dgraph can handle both valid data and error
schema: |
- type Country @remote {
- code: String
- name: String
- states: [State]
- std: Int
- }
+ type Country @remote {
+ code: String
+ name: String
+ states: [State]
+ std: Int
+ }
- type State @remote {
- code: String
- name: String
- country: Country
- }
+ type State @remote {
+ code: String
+ name: String
+ country: Country
+ }
- input CountryInput {
- code: String!
- name: String!
- states: [StateInput]
- }
+ input CountryInput {
+ code: String!
+ name: String!
+ states: [StateInput]
+ }
- input StateInput {
- code: String!
- name: String!
- }
+ input StateInput {
+ code: String!
+ name: String!
+ }
- type Query{
- country(code: ID!): Country!
- }
+ type Query{
+ country(code: ID!): Country!
+ }
request: |
query($id: ID!) { country(code: $id) {
code
@@ -169,8 +170,8 @@
variables: |
{"id":"BI"}
-- name: "validcountries"
- description: "Test case to validate return multiple entities as part of graphql response"
+- name: validcountries
+ description: Test case to validate return multiple entities as part of graphql response
schema: |
type Country {
code: String
@@ -181,36 +182,36 @@
validCountries(code: ID!): [Country]
}
-- name: "graphqlerr"
- description: "Test case to validate whether dgraph can handle graphql error"
+- name: graphqlerr
+ description: Test case to validate whether dgraph can handle graphql error
schema: |
- type Country @remote {
- code: String
- name: String
- states: [State]
- std: Int
- }
+ type Country @remote {
+ code: String
+ name: String
+ states: [State]
+ std: Int
+ }
- type State @remote {
- code: String
- name: String
- country: Country
- }
+ type State @remote {
+ code: String
+ name: String
+ country: Country
+ }
- input CountryInput {
- code: String!
- name: String!
- states: [StateInput]
- }
+ input CountryInput {
+ code: String!
+ name: String!
+ states: [StateInput]
+ }
- input StateInput {
- code: String!
- name: String!
- }
+ input StateInput {
+ code: String!
+ name: String!
+ }
- type Query{
- country(code: ID!): [Country]
- }
+ type Query{
+ country(code: ID!): [Country]
+ }
request: |
query($id: ID!) { country(code: $id) {
code
@@ -227,8 +228,8 @@
variables: |
{"id":"BI"}
-- name: "setcountry"
- description: "Test case to validate graphql mutation"
+- name: setcountry
+ description: Test case to validate graphql mutation
schema: |
type Country {
code: String
@@ -292,40 +293,40 @@
variables: |
{"input":{"code":"IN","name":"India","states":[{"code":"RJ","name":"Rajasthan"},{"code":"KA","name":"Karnataka"}]}}
-- name: "updatecountries"
- description: "Test case to validate custom logic mutation update"
+- name: updatecountries
+ description: Test case to validate custom logic mutation update
schema: |
- type Country @remote {
- code: String
- name: String
- states: [State]
- std: Int
- }
+ type Country @remote {
+ code: String
+ name: String
+ states: [State]
+ std: Int
+ }
- type State @remote {
- code: String
- name: String
- country: Country
- }
+ type State @remote {
+ code: String
+ name: String
+ country: Country
+ }
- input CountryInput {
- code: String!
- name: String!
- states: [StateInput]
- }
+ input CountryInput {
+ code: String!
+ name: String!
+ states: [StateInput]
+ }
- input StateInput {
- code: String!
- name: String!
- }
+ input StateInput {
+ code: String!
+ name: String!
+ }
- type Mutation{
- updateCountries(name: String, std: Int): [Country!]!
- }
+ type Mutation{
+ updateCountries(name: String, std: Int): [Country!]!
+ }
- type Query{
- country(code: ID!): [Country]
- }
+ type Query{
+ country(code: ID!): [Country]
+ }
request: |
mutation($name: String, $std: Int) { updateCountries(name: $name, std: $std) {
name
@@ -355,7 +356,7 @@
%s(%s:ID!): String
}
-- name: "introspectionresults"
+- name: introspectionresults
schema: |
input UserInput{
id: ID!
@@ -437,7 +438,7 @@
getPosts(input: [PostFilterInput]): [Post!]
}
-- name: "carsschema"
+- name: carsschema
schema: |
type Car {
id: ID!
@@ -470,41 +471,41 @@
- name: invalidargument
schema: |
- type Country @remote {
- code: String
- name: String
- states: [State]
- std: Int
- }
+ type Country @remote {
+ code: String
+ name: String
+ states: [State]
+ std: Int
+ }
- type State @remote {
- code: String
- name: String
- country: Country
- }
+ type State @remote {
+ code: String
+ name: String
+ country: Country
+ }
- type Query{
- country(no_code: ID!): Country!
- }
+ type Query{
+ country(no_code: ID!): Country!
+ }
- name: invalidtype
schema: |
- type Country @remote {
- code: String
- name: String
- states: [State]
- std: Int
- }
+ type Country @remote {
+ code: String
+ name: String
+ states: [State]
+ std: Int
+ }
- type State @remote {
- code: String
- name: String
- country: Country
- }
+ type State @remote {
+ code: String
+ name: String
+ country: Country
+ }
- type Query{
- country(code: Int!): Country!
- }
+ type Query{
+ country(code: Int!): Country!
+ }
- name: invalidinputbatchedfield
schema: |
@@ -526,4 +527,4 @@
}
type Query{
getPosts(input: PostFilterInput): [Post!]
- }
\ No newline at end of file
+ }
diff --git a/graphql/e2e/custom_logic/cmd/index.js b/graphql/e2e/custom_logic/cmd/index.js
index 448d471bde7..b0070bffae3 100644
--- a/graphql/e2e/custom_logic/cmd/index.js
+++ b/graphql/e2e/custom_logic/cmd/index.js
@@ -1,9 +1,9 @@
-const graphql = require("graphql");
+const graphql = require("graphql")
// build internal graphql schema.
-const graphqlSchemaObj = graphql.buildSchema(process.argv[2]);
+const graphqlSchemaObj = graphql.buildSchema(process.argv[2])
// introspect and print the introspection result to stdout.
graphql.graphql(graphqlSchemaObj, graphql.introspectionQuery).then((res) => {
- console.log(JSON.stringify(res))
-})
\ No newline at end of file
+ console.log(JSON.stringify(res))
+})
diff --git a/graphql/e2e/custom_logic/docker-compose.yml b/graphql/e2e/custom_logic/docker-compose.yml
index 57aad77b2b6..7ad420456fd 100644
--- a/graphql/e2e/custom_logic/docker-compose.yml
+++ b/graphql/e2e/custom_logic/docker-compose.yml
@@ -8,37 +8,39 @@ services:
labels:
cluster: test
ports:
- - 8080
- - 9080
+ - 8080
+ - 9080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha1:7080 --zero=zero1:5080
- --logtostderr -v=2 --raft="idx=1;"
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha1:7080 --zero=zero1:5080 --logtostderr -v=2 --raft="idx=1;" --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
zero1:
image: dgraph/dgraph:local
working_dir: /data/zero1
labels:
cluster: test
ports:
- - 5080
- - 6080
+ - 5080
+ - 6080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft="idx=1;" --my=zero1:5080 --replicas=1 --logtostderr
- -v=2 --bindall
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --raft="idx=1;" --my=zero1:5080 --replicas=1 --logtostderr -v=2 --bindall
mock:
build:
context: ./cmd
labels:
cluster: test
ports:
- - 8888
+ - 8888
volumes: {}
diff --git a/graphql/e2e/directives/docker-compose.yml b/graphql/e2e/directives/docker-compose.yml
index 308c493a2bf..fb56b7f9550 100644
--- a/graphql/e2e/directives/docker-compose.yml
+++ b/graphql/e2e/directives/docker-compose.yml
@@ -14,7 +14,10 @@ services:
source: $GOPATH/bin
target: /gobin
read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10 --my=zero1:5080
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10
+ --my=zero1:5080
alpha1:
image: dgraph/dgraph:local
@@ -30,11 +33,11 @@ services:
labels:
cluster: test
service: alpha1
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --zero=zero1:5080 --expose_trace
- --profile_mode block --block_rate 10 --logtostderr -v=2 --my=alpha1:7080
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --graphql "lambda-url=http://lambda:8686/graphql-worker;"
- --trace "ratio=1.0;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=2
+ --my=alpha1:7080 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --graphql
+ "lambda-url=http://lambda:8686/graphql-worker;" --trace "ratio=1.0;"
lambda:
image: dgraph/dgraph-lambda:latest
diff --git a/graphql/e2e/directives/script.js b/graphql/e2e/directives/script.js
index fd9116645d8..3dc27ef0d9e 100644
--- a/graphql/e2e/directives/script.js
+++ b/graphql/e2e/directives/script.js
@@ -1,71 +1,80 @@
-const authorBio = ({parent: {name, dob}}) => `My name is ${name} and I was born on ${dob}.`
-const characterBio = ({parent: {name}}) => `My name is ${name}.`
-const humanBio = ({parent: {name, totalCredits}}) => `My name is ${name}. I have ${totalCredits} credits.`
-const droidBio = ({parent: {name, primaryFunction}}) => `My name is ${name}. My primary function is ${primaryFunction}.`
+const authorBio = ({ parent: { name, dob } }) => `My name is ${name} and I was born on ${dob}.`
+const characterBio = ({ parent: { name } }) => `My name is ${name}.`
+const humanBio = ({ parent: { name, totalCredits } }) =>
+ `My name is ${name}. I have ${totalCredits} credits.`
+const droidBio = ({ parent: { name, primaryFunction } }) =>
+ `My name is ${name}. My primary function is ${primaryFunction}.`
const summary = () => `hi`
-const astronautBio = ({parent: {name, age, isActive}}) => `Name - ${name}, Age - ${age}, isActive - ${isActive}`
+const astronautBio = ({ parent: { name, age, isActive } }) =>
+ `Name - ${name}, Age - ${age}, isActive - ${isActive}`
-async function authorsByName({args, dql}) {
- const results = await dql.query(`query queryAuthor($name: string) {
+async function authorsByName({ args, dql }) {
+ const results = await dql.query(
+ `query queryAuthor($name: string) {
queryAuthor(func: type(test.dgraph.author)) @filter(eq(test.dgraph.author.name, $name)) {
name: test.dgraph.author.name
dob: test.dgraph.author.dob
reputation: test.dgraph.author.reputation
}
- }`, {"$name": args.name})
- return results.data.queryAuthor
+ }`,
+ { $name: args.name },
+ )
+ return results.data.queryAuthor
}
-async function newAuthor({args, graphql}) {
- // lets give every new author a reputation of 3 by default
- const results = await graphql(`mutation ($name: String!) {
- addAuthor(input: [{name: $name, reputation: 3.0 }]) {
- author {
- id
- reputation
- }
+async function newAuthor({ args, graphql }) {
+ // lets give every new author a reputation of 3 by default
+ const results = await graphql(
+ `
+ mutation ($name: String!) {
+ addAuthor(input: [{ name: $name, reputation: 3.0 }]) {
+ author {
+ id
+ reputation
+ }
}
- }`, {"name": args.name})
- return results.data.addAuthor.author[0].id
+ }
+ `,
+ { name: args.name },
+ )
+ return results.data.addAuthor.author[0].id
}
self.addGraphQLResolvers({
- "Author.bio": authorBio,
- "Character.bio": characterBio,
- "Human.bio": humanBio,
- "Droid.bio": droidBio,
- "Book.summary": summary,
- "Astronaut.bio": astronautBio,
- "Query.authorsByName": authorsByName,
- "Mutation.newAuthor": newAuthor
+ "Author.bio": authorBio,
+ "Character.bio": characterBio,
+ "Human.bio": humanBio,
+ "Droid.bio": droidBio,
+ "Book.summary": summary,
+ "Astronaut.bio": astronautBio,
+ "Query.authorsByName": authorsByName,
+ "Mutation.newAuthor": newAuthor,
})
-async function rank({parents}) {
- const idRepList = parents.map(function (parent) {
- return {id: parent.id, rep: parent.reputation}
- });
- const idRepMap = {};
- idRepList.sort((a, b) => a.rep > b.rep ? -1 : 1)
- .forEach((a, i) => idRepMap[a.id] = i + 1)
- return parents.map(p => idRepMap[p.id])
+async function rank({ parents }) {
+ const idRepList = parents.map(function (parent) {
+ return { id: parent.id, rep: parent.reputation }
+ })
+ const idRepMap = {}
+ idRepList.sort((a, b) => (a.rep > b.rep ? -1 : 1)).forEach((a, i) => (idRepMap[a.id] = i + 1))
+ return parents.map((p) => idRepMap[p.id])
}
self.addMultiParentGraphQLResolvers({
- "Author.rank": rank
+ "Author.rank": rank,
})
async function districtWebhook({ dql, graphql, authHeader, event }) {
- // forward the event to the changelog server running on the host machine
- await fetch(`http://172.17.0.1:8888/changelog`, {
- method: "POST",
- body: JSON.stringify(event)
- })
- // just return, nothing else to do with response
+ // forward the event to the changelog server running on the host machine
+ await fetch(`http://172.17.0.1:8888/changelog`, {
+ method: "POST",
+ body: JSON.stringify(event),
+ })
+ // just return, nothing else to do with response
}
self.addWebHookResolvers({
- "District.add": districtWebhook,
- "District.update": districtWebhook,
- "District.delete": districtWebhook,
+ "District.add": districtWebhook,
+ "District.update": districtWebhook,
+ "District.delete": districtWebhook,
})
-
diff --git a/graphql/e2e/multi_tenancy/docker-compose.yml b/graphql/e2e/multi_tenancy/docker-compose.yml
index 9d0bcfe6d10..9233752f196 100644
--- a/graphql/e2e/multi_tenancy/docker-compose.yml
+++ b/graphql/e2e/multi_tenancy/docker-compose.yml
@@ -8,80 +8,84 @@ services:
labels:
cluster: test
ports:
- - 8080
- - 9080
+ - 8080
+ - 9080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- - type: bind
- source: ../../../ee/acl/hmac-secret
- target: /dgraph-acl/hmac-secret
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha1:7080 --zero=zero1:5080
- --logtostderr -v=2 --raft "idx=1;"
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=3000s;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ - type: bind
+ source: ../../../ee/acl/hmac-secret
+ target: /dgraph-acl/hmac-secret
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha1:7080 --zero=zero1:5080 --logtostderr -v=2 --raft "idx=1;" --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --acl
+ "secret-file=/dgraph-acl/hmac-secret; access-ttl=3000s;"
alpha2:
image: dgraph/dgraph:local
working_dir: /data/alpha2
depends_on:
- - alpha1
+ - alpha1
labels:
cluster: test
ports:
- - 8080
- - 9080
+ - 8080
+ - 9080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- - type: bind
- source: ../../../ee/acl/hmac-secret
- target: /dgraph-acl/hmac-secret
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha2:7080 --zero=zero1:5080
- --logtostderr -v=2 --raft "idx=2;"
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=3000s;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ - type: bind
+ source: ../../../ee/acl/hmac-secret
+ target: /dgraph-acl/hmac-secret
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha2:7080 --zero=zero1:5080 --logtostderr -v=2 --raft "idx=2;" --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --acl
+ "secret-file=/dgraph-acl/hmac-secret; access-ttl=3000s;"
alpha3:
image: dgraph/dgraph:local
working_dir: /data/alpha3
depends_on:
- - alpha2
+ - alpha2
labels:
cluster: test
ports:
- - 8080
- - 9080
+ - 8080
+ - 9080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- - type: bind
- source: ../../../ee/acl/hmac-secret
- target: /dgraph-acl/hmac-secret
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha3:7080 --zero=zero1:5080
- --logtostderr -v=2 --raft "idx=3;"
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --acl "secret-file=/dgraph-acl/hmac-secret; access-ttl=3000s;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ - type: bind
+ source: ../../../ee/acl/hmac-secret
+ target: /dgraph-acl/hmac-secret
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha3:7080 --zero=zero1:5080 --logtostderr -v=2 --raft "idx=3;" --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --acl
+ "secret-file=/dgraph-acl/hmac-secret; access-ttl=3000s;"
zero1:
image: dgraph/dgraph:local
working_dir: /data/zero1
labels:
cluster: test
ports:
- - 5080
- - 6080
+ - 5080
+ - 6080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft "idx=1;" --my=zero1:5080 --replicas=1 --logtostderr
- -v=2 --bindall
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft
+ "idx=1;" --my=zero1:5080 --replicas=1 --logtostderr -v=2 --bindall
volumes: {}
diff --git a/graphql/e2e/normal/docker-compose.yml b/graphql/e2e/normal/docker-compose.yml
index 308c493a2bf..fb56b7f9550 100644
--- a/graphql/e2e/normal/docker-compose.yml
+++ b/graphql/e2e/normal/docker-compose.yml
@@ -14,7 +14,10 @@ services:
source: $GOPATH/bin
target: /gobin
read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10 --my=zero1:5080
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --logtostderr -v=2 --bindall --expose_trace --profile_mode block --block_rate 10
+ --my=zero1:5080
alpha1:
image: dgraph/dgraph:local
@@ -30,11 +33,11 @@ services:
labels:
cluster: test
service: alpha1
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --zero=zero1:5080 --expose_trace
- --profile_mode block --block_rate 10 --logtostderr -v=2 --my=alpha1:7080
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
- --graphql "lambda-url=http://lambda:8686/graphql-worker;"
- --trace "ratio=1.0;"
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --zero=zero1:5080 --expose_trace --profile_mode block --block_rate 10 --logtostderr -v=2
+ --my=alpha1:7080 --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;" --graphql
+ "lambda-url=http://lambda:8686/graphql-worker;" --trace "ratio=1.0;"
lambda:
image: dgraph/dgraph-lambda:latest
diff --git a/graphql/e2e/normal/script.js b/graphql/e2e/normal/script.js
index 1077c1b2a36..761b69b3116 100644
--- a/graphql/e2e/normal/script.js
+++ b/graphql/e2e/normal/script.js
@@ -1,57 +1,67 @@
-const authorBio = ({parent: {name, dob}}) => `My name is ${name} and I was born on ${dob}.`
-const characterBio = ({parent: {name}}) => `My name is ${name}.`
-const humanBio = ({parent: {name, totalCredits}}) => `My name is ${name}. I have ${totalCredits} credits.`
-const droidBio = ({parent: {name, primaryFunction}}) => `My name is ${name}. My primary function is ${primaryFunction}.`
+const authorBio = ({ parent: { name, dob } }) => `My name is ${name} and I was born on ${dob}.`
+const characterBio = ({ parent: { name } }) => `My name is ${name}.`
+const humanBio = ({ parent: { name, totalCredits } }) =>
+ `My name is ${name}. I have ${totalCredits} credits.`
+const droidBio = ({ parent: { name, primaryFunction } }) =>
+ `My name is ${name}. My primary function is ${primaryFunction}.`
const summary = () => `hi`
-const astronautBio = ({parent: {name, age, isActive}}) => `Name - ${name}, Age - ${age}, isActive - ${isActive}`
+const astronautBio = ({ parent: { name, age, isActive } }) =>
+ `Name - ${name}, Age - ${age}, isActive - ${isActive}`
-async function authorsByName({args, dql}) {
- const results = await dql.query(`query queryAuthor($name: string) {
+async function authorsByName({ args, dql }) {
+ const results = await dql.query(
+ `query queryAuthor($name: string) {
queryAuthor(func: type(Author)) @filter(eq(Author.name, $name)) {
name: Author.name
dob: Author.dob
reputation: Author.reputation
}
- }`, {"$name": args.name})
- return results.data.queryAuthor
+ }`,
+ { $name: args.name },
+ )
+ return results.data.queryAuthor
}
-async function newAuthor({args, graphql}) {
- // lets give every new author a reputation of 3 by default
- const results = await graphql(`mutation ($name: String!) {
- addAuthor(input: [{name: $name, reputation: 3.0 }]) {
- author {
- id
- reputation
- }
+async function newAuthor({ args, graphql }) {
+ // lets give every new author a reputation of 3 by default
+ const results = await graphql(
+ `
+ mutation ($name: String!) {
+ addAuthor(input: [{ name: $name, reputation: 3.0 }]) {
+ author {
+ id
+ reputation
+ }
}
- }`, {"name": args.name})
- return results.data.addAuthor.author[0].id
+ }
+ `,
+ { name: args.name },
+ )
+ return results.data.addAuthor.author[0].id
}
self.addGraphQLResolvers({
- "Author.bio": authorBio,
- "Character.bio": characterBio,
- "Human.bio": humanBio,
- "Droid.bio": droidBio,
- "Book.summary": summary,
- "Astronaut.bio": astronautBio,
- "Query.authorsByName": authorsByName,
- "Mutation.newAuthor": newAuthor
+ "Author.bio": authorBio,
+ "Character.bio": characterBio,
+ "Human.bio": humanBio,
+ "Droid.bio": droidBio,
+ "Book.summary": summary,
+ "Astronaut.bio": astronautBio,
+ "Query.authorsByName": authorsByName,
+ "Mutation.newAuthor": newAuthor,
})
-async function rank({parents}) {
- const idRepList = parents.map(function (parent) {
- return {id: parent.id, rep: parent.reputation}
- });
- const idRepMap = {};
- idRepList.sort((a, b) => a.rep > b.rep ? -1 : 1)
- .forEach((a, i) => idRepMap[a.id] = i + 1)
- return parents.map(p => idRepMap[p.id])
+async function rank({ parents }) {
+ const idRepList = parents.map(function (parent) {
+ return { id: parent.id, rep: parent.reputation }
+ })
+ const idRepMap = {}
+ idRepList.sort((a, b) => (a.rep > b.rep ? -1 : 1)).forEach((a, i) => (idRepMap[a.id] = i + 1))
+ return parents.map((p) => idRepMap[p.id])
}
self.addMultiParentGraphQLResolvers({
- "Author.rank": rank
+ "Author.rank": rank,
})
// TODO(GRAPHQL-1123): need to find a way to make it work on TeamCity machines.
@@ -59,17 +69,16 @@ self.addMultiParentGraphQLResolvers({
// work in teamcity machines, neither does `host.docker.internal` works there. So, we are
// skipping the related test for now.
async function districtWebhook({ dql, graphql, authHeader, event }) {
- // forward the event to the changelog server running on the host machine
- await fetch(`http://172.17.0.1:8888/changelog`, {
- method: "POST",
- body: JSON.stringify(event)
- })
- // just return, nothing else to do with response
+ // forward the event to the changelog server running on the host machine
+ await fetch(`http://172.17.0.1:8888/changelog`, {
+ method: "POST",
+ body: JSON.stringify(event),
+ })
+ // just return, nothing else to do with response
}
self.addWebHookResolvers({
- "District.add": districtWebhook,
- "District.update": districtWebhook,
- "District.delete": districtWebhook,
+ "District.add": districtWebhook,
+ "District.update": districtWebhook,
+ "District.delete": districtWebhook,
})
-
diff --git a/graphql/e2e/schema/docker-compose.yml b/graphql/e2e/schema/docker-compose.yml
index 0349ecaa102..7eb4f0a2716 100644
--- a/graphql/e2e/schema/docker-compose.yml
+++ b/graphql/e2e/schema/docker-compose.yml
@@ -12,16 +12,17 @@ services:
labels:
cluster: test
ports:
- - 8080
- - 9080
+ - 8080
+ - 9080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha1:7080 --zero=zero1:5080
- --logtostderr -v=2 --raft "idx=1;"
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha1:7080 --zero=zero1:5080 --logtostderr -v=2 --raft "idx=1;" --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
alpha2:
image: dgraph/dgraph:local
working_dir: /data/alpha2
@@ -30,20 +31,21 @@ services:
soft: 1024
hard: 1024
depends_on:
- - alpha1
+ - alpha1
labels:
cluster: test
ports:
- - 8080
- - 9080
+ - 8080
+ - 9080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha2:7080 --zero=zero1:5080
- --logtostderr -v=2 --raft "idx=2;"
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha2:7080 --zero=zero1:5080 --logtostderr -v=2 --raft "idx=2;" --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
alpha3:
image: dgraph/dgraph:local
working_dir: /data/alpha3
@@ -52,33 +54,35 @@ services:
soft: 1024
hard: 1024
depends_on:
- - alpha2
+ - alpha2
labels:
cluster: test
ports:
- - 8080
- - 9080
+ - 8080
+ - 9080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha3:7080 --zero=zero1:5080
- --logtostderr -v=2 --raft "idx=3;"
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha3:7080 --zero=zero1:5080 --logtostderr -v=2 --raft "idx=3;" --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
zero1:
image: dgraph/dgraph:local
working_dir: /data/zero1
labels:
cluster: test
ports:
- - 5080
- - 6080
+ - 5080
+ - 6080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft "idx=1;" --my=zero1:5080 --replicas=1 --logtostderr
- -v=2 --bindall
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft
+ "idx=1;" --my=zero1:5080 --replicas=1 --logtostderr -v=2 --bindall
volumes: {}
diff --git a/graphql/e2e/subscription/docker-compose.yml b/graphql/e2e/subscription/docker-compose.yml
index 6fed036ff0d..d374d6cd958 100644
--- a/graphql/e2e/subscription/docker-compose.yml
+++ b/graphql/e2e/subscription/docker-compose.yml
@@ -8,65 +8,69 @@ services:
labels:
cluster: test
ports:
- - 8080
- - 9080
+ - 8080
+ - 9080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha1:7080 --zero=zero1:5080
- --logtostderr -v=2 --raft="idx=1;"
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha1:7080 --zero=zero1:5080 --logtostderr -v=2 --raft="idx=1;" --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
alpha2:
image: dgraph/dgraph:local
working_dir: /data/alpha2
depends_on:
- - alpha1
+ - alpha1
labels:
cluster: test
ports:
- - 8080
- - 9080
+ - 8080
+ - 9080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha2:7080 --zero=zero1:5080
- --logtostderr -v=2 --raft="idx=2;"
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha2:7080 --zero=zero1:5080 --logtostderr -v=2 --raft="idx=2;" --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
alpha3:
image: dgraph/dgraph:local
working_dir: /data/alpha3
depends_on:
- - alpha2
+ - alpha2
labels:
cluster: test
ports:
- - 8080
- - 9080
+ - 8080
+ - 9080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;" --my=alpha3:7080 --zero=zero1:5080
- --logtostderr -v=2 --raft="idx=3;"
- --security "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} alpha --telemetry "reports=false; sentry=false;"
+ --my=alpha3:7080 --zero=zero1:5080 --logtostderr -v=2 --raft="idx=3;" --security
+ "whitelist=10.0.0.0/8,172.16.0.0/12,192.168.0.0/16;"
zero1:
image: dgraph/dgraph:local
working_dir: /data/zero1
labels:
cluster: test
ports:
- - 580
- - 6080
+ - 580
+ - 6080
volumes:
- - type: bind
- source: $GOPATH/bin
- target: /gobin
- read_only: true
- command: /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;" --raft="idx=1;" --my=zero1:5080 --replicas=1 --logtostderr
- -v=2 --bindall
+ - type: bind
+ source: $GOPATH/bin
+ target: /gobin
+ read_only: true
+ command:
+ /gobin/dgraph ${COVERAGE_OUTPUT} zero --telemetry "reports=false; sentry=false;"
+ --raft="idx=1;" --my=zero1:5080 --replicas=1 --logtostderr -v=2 --bindall
volumes: {}
diff --git a/graphql/resolve/add_mutation_test.yaml b/graphql/resolve/add_mutation_test.yaml
index 7c5202db9a6..1f91c6b4844 100644
--- a/graphql/resolve/add_mutation_test.yaml
+++ b/graphql/resolve/add_mutation_test.yaml
@@ -1,5 +1,4 @@
--
- name: "Add mutation geo field - Point type"
+- name: Add mutation geo field - Point type
gqlmutation: |
mutation addHotel($hotel: AddHotelInput!) {
addHotel(input: [$hotel]) {
@@ -19,7 +18,8 @@
}
}
qnametouid: |
- explanation: "Add mutation should convert the Point type mutation to corresponding Dgraph JSON mutation"
+ explanation:
+ Add mutation should convert the Point type mutation to corresponding Dgraph JSON mutation
dgmutations:
- setjson: |
{ "uid":"_:Hotel_1",
@@ -31,8 +31,7 @@
}
}
--
- name: "Add mutation geo field - Polygon type"
+- name: Add mutation geo field - Polygon type
gqlmutation: |
mutation addHotel($hotel: AddHotelInput!) {
addHotel(input: [$hotel]) {
@@ -79,7 +78,8 @@
}
}
}
- explanation: "Add mutation should convert the Polygon type mutation to corresponding Dgraph JSON mutation"
+ explanation:
+ Add mutation should convert the Polygon type mutation to corresponding Dgraph JSON mutation
dgmutations:
- setjson: |
{
@@ -92,8 +92,7 @@
}
}
--
- name: "Add mutation geo field - MultiPolygon type"
+- name: Add mutation geo field - MultiPolygon type
gqlmutation: |
mutation addHotel($hotel: AddHotelInput!) {
addHotel(input: [$hotel]) {
@@ -168,7 +167,8 @@
}
}
}
- explanation: "Add mutation should convert the MultiPolygon type mutation to corresponding Dgraph JSON mutation"
+ explanation:
+ Add mutation should convert the MultiPolygon type mutation to corresponding Dgraph JSON mutation
dgmutations:
- setjson: |
{
@@ -181,8 +181,7 @@
}
}
--
- name: "Add mutation with variables"
+- name: Add mutation with variables
gqlmutation: |
mutation addAuthor($auth: AddAuthorInput!) {
addAuthor(input: [$auth]) {
@@ -198,8 +197,8 @@
"posts": []
}
}
- explanation: "A uid and type should get injected and all data transformed to
- underlying Dgraph edge names"
+ explanation:
+ A uid and type should get injected and all data transformed to underlying Dgraph edge names
dgmutations:
- setjson: |
{ "uid":"_:Author_1",
@@ -209,8 +208,7 @@
"Author.posts":[]
}
--
- name: "Add deep mutation with variables"
+- name: Add deep mutation with variables
gqlmutation: |
mutation addAuthor($auth: AddAuthorInput!) {
addAuthor(input: [$auth]) {
@@ -249,8 +247,9 @@
}]
}
}
- explanation: "A uid and type should get injected and all data transformed to
- underlying Dgraph edge names. Some PostSecrets are present and are not created."
+ explanation:
+ A uid and type should get injected and all data transformed to underlying Dgraph edge names.
+ Some PostSecrets are present and are not created
dgquery: |-
query {
PostSecret_1(func: eq(PostSecret.title, "ps1")) {
@@ -416,8 +415,7 @@
"uid":"_:Author_9"
}
--
- name: "Add mutation for predicates with special characters having @dgraph directive."
+- name: Add mutation for predicates with special characters having @dgraph directive
gqlmutation: |
mutation {
addMessage(input : [{content : "content1", author: "author1"}]) {
@@ -436,8 +434,7 @@
"post":"content1"
}
--
- name: "Add multiple mutation with variables"
+- name: Add multiple mutation with variables
gqlmutation: |
mutation addAuthor($auth: [AddAuthorInput!]!) {
addAuthor(input: $auth) {
@@ -456,8 +453,8 @@
}
]
}
- explanation: "A uid and type should get injected and all data transformed to
- underlying Dgraph edge names"
+ explanation:
+ A uid and type should get injected and all data transformed to underlying Dgraph edge names
dgmutations:
- setjson: |
{ "uid":"_:Author_1",
@@ -470,8 +467,7 @@
"Author.name":"Different Author"
}
--
- name: "Add Mutation with object at root instead of an array"
+- name: Add Mutation with object at root instead of an array"
gqlmutation: |
mutation addAuthor {
addAuthor(input: { name: "A.N. Author"}) {
@@ -480,8 +476,8 @@
}
}
}
- explanation: "The input being an object should also work because of the input coercion rules
- for input objects."
+ explanation:
+ The input being an object should also work because of the input coercion rules for input objects
dgmutations:
- setjson: |
{ "uid":"_:Author_1",
@@ -489,9 +485,7 @@
"Author.name":"A.N. Author"
}
-
--
- name: "Add Mutation with embedded value"
+- name: Add Mutation with embedded value
gqlmutation: |
mutation addAuthor {
addAuthor(input: [{ name: "A.N. Author", posts: []}]) {
@@ -500,8 +494,9 @@
}
}
}
- explanation: "The input should be used for the mutation, with a uid and type getting
- injected and all data transformed to underlying Dgraph edge names"
+ explanation:
+ The input should be used for the mutation, with a uid and type getting injected and all data
+ transformed to underlying Dgraph edge names
dgmutations:
- setjson: |
{ "uid":"_:Author_1",
@@ -510,8 +505,7 @@
"Author.posts":[]
}
--
- name: "Add Mutation with Password field"
+- name: Add Mutation with Password field
gqlmutation: |
mutation addUser($name: String!, $pwd: String!) {
addUser(input: [{ name: $name, pwd: $pwd}]) {
@@ -522,8 +516,9 @@
}
gqlvariables: |
{ "name": "A.N. Author", "pwd": "Password" }
- explanation: "The input and variables should be used for the mutation, with a uid and type
- getting injected and all data transformed to underlying Dgraph edge names"
+ explanation:
+ The input and variables should be used for the mutation, with a uid and type getting injected
+ and all data transformed to underlying Dgraph edge names
dgquery: |-
query {
User_1(func: eq(User.name, "A.N. Author")) {
@@ -540,8 +535,7 @@
"User.pwd":"Password"
}
--
- name: "Add Multiple Mutations with embedded value"
+- name: Add Multiple Mutations with embedded value
gqlmutation: |
mutation addAuthor {
addAuthor(input: [{ name: "A.N. Author", posts: []},
@@ -551,8 +545,9 @@
}
}
}
- explanation: "The input should be used for the mutation, with a uid and type getting
- injected and all data transformed to underlying Dgraph edge names"
+ explanation:
+ The input should be used for the mutation, with a uid and type getting injected and all data
+ transformed to underlying Dgraph edge names
dgmutations:
- setjson: |
{
@@ -569,8 +564,7 @@
"Author.posts":[]
}
--
- name: "Add mutation with reference"
+- name: Add mutation with reference
gqlmutation: |
mutation addAuthor($auth: AddAuthorInput!) {
addAuthor(input: [$auth]) {
@@ -586,8 +580,7 @@
"posts": []
}
}
- explanation: "The reference to country should get transformed to 'uid' for the
- Dgraph JSON mutation"
+ explanation: The reference to country should get transformed to 'uid' for the Dgraph JSON mutation
dgquery: |-
query {
Country_1(func: uid(0x123)) {
@@ -612,8 +605,7 @@
"Author.posts":[]
}
--
- name: "Add mutation with missing reference"
+- name: Add mutation with missing reference
gqlmutation: |
mutation addAuthor($auth: AddAuthorInput!) {
addAuthor(input: [$auth]) {
@@ -629,7 +621,7 @@
"posts": []
}
}
- explanation: "This should throw an error as 0x123 is not a valid Country node"
+ explanation: This should throw an error as 0x123 is not a valid Country node
dgquery: |-
query {
Country_1(func: uid(0x123)) {
@@ -637,13 +629,9 @@
dgraph.type
}
}
- error2:
- {
- "message": "failed to rewrite mutation payload because ID \"0x123\" isn't a Country"
- }
+ error2: { "message": failed to rewrite mutation payload because ID \"0x123\" isn't a Country }
--
- name: "Add mutation with invalid reference"
+- name: Add mutation with invalid reference
gqlmutation: |
mutation addAuthor($auth: AddAuthorInput!) {
addAuthor(input: [$auth]) {
@@ -659,13 +647,14 @@
"posts": []
}
}
- explanation: "A reference must be a valid UID"
+ explanation: A reference must be a valid UID
error:
- { "message":
- "failed to rewrite mutation payload because ID argument (HI!) was not able to be parsed" }
+ {
+ "message":
+ failed to rewrite mutation payload because ID argument (HI!) was not able to be parsed,
+ }
--
- name: "Add mutation with inverse reference"
+- name: Add mutation with inverse reference
gqlmutation: |
mutation addPost($post: AddPostInput!) {
addPost(input: [$post]) {
@@ -681,8 +670,7 @@
"author": { "id": "0x2" }
}
}
- explanation: "The reference to the author node should be transformed to include
- a new 'posts' edge."
+ explanation: The reference to the author node should be transformed to include a new 'posts' edge
dgquery: |-
query {
Author_1(func: uid(0x2)) {
@@ -706,8 +694,7 @@
}
}
--
- name: "Add mutation for a type that implements an interface"
+- name: Add mutation for a type that implements an interface
gqlmutation: |
mutation addHuman($human: AddHumanInput!) {
addHuman(input: [$human]) {
@@ -726,7 +713,7 @@
"ename": "employee no. 1"
}
}
- explanation: "The mutation should get rewritten with correct edges from the interface."
+ explanation: The mutation should get rewritten with correct edges from the interface
dgmutations:
- setjson: |
{ "uid" : "_:Human_1",
@@ -737,8 +724,7 @@
"dgraph.type": ["Human", "Character", "Employee"]
}
--
- name: "Add mutation using xid code 1"
+- name: Add mutation using xid code 1
gqlmutation: |
mutation addState($input: AddStateInput!) {
addState(input: [$input]) {
@@ -755,7 +741,7 @@
"country": { "id": "0x12" }
}
}
- explanation: "The add mutation should get rewritten into a Dgraph upsert mutation"
+ explanation: The add mutation should get rewritten into a Dgraph upsert mutation
dgquery: |-
query {
State_1(func: eq(State.code, "nsw")) {
@@ -783,9 +769,8 @@
}
}
--
- name: "Add mutation using xid code 2"
- explanation: "Error thrown as node with code nsw exists."
+- name: Add mutation using xid code 2
+ explanation: Error thrown as node with code nsw exists
gqlmutation: |
mutation addState($input: AddStateInput!) {
addState(input: [$input], upsert: false) {
@@ -820,12 +805,13 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because id nsw already exists for field code inside type State"
+ "message":
+ failed to rewrite mutation payload because id nsw already exists for field code inside type
+ State,
}
--
- name: "Multiple Upsert Mutation 1"
- explanation: "As both states exist, the countries of the states are updated"
+- name: Multiple Upsert Mutation 1
+ explanation: As both states exist, the countries of the states are updated
gqlmutation: |
mutation addState($input: [AddStateInput!]!) {
addState(input: $input, upsert: true) {
@@ -936,8 +922,7 @@
]
cond: "@if(gt(len(State_3), 0))"
--
- name: "Upsert Mutation with multiple xids where both existence queries result exist"
+- name: Upsert Mutation with multiple xids where both existence queries result exist
gqlmutation: |
mutation addBook($input: [AddBookInput!]!) {
addBook(input: $input, upsert: true) {
@@ -988,10 +973,10 @@
}
cond: "@if(gt(len(Book_2), 0))"
--
- name: "Upsert Mutation with multiple xids where only one of existence queries result exist"
- explanation: "Book1 does not exist but Book2 exists. As Book2 exists, this is an upsert.
- Even though, Book1 does not exist, the mutation should not update ISBN as it is also an XID."
+- name: Upsert Mutation with multiple xids where only one of existence queries result exist
+ explanation:
+ Book1 does not exist but Book2 exists. As Book2 exists, this is an upsert. Even though, Book1
+ does not exist, the mutation should not update ISBN as it is also an XID.
gqlmutation: |
mutation addBook($input: [AddBookInput!]!) {
addBook(input: $input, upsert: true) {
@@ -1042,10 +1027,9 @@
}
cond: "@if(gt(len(Book_2), 0))"
--
- name: "Multiple Upsert Mutation 2"
- explanation: "The first state exists and is updated. Second is created. Country
- is also created in second"
+- name: Multiple Upsert Mutation 2
+ explanation:
+ The first state exists and is updated. Second is created. Country is also created in second
gqlmutation: |
mutation addState($input: [AddStateInput!]!) {
addState(input: $input, upsert: true) {
@@ -1134,10 +1118,11 @@
}
}
--
- name: "Add mutation on implementation type which have inherited @id field with interface argument -1"
- explanation: "This mutation will generate three existence queries two for xid - refID (one for interface and one
- for implementing type) and one for xid - name"
+- name:
+ Add mutation on implementation type which have inherited @id field with interface argument -1
+ explanation:
+ This mutation will generate three existence queries two for xid - refID (one for interface and
+ one for implementing type) and one for xid - name
gqlmutation: |
mutation addLibraryMember($input: AddLibraryMemberInput!) {
addLibraryMember(input: [$input], upsert: false) {
@@ -1190,10 +1175,11 @@
"uid": "_:LibraryMember_2"
}
--
- name: "Add mutation on implementation type which have inherited @id field with interface argument -2"
- explanation: "Node with refID:101 already exist in other implementing type of interface, mutation not allowed
- in this case and we will return error"
+- name:
+ Add mutation on implementation type which have inherited @id field with interface argument -2
+ explanation:
+ Node with refID:101 already exist in other implementing type of interface, mutation not allowed
+ in this case and we will return error
gqlmutation: |
mutation addLibraryMember($input: AddLibraryMemberInput!) {
addLibraryMember(input: [$input], upsert: false) {
@@ -1235,12 +1221,13 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because id 101 already exists for field refID
- in some other implementing type of interface Member"
+ "message":
+ failed to rewrite mutation payload because id 101 already exists for field refID in some
+ other implementing type of interface Member,
}
--
- name: "Add mutation on implementation type which have inherited @id field with interface argument -3"
+- name:
+ Add mutation on implementation type which have inherited @id field with interface argument -3
explanation: "Node with refID:101 already exist in same mutated type, returns error "
gqlmutation: |
mutation addLibraryMember($input: AddLibraryMemberInput!) {
@@ -1283,13 +1270,16 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because id 101 already exists for field
- refID inside type LibraryMember"
+ "message":
+ failed to rewrite mutation payload because id 101 already exists for field refID inside type
+ LibraryMember,
}
--
- name: "Add upsert mutation on implementation type which have inherited @id field with interface argument -1"
- explanation: "node with @id field doesn't exist in any of the implementing type, we will add the node"
+- name:
+ Add upsert mutation on implementation type which have inherited @id field with interface
+ argument -1
+ explanation:
+ node with @id field doesn't exist in any of the implementing type, we will add the node
gqlmutation: |
mutation addLibraryMember($input: AddLibraryMemberInput!) {
addLibraryMember(input: [$input], upsert: true) {
@@ -1348,9 +1338,10 @@
"uid": "uid(LibraryMember_1)"
}
cond: "@if(gt(len(LibraryMember_1), 0))"
--
- name: "Add upsert mutation on implementation type which have inherited @id field with interface argument -2"
- explanation: "node with @id field already exist in one of the implementing type, returns error"
+- name:
+ Add upsert mutation on implementation type which have inherited @id field with interface
+ argument -2
+ explanation: node with @id field already exist in one of the implementing type, returns error
gqlmutation: |
mutation addLibraryMember($input: AddLibraryMemberInput!) {
addLibraryMember(input: [$input], upsert: true) {
@@ -1392,14 +1383,15 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because id 101 already exists for
- field refID in some other implementing type of interface Member"
+ "message":
+ failed to rewrite mutation payload because id 101 already exists for field refID in some
+ other implementing type of interface Member,
}
--
- name: "Add mutation with nested object which have inherited @id field with interface argument -1"
- explanation: "There is no node with refID 101 of interface type or it's implementation type,hence will wii add
- nested object and link that to parent object"
+- name: Add mutation with nested object which have inherited @id field with interface argument -1
+ explanation:
+ There is no node with refID 101 of interface type or it's implementation type,hence will wii add
+ nested object and link that to parent object
gqlmutation: |
mutation addLibraryManager($input: AddLibraryManagerInput!) {
addLibraryManager(input: [$input], upsert: false) {
@@ -1470,9 +1462,10 @@
"uid": "_:LibraryManager_1"
}
--
- name: "Add mutation with nested object which have inherited @id field with interface argument -2"
- explanation: "node with refID 101 already exist in one of the implementing type other than mutated type,returns error"
+- name: Add mutation with nested object which have inherited @id field with interface argument -2
+ explanation:
+ node with refID 101 already exist in one of the implementing type other than mutated
+ type,returns error
gqlmutation: |
mutation addLibraryManager($input: AddLibraryManagerInput!) {
addLibraryManager(input: [$input], upsert: false) {
@@ -1523,13 +1516,13 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because id 101 already exists for field
- refID in some other implementing type of interface Member"
+ "message":
+ failed to rewrite mutation payload because id 101 already exists for field refID in some
+ other implementing type of interface Member,
}
--
- name: "Add mutation with nested object which have inherited @id field with interface argument -3"
- explanation: "node with refID 101 already exist for mutated type,link child node to parent"
+- name: Add mutation with nested object which have inherited @id field with interface argument -3
+ explanation: node with refID 101 already exist for mutated type,link child node to parent
gqlmutation: |
mutation addLibraryManager($input: AddLibraryManagerInput!) {
addLibraryManager(input: [$input], upsert: false) {
@@ -1594,10 +1587,13 @@
"uid": "_:LibraryManager_1"
}
--
- name: "Add mutation on implementation type which have inherited @id fields with interface argument from multiple interfaces"
- explanation: "This mutation will generate six existence queries, 2 existence queries for each of the inherited @id fields
- with interface arg and one for each @id field,none of the existence query return uid,so we successfully add the object in this case"
+- name:
+ Add mutation on implementation type which have inherited @id fields with interface argument from
+ multiple interfaces
+ explanation:
+ This mutation will generate six existence queries, 2 existence queries for each of the inherited
+ @id fields with interface arg and one for each @id field,none of the existence query return
+ uid,so we successfully add the object in this case
gqlmutation: |
mutation addSportsMember($input: AddSportsMemberInput!) {
addSportsMember(input: [$input], upsert: false) {
@@ -1667,8 +1663,7 @@
"uid": "_:SportsMember_6"
}
--
- name: "Add mutation using code on type which also has an ID field"
+- name: Add mutation using code on type which also has an ID field
gqlmutation: |
mutation addEditor($input: AddEditorInput!) {
addEditor(input: [$input]) {
@@ -1684,7 +1679,7 @@
"name": "A.N. Editor"
}
}
- explanation: "The add mutation should get rewritten into a Dgraph upsert mutation"
+ explanation: The add mutation should get rewritten into a Dgraph upsert mutation
dgquery: |-
query {
Editor_1(func: eq(Editor.code, "editor")) {
@@ -1701,8 +1696,7 @@
"Editor.code": "editor"
}
--
- name: "Deep add mutation"
+- name: Deep add mutation
gqlmutation: |
mutation addAuthor($author: AddAuthorInput!) {
addAuthor(input: [$author]) {
@@ -1742,8 +1736,7 @@
]
}
--
- name: "Deep add multiple mutation"
+- name: Deep add multiple mutation
gqlmutation: |
mutation addAuthor($author: [AddAuthorInput!]!) {
addAuthor(input: $author) {
@@ -1809,8 +1802,7 @@
]
}
--
- name: "Deep add with existing"
+- name: Deep add with existing
gqlmutation: |
mutation addAuthor($author: AddAuthorInput!) {
addAuthor(input: [$author]) {
@@ -1886,8 +1878,7 @@
}
]
--
- name: "Deep add multiple with existing"
+- name: Deep add multiple with existing
gqlmutation: |
mutation addAuthor($author: [AddAuthorInput!]!) {
addAuthor(input: $author) {
@@ -2023,8 +2014,7 @@
}
]
--
- name: "Deep add with two existing"
+- name: Deep add with two existing
gqlmutation: |
mutation addAuthor($author: AddAuthorInput!) {
addAuthor(input: [$author]) {
@@ -2107,8 +2097,7 @@
}
]
--
- name: "Deep add with null"
+- name: Deep add with null
gqlmutation: |
mutation addAuthor($author: AddAuthorInput!) {
addAuthor(input: [$author]) {
@@ -2149,8 +2138,7 @@
]
}
--
- name: "Add three deep"
+- name: Add three deep
gqlmutation: |
mutation addAuthor($author: AddAuthorInput!) {
addAuthor(input: [$author]) {
@@ -2201,8 +2189,7 @@
]
}
--
- name: "Add mutation with deep xid choices 1"
+- name: Add mutation with deep xid choices 1
gqlmutation: |
mutation addCountry($input: AddCountryInput!) {
addCountry(input: [$input]) {
@@ -2221,7 +2208,7 @@
} ]
}
}
- explanation: "No nodes exist. Both nodes are created."
+ explanation: No nodes exist. Both nodes are created.
dgquery: |-
query {
State_1(func: eq(State.code, "dg")) {
@@ -2250,8 +2237,7 @@
"uid":"_:Country_2"
}
--
- name: "Add mutation with deep xid choices 2"
+- name: Add mutation with deep xid choices 2
gqlmutation: |
mutation addCountry($input: AddCountryInput!) {
addCountry(input: [$input]) {
@@ -2270,7 +2256,8 @@
} ]
}
}
- explanation: "The state exists. It is linked to the new Country. Its link to old country is deleted."
+ explanation:
+ The state exists. It is linked to the new Country. Its link to old country is deleted.
dgquery: |-
query {
State_1(func: eq(State.code, "dg")) {
@@ -2318,8 +2305,7 @@
}
]
--
- name: "Add mutation with deep xid that must be reference 1"
+- name: Add mutation with deep xid that must be reference 1
gqlmutation: |
mutation addCountry($input: AddCountryInput!) {
addCountry(input: [$input]) {
@@ -2337,8 +2323,9 @@
} ]
}
}
- explanation: "The add mutation has only one option because the state isn't a valid create
- because it's missing required field name"
+ explanation:
+ The add mutation has only one option because the state isn't a valid create because it's missing
+ required field name
dgquery: |-
query {
State_1(func: eq(State.code, "dg")) {
@@ -2381,8 +2368,7 @@
}
]
--
- name: "Add mutation with deep xid that must be reference 2"
+- name: Add mutation with deep xid that must be reference 2
gqlmutation: |
mutation addCountry($input: AddCountryInput!) {
addCountry(input: [$input]) {
@@ -2400,7 +2386,7 @@
} ]
}
}
- explanation: "Error is thrown as State with code dg does not exist"
+ explanation: Error is thrown as State with code dg does not exist
dgquery: |-
query {
State_1(func: eq(State.code, "dg")) {
@@ -2410,12 +2396,12 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because type State requires a value for field name, but no value present"
+ "message":
+ "failed to rewrite mutation payload because type State requires a value for field name, but
+ no value present",
}
-
--
- name: "deprecated fields can be mutated"
+- name: deprecated fields can be mutated
gqlmutation: |
mutation addCategory($cat: AddCategoryInput!) {
addCategory(input: [$cat]) {
@@ -2439,8 +2425,7 @@
"Category.iAmDeprecated": "but I can be written to"
}
--
- name: "Add mutation with reverse predicate"
+- name: Add mutation with reverse predicate
gqlmutation: |
mutation addMovieDirector($dir: AddMovieDirectorInput!) {
addMovieDirector(input: [$dir]) {
@@ -2455,7 +2440,7 @@
"directed": [{ "id": "0x2" }]
}
}
- explanation: "Movie node exists and is not created"
+ explanation: Movie node exists and is not created
dgquery: |-
query {
Movie_1(func: uid(0x2)) {
@@ -2477,7 +2462,7 @@
}]
}
-- name: "Top Level Duplicate XIDs with same object Test"
+- name: Top Level Duplicate XIDs with same object Test
gqlmutation: |
mutation addState($input: [AddStateInput!]!) {
addState(input: $input) {
@@ -2494,12 +2479,13 @@
{"name": "State1", "code": "S1"}
]
}
- explanation: "When duplicate XIDs are given as input at top level, but the object structure is
- same, it should return error."
+ explanation:
+ When duplicate XIDs are given as input at top level, but the object structure is same, it should
+ return error.
error:
message: "failed to rewrite mutation payload because duplicate XID found: S1"
-- name: "Top Level Duplicate XIDs with different object Test"
+- name: Top Level Duplicate XIDs with different object Test
gqlmutation: |
mutation addState($input: [AddStateInput!]!) {
addState(input: $input) {
@@ -2516,12 +2502,13 @@
{"name": "State2", "code": "S1"}
]
}
- explanation: "When duplicate XIDs are given as input at top level, but the object structure is
- different, it should still return error."
+ explanation:
+ When duplicate XIDs are given as input at top level, but the object structure is different, it
+ should still return error.
error:
message: "failed to rewrite mutation payload because duplicate XID found: S1"
-- name: "Deep Mutation Duplicate XIDs with same object Test"
+- name: Deep Mutation Duplicate XIDs with same object Test
gqlmutation: |
mutation addCity($input: [AddCityInput!]!) {
addCity(input: $input) {
@@ -2551,8 +2538,9 @@
}
]
}
- explanation: "When duplicate XIDs are given as input to deep mutation but the object structure
- is same or contains just xid, it should not return error."
+ explanation:
+ When duplicate XIDs are given as input to deep mutation but the object structure is same or
+ contains just xid, it should not return error.
dgquery: |-
query {
District_1(func: eq(District.code, "D1")) {
@@ -2613,7 +2601,7 @@
"uid":"_:City_4"
}
-- name: "Deep Mutation Duplicate XIDs with same object with @hasInverse Test"
+- name: Deep Mutation Duplicate XIDs with same object with @hasInverse Test
gqlmutation: |
mutation addCountry($input: [AddCountryInput!]!) {
addCountry(input: $input) {
@@ -2652,15 +2640,16 @@
}
]
}
- explanation: "When duplicate XIDs are given as input to deep mutation and the object structure
- is same and the containing object has @hasInverse on its xid object field, but the xid object
- does not have the @hasInverse field of List type, it should return error."
+ explanation:
+ When duplicate XIDs are given as input to deep mutation and the object structure is same and the
+ containing object has @hasInverse on its xid object field, but the xid object does not have the
+ @hasInverse field of List type, it should return error.
error:
message: |-
failed to rewrite mutation payload because duplicate XID found: S1
failed to rewrite mutation payload because duplicate XID found: S2
-- name: "Deep Mutation Duplicate XIDs with different object Test"
+- name: Deep Mutation Duplicate XIDs with different object Test
gqlmutation: |
mutation addStudent($input: [AddStudentInput!]!) {
addStudent(input: $input) {
@@ -2701,14 +2690,15 @@
}
]
}
- explanation: "When duplicate XIDs are given as input to deep mutation but the object structure
- is different, it should return error."
+ explanation:
+ When duplicate XIDs are given as input to deep mutation but the object structure is different,
+ it should return error.
error:
message: |-
failed to rewrite mutation payload because duplicate XID found: T1
failed to rewrite mutation payload because duplicate XID found: T1
-- name: "Circular Duplicate XIDs in single mutation"
+- name: Circular Duplicate XIDs in single mutation
gqlmutation: |
mutation addStudent($input: [AddStudentInput!]!) {
addStudent(input: $input) {
@@ -2735,13 +2725,13 @@
}
]
}
- explanation: "When duplicate XIDs are given as input circularly in a single mutation, it
- should return error."
+ explanation:
+ When duplicate XIDs are given as input circularly in a single mutation, it should return error.
error:
message: |-
failed to rewrite mutation payload because duplicate XID found: S1
-- name: "Duplicate XIDs in single mutation for Interface"
+- name: Duplicate XIDs in single mutation for Interface
gqlmutation: |
mutation addStudent($input: [AddStudentInput!]!) {
addStudent(input: $input) {
@@ -2772,8 +2762,9 @@
}
]
}
- explanation: "When duplicate XIDs are given as input for an Interface in a single mutation, it
- should return error."
+ explanation:
+ When duplicate XIDs are given as input for an Interface in a single mutation, it should return
+ error.
error:
message: |-
failed to rewrite mutation payload because duplicate XID found: S1
@@ -2804,7 +2795,7 @@
#
# There's three cases to consider: add by ID, add by XID, deep add
-- name: "Additional Deletes - Add connects to existing node by ID"
+- name: Additional Deletes - Add connects to existing node by ID
gqlmutation: |
mutation addAuthor($auth: AddAuthorInput!) {
addAuthor(input: [$auth]) {
@@ -2864,8 +2855,8 @@
}
]
-- name: "Additional Deletes - Add connects to existing node by XID"
- explanation: "One of the states exists. Country attached to that state is deleted."
+- name: Additional Deletes - Add connects to existing node by XID
+ explanation: One of the states exists. Country attached to that state is deleted
gqlmutation: |
mutation addCountry($inp: AddCountryInput!) {
addCountry(input: [$inp]) {
@@ -2943,8 +2934,8 @@
}
]
-- name: "Deep XID 4 level deep 1"
- explanation: "No nodes exist. All nodes are created."
+- name: Deep XID 4 level deep 1
+ explanation: No nodes exist. All nodes are created.
gqlmutation: |
mutation addStudent($student: AddStudentInput!) {
addStudent(input: [$student]) {
@@ -3039,8 +3030,8 @@
"uid":"_:Student_1"
}
-- name: "Deep XID 4 level deep 2"
- explanation: "Teacher T1 also teaches the newly added student at top level, S0."
+- name: Deep XID 4 level deep 2
+ explanation: Teacher T1 also teaches the newly added student at top level, S0.
gqlmutation: |
mutation addStudent($student: AddStudentInput!) {
addStudent(input: [$student]) {
@@ -3147,8 +3138,8 @@
"uid":"_:Student_1"
}
-- name: "Deep XID Add top level hasInverse 1"
- explanation: "No nodes exists. All are created."
+- name: Deep XID Add top level hasInverse 1
+ explanation: No nodes exists. All are created.
gqlmutation: |
mutation addStudent($student: AddStudentInput!) {
addStudent(input: [$student]) {
@@ -3224,8 +3215,8 @@
"uid":"_:Student_1"
}
-- name: "Deep XID Add top level hasInverse 2"
- explanation: "Teacher T0 exists and is linked to Student S0"
+- name: Deep XID Add top level hasInverse 2
+ explanation: Teacher T0 exists and is linked to Student S0
gqlmutation: |
mutation addStudent($student: AddStudentInput!) {
addStudent(input: [$student]) {
@@ -3289,8 +3280,8 @@
"uid":"_:Student_1"
}
-- name: "Deep XID Add top level hasInverse 3"
- explanation: "Student S1 exists and is linked to Teacher T0."
+- name: Deep XID Add top level hasInverse 3
+ explanation: Student S1 exists and is linked to Teacher T0.
gqlmutation: |
mutation addStudent($student: AddStudentInput!) {
addStudent(input: [$student]) {
@@ -3366,9 +3357,8 @@
"uid":"_:Student_1"
}
-
-- name: "Deep XID Add lower level hasInvsere 1"
- explanation: "None of the nodes exists. All of them are created."
+- name: Deep XID Add lower level hasInvsere 1
+ explanation: None of the nodes exists. All of them are created.
gqlmutation: |
mutation addLab($lab: AddLabInput!) {
addLab(input: [$lab]) {
@@ -3432,8 +3422,8 @@
"uid":"_:Lab_1"
}
-- name: "Deep XID Add lower level hasInvsere 2"
- explanation: "computer exists. Computer node is linked to Lab."
+- name: Deep XID Add lower level hasInvsere 2
+ explanation: computer exists. Computer node is linked to Lab.
gqlmutation: |
mutation addLab($lab: AddLabInput!) {
addLab(input: [$lab]) {
@@ -3487,8 +3477,8 @@
"uid":"_:Lab_1"
}
-- name: "Deep XID Add lower level hasInvsere 3"
- explanation: "Computer Owner exists and is linked to computer."
+- name: Deep XID Add lower level hasInvsere 3
+ explanation: Computer Owner exists and is linked to computer.
gqlmutation: |
mutation addLab($lab: AddLabInput!) {
addLab(input: [$lab]) {
@@ -3569,7 +3559,7 @@
"uid": "uid(Computer_4)"
}]
-- name: "Deep mutation alternate id xid"
+- name: Deep mutation alternate id xid
gqlmutation: |
mutation addAuthor($city: AddCityInput!) {
addCity(input: [$city]) {
@@ -3637,7 +3627,7 @@
"uid":"_:City_2"
}
-- name: "Deep mutation alternate id xid with existing XID"
+- name: Deep mutation alternate id xid with existing XID
gqlmutation: |
mutation addAuthor($city: AddCityInput!) {
addCity(input: [$city]) {
@@ -3697,8 +3687,7 @@
"uid":"_:City_2"
}
-
-- name: "Additional Deletes - deep mutation"
+- name: Additional Deletes - deep mutation
gqlmutation: |
mutation addAuthor($auth: AddAuthorInput!) {
addAuthor(input: [$auth]) {
@@ -3852,8 +3841,8 @@
"uid":"_:Post1_4"
}
-- name: "Deep mutation three level xid with existing XIDs 1"
- explanation: "reply1 and comment1 exists and is not created"
+- name: Deep mutation three level xid with existing XIDs 1
+ explanation: reply1 and comment1 exists and is not created
gqlmutation: |
mutation($auth: [AddPost1Input!]!) {
addPost1(input: $auth) {
@@ -3952,8 +3941,9 @@
"uid":"_:Post1_4"
}
-- name: "Deep mutation three level xid with existing XIDs 2"
- explanation: "comment2 and comment1 exists. reply1 does not exist. reply1 is not created as its parent exists."
+- name: Deep mutation three level xid with existing XIDs 2
+ explanation:
+ comment2 and comment1 exists. reply1 does not exist. reply1 is not created as its parent exists.
gqlmutation: |
mutation($auth: [AddPost1Input!]!) {
addPost1(input: $auth) {
@@ -4044,8 +4034,7 @@
"uid":"_:Post1_4"
}
--
- name: "Add mutation error on @id field for empty value"
+- name: Add mutation error on @id field for empty value
gqlmutation: |
mutation addState($input: AddStateInput!) {
addState(input: [$input]) {
@@ -4062,12 +4051,15 @@
"country": { "id": "0x12" }
}
}
- explanation: "The add mutation should not be allowed since value of @id field is empty."
+ explanation: The add mutation should not be allowed since value of @id field is empty.
error:
- { "message": "failed to rewrite mutation payload because encountered an empty value for @id field `State.code`" }
+ {
+ "message":
+ failed to rewrite mutation payload because encountered an empty value for @id field
+ `State.code`,
+ }
--
- name: "Add mutation error on @id field for empty value (Nested)"
+- name: Add mutation error on @id field for empty value (Nested)
gqlmutation: |
mutation addCountry($input: AddCountryInput!) {
addCountry(input: [$input]) {
@@ -4086,12 +4078,15 @@
} ]
}
}
- explanation: "The add mutation should not be allowed since value of @id field is empty."
+ explanation: The add mutation should not be allowed since value of @id field is empty.
error:
- { "message": "failed to rewrite mutation payload because encountered an empty value for @id field `State.code`" }
+ {
+ "message":
+ failed to rewrite mutation payload because encountered an empty value for @id field
+ `State.code`,
+ }
--
- name: "Add mutation for person with @hasInverse"
+- name: Add mutation for person with @hasInverse
gqlmutation: |
mutation($input: [AddPersonInput!]!) {
addPerson(input: $input) {
@@ -4147,8 +4142,7 @@
"uid": "_:Person_1"
}
--
- name: "Add mutation with union"
+- name: Add mutation with union
gqlmutation: |
mutation($input: [AddHomeInput!]!) {
addHome(input: $input) {
@@ -4214,8 +4208,7 @@
"uid": "_:Home_2"
}
--
- name: "Add mutation with union - invalid input"
+- name: Add mutation with union - invalid input
gqlmutation: |
mutation($input: [AddHomeInput!]!) {
addHome(input: $input) {
@@ -4243,14 +4236,13 @@
}
]
}
- explanation: "The add mutation should not be allowed since the union input is invalid"
+ explanation: The add mutation should not be allowed since the union input is invalid
error:
message: |-
failed to rewrite mutation payload because value for field `favouriteMember` in type `Home` must have exactly one child, found 0 children
failed to rewrite mutation payload because value for field `members` in type `Home` index `0` must have exactly one child, found 2 children
--
- name: "Add type with multiple Xid fields"
+- name: Add type with multiple Xid fields
gqlmutation: |
mutation($input: [AddBookInput!]!) {
addBook(input: $input) {
@@ -4310,8 +4302,9 @@
"uid": "_:Book_2"
}
--
- name: "Add mutation with multiple Xid fields shouldn't give error if xidName+xidVal is equal for two different xid fields in a type"
+- name:
+ Add mutation with multiple Xid fields shouldn't give error if xidName+xidVal is equal for two
+ different xid fields in a type
gqlmutation: |
mutation($input: [AddABCInput!]!) {
addABC(input: $input) {
@@ -4342,8 +4335,9 @@
dgraph.type
}
}
- explanation: "We should generate different variables as ABC_1 and ABC_2 if xidName+xidValue is same as in above case
- i.e. ab+cd and abc+d both equals to abcd"
+ explanation:
+ We should generate different variables as ABC_1 and ABC_2 if xidName+xidValue is same as in
+ above case i.e. ab+cd and abc+d both equals to abcd
dgmutations:
- setjson: |
{
@@ -4355,8 +4349,9 @@
"uid":"_:ABC_2"
}
--
- name: "Add mutation with multiple Xid fields shouldn't give error if xidName+xidVal is equal for two different xid fields in different objects"
+- name:
+ Add mutation with multiple Xid fields shouldn't give error if xidName+xidVal is equal for two
+ different xid fields in different objects
gqlmutation: |
mutation($input: [AddABCInput!]!) {
addABC(input: $input) {
@@ -4399,8 +4394,9 @@
dgraph.type
}
}
- explanation: "We should generate different variables as ABC_1 and ABC_4 if xidName+xidValue is same in two different objects as in above case
- i.e. ab+cd and abc+d both equals to abcd"
+ explanation:
+ We should generate different variables as ABC_1 and ABC_4 if xidName+xidValue is same in two
+ different objects as in above case i.e. ab+cd and abc+d both equals to abcd
dgmutations:
- setjson: |
{
@@ -4421,8 +4417,9 @@
"uid":"_:ABC_4"
}
--
- name: "Add mutation with multiple Xid fields shouldn't give error if typeName+xidName+xidVal is equal for two different xid fields in different types"
+- name:
+ Add mutation with multiple Xid fields shouldn't give error if typeName+xidName+xidVal is equal
+ for two different xid fields in different types
gqlmutation: |
mutation($input: [AddABCInput!]!) {
addABC(input: $input) {
@@ -4469,8 +4466,10 @@
dgraph.type
}
}
- explanation: "We should generate different variables as ABC_1 and AB_3, or ABC_2 and AB_4 if typename+xidName+xidValue is same in two different types as in above case
- i.e. ABC+ab+cd and AB+Cabc+d both equals to ABCabcd"
+ explanation:
+ We should generate different variables as ABC_1 and AB_3, or ABC_2 and AB_4 if
+ typename+xidName+xidValue is same in two different types as in above case i.e. ABC+ab+cd and
+ AB+Cabc+d both equals to ABCabcd
dgmutations:
- setjson: |
{
@@ -4486,9 +4485,7 @@
"uid": "_:ABC_2"
}
-
--
- name: "Add type with multiple Xid fields at deep level"
+- name: Add type with multiple Xid fields at deep level
gqlmutation: |
mutation($input: [AddauthorInput!]!) {
addauthor(input: $input) {
@@ -4548,10 +4545,12 @@
]
}
--
- name: "Add mutation for type Person1 with multiple xids referencing same node as closeFriends and friends, closeFriends refer friends with xid id"
- explanation: "The mutation adds same node as friends and closeFriends. It should
- work irrespective of the order in which the node is referenced."
+- name:
+ Add mutation for type Person1 with multiple xids referencing same node as closeFriends and
+ friends, closeFriends refer friends with xid id
+ explanation:
+ The mutation adds same node as friends and closeFriends. It should work irrespective of the
+ order in which the node is referenced.
gqlmutation: |
mutation($input: [AddPerson1Input!]!) {
addPerson1(input: $input) {
@@ -4640,8 +4639,9 @@
"uid": "_:Person1_2"
}
--
- name: "Add type with multiple Xids fields at deep level when deep node already exist for all existence queries"
+- name:
+ Add type with multiple Xids fields at deep level when deep node already exist for all existence
+ queries
gqlmutation: |
mutation($input: [AddauthorInput!]!) {
addauthor(input: $input) {
@@ -4685,11 +4685,13 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because multiple nodes found for given xid values,
- updation not possible"
+ "message":
+ "failed to rewrite mutation payload because multiple nodes found for given xid values,
+ updation not possible",
}
--
- name: "Add type with multiple Xids fields at deep level when deep node already exist for one existence query"
+- name:
+ Add type with multiple Xids fields at deep level when deep node already exist for one existence
+ query
gqlmutation: |
mutation($input: [AddauthorInput!]!) {
addauthor(input: $input) {
@@ -4765,10 +4767,12 @@
}
]
--
- name: "Add mutation for type Person1 with multiple xids referencing same node as closeFriends and friends, friends refer closeFriends with xid name "
- explanation: "The mutation adds same node as friends and closeFriends. It should
- work irrespective of the order in which the node is referenced."
+- name:
+ "Add mutation for type Person1 with multiple xids referencing same node as closeFriends and
+ friends, friends refer closeFriends with xid name "
+ explanation:
+ The mutation adds same node as friends and closeFriends. It should work irrespective of the
+ order in which the node is referenced.
gqlmutation: |
mutation($input: [AddPerson1Input!]!) {
addPerson1(input: $input) {
@@ -4857,7 +4861,7 @@
"uid": "_:Person1_2"
}
-- name: "Reference to inverse field should be ignored and not throw an error"
+- name: Reference to inverse field should be ignored and not throw an error
gqlmutation: |
mutation addDistrict($input: [AddDistrictInput!]!) {
addDistrict(input: $input) {
@@ -4885,9 +4889,10 @@
}
]
}
- explanation: "As district is inverse of city. There is no need to supply district to
- the city. In case it is supplied, it is simply ignored. The city is linked to D1 and
- district with code non-existing is ignored. Not even its existence query is generated."
+ explanation:
+ As district is inverse of city. There is no need to supply district to the city. In case it is
+ supplied, it is simply ignored. The city is linked to D1 and district with code non-existing is
+ ignored. Not even its existence query is generated.
dgquery: |-
query {
District_1(func: eq(District.code, "D1")) {
@@ -4937,7 +4942,7 @@
"uid":"_:District_2"
}
-- name: "Reference to inverse field should be ignored and not throw an error 2"
+- name: Reference to inverse field should be ignored and not throw an error 2
gqlmutation: |
mutation addFoo($input: [AddFooInput!]!) {
addFoo(input: $input) {
@@ -4962,8 +4967,9 @@
}
]
}
- explanation: "As foo is inverse of bar. There is no need to supply bar to
- foo. In case it is supplied, it is simply ignored."
+ explanation:
+ As foo is inverse of bar. There is no need to supply bar to foo. In case it is supplied, it is
+ simply ignored.
dgquery: |-
query {
Foo_1(func: eq(Foo.id, "123")) {
@@ -5015,8 +5021,8 @@
"uid":"_:Foo_3"
}
--
- name: "Add mutation for Friend, Friend1 should not generated same variable name for existence queries"
+- name:
+ Add mutation for Friend, Friend1 should not generated same variable name for existence queries
gqlmutation: |
mutation($input: [AddFriend1Input!]!) {
addFriend1(input: $input) {
@@ -5099,72 +5105,71 @@
}
dgmutations:
- setjson: |
- {
- "Friend1.friends":
- [
- {
- "Friend.id":"Friend1",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_2"
- },
- {
- "Friend.id":"Friend2",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_3"
- },
- {
- "Friend.id":"Friend3",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_4"
- },
- {
- "Friend.id":"Friend4",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_5"
- },
- {
- "Friend.id":"Friend5",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_6"
- },
- {
- "Friend.id":"Friend6",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_7"
- },
- {
- "Friend.id":"Friend7",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_8"
- },
- {
- "Friend.id":"Friend8",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_9"
- },
- {
- "Friend.id":"Friend9",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_10"
- },
- {
- "Friend.id":"Friend10",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_11"
- },
- {
- "Friend.id":"Friend11",
- "dgraph.type":["Friend"],
- "uid":"_:Friend_12"
- }
- ],
- "Friend1.id":"Main Friend",
- "dgraph.type":["Friend1"],
- "uid":"_:Friend1_1"
- }
+ {
+ "Friend1.friends":
+ [
+ {
+ "Friend.id":"Friend1",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_2"
+ },
+ {
+ "Friend.id":"Friend2",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_3"
+ },
+ {
+ "Friend.id":"Friend3",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_4"
+ },
+ {
+ "Friend.id":"Friend4",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_5"
+ },
+ {
+ "Friend.id":"Friend5",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_6"
+ },
+ {
+ "Friend.id":"Friend6",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_7"
+ },
+ {
+ "Friend.id":"Friend7",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_8"
+ },
+ {
+ "Friend.id":"Friend8",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_9"
+ },
+ {
+ "Friend.id":"Friend9",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_10"
+ },
+ {
+ "Friend.id":"Friend10",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_11"
+ },
+ {
+ "Friend.id":"Friend11",
+ "dgraph.type":["Friend"],
+ "uid":"_:Friend_12"
+ }
+ ],
+ "Friend1.id":"Main Friend",
+ "dgraph.type":["Friend1"],
+ "uid":"_:Friend1_1"
+ }
--
- name: "Add mutation with language tag fields"
+- name: Add mutation with language tag fields
gqlmutation: |
mutation {
addPerson(input: { name: "Alice", nameHi: "ऐलिस",nameZh: "爱丽丝"}) {
@@ -5184,11 +5189,10 @@
"uid": "_:Person_1"
}
--
- name: "2-level add mutation with nullable @id fields "
- explaination: "bookId in Book and PenName in author are @id and nullable field,
- we can skip them while doing add mutation. Nested object author doesn't exist, so we
- add it and link it to book"
+- name: "2-level add mutation with nullable @id fields "
+ explaination:
+ bookId in Book and PenName in author are @id and nullable field, we can skip them while doing
+ add mutation. Nested object author doesn't exist, so we add it and link it to book
gqlmutation: |
mutation addBook($input: [AddBookInput!]!) {
addBook(input: $input, upsert: false) {
@@ -5251,12 +5255,12 @@
"uid": "_:Book_2"
}
--
- name: "2- level add mutation with upsert and nullable @id fields "
- explaination: "bookId in @id,penName in author are nullable @id fields and we can skip them.
- title,ISBN in Book are @id fields,so also added in set Json, because @id fields will also be updated by upserts.
- Both book and author already exist so we just link new author to book and delete old reference from book to author,
- if there is any"
+- name: "2- level add mutation with upsert and nullable @id fields "
+ explaination:
+ bookId in @id,penName in author are nullable @id fields and we can skip them. title,ISBN in Book
+ are @id fields,so also added in set Json, because @id fields will also be updated by upserts.
+ Both book and author already exist so we just link new author to book and delete old reference
+ from book to author, if there is any
gqlmutation: |
mutation addBook($input: [AddBookInput!]!) {
addBook(input: $input, upsert: true) {
@@ -5335,9 +5339,9 @@
}]
cond: "@if(gt(len(Book_2), 0))"
--
- name: "add mutation with upsert gives error when multiple nodes are found for existence queries"
- explaination: "Two different books exist for title and Sapiens @id fields, We can't do upsert mutation "
+- name: add mutation with upsert gives error when multiple nodes are found for existence queries
+ explaination:
+ "Two different books exist for title and Sapiens @id fields, We can't do upsert mutation "
gqlmutation: |
mutation addBook($input: [AddBookInput!]!) {
addBook(input: $input, upsert: true) {
@@ -5374,15 +5378,17 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because multiple nodes found
- for given xid values, updation not possible"
+ "message":
+ "failed to rewrite mutation payload because multiple nodes found for given xid values,
+ updation not possible",
}
--
- name: "add mutation with upsert at nested level gives error when multiple nodes are found
- for existence queries"
- explaination: "Two different author exist for penName and authorId @id fields inside author,
- We can't link author to both books "
+- name:
+ add mutation with upsert at nested level gives error when multiple nodes are found for existence
+ queries
+ explaination:
+ "Two different author exist for penName and authorId @id fields inside author, We can't link
+ author to both books "
gqlmutation: |
mutation addBook($input: [AddBookInput!]!) {
addBook(input: $input, upsert: true) {
@@ -5431,14 +5437,15 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because multiple nodes
- found for given xid values, updation not possible"
+ "message":
+ "failed to rewrite mutation payload because multiple nodes found for given xid values,
+ updation not possible",
}
--
- name: "No xid present for add mutation with upsert"
- explaination: "If none of the xid field is given in upsert mutation then there will be no existence queries,
- and it will behave as simple add mutation,i.e. create new node with all the given fields"
+- name: No xid present for add mutation with upsert
+ explaination:
+ If none of the xid field is given in upsert mutation then there will be no existence queries,
+ and it will behave as simple add mutation,i.e. create new node with all the given fields
gqlmutation: |
mutation addBook($input: [AddBookInput!]!) {
addBook(input: $input, upsert: true) {
@@ -5465,10 +5472,10 @@
"uid":"_:Book_1"
}
--
- name: "Non-nullable xid should be present in add Mutation for nested field"
- explaination: "non-nullable @id field id in comment1 type not provided. As no reference is
- provided for comment, we treat it as new node, and return error for missing xid."
+- name: Non-nullable xid should be present in add Mutation for nested field
+ explaination:
+ non-nullable @id field id in comment1 type not provided. As no reference is provided for
+ comment, we treat it as new node, and return error for missing xid.
gqlmutation: |
mutation addPost1($input: [AddPost1Input!]!) {
addPost1(input: $input, upsert: false) {
@@ -5496,13 +5503,9 @@
dgraph.type
}
}
- error2:
- {
- "message": "failed to rewrite mutation payload because field id cannot be empty"
- }
-
--
- name: "Add mutation with @default directive"
+ error2: { "message": failed to rewrite mutation payload because field id cannot be empty }
+
+- name: Add mutation with @default directive
gqlmutation: |
mutation($input: [AddBookingInput!]!) {
addBooking(input: $input) {
@@ -5521,7 +5524,8 @@
}
]
}
- explanation: "As booking has @default fields and is being added, these should be set to the default add value"
+ explanation:
+ As booking has @default fields and is being added, these should be set to the default add value
dgmutations:
- setjson: |
{
@@ -5539,8 +5543,7 @@
"uid":"_:Booking_1"
}
--
- name: "Add mutation with @default directive uses provided values"
+- name: Add mutation with @default directive uses provided values
gqlmutation: |
mutation($input: [AddBookingInput!]!) {
addBooking(input: $input) {
@@ -5565,7 +5568,9 @@
}
]
}
- explanation: "Fields with @default(add) should use input values if provided (note that count is still using default)"
+ explanation:
+ Fields with @default(add) should use input values if provided (note that count is still using
+ default)
dgmutations:
- setjson: |
{
@@ -5583,9 +5588,10 @@
"uid":"_:Booking_1"
}
--
- name: "Upsert mutation with @default directives where only one of the nodes exists"
- explanation: "Booking1 should only have updated timestamp as it exists, Booking2 should have created and updated timestamps"
+- name: Upsert mutation with @default directives where only one of the nodes exists
+ explanation:
+ Booking1 should only have updated timestamp as it exists, Booking2 should have created and
+ updated timestamps
gqlmutation: |
mutation addBookingXID($input: [AddBookingXIDInput!]!) {
addBookingXID(input: $input, upsert: true) {
@@ -5658,4 +5664,3 @@
"BookingXID"
]
}
-
diff --git a/graphql/resolve/auth_add_test.yaml b/graphql/resolve/auth_add_test.yaml
index 0c116e93e9f..d9dc0de5a4a 100644
--- a/graphql/resolve/auth_add_test.yaml
+++ b/graphql/resolve/auth_add_test.yaml
@@ -1,4 +1,4 @@
-- name: "Add one node"
+- name: Add one node
gqlquery: |
mutation addUserSecret($secret: AddUserSecretInput!) {
addUserSecret(input: [$secret]) {
@@ -8,7 +8,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "secret":
{ "aSecret": "it is",
@@ -30,7 +30,7 @@
"UserSecret": [ { "uid": "0x123" }]
}
-- name: "Add multiple nodes"
+- name: Add multiple nodes
gqlquery: |
mutation addUserSecret($secrets: [AddUserSecretInput!]!) {
addUserSecret(input: $secrets) {
@@ -40,7 +40,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "secrets":
[
@@ -66,7 +66,7 @@
"UserSecret": [ { "uid": "0x123" }, { "uid": "0x456" } ]
}
-- name: "Add one node that fails auth"
+- name: Add one node that fails auth
gqlquery: |
mutation addUserSecret($secret: AddUserSecretInput!) {
addUserSecret(input: [$secret]) {
@@ -76,7 +76,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{
"secret":
@@ -101,10 +101,9 @@
{
"UserSecret": [ ]
}
- error:
- { "message": "mutation failed because authorization failed" }
+ error: { "message": mutation failed because authorization failed }
-- name: "Add multiple nodes that fails auth"
+- name: Add multiple nodes that fails auth
gqlquery: |
mutation addUserSecret($secrets: [AddUserSecretInput!]!) {
addUserSecret(input: $secrets) {
@@ -114,7 +113,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "secrets":
[
@@ -139,10 +138,9 @@
{
"UserSecret": [ { "uid": "0x123" }]
}
- error:
- { "message": "mutation failed because authorization failed" }
+ error: { "message": mutation failed because authorization failed }
-- name: "Add multiple nodes of different types"
+- name: Add multiple nodes of different types
gqlquery: |
mutation addColumn($col: AddColumnInput!) {
addColumn(input: [$col]) {
@@ -152,7 +150,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "col":
{ "inProject": { "projID": "0x123" },
@@ -209,7 +207,7 @@
"Ticket": [ { "uid": "0x789" } ]
}
-- name: "Add multiple nodes of different types that fails auth"
+- name: Add multiple nodes of different types that fails auth
gqlquery: |
mutation addColumn($col: AddColumnInput!) {
addColumn(input: [$col]) {
@@ -219,7 +217,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "col":
{
@@ -275,10 +273,9 @@
{
"Ticket": [ { "uid": "0x789" } ]
}
- error:
- { "message": "mutation failed because authorization failed" }
+ error: { "message": mutation failed because authorization failed }
-- name: "Add multiples of multiple nodes of different types"
+- name: Add multiples of multiple nodes of different types
gqlquery: |
mutation addColumn($col1: AddColumnInput!, $col2: AddColumnInput!) {
addColumn(input: [$col1, $col2]) {
@@ -288,7 +285,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "col1":
{ "inProject": { "projID": "0x123" },
@@ -352,7 +349,7 @@
"Ticket": [ { "uid": "0x789" }, { "uid": "0x799" } ]
}
-- name: "Add multiples of multiple nodes of different types that fails auth"
+- name: Add multiples of multiple nodes of different types that fails auth
gqlquery: |
mutation addColumn($col1: AddColumnInput!, $col2: AddColumnInput!) {
addColumn(input: [$col1, $col2]) {
@@ -362,7 +359,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "col1":
{ "inProject": { "projID": "0x123" },
@@ -425,15 +422,14 @@
"Column": [ { "uid": "0x456" } ],
"Ticket": [ { "uid": "0x789" }, { "uid": "0x799" } ]
}
- error:
- { "message": "mutation failed because authorization failed" }
+ error: { "message": mutation failed because authorization failed }
# See comments about additional deletes in add_mutation_test.yaml.
# Because of those additional deletes, for example, when we add a column and
# link it to an existing ticket, we remove that ticket from the column it was
# attached to ... so we need authorization to update that column as well
# as to add the new column.
-- name: "Add with auth on additional delete"
+- name: Add with auth on additional delete
gqlquery: |
mutation addColumn($col: AddColumnInput!) {
addColumn(input: [$col]) {
@@ -443,7 +439,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "col":
{ "inProject": { "projID": "0x123" },
@@ -514,7 +510,7 @@
"Column": [ { "uid": "0x456" } ]
}
-- name: "Add with auth on additional delete that fails"
+- name: Add with auth on additional delete that fails
gqlquery: |
mutation addColumn($col: AddColumnInput!) {
addColumn(input: [$col]) {
@@ -524,7 +520,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "col":
{ "inProject": { "projID": "0x123" },
@@ -593,10 +589,9 @@
{
"Column": [ { "uid": "0x456" } ]
}
- error:
- { "message": "couldn't rewrite query for mutation addColumn because authorization failed" }
+ error: { "message": couldn't rewrite query for mutation addColumn because authorization failed }
-- name: "Add with deep auth on additional delete"
+- name: Add with deep auth on additional delete
gqlquery: |
mutation addProject($proj: AddProjectInput!) {
addProject(input: [$proj]) {
@@ -606,7 +601,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{
"proj": {
@@ -687,7 +682,7 @@
"Project": [ { "uid": "0x123" } ]
}
-- name: "Add with deep auth on additional delete that fails"
+- name: Add with deep auth on additional delete that fails
gqlquery: |
mutation addProject($proj: AddProjectInput!) {
addProject(input: [$proj]) {
@@ -697,7 +692,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{
"proj": {
@@ -776,10 +771,9 @@
"Column": [ { "uid": "0x456" } ],
"Project": [ { "uid": "0x123" } ]
}
- error:
- { "message": "couldn't rewrite query for mutation addProject because authorization failed" }
+ error: { "message": couldn't rewrite query for mutation addProject because authorization failed }
-- name: "Add with top level RBAC false."
+- name: Add with top level RBAC false
gqlquery: |
mutation addLog($log: AddLogInput!) {
addLog(input: [$log]) {
@@ -789,7 +783,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "log":
{
@@ -800,11 +794,9 @@
}
uids: |
{ "Log_1": "0x123" }
- error:
- { "message": "mutation failed because authorization failed"}
+ error: { "message": mutation failed because authorization failed }
-
-- name: "Add with top level RBAC true."
+- name: Add with top level RBAC true
gqlquery: |
mutation addLog($log: AddLogInput!) {
addLog(input: [$log]) {
@@ -814,8 +806,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
variables: |
{ "log":
{
@@ -830,7 +822,7 @@
}
skipauth: true
-- name: "Add with top level OR RBAC true."
+- name: Add with top level OR RBAC true
gqlquery: |
mutation addProject($proj: AddProjectInput!) {
addProject(input: [$proj]) {
@@ -840,8 +832,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
variables: |
{
"proj": {
@@ -855,7 +847,7 @@
}
skipauth: true
-- name: "Add with top level OR RBAC false."
+- name: Add with top level OR RBAC false
gqlquery: |
mutation addProject($proj: AddProjectInput!) {
addProject(input: [$proj]) {
@@ -865,8 +857,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
variables: |
{
"proj": {
@@ -895,7 +887,7 @@
"Project": [ { "uid": "0x123" } ]
}
-- name: "Add with top level And RBAC true."
+- name: Add with top level And RBAC true
gqlquery: |
mutation addIssue($issue: AddIssueInput!) {
addIssue(input: [$issue]) {
@@ -905,8 +897,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
variables: |
{ "issue":
{ "msg": "log123",
@@ -946,7 +938,7 @@
"Issue": [ { "uid": "0x789" }]
}
-- name: "Add with top level And RBAC false."
+- name: Add with top level And RBAC false
gqlquery: |
mutation addIssue($issue: AddIssueInput!) {
addIssue(input: [$issue]) {
@@ -956,8 +948,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
variables: |
{ "issue":
{ "msg": "log123",
@@ -992,10 +984,9 @@
Issue.owner : Issue.owner @filter(eq(User.username, "user1"))
}
}
- error:
- { "message": "mutation failed because authorization failed" }
+ error: { "message": mutation failed because authorization failed }
-- name: "Add with top level not RBAC false."
+- name: Add with top level not RBAC false
gqlquery: |
mutation addComplexLog($log: AddComplexLogInput!) {
addComplexLog(input: [$log]) {
@@ -1005,8 +996,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
variables: |
{ "log":
{ "logs": "log123",
@@ -1017,10 +1008,9 @@
{
"ComplexLog_1": "0x123"
}
- error:
- { "message": "mutation failed because authorization failed"}
+ error: { "message": mutation failed because authorization failed }
-- name: "Add with top level not RBAC true."
+- name: Add with top level not RBAC true
gqlquery: |
mutation addComplexLog($log: AddComplexLogInput!) {
addComplexLog(input: [$log]) {
@@ -1030,8 +1020,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
variables: |
{ "log":
{ "logs": "log123",
@@ -1044,7 +1034,7 @@
}
skipauth: true
-- name: "Adding nodes for a Type that inherits Auth rules from an interfaces successfully."
+- name: Adding nodes for a Type that inherits Auth rules from an interfaces successfully
gqlquery: |
mutation addQuestion($question: [AddQuestionInput!]!) {
addQuestion(input: $question) {
@@ -1058,7 +1048,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
ANS: "true"
variables: |
{ "question":
@@ -1097,7 +1087,7 @@
"Question": [ {"uid": "0x123"}]
}
-- name: "Adding node for a Type that inherits auth rules from an interface fails."
+- name: Adding node for a Type that inherits auth rules from an interface fails
gqlquery: |
mutation addQuestion($question: [AddQuestionInput!]!) {
addQuestion(input: $question) {
@@ -1111,7 +1101,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
ANS: "true"
variables: |
{ "question":
@@ -1149,10 +1139,9 @@
{
"Question": [ ], "Author": [ { "uid" : "0x456"} ]
}
- error:
- { "message": "mutation failed because authorization failed"}
+ error: { "message": mutation failed because authorization failed }
-- name: "Add type with having RBAC rule on interface successfully"
+- name: Add type with having RBAC rule on interface successfully
gqlquery: |
mutation addFbPost($post: [AddFbPostInput!]!){
addFbPost(input: $post){
@@ -1165,8 +1154,8 @@
}
}
jwtvar:
- USER: "user1"
- ROLE: "ADMIN"
+ USER: user1
+ ROLE: ADMIN
variables: |
{ "post":
[{
@@ -1200,7 +1189,7 @@
"FbPost": [ {"uid": "0x123"}]
}
-- name: "Add type with Having RBAC rule on interface failed"
+- name: Add type with Having RBAC rule on interface failed
gqlquery: |
mutation addFbPost($post: [AddFbPostInput!]!){
addFbPost(input: $post){
@@ -1213,8 +1202,8 @@
}
}
jwtvar:
- USER: "user1"
- ROLE: "USER"
+ USER: user1
+ ROLE: USER
variables: |
{ "post":
[{
@@ -1230,10 +1219,9 @@
"FbPost_1": "0x123",
"Author_1": "0x456"
}
- error:
- {"message" : "mutation failed because authorization failed"}
+ error: { "message": mutation failed because authorization failed }
-- name: "Upsert Add Mutation with RBAC true"
+- name: Upsert Add Mutation with RBAC true
gqlquery: |
mutation addTweets($tweet: AddTweetsInput!) {
addTweets(input: [$tweet], upsert: true) {
@@ -1243,7 +1231,7 @@
}
}
jwtvar:
- USER: "foo"
+ USER: foo
variables: |
{ "tweet":
{ "id": "existing ID",
@@ -1271,7 +1259,7 @@
Tweets_2 as var(func: uid(0x123)) @filter(type(Tweets))
}
-- name: "Upsert Add Mutation with RBAC false"
+- name: Upsert Add Mutation with RBAC false
gqlquery: |
mutation addTweets($tweet: AddTweetsInput!) {
addTweets(input: [$tweet], upsert: true) {
@@ -1281,7 +1269,7 @@
}
}
jwtvar:
- USER: "not foo"
+ USER: not foo
variables: |
{ "tweet":
{ "id": "existing ID",
@@ -1305,9 +1293,10 @@
Tweets_1 as addTweets()
}
-- name: "Upsert with Deep Auth"
- explanation: "As state already exists, update auth rules of State are applied.
- As Country does not exist, add auth rules of Country are applied."
+- name: Upsert with Deep Auth
+ explanation:
+ As state already exists, update auth rules of State are applied. As Country does not exist, add
+ auth rules of Country are applied
gqlquery: |
mutation addState($state: AddStateInput!) {
addState(input: [$state], upsert: true) {
@@ -1317,7 +1306,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "state":
{
diff --git a/graphql/resolve/auth_closed_by_default_add_test.yaml b/graphql/resolve/auth_closed_by_default_add_test.yaml
index 0e063f6199d..3a3e08d4ae6 100644
--- a/graphql/resolve/auth_closed_by_default_add_test.yaml
+++ b/graphql/resolve/auth_closed_by_default_add_test.yaml
@@ -1,4 +1,4 @@
-- name: "Query with missing jwt token - Type with Auth"
+- name: Query with missing jwt token - Type with Auth
gqlquery: |
mutation addComplexLog($log: AddComplexLogInput!) {
addComplexLog(input: [$log]) {
@@ -8,8 +8,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
variables: |
{ "log":
{ "logs": "log123",
@@ -20,9 +20,13 @@
uids: |
{ "ComplexLog1": "0x123" }
error:
- { "message": "mutation failed because authorization failed because a valid JWT is required but was not provided"}
+ {
+ "message":
+ mutation failed because authorization failed because a valid JWT is required but was not
+ provided,
+ }
-- name: "Query with missing jwt token - Type without Auth"
+- name: Query with missing jwt token - Type without Auth
gqlquery: |
mutation addTodo($todo: AddTodoInput!) {
addTodo(input: [$todo]) {
@@ -40,4 +44,8 @@
}
}
error:
- { "message": "mutation failed because authorization failed because a valid JWT is required but was not provided"}
\ No newline at end of file
+ {
+ "message":
+ mutation failed because authorization failed because a valid JWT is required but was not
+ provided,
+ }
diff --git a/graphql/resolve/auth_closed_by_default_delete_test.yaml b/graphql/resolve/auth_closed_by_default_delete_test.yaml
index 0bce8951bcb..ed36a57643f 100644
--- a/graphql/resolve/auth_closed_by_default_delete_test.yaml
+++ b/graphql/resolve/auth_closed_by_default_delete_test.yaml
@@ -1,4 +1,4 @@
-- name: "Delete with top level not RBAC true - type with auth"
+- name: Delete with top level not RBAC true - type with auth
gqlquery: |
mutation ($ids: [ID!]) {
deleteRole(filter: {id: $ids}) {
@@ -17,15 +17,14 @@
"uid": "uid(x)"
}]
dgquery: |-
- query {
- x as deleteRole(func: type(Role)) {
- uid
- }
- }
- error:
- { "message": "a valid JWT is required but was not provided" }
+ query {
+ x as deleteRole(func: type(Role)) {
+ uid
+ }
+ }
+ error: { "message": a valid JWT is required but was not provided }
-- name: "Delete with top level not RBAC true. - type with auth"
+- name: Delete with top level not RBAC true. - type with auth
gqlquery: |
mutation ($ids: [ID!]) {
deleteTodo(filter: {id: $ids}) {
@@ -44,10 +43,9 @@
"uid": "uid(x)"
}]
dgquery: |-
- query {
- x as deletetodo(func: type(Todo)) {
- uid
- }
- }
- error:
- { "message": "a valid JWT is required but was not provided" }
\ No newline at end of file
+ query {
+ x as deletetodo(func: type(Todo)) {
+ uid
+ }
+ }
+ error: { "message": a valid JWT is required but was not provided }
diff --git a/graphql/resolve/auth_closed_by_default_query_test.yaml b/graphql/resolve/auth_closed_by_default_query_test.yaml
index c0a6fc02b97..e94883506ed 100644
--- a/graphql/resolve/auth_closed_by_default_query_test.yaml
+++ b/graphql/resolve/auth_closed_by_default_query_test.yaml
@@ -1,4 +1,4 @@
-- name: "Query with missing jwt token - type with auth directive"
+- name: Query with missing jwt token - type with auth directive
gqlquery: |
query {
queryTodo {
@@ -7,10 +7,9 @@
text
}
}
- error:
- { "message": "a valid JWT is required but was not provided"}
+ error: { "message": a valid JWT is required but was not provided }
-- name: "Query with missing jwt token - type without auth directive"
+- name: Query with missing jwt token - type without auth directive
gqlquery: |
query {
queryTodo {
@@ -19,5 +18,4 @@
text
}
}
- error:
- { "message": "a valid JWT is required but was not provided" }
+ error: { "message": a valid JWT is required but was not provided }
diff --git a/graphql/resolve/auth_closed_by_default_update_test.yaml b/graphql/resolve/auth_closed_by_default_update_test.yaml
index 52fa150805d..838e5629f8d 100644
--- a/graphql/resolve/auth_closed_by_default_update_test.yaml
+++ b/graphql/resolve/auth_closed_by_default_update_test.yaml
@@ -1,4 +1,4 @@
-- name: "Update with top level And RBAC false - type with Auth "
+- name: Update with top level And RBAC false - type with Auth
gqlquery: |
mutation updateIssue($issue: UpdateIssueInput!) {
updateIssue(input: $issue) {
@@ -21,9 +21,12 @@
x as updateIssue()
}
error:
- { "message": "couldn't rewrite mutation updateIssue because a valid JWT is required but was not provided" }
+ {
+ "message":
+ couldn't rewrite mutation updateIssue because a valid JWT is required but was not provided,
+ }
-- name: "Update with top level And RBAC false - type without auth"
+- name: Update with top level And RBAC false - type without auth
gqlquery: |
mutation updateTodo($todo: UpdateTodoInput!) {
updateTodo(input: $todo) {
@@ -46,4 +49,7 @@
x as updateTodo()
}
error:
- { "message": "couldn't rewrite mutation updateTodo because a valid JWT is required but was not provided" }
\ No newline at end of file
+ {
+ "message":
+ couldn't rewrite mutation updateTodo because a valid JWT is required but was not provided,
+ }
diff --git a/graphql/resolve/auth_delete_test.yaml b/graphql/resolve/auth_delete_test.yaml
index 66d7b97f361..2028b6d4b65 100644
--- a/graphql/resolve/auth_delete_test.yaml
+++ b/graphql/resolve/auth_delete_test.yaml
@@ -1,4 +1,4 @@
-- name: "Delete with auth"
+- name: Delete with auth
gqlquery: |
mutation deleteUserSecret($filter: UserSecretFilter!) {
deleteUserSecret(filter: $filter) {
@@ -6,7 +6,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "filter": { "aSecret": { "anyofterms": "auth is applied" } } }
dgmutations:
@@ -24,7 +24,7 @@
UserSecret_Auth2 as var(func: uid(UserSecret_1)) @filter(eq(UserSecret.ownedBy, "user1")) @cascade
}
-- name: "Delete with inverse field and RBAC true"
+- name: Delete with inverse field and RBAC true
gqlquery: |
mutation {
deleteTweets(
@@ -37,8 +37,8 @@
}
}
jwtvar:
- USER: "foo"
- ROLE: "admin"
+ USER: foo
+ ROLE: admin
dgmutations:
- deletejson: |
[
@@ -70,7 +70,7 @@
Tweets_4 as var(func: uid(x))
}
-- name: "Delete with inverse field and RBAC false"
+- name: Delete with inverse field and RBAC false
gqlquery: |
mutation {
deleteTweets(
@@ -83,7 +83,7 @@
}
}
jwtvar:
- ROLE: "admin"
+ ROLE: admin
dgmutations:
- deletejson: |
[
@@ -104,7 +104,7 @@
Tweets_2 as var(func: uid(x))
}
-- name: "Delete with deep auth"
+- name: Delete with deep auth
gqlquery: |
mutation deleteTicket($filter: TicketFilter!) {
deleteTicket(filter: $filter) {
@@ -112,7 +112,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "filter": { "title": { "anyofterms": "auth is applied" } } }
dgmutations:
@@ -148,8 +148,7 @@
}
}
-
-- name: "Delete with deep query"
+- name: Delete with deep query
gqlquery: |
mutation deleteTicket($filter: TicketFilter!) {
deleteTicket(filter: $filter) {
@@ -171,7 +170,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "filter": { "title": { "anyofterms": "auth is applied" } } }
dgmutations:
@@ -279,7 +278,7 @@
}
}
-- name: "Delete with top level RBAC true."
+- name: Delete with top level RBAC true
gqlquery: |
mutation($projs: [ID!]) {
deleteProject (filter: { projID: $projs}) {
@@ -291,8 +290,8 @@
"projs" : ["0x01", "0x02"]
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgmutations:
- deletejson: |
[{
@@ -313,7 +312,7 @@
}
}
-- name: "Delete with top level RBAC false."
+- name: Delete with top level RBAC false
gqlquery: |
mutation deleteLog($filter: LogFilter!) {
deleteLog(filter: $filter) {
@@ -331,8 +330,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgmutations:
- deletejson: |
[{
@@ -348,7 +347,7 @@
DeleteLogPayload.log()
}
-- name: "multiple rule in delete mutation"
+- name: Multiple rule in delete mutation
gqlquery: |
mutation deleteUser($filter: UserFilter!) {
deleteUser(filter: $filter) {
@@ -362,7 +361,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgmutations:
- deletejson: |
[
@@ -389,7 +388,7 @@
User_Auth3 as var(func: uid(User_1)) @filter(eq(User.isPublic, true)) @cascade
}
-- name: "Filtering by ID"
+- name: Filtering by ID
gqlquery: |
mutation deleteRegion($filter: RegionFilter!) {
deleteRegion(filter: $filter) {
@@ -397,7 +396,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "filter":
{
@@ -419,7 +418,7 @@
Region_Auth2 as var(func: uid(Region_1)) @filter(eq(Region.global, true)) @cascade
}
-- name: "Delete with top level RBAC false."
+- name: Delete with top level RBAC false
gqlquery: |
mutation deleteLog($filter: LogFilter!) {
deleteLog(filter: $filter) {
@@ -433,8 +432,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgmutations:
- deletejson: |
[{
@@ -445,7 +444,7 @@
x as deleteLog()
}
-- name: "Delete with top level RBAC true."
+- name: Delete with top level RBAC true
gqlquery: |
mutation deleteLog($filter: LogFilter!) {
deleteLog(filter: $filter) {
@@ -463,8 +462,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgmutations:
- deletejson: |
[{
@@ -492,7 +491,7 @@
Log_3 as var(func: uid(x))
}
-- name: "Delete with top level OR RBAC true."
+- name: Delete with top level OR RBAC true
gqlquery: |
mutation($ids: [ID!]) {
deleteComplexLog (filter: { id: $ids}) {
@@ -504,8 +503,8 @@
"ids" : ["0x01", "0x02"]
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgmutations:
- deletejson: |
[{ "uid": "uid(x)" }]
@@ -518,7 +517,7 @@
ComplexLog_1 as var(func: uid(0x1, 0x2)) @filter(type(ComplexLog))
}
-- name: "Delete with top level OR RBAC false."
+- name: Delete with top level OR RBAC false
gqlquery: |
mutation($ids: [ID!]) {
deleteComplexLog (filter: { id: $ids}) {
@@ -530,7 +529,7 @@
"ids" : ["0x01", "0x02"]
}
jwtvar:
- USER: "user1"
+ USER: user1
dgmutations:
- deletejson: |
[{ "uid": "uid(x)" }]
@@ -544,7 +543,7 @@
ComplexLog_Auth2 as var(func: uid(ComplexLog_1)) @filter(eq(ComplexLog.visible, true)) @cascade
}
-- name: "Delete with top level AND RBAC true."
+- name: Delete with top level AND RBAC true
gqlquery: |
mutation ($ids: [ID!]) {
deleteIssue(filter: {id: $ids}) {
@@ -556,8 +555,8 @@
"ids": ["0x1", "0x2"]
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgmutations:
- deletejson: |
[{
@@ -575,7 +574,7 @@
}
}
-- name: "Delete with top level AND RBAC false."
+- name: Delete with top level AND RBAC false
gqlquery: |
mutation ($ids: [ID!]) {
deleteIssue(filter: {id: $ids}) {
@@ -587,8 +586,8 @@
"ids": ["0x1", "0x2"]
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgmutations:
- deletejson: |
[{
@@ -599,7 +598,7 @@
x as deleteIssue()
}
-- name: "Delete with top level not RBAC false."
+- name: Delete with top level not RBAC false
gqlquery: |
mutation ($ids: [ID!]) {
deleteRole(filter: {id: $ids}) {
@@ -611,8 +610,8 @@
"ids": ["0x1", "0x2"]
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgmutations:
- deletejson: |
[{
@@ -623,7 +622,7 @@
x as deleteRole()
}
-- name: "Delete with top level not RBAC true."
+- name: Delete with top level not RBAC true
gqlquery: |
mutation ($ids: [ID!]) {
deleteRole(filter: {id: $ids}) {
@@ -635,8 +634,8 @@
"ids": ["0x1", "0x2"]
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgmutations:
- deletejson: |
[{
@@ -649,7 +648,9 @@
}
}
-- name: "Deleting interface having its own auth rules and implementing types also have auth rules and some of the rules of implementing types are not satisfied"
+- name:
+ Deleting interface having its own auth rules and implementing types also have auth rules and
+ some of the rules of implementing types are not satisfied
gqlquery: |
mutation ($ids: [ID!]) {
deletePost(filter: {id: $ids}) {
@@ -661,8 +662,8 @@
"ids": ["0x1", "0x2"]
}
jwtvar:
- USER: "user1"
- ANS: "true"
+ USER: user1
+ ANS: "true"
dgmutations:
- deletejson: |
[{
@@ -700,7 +701,9 @@
}
}
-- name: "Deleting interface having its own auth rules and implementing types also have auth rules and the rules of interface are not satisfied"
+- name:
+ Deleting interface having its own auth rules and implementing types also have auth rules and the
+ rules of interface are not satisfied
gqlquery: |
mutation ($ids: [ID!]) {
deletePost(filter: {id: $ids}) {
@@ -708,7 +711,7 @@
}
}
jwtvar:
- ROLE: "ADMIN"
+ ROLE: ADMIN
AND: "true"
variables: |
{
@@ -724,7 +727,9 @@
x as deletePost()
}
-- name: "Deleting interface having no own auth rules but some implementing types have auth rules and they are not satisfied."
+- name:
+ Deleting interface having no own auth rules but some implementing types have auth rules and they
+ are not satisfied
gqlquery: |
mutation ($ids: [ID!]) {
deleteA(filter: {id: $ids}) {
@@ -750,7 +755,7 @@
B_2 as var(func: type(B))
}
-- name: "Delete Type Having Graph Traversal Auth Rules on Interface."
+- name: Delete Type Having Graph Traversal Auth Rules on Interface
gqlquery: |
mutation ($ids: [ID!]) {
deleteQuestion(filter: {id: $ids}) {
@@ -762,7 +767,7 @@
"ids": ["0x1", "0x2"]
}
jwtvar:
- USER: "user1"
+ USER: user1
ANS: "true"
dgmutations:
- deletejson: |
@@ -793,7 +798,7 @@
}
}
-- name: "Delete Type Having Graph Traversal Auth Rules on Interface and those are not satisfied."
+- name: Delete Type Having Graph Traversal Auth Rules on Interface and those are not satisfied
gqlquery: |
mutation ($ids: [ID!]) {
deleteQuestion(filter: {id: $ids}) {
@@ -816,7 +821,7 @@
x as deleteQuestion()
}
-- name: "Delete type having RBAC Auth Rules on interface and those are not satisfied."
+- name: Delete type having RBAC Auth Rules on interface and those are not satisfied
gqlquery: |
mutation ($ids: [ID!]) {
deleteFbPost(filter: {id: $ids}) {
@@ -828,8 +833,8 @@
"ids": ["0x1", "0x2"]
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgmutations:
- deletejson: |
[{
@@ -840,7 +845,7 @@
x as deleteFbPost()
}
-- name: "Delete type having RBAC Auth Rules on interface and all are satisfied."
+- name: Delete type having RBAC Auth Rules on interface and all are satisfied
gqlquery: |
mutation ($ids: [ID!]) {
deleteFbPost(filter: {id: $ids}) {
@@ -852,8 +857,8 @@
"ids": ["0x1", "0x2"]
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgmutations:
- deletejson: |
[{
@@ -879,4 +884,3 @@
}
}
}
-
diff --git a/graphql/resolve/auth_query_test.yaml b/graphql/resolve/auth_query_test.yaml
index a0083d44266..381bacf98f6 100644
--- a/graphql/resolve/auth_query_test.yaml
+++ b/graphql/resolve/auth_query_test.yaml
@@ -1,4 +1,4 @@
-- name: "Deep RBAC rule - All level true"
+- name: Deep RBAC rule - All level true
gqlquery: |
query {
queryContact {
@@ -46,7 +46,7 @@
TaskOccurrence_Auth5 as var(func: uid(TaskOccurrence_4)) @filter(eq(TaskOccurrence.role, "ADMINISTRATOR")) @cascade
}
-- name: "Deep RBAC rule - Level 0 false"
+- name: Deep RBAC rule - Level 0 false
gqlquery: |
query {
queryContact {
@@ -71,7 +71,7 @@
queryContact()
}
-- name: "Deep RBAC rule - Level 1 false"
+- name: Deep RBAC rule - Level 1 false
gqlquery: |
query {
queryContact {
@@ -101,7 +101,7 @@
Contact_6 as var(func: type(Contact))
}
-- name: "Deep RBAC rule with cascade - Level 1 false"
+- name: Deep RBAC rule with cascade - Level 1 false
gqlquery: |
query {
queryContact @cascade {
@@ -150,7 +150,7 @@
AdminTask_6 as var(func: uid())
}
-- name: "Deep RBAC rule - Level 2 false"
+- name: Deep RBAC rule - Level 2 false
gqlquery: |
query {
queryContact {
@@ -188,7 +188,7 @@
AdminTask_1 as var(func: uid(AdminTask_2))
}
-- name: "Deep RBAC rule - Level 1 type without auth."
+- name: Deep RBAC rule - Level 1 type without auth
gqlquery: |
query {
queryContact {
@@ -236,7 +236,7 @@
TaskOccurrence_Auth5 as var(func: uid(TaskOccurrence_4)) @filter(eq(TaskOccurrence.role, "ADMINISTRATOR")) @cascade
}
-- name: "Auth query with @dgraph pred."
+- name: Auth query with @dgraph pred
gqlquery: |
query {
queryStudent {
@@ -244,8 +244,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
queryStudent(func: uid(StudentRoot)) {
@@ -257,7 +257,7 @@
Student_Auth2 as var(func: uid(Student_1)) @filter(eq(IOw80vnV, "user1")) @cascade
}
-- name: "Auth query with @dgraph pred (Test RBAC)."
+- name: Auth query with @dgraph pred (Test RBAC)
gqlquery: |
query {
queryStudent {
@@ -265,14 +265,14 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
queryStudent()
}
-- name: "Auth with deep get query."
+- name: Auth with deep get query
gqlquery: |
query {
getProject(projID: "0x123") {
@@ -284,7 +284,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
getProject(func: uid(ProjectRoot)) @filter(type(Project)) {
@@ -323,7 +323,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryUserSecret(func: uid(UserSecretRoot)) {
@@ -335,7 +335,7 @@
UserSecret_Auth2 as var(func: uid(UserSecret_1)) @filter(eq(UserSecret.ownedBy, "user1")) @cascade
}
-- name: "Auth with Aggregate Root Query"
+- name: Auth with Aggregate Root Query
gqlquery: |
query {
aggregateUserSecret {
@@ -345,7 +345,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
aggregateUserSecret() {
@@ -371,7 +371,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
getUserSecret(func: uid(UserSecretRoot)) @filter(type(UserSecret)) {
@@ -392,7 +392,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryUserSecret(func: uid(UserSecretRoot)) {
@@ -404,7 +404,7 @@
UserSecret_Auth2 as var(func: uid(UserSecret_1)) @filter(eq(UserSecret.ownedBy, "user1")) @cascade
}
-- name: "Deep RBAC rules true"
+- name: Deep RBAC rules true
gqlquery: |
query {
queryUser {
@@ -414,8 +414,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
queryUser(func: uid(UserRoot)) {
@@ -435,7 +435,7 @@
}
}
-- name: "Deep RBAC rules false"
+- name: Deep RBAC rules false
gqlquery: |
query {
queryUser {
@@ -446,8 +446,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
queryUser(func: uid(UserRoot)) {
@@ -458,7 +458,7 @@
User_3 as var(func: type(User))
}
-- name: "Auth with top level AND rbac true"
+- name: Auth with top level AND rbac true
gqlquery: |
query {
queryIssue {
@@ -466,8 +466,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
queryIssue(func: uid(IssueRoot)) {
@@ -481,7 +481,7 @@
}
}
-- name: "Auth with complex rbac rules, true"
+- name: Auth with complex rbac rules, true
gqlquery: |
query {
queryComplexLog {
@@ -489,8 +489,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
queryComplexLog(func: uid(ComplexLogRoot)) {
@@ -501,7 +501,7 @@
ComplexLog_1 as var(func: type(ComplexLog))
}
-- name: "Auth with complex rbac rules, false"
+- name: Auth with complex rbac rules, false
gqlquery: |
query {
queryComplexLog {
@@ -509,14 +509,14 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
queryComplexLog()
}
-- name: "Auth with top level rbac true"
+- name: Auth with top level rbac true
gqlquery: |
query {
queryLog {
@@ -524,8 +524,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
queryLog(func: uid(LogRoot)) {
@@ -536,7 +536,7 @@
Log_1 as var(func: type(Log))
}
-- name: "Auth with top level rbac false"
+- name: Auth with top level rbac false
gqlquery: |
query {
queryLog {
@@ -544,14 +544,14 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
queryLog()
}
-- name: "Auth with top level AND rbac false"
+- name: Auth with top level AND rbac false
gqlquery: |
query {
queryIssue {
@@ -559,14 +559,14 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
queryIssue()
}
-- name: "Aggregate Query on Auth with top level AND rbac false"
+- name: Aggregate Query on Auth with top level AND rbac false
gqlquery: |
query {
aggregateIssue {
@@ -575,14 +575,14 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
aggregateIssue()
}
-- name: "Auth with top level OR rbac true"
+- name: Auth with top level OR rbac true
gqlquery: |
query {
queryProject {
@@ -590,8 +590,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
queryProject(func: uid(ProjectRoot)) {
@@ -602,7 +602,7 @@
Project_1 as var(func: type(Project))
}
-- name: "Aggregate on Auth with top level OR rbac true"
+- name: Aggregate on Auth with top level OR rbac true
gqlquery: |
query {
aggregateProject {
@@ -612,8 +612,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
aggregateProject() {
@@ -630,7 +630,7 @@
Project_1 as var(func: type(Project))
}
-- name: "Query with missing jwt variables"
+- name: Query with missing jwt variables
gqlquery: |
query {
queryGroup {
@@ -638,7 +638,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryGroup(func: uid(GroupRoot)) {
@@ -651,7 +651,7 @@
}
}
-- name: "Auth with top level OR rbac false"
+- name: Auth with top level OR rbac false
gqlquery: |
query {
queryProject {
@@ -659,8 +659,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
queryProject(func: uid(ProjectRoot)) {
@@ -685,7 +685,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryUserSecret(func: uid(UserSecretRoot), orderasc: UserSecret.aSecret) {
@@ -706,7 +706,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryTicket(func: uid(TicketRoot)) {
@@ -738,7 +738,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryUser(func: uid(UserRoot)) {
@@ -778,7 +778,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryUser(func: uid(UserRoot)) {
@@ -806,7 +806,7 @@
}
}
-- name: "Auth deep query - 0 level"
+- name: Auth deep query - 0 level
gqlquery: |
query {
queryMovie(filter: { content: { eq: "A. N. Author" } }, order: { asc: content }, first: 10, offset: 10) {
@@ -814,7 +814,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryMovie(func: uid(MovieRoot), orderasc: Movie.content) {
@@ -834,7 +834,7 @@
}
}
-- name: "Auth deep query - 1 level"
+- name: Auth deep query - 1 level
gqlquery: |
query {
queryMovie(filter: { content: { eq: "MovieXYZ" } }, order: { asc: content }, first: 10, offset: 10) @cascade {
@@ -846,7 +846,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryMovie(func: uid(MovieRoot), orderasc: Movie.content, first: 10, offset: 10) @cascade {
@@ -875,7 +875,7 @@
Region_1 as var(func: uid(Region_2))
}
-- name: "Auth deep query - 3 level"
+- name: Auth deep query - 3 level
gqlquery: |
query {
queryMovie(filter: { content: { eq: "MovieXYZ" } }, order: { asc: content }, first: 10, offset: 10) {
@@ -896,7 +896,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryMovie(func: uid(MovieRoot), orderasc: Movie.content) {
@@ -945,7 +945,7 @@
UserSecret_Auth7 as var(func: uid(UserSecret_6)) @filter(eq(UserSecret.ownedBy, "user1")) @cascade
}
-- name: "Auth deep query with @cascade at all the levels - 3 level"
+- name: Auth deep query with @cascade at all the levels - 3 level
gqlquery: |
query {
queryMovie(filter: { content: { eq: "MovieXYZ" } }, order: { asc: content }, first: 10, offset: 10) @cascade {
@@ -966,7 +966,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryMovie(func: uid(MovieRoot), orderasc: Movie.content, first: 10, offset: 10) @cascade {
@@ -1015,7 +1015,7 @@
UserSecret_Auth7 as var(func: uid(UserSecret_6)) @filter(eq(UserSecret.ownedBy, "user1")) @cascade
}
-- name: "Auth with complex filter"
+- name: Auth with complex filter
gqlquery: |
query {
queryMovie {
@@ -1023,7 +1023,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryMovie(func: uid(MovieRoot)) {
@@ -1043,7 +1043,7 @@
}
}
-- name: "Aggregate Query with complex auth filter"
+- name: Aggregate Query with complex auth filter
gqlquery: |
query {
aggregateMovie {
@@ -1052,7 +1052,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
aggregateMovie() {
@@ -1076,7 +1076,7 @@
}
}
-- name: "Query with missing variable - top level"
+- name: Query with missing variable - top level
gqlquery: |
query {
queryUserSecret {
@@ -1088,7 +1088,7 @@
queryUserSecret()
}
-- name: "Query with null variable - top level"
+- name: Query with null variable - top level
gqlquery: |
query {
queryUserSecret {
@@ -1102,7 +1102,7 @@
queryUserSecret()
}
-- name: "Get with top level RBAC false"
+- name: Get with top level RBAC false
gqlquery: |
query {
getLog(id: "0x123") {
@@ -1110,13 +1110,13 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
getLog()
}
-- name: "Query with missing variable - deep query"
+- name: Query with missing variable - deep query
gqlquery: |
query {
queryUser {
@@ -1137,7 +1137,7 @@
User_3 as var(func: type(User))
}
-- name: "Query with null variable - deep query"
+- name: Query with null variable - deep query
gqlquery: |
query {
queryUser {
@@ -1160,7 +1160,7 @@
User_3 as var(func: type(User))
}
-- name: "Query with missing variable - partial jwt token"
+- name: Query with missing variable - partial jwt token
gqlquery: |
query {
queryProject {
@@ -1168,7 +1168,7 @@
}
}
jwtvar:
- ROLE: "ADMIN"
+ ROLE: ADMIN
dgquery: |-
query {
queryProject(func: uid(ProjectRoot)) {
@@ -1179,7 +1179,7 @@
Project_1 as var(func: type(Project))
}
-- name: "Query with missing jwt token - type without auth directive"
+- name: Query with missing jwt token - type without auth directive
gqlquery: |
query {
queryRole {
@@ -1194,7 +1194,7 @@
}
}
-- name: "Query with missing jwt token - type with auth directive"
+- name: Query with missing jwt token - type with auth directive
gqlquery: |
query {
queryMovie {
@@ -1215,7 +1215,7 @@
}
}
-- name: "Query with missing jwt token - type with empty auth directive"
+- name: Query with missing jwt token - type with empty auth directive
gqlquery: |
query {
queryReview {
@@ -1230,7 +1230,7 @@
}
}
-- name: "Aggregate Fields at child with Auth deep filter and field filter"
+- name: Aggregate Fields at child with Auth deep filter and field filter
gqlquery: |
query {
queryUser {
@@ -1242,7 +1242,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
queryUser(func: uid(UserRoot)) {
@@ -1272,7 +1272,7 @@
}
}
-- name: "Multiple Aggregate queries at child level and other queries with Auth deep filter"
+- name: Multiple Aggregate queries at child level and other queries with Auth deep filter
gqlquery: |
query {
queryUser {
@@ -1289,8 +1289,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
queryUser(func: uid(UserRoot)) {
@@ -1348,7 +1348,7 @@
}
}
-- name: "Aggregate at child with RBAC rules true"
+- name: Aggregate at child with RBAC rules true
gqlquery: |
query {
queryUser {
@@ -1359,8 +1359,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
queryUser(func: uid(UserRoot)) {
@@ -1383,7 +1383,7 @@
}
}
-- name: "Aggregate Fields with Deep RBAC rules false"
+- name: Aggregate Fields with Deep RBAC rules false
gqlquery: |
query {
queryUser {
@@ -1395,8 +1395,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
queryUser(func: uid(UserRoot)) {
@@ -1407,7 +1407,7 @@
User_1 as var(func: type(User))
}
-- name: "Type should apply Interface's query rules and along with its own auth rules"
+- name: Type should apply Interface's query rules and along with its own auth rules
gqlquery: |
query {
queryQuestion {
@@ -1417,7 +1417,7 @@
}
jwtvar:
ANS: "true"
- USER: "Random"
+ USER: Random
dgquery: |-
query {
queryQuestion(func: uid(QuestionRoot)) {
@@ -1437,7 +1437,7 @@
}
}
-- name: "Type should apply only Interface's query auth rules"
+- name: Type should apply only Interface's query auth rules
gqlquery: |
query {
queryAnswer {
@@ -1446,7 +1446,7 @@
}
}
jwtvar:
- USER: "Random"
+ USER: Random
dgquery: |-
query {
queryAnswer(func: uid(AnswerRoot)) {
@@ -1463,7 +1463,7 @@
}
}
-- name: "Type should apply query auth rules from all the interfaces that it implements."
+- name: Type should apply query auth rules from all the interfaces that it implements
gqlquery: |
query {
queryFbPost {
@@ -1472,8 +1472,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "Random"
+ ROLE: ADMIN
+ USER: Random
dgquery: |-
query {
queryFbPost(func: uid(FbPostRoot)) {
@@ -1490,7 +1490,7 @@
}
}
-- name: "Type which inherits auth rules from interfaces returns no results when auth rules fail"
+- name: Type which inherits auth rules from interfaces returns no results when auth rules fail
gqlquery: |
query {
queryFbPost {
@@ -1499,14 +1499,14 @@
}
}
jwtvar:
- ROLE: "REGULAR"
- USER: "Random"
+ ROLE: REGULAR
+ USER: Random
dgquery: |-
query {
queryFbPost()
}
-- name: "Auth rules of All the implementing types should Apply to the interface also"
+- name: Auth rules of All the implementing types should Apply to the interface also
gqlquery: |
query {
queryPost {
@@ -1514,9 +1514,9 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- ANS: "true"
- USER: "Random"
+ ROLE: ADMIN
+ ANS: "true"
+ USER: Random
dgquery: |-
query {
queryPost(func: uid(PostRoot)) {
@@ -1552,7 +1552,7 @@
}
}
-- name: "Filters on query Interface should work correctly"
+- name: Filters on query Interface should work correctly
gqlquery: |
query {
queryPost(filter: {text: {eq: "A Post"}}, order: { desc: text}, first: 10, offset: 5 ) {
@@ -1560,9 +1560,9 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- ANS: "true"
- USER: "Random"
+ ROLE: ADMIN
+ ANS: "true"
+ USER: Random
dgquery: |-
query {
queryPost(func: uid(PostRoot), orderdesc: Post.text) {
@@ -1598,7 +1598,7 @@
}
}
-- name: "Query interface should return empty if the auth rule of interface is not satisfied"
+- name: Query interface should return empty if the auth rule of interface is not satisfied
gqlquery: |
query {
queryPost {
@@ -1606,14 +1606,14 @@
}
}
jwtvar:
- ROLE: "ADMIN"
+ ROLE: ADMIN
ANS: "true"
dgquery: |-
query {
queryPost()
}
-- name: "Query interface should return partial types if the auth rule of interface is not satisfied"
+- name: Query interface should return partial types if the auth rule of interface is not satisfied
gqlquery: |
query {
queryPost {
@@ -1621,7 +1621,7 @@
}
}
jwtvar:
- USER: "Random"
+ USER: Random
ANS: "true"
dgquery: |-
query {
@@ -1651,7 +1651,7 @@
}
}
-- name: "Get Query interface having Auth Rules apply Auth filters of types also"
+- name: Get Query interface having Auth Rules apply Auth filters of types also
gqlquery: |
query {
getPost(id: "0x1") {
@@ -1659,7 +1659,7 @@
}
}
jwtvar:
- USER: "Random"
+ USER: Random
ANS: "true"
dgquery: |-
query {
@@ -1689,7 +1689,8 @@
}
}
-- name: "Get Query interface having Auth Rules should return empty if the Auth rules are not satisfied"
+- name:
+ Get Query interface having Auth Rules should return empty if the Auth rules are not satisfied
gqlquery: |
query {
getPost(id: "0x1") {
@@ -1702,7 +1703,9 @@
getPost()
}
-- name: "Query interface having no Auth Rules should apply auth rules on implementing types that are satisfied"
+- name:
+ Query interface having no Auth Rules should apply auth rules on implementing types that are
+ satisfied
gqlquery: |
query {
queryA {
@@ -1727,7 +1730,9 @@
}
}
-- name: "Query interface having no Auth Rules but some type have Auth rules and those are not satified are excluded (for eg: type C )"
+- name:
+ "Query interface having no Auth Rules but some type have Auth rules and those are not satified
+ are excluded (for eg: type C )"
gqlquery: |
query {
queryA {
@@ -1747,8 +1752,7 @@
B_2 as var(func: type(B))
}
--
- name: "Password Query with no rule applied for password"
+- name: Password Query with no rule applied for password
gqlquery: |
query {
checkUserPassword(username: "user", password: "Password") {
@@ -1766,8 +1770,7 @@
}
}
--
- name: "Password Query with RBAC rule true"
+- name: Password Query with RBAC rule true
gqlquery: |
query {
checkLogPassword(id: "0x123", pwd: "something") {
@@ -1777,7 +1780,7 @@
}
}
jwtvar:
- ROLE: "Admin"
+ ROLE: Admin
dgquery: |-
query {
checkLogPassword(func: uid(LogRoot)) @filter((eq(val(pwd), 1) AND type(Log))) {
@@ -1792,8 +1795,7 @@
}
}
--
- name: "Password Query with RBAC rule false"
+- name: Password Query with RBAC rule false
gqlquery: |
query {
checkLogPassword(id: "0x123", pwd: "something") {
@@ -1802,14 +1804,13 @@
}
}
jwtvar:
- ROLE: "User"
+ ROLE: User
dgquery: |-
query {
checkLogPassword()
}
--
- name: "Password Query with auth rules"
+- name: Password Query with auth rules
gqlquery: |
query {
checkProjectPassword(projID: "0x123", pwd: "something") {
@@ -1822,7 +1823,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
dgquery: |-
query {
checkProjectPassword(func: uid(ProjectRoot)) @filter((eq(val(pwd), 1) AND type(Project))) {
@@ -1856,7 +1857,9 @@
}
}
-- name: "Type with password query should apply Interface's password rules and along with its own auth rules"
+- name:
+ Type with password query should apply Interface's password rules and along with its own auth
+ rules
gqlquery: |
query {
checkQuestionPassword(id: "0x123", pwd: "something") {
@@ -1865,9 +1868,9 @@
}
}
jwtvar:
- ROLE: "Admin"
+ ROLE: Admin
ANS: "true"
- USER: "ADMIN"
+ USER: ADMIN
dgquery: |-
query {
checkQuestionPassword(func: uid(QuestionRoot)) @filter((eq(val(pwd), 1) AND type(Question))) {
@@ -1885,7 +1888,8 @@
}
}
-- name: "Type which inherits password auth rules from interfaces returns no results when auth rules fail"
+- name:
+ Type which inherits password auth rules from interfaces returns no results when auth rules fail
gqlquery: |
query {
checkQuestionPassword(id: "0x123", pwd: "something") {
@@ -1894,15 +1898,15 @@
}
}
jwtvar:
- ROLE: "NotAdmin"
+ ROLE: NotAdmin
ANS: "true"
- USER: "ADMIN"
+ USER: ADMIN
dgquery: |-
query {
checkQuestionPassword()
}
-- name: "Password Auth rules of All the implementing types should Apply to the interface also"
+- name: Password Auth rules of All the implementing types should Apply to the interface also
gqlquery: |
query {
checkPostPassword(id: "0x123", pwd: "something") {
@@ -1910,9 +1914,9 @@
}
}
jwtvar:
- ROLE: "Admin"
+ ROLE: Admin
ANS: "true"
- USER: "ADMIN"
+ USER: ADMIN
dgquery: |-
query {
checkPostPassword(func: uid(PostRoot)) @filter((eq(val(pwd), 1) AND type(Post))) {
@@ -1939,7 +1943,7 @@
}
}
-- name: "Entities query with query auth rules"
+- name: Entities query with query auth rules
gqlquery: |
query {
_entities(representations: [{__typename: "Mission", id: "0x1"}{__typename: "Mission", id: "0x2"}, {__typename: "Mission", id: "0x3"}]) {
@@ -1951,7 +1955,7 @@
}
}
jwtvar:
- USER: "user"
+ USER: user
dgquery: |-
query {
_entities(func: uid(_EntityRoot), orderasc: Mission.id) {
@@ -1967,7 +1971,7 @@
Mission.id : Mission.id
}
}
-- name: "Entities query with top level RBAC rule true and level 1 query auth rule"
+- name: Entities query with top level RBAC rule true and level 1 query auth rule
gqlquery: |
query {
_entities(representations: [{__typename: "Astronaut", id: "0x1"},{__typename: "Astronaut", id: "0x2"},{__typename: "Astronaut", id: "0x3"}]) {
@@ -1979,8 +1983,8 @@
}
}
jwtvar:
- ROLE: "admin"
- USER: "user"
+ ROLE: admin
+ USER: user
dgquery: |-
query {
_entities(func: uid(_EntityRoot), orderasc: Astronaut.id) {
@@ -2002,7 +2006,7 @@
}
}
-- name: "Entities query with RBAC rule false"
+- name: Entities query with RBAC rule false
gqlquery: |
query {
_entities(representations: [{__typename: "Astronaut", id: "0x1"},{__typename: "Astronaut", id: "0x2"},{__typename: "Astronaut", id: "0x3"}]) {
@@ -2014,13 +2018,13 @@
}
}
jwtvar:
- ROLE: "user"
+ ROLE: user
dgquery: |-
query {
_entities()
}
-- name: "Entities query with top RBAC rules true and missing JWT variable for level 1 query auth rule"
+- name: Entities query with top RBAC rules true and missing JWT variable for level 1 query auth rule
gqlquery: |
query {
_entities(representations: [{__typename: "Astronaut", id: "0x1"},{__typename: "Astronaut", id: "0x2"},{__typename: "Astronaut", id: "0x3"}]) {
@@ -2032,7 +2036,7 @@
}
}
jwtvar:
- ROLE: "admin"
+ ROLE: admin
dgquery: |-
query {
_entities(func: uid(_EntityRoot), orderasc: Astronaut.id) {
@@ -2043,8 +2047,7 @@
Astronaut_3 as var(func: eq(Astronaut.id, "0x1", "0x2", "0x3")) @filter(type(Astronaut))
}
--
- name: "Query interface should return all the nodes of a type if rbac rules of type are true"
+- name: Query interface should return all the nodes of a type if rbac rules of type are true
gqlquery: |
query {
queryVehicle{
@@ -2052,8 +2055,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user"
+ ROLE: ADMIN
+ USER: user
dgquery: |-
query {
queryVehicle(func: uid(VehicleRoot)) {
@@ -2066,8 +2069,7 @@
Car_2 as var(func: type(Car))
}
--
- name: "Fragments in auth queries with user defined cascade"
+- name: Fragments in auth queries with user defined cascade
gqlquery: |
query {
queryHome {
@@ -2101,8 +2103,7 @@
}
}
--
- name: "Query auth rules with filter on field with ID type"
+- name: Query auth rules with filter on field with ID type
gqlquery: |
query{
queryPerson{
@@ -2125,8 +2126,7 @@
}
}
--
- name: "Query auth rules with filter on field with ID type, single JWT ID param"
+- name: Query auth rules with filter on field with ID type, single JWT ID param
gqlquery: |
query{
queryPerson{
diff --git a/graphql/resolve/auth_tests.yaml b/graphql/resolve/auth_tests.yaml
index 69170db2286..a3eeef55c2e 100644
--- a/graphql/resolve/auth_tests.yaml
+++ b/graphql/resolve/auth_tests.yaml
@@ -1,5 +1,4 @@
--
- name: "Type Authorization OR (dgquery, dgquery) filter at root node, rbac rules false"
+- name: Type Authorization OR (dgquery, dgquery) filter at root node, rbac rules false
gqlquery: |
query {
queryUser(filter: {username: {eq: "user1"}}, order: {asc: username}) {
@@ -8,14 +7,13 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
}
--
- name: "Type Authorization OR (dgquery, rbac) filter at root node, rbac rules false"
+- name: Type Authorization OR (dgquery, rbac) filter at root node, rbac rules false
gqlquery: |
query {
getProject(projID: "0x1") {
@@ -23,15 +21,13 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
}
-
--
- name: "Type Authorization AND (dgquery, rbac) filter at root node, rbac rules false"
+- name: Type Authorization AND (dgquery, rbac) filter at root node, rbac rules false
gqlquery: |
query {
getIssue(id: "0x1") {
@@ -39,14 +35,13 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
}
--
- name: "Type Authorization AND (dgquery, dgquery) filter at root node, rbac rules false"
+- name: Type Authorization AND (dgquery, dgquery) filter at root node, rbac rules false
gqlquery: |
query {
getMovie(id: "0x1") {
@@ -54,14 +49,13 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
}
--
- name: "Type Authorization filter at root node, rbac rules false"
+- name: Type Authorization filter at root node, rbac rules false
gqlquery: |
query {
getTicket(id: "0x1") {
@@ -69,14 +63,13 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
}
--
- name: "Type Authorization RBAC filter at root node, rbac rules false"
+- name: Type Authorization RBAC filter at root node, rbac rules false
gqlquery: |
query {
getLog(id: "0x1") {
@@ -84,15 +77,13 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
}
-
--
- name: "Type Authorization filter at deep node, rbac rules false"
+- name: Type Authorization filter at deep node, rbac rules false
gqlquery: |
query {
queryProject(filter: {name: {eq: "Project1"}}, order: {asc: name}) {
@@ -107,14 +98,13 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
}
--
- name: "Field authorization filters at root node, rbac rules false"
+- name: Field authorization filters at root node, rbac rules false
gqlquery: |
query {
getUser(username: "user1") {
@@ -124,14 +114,13 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
}
--
- name: "Field authorization filters at deep node, rbac rules false"
+- name: Field authorization filters at deep node, rbac rules false
gqlquery: |
query {
getProject(projID: "0x1") {
@@ -147,14 +136,13 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
dgquery: |-
query {
}
--
- name: "Type Authorization OR (dgquery, rbac) filter at root node, rbac rules true"
+- name: Type Authorization OR (dgquery, rbac) filter at root node, rbac rules true
gqlquery: |
query {
getProject(projID: "0x1") {
@@ -162,15 +150,13 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
}
-
--
- name: "Type Authorization AND (dgquery, rbac) filter at root node, rbac rules true"
+- name: Type Authorization AND (dgquery, rbac) filter at root node, rbac rules true
gqlquery: |
query {
getIssue(id: "0x1") {
@@ -178,14 +164,13 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
}
--
- name: "Type Authorization AND filter at root node, rbac rules true"
+- name: Type Authorization AND filter at root node, rbac rules true
gqlquery: |
query {
getMovie(id: "0x1") {
@@ -193,14 +178,13 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
}
--
- name: "Type Authorization filter at root node, rbac rules true"
+- name: Type Authorization filter at root node, rbac rules true
gqlquery: |
query {
getTicket(id: "0x1") {
@@ -208,14 +192,13 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
}
--
- name: "Type Authorization RBAC filter at root node, rbac rules true"
+- name: Type Authorization RBAC filter at root node, rbac rules true
gqlquery: |
query {
getLog(id: "0x1") {
@@ -223,15 +206,13 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
}
-
--
- name: "Type Authorization filter at deep node, rbac rules true"
+- name: Type Authorization filter at deep node, rbac rules true
gqlquery: |
query {
queryProject(filter: {name: {eq: "Project1"}}, order: {asc: name}) {
@@ -246,14 +227,13 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
}
--
- name: "Field authorization filters at root node, rbac rules true"
+- name: Field authorization filters at root node, rbac rules true
gqlquery: |
query {
getUser(username: "user1") {
@@ -263,14 +243,13 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
}
--
- name: "Field authorization filters at deep node, rbac rules true"
+- name: Field authorization filters at deep node, rbac rules true
gqlquery: |
query {
getProject(projID: "0x1") {
@@ -286,9 +265,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
dgquery: |-
query {
}
-
diff --git a/graphql/resolve/auth_update_test.yaml b/graphql/resolve/auth_update_test.yaml
index 385d9a886ac..8b4ea54aac5 100644
--- a/graphql/resolve/auth_update_test.yaml
+++ b/graphql/resolve/auth_update_test.yaml
@@ -1,4 +1,4 @@
-- name: "Update one node that creates nothing"
+- name: Update one node that creates nothing
gqlquery: |
mutation updateUserSecret($upd: UpdateUserSecretInput!) {
updateUserSecret(input: $upd) {
@@ -8,7 +8,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "upd":
{ "filter": { "id": [ "0x123" ] },
@@ -27,7 +27,7 @@
uids: |
{ }
-- name: "Update a node that does a deep add"
+- name: Update a node that does a deep add
gqlquery: |
mutation updateColumn($upd: UpdateColumnInput!) {
updateColumn(input: $upd) {
@@ -37,7 +37,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "upd":
{
@@ -88,8 +88,7 @@
"Ticket": [ { "uid": "0x789" } ]
}
-
-- name: "Update a node that does a deep add and fails auth"
+- name: Update a node that does a deep add and fails auth
gqlquery: |
mutation updateColumn($upd: UpdateColumnInput!) {
updateColumn(input: $upd) {
@@ -99,7 +98,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "upd":
{
@@ -147,14 +146,13 @@
}
authjson: |
{ }
- error:
- { "message": "mutation failed because authorization failed" }
+ error: { "message": mutation failed because authorization failed }
# See comments about additional deletes in update_mutation_test.yaml.
# Because of those additional deletes, for example, when we update a column and
# link it to an existing ticket, we might remove that ticket from the column it was
# attached to ... so we need authorization to update that column as well.
-- name: "update with auth on additional delete (updt list edge)"
+- name: update with auth on additional delete (updt list edge)
gqlquery: |
mutation updateColumn($upd: UpdateColumnInput!) {
updateColumn(input: $upd) {
@@ -164,7 +162,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "upd":
{
@@ -225,7 +223,7 @@
"Column_5.auth": [ { "uid": "0x456" } ]
}
-- name: "update with auth on additional delete that fails (updt list edge)"
+- name: update with auth on additional delete that fails (updt list edge)
gqlquery: |
mutation updateColumn($upd: UpdateColumnInput!) {
updateColumn(input: $upd) {
@@ -235,7 +233,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "upd":
{
@@ -300,9 +298,9 @@
authjson: |
{ }
error:
- { "message": "couldn't rewrite query for mutation updateColumn because authorization failed" }
+ { "message": couldn't rewrite query for mutation updateColumn because authorization failed }
-- name: "update with auth on additional delete (updt single edge)"
+- name: update with auth on additional delete (updt single edge)
gqlquery: |
mutation updateTicket($upd: UpdateTicketInput!) {
updateTicket(input: $upd) {
@@ -312,7 +310,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "upd":
{
@@ -374,7 +372,7 @@
"Column_5.auth": [ { "uid": "0x499" } ]
}
-- name: "update with auth on additional delete that fails (updt single edge)"
+- name: update with auth on additional delete that fails (updt single edge)
gqlquery: |
mutation updateTicket($upd: UpdateTicketInput!) {
updateTicket(input: $upd) {
@@ -384,7 +382,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
variables: |
{ "upd":
{
@@ -445,9 +443,9 @@
"Column_5": [ { "uid": "0x499" } ]
}
error:
- { "message": "couldn't rewrite query for mutation updateTicket because authorization failed" }
+ { "message": couldn't rewrite query for mutation updateTicket because authorization failed }
-- name: "Update with top level RBAC false."
+- name: Update with top level RBAC false
gqlquery: |
mutation updateLog($log: UpdateLogInput!) {
updateLog(input: $log) {
@@ -457,8 +455,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
variables: |
{ "log":
{
@@ -474,7 +472,7 @@
x as updateLog()
}
-- name: "Update with top level RBAC true."
+- name: Update with top level RBAC true
gqlquery: |
mutation updateLog($log: UpdateLogInput!) {
updateLog(input: $log) {
@@ -484,8 +482,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
variables: |
{ "log":
{
@@ -505,7 +503,7 @@
Log_1 as var(func: uid(0x123)) @filter(type(Log))
}
-- name: "Update with top level OR RBAC false."
+- name: Update with top level OR RBAC false
gqlquery: |
mutation updateProject($proj: UpdateProjectInput!) {
updateProject(input: $proj) {
@@ -515,8 +513,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
variables: |
{ "proj":
{
@@ -540,7 +538,7 @@
}
}
-- name: "Update with top level OR RBAC true."
+- name: Update with top level OR RBAC true
gqlquery: |
mutation updateProject($proj: UpdateProjectInput!) {
updateProject(input: $proj) {
@@ -550,8 +548,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
variables: |
{ "proj":
{
@@ -570,7 +568,7 @@
Project_1 as var(func: uid(0x123)) @filter(type(Project))
}
-- name: "Update with top level And RBAC true."
+- name: Update with top level And RBAC true
gqlquery: |
mutation updateIssue($issue: UpdateIssueInput!) {
updateIssue(input: $issue) {
@@ -580,8 +578,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
variables: |
{ "issue":
{
@@ -603,7 +601,7 @@
}
}
-- name: "Update with top level And RBAC false."
+- name: Update with top level And RBAC false
gqlquery: |
mutation updateIssue($issue: UpdateIssueInput!) {
updateIssue(input: $issue) {
@@ -613,8 +611,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
variables: |
{ "issue":
{
@@ -629,7 +627,7 @@
x as updateIssue()
}
-- name: "Update with top level not RBAC true."
+- name: Update with top level not RBAC true
gqlquery: |
mutation updateComplexLog($log: UpdateComplexLogInput!) {
updateComplexLog(input: $log) {
@@ -639,8 +637,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
variables: |
{ "log":
{
@@ -659,7 +657,7 @@
ComplexLog_1 as var(func: uid(0x123)) @filter(type(ComplexLog))
}
-- name: "Update with top level not RBAC false."
+- name: Update with top level not RBAC false
gqlquery: |
mutation updateComplexLog($log: UpdateComplexLogInput!) {
updateComplexLog(input: $log) {
@@ -669,8 +667,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
variables: |
{ "log":
{
@@ -685,7 +683,7 @@
x as updateComplexLog()
}
-- name: "Update Type inheriting Graph Traversal Auth Rules from Interface."
+- name: Update Type inheriting Graph Traversal Auth Rules from Interface.
gqlquery: |
mutation updateQuestion($question: UpdateQuestionInput!) {
updateQuestion(input: $question) {
@@ -695,7 +693,7 @@
}
}
jwtvar:
- USER: "user1"
+ USER: user1
ANS: "true"
variables: |
{
@@ -725,7 +723,7 @@
}
}
-- name: "Update Type inheriting Graph Traversal Auth Rules on Interface failed."
+- name: Update Type inheriting Graph Traversal Auth Rules on Interface failed
gqlquery: |
mutation updateQuestion($question: UpdateQuestionInput!) {
updateQuestion(input: $question) {
@@ -749,7 +747,7 @@
x as updateQuestion()
}
-- name: "Update Type inheriting RBAC Auth Rules from Interface."
+- name: Update Type inheriting RBAC Auth Rules from Interface
gqlquery: |
mutation updateFbPost($post: UpdateFbPostInput!) {
updateFbPost(input: $post) {
@@ -759,8 +757,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
variables: |
{
"post":
@@ -786,7 +784,7 @@
}
}
-- name: "Update Type inheriting RBAC Auth Rules from Interface failed"
+- name: Update Type inheriting RBAC Auth Rules from Interface failed
gqlquery: |
mutation updateFbPost($post: UpdateFbPostInput!) {
updateFbPost(input: $post) {
@@ -796,8 +794,8 @@
}
}
jwtvar:
- ROLE: "USER"
- USER: "user1"
+ ROLE: USER
+ USER: user1
variables: |
{
"post":
@@ -813,7 +811,9 @@
x as updateFbPost()
}
-- name: "Updating interface having its own auth rules and implementing types also have auth rules and all are satisfied"
+- name:
+ Updating interface having its own auth rules and implementing types also have auth rules and all
+ are satisfied
gqlquery: |
mutation updatePost($post: UpdatePostInput!) {
updatePost(input: $post) {
@@ -823,8 +823,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
ANS: "true"
variables: |
{
@@ -869,7 +869,9 @@
}
}
-- name: "Updating interface having its own auth rules and implementing types also have auth rules and some of the rules of implementing types are not satisfied"
+- name:
+ Updating interface having its own auth rules and implementing types also have auth rules and
+ some of the rules of implementing types are not satisfied
gqlquery: |
mutation updatePost($post: UpdatePostInput!) {
updatePost(input: $post) {
@@ -879,8 +881,8 @@
}
}
jwtvar:
- ROLE: "ADMIN"
- USER: "user1"
+ ROLE: ADMIN
+ USER: user1
variables: |
{
"post":
@@ -914,7 +916,9 @@
}
}
-- name: "Updating interface having its own auth rules and implementing types also have auth rules and the rules of interface are not satisfied"
+- name:
+ Updating interface having its own auth rules and implementing types also have auth rules and the
+ rules of interface are not satisfied
gqlquery: |
mutation updatePost($post: UpdatePostInput!) {
updatePost(input: $post) {
@@ -924,7 +928,7 @@
}
}
jwtvar:
- ROLE: "ADMIN"
+ ROLE: ADMIN
ANS: "true"
variables: |
{
@@ -941,7 +945,9 @@
x as updatePost()
}
-- name: "Updating interface having no own auth rules but some implementing types have auth rules and they are not satisfied."
+- name:
+ Updating interface having no own auth rules but some implementing types have auth rules and they
+ are not satisfied
gqlquery: |
mutation updateA($inp: UpdateAInput!) {
updateA(input: $inp) {
@@ -951,7 +957,7 @@
}
}
jwtvar:
- ROLE: "ADMIN"
+ ROLE: ADMIN
ANS: "true"
variables: |
{
diff --git a/graphql/resolve/custom_mutation_test.yaml b/graphql/resolve/custom_mutation_test.yaml
index 25388aff27a..fadfb8d6ea2 100644
--- a/graphql/resolve/custom_mutation_test.yaml
+++ b/graphql/resolve/custom_mutation_test.yaml
@@ -1,4 +1,4 @@
-- name: "custom POST mutation creating movies gets body filled from variables"
+- name: custom POST mutation creating movies gets body filled from variables
gqlquery: |
mutation createMovies($movs: [MovieInput!]) {
createMyFavouriteMovies(input: $movs) {
@@ -43,7 +43,7 @@
{ "name": "Mov2" }
]
}
- headers: { "X-App-Token": ["val"], "Auth0-Token": ["tok"], "Content-type": ["application/json"] }
+ headers: { "X-App-Token": [val], "Auth0-Token": [tok], "Content-type": [application/json] }
resolvedresponse: |
{
"createMyFavouriteMovies": [
@@ -65,7 +65,7 @@
]
}
-- name: "custom PATCH mutation updating movies gets url & body filled from variables"
+- name: custom PATCH mutation updating movies gets url & body filled from variables
gqlquery: |
mutation updateMovies($id: ID!, $mov: MovieInput!) {
updateMyFavouriteMovie(id: $id, input: $mov) {
@@ -105,7 +105,7 @@
"director": [ { "name": "Dir1" } ]
}
}
- headers: { "Content-type": ["application/json"] }
+ headers: { "Content-type": [application/json] }
resolvedresponse: |
{
"updateMyFavouriteMovie": {
@@ -120,7 +120,7 @@
}
}
-- name: "custom DELETE mutation deleting movie, gets url filled from variables"
+- name: custom DELETE mutation deleting movie, gets url filled from variables
gqlquery: |
mutation deleteMovie($id: ID!) {
deleteMyFavouriteMovie(id: $id) {
@@ -149,7 +149,7 @@
}
url: http://myapi.com/favMovies/0x01
method: DELETE
- headers: { "Content-type": ["application/json"] }
+ headers: { "Content-type": [application/json] }
resolvedresponse: |
{
"deleteMyFavouriteMovie": {
diff --git a/graphql/resolve/custom_query_test.yaml b/graphql/resolve/custom_query_test.yaml
index dc3b092c0b4..45884375077 100644
--- a/graphql/resolve/custom_query_test.yaml
+++ b/graphql/resolve/custom_query_test.yaml
@@ -1,5 +1,4 @@
--
- name: "custom GET query returning users"
+- name: custom GET query returning users
gqlquery: |
query {
myFavoriteMovies(id: "0x1", name: "Michael", num: null) {
@@ -30,7 +29,7 @@
]
url: http://myapi.com/favMovies/0x1?name=Michael&num=
method: GET
- headers: { "Content-type": ["application/json"] }
+ headers: { "Content-type": [application/json] }
resolvedresponse: |
{
"myFavoriteMovies": [
@@ -52,8 +51,7 @@
]
}
--
- name: "custom POST query gets body filled from variables"
+- name: custom POST query gets body filled from variables
gqlquery: |
query movies($id: ID!) {
myFavoriteMoviesPart2(id: $id, name: "Michael", num: 10) {
@@ -86,7 +84,7 @@
url: http://myapi.com/favMovies/0x9?name=Michael&num=10
method: POST
body: '{ "id": "0x9", "name": "Michael", "director": { "number": 10 }}'
- headers: { "X-App-Token": ["val"], "Auth0-Token": ["tok"], "Content-type": ["application/json"] }
+ headers: { "X-App-Token": [val], "Auth0-Token": [tok], "Content-type": [application/json] }
resolvedresponse: |
{
"myFavoriteMoviesPart2": [
diff --git a/graphql/resolve/delete_mutation_test.yaml b/graphql/resolve/delete_mutation_test.yaml
index 36064ed1667..3e82d526c57 100644
--- a/graphql/resolve/delete_mutation_test.yaml
+++ b/graphql/resolve/delete_mutation_test.yaml
@@ -1,5 +1,4 @@
--
- name: "Only id filter"
+- name: Only id filter
gqlmutation: |
mutation deleteAuthor($filter: AuthorFilter!) {
deleteAuthor(filter: $filter) {
@@ -10,7 +9,7 @@
{ "filter":
{ "id": ["0x1", "0x2"] }
}
- explanation: "The correct mutation and query should be built using variable and filters."
+ explanation: The correct mutation and query should be built using variable and filters.
dgmutations:
- deletejson: |
[
@@ -28,8 +27,7 @@
}
}
--
- name: "Delete with deep query in result"
+- name: Delete with deep query in result
gqlmutation: |
mutation deleteAuthor($filter: AuthorFilter!) {
deleteAuthor(filter: $filter) {
@@ -53,7 +51,7 @@
{ "filter":
{ "id": ["0x1", "0x2"] }
}
- explanation: "The correct mutation and query should be built using variable and filters."
+ explanation: The correct mutation and query should be built using variable and filters.
dgmutations:
- deletejson: |
[
@@ -89,8 +87,7 @@
}
}
--
- name: "Multiple filters including id"
+- name: Multiple filters including id
gqlmutation: |
mutation deleteAuthor($filter: AuthorFilter!) {
deleteAuthor(filter: $filter) {
@@ -104,7 +101,7 @@
"name": { "eq": "A.N. Author" }
}
}
- explanation: "The correct mutation and query should be built using variable and filters."
+ explanation: The correct mutation and query should be built using variable and filters.
dgmutations:
- deletejson: |
[
@@ -122,8 +119,7 @@
}
}
--
- name: "Multiple non-id filters"
+- name: Multiple non-id filters
gqlmutation: |
mutation deleteAuthor($filter: AuthorFilter!) {
deleteAuthor(filter: $filter) {
@@ -137,7 +133,7 @@
"dob": { "eq": "2000-01-01" }
}
}
- explanation: "The correct mutation and query should be built using variable and filters."
+ explanation: The correct mutation and query should be built using variable and filters.
dgmutations:
- deletejson: |
[
@@ -155,8 +151,7 @@
}
}
--
- name: "With list inverse"
+- name: With list inverse
gqlmutation: |
mutation deleteState($filter: StateFilter!) {
deleteState(filter: $filter) {
@@ -167,7 +162,7 @@
{ "filter":
{ "code": { "eq": "abc" } }
}
- explanation: "The correct mutation and query should be built using variable and filters."
+ explanation: The correct mutation and query should be built using variable and filters.
dgmutations:
- deletejson: |
[
@@ -185,8 +180,7 @@
}
}
--
- name: "With multiple inverses"
+- name: With multiple inverses
gqlmutation: |
mutation deletePost($filter: PostFilter!) {
deletePost(filter: $filter) {
@@ -197,7 +191,7 @@
{ "filter":
{ "postID": ["0x1", "0x2"] }
}
- explanation: "The correct mutation and query should be built using variable and filters."
+ explanation: The correct mutation and query should be built using variable and filters.
dgmutations:
- deletejson: |
[
@@ -220,8 +214,7 @@
}
}
--
- name: "Delete mutation on a type with a field with reverse predicate"
+- name: Delete mutation on a type with a field with reverse predicate
gqlmutation: |
mutation deleteMovie($filter: MovieFilter!) {
deleteMovie(filter: $filter) {
@@ -232,7 +225,7 @@
{ "filter":
{ "id": ["0x1", "0x2"] }
}
- explanation: "The correct mutation and query should be built using variable and filters."
+ explanation: The correct mutation and query should be built using variable and filters.
dgmutations:
- deletejson: |
[
@@ -249,8 +242,7 @@
MovieDirector_2 as ~directed.movies
}
}
--
- name: "Deleting an interface with just a field with @id directive"
+- name: Deleting an interface with just a field with @id directive
gqlmutation: |
mutation{
deleteA(filter:{name:{eq: "xyz"}}){
@@ -278,8 +270,7 @@
- deletejson: |
[{ "uid": "uid(x)"}]
--
- name: "delete with multiple id's"
+- name: delete with multiple id's
gqlmutation: |
mutation deleteBook($filter: BookFilter!) {
deleteBook(filter: $filter) {
@@ -324,4 +315,4 @@
uid
author_2 as Book.author
}
- }
\ No newline at end of file
+ }
diff --git a/graphql/resolve/mutation_query_test.yaml b/graphql/resolve/mutation_query_test.yaml
index 8ced23604f4..366495b2982 100644
--- a/graphql/resolve/mutation_query_test.yaml
+++ b/graphql/resolve/mutation_query_test.yaml
@@ -1,6 +1,5 @@
ADD_UPDATE_MUTATION:
- -
- name: "single level"
+ - name: single level
gqlquery: |
mutation {
ADD_UPDATE_MUTATION {
@@ -18,8 +17,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "alias is ignored in query rewriting"
+ - name: alias is ignored in query rewriting
gqlquery: |
mutation {
ADD_UPDATE_MUTATION {
@@ -44,8 +42,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "selection set in result"
+ - name: selection set in result
gqlquery: |
mutation {
ADD_UPDATE_MUTATION {
@@ -63,8 +60,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "deep"
+ - name: deep
gqlquery: |
mutation {
ADD_UPDATE_MUTATION {
@@ -89,8 +85,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "can do deep filter"
+ - name: can do deep filter
gqlquery: |
mutation {
ADD_UPDATE_MUTATION {
@@ -122,8 +117,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "can work with skip and filter"
+ - name: can work with skip and filter
gqlquery: |
mutation ($skip: Boolean!, $include: Boolean!) {
ADD_UPDATE_MUTATION {
@@ -152,8 +146,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "cascade directive on mutation payload"
+ - name: cascade directive on mutation payload
gqlquery: |
mutation {
ADD_UPDATE_MUTATION @cascade {
@@ -181,8 +174,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "cascade directive on mutation query field"
+ - name: cascade directive on mutation query field
gqlquery: |
mutation {
ADD_UPDATE_MUTATION {
@@ -210,8 +202,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "cascade directive inside mutation query"
+ - name: cascade directive inside mutation query
gqlquery: |
mutation {
ADD_UPDATE_MUTATION {
@@ -239,8 +230,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "parameterized cascade directive on mutation payload"
+ - name: parameterized cascade directive on mutation payload
gqlquery: |
mutation {
ADD_UPDATE_MUTATION @cascade(fields:["post","numUids"]) {
@@ -268,8 +258,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "parametrized cascade directive on mutation query field"
+ - name: parametrized cascade directive on mutation query field
gqlquery: |
mutation {
ADD_UPDATE_MUTATION {
@@ -297,8 +286,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "parameterized cascade directive inside mutation query"
+ - name: parameterized cascade directive inside mutation query
gqlquery: |
mutation {
ADD_UPDATE_MUTATION {
@@ -326,8 +314,7 @@ ADD_UPDATE_MUTATION:
}
}
- -
- name: "parameterized cascade directive at multiple levels "
+ - name: parameterized cascade directive at multiple levels
gqlquery: |
mutation {
ADD_UPDATE_MUTATION @cascade(fields:["post"]) {
@@ -355,8 +342,7 @@ ADD_UPDATE_MUTATION:
}
}
UPDATE_MUTATION:
- -
- name: "filter update result"
+ - name: filter update result
gqlquery: |
mutation {
UPDATE_MUTATION {
@@ -373,8 +359,7 @@ UPDATE_MUTATION:
Post.title : Post.title
}
}
- -
- name: "order update result"
+ - name: order update result
gqlquery: |
mutation {
UPDATE_MUTATION {
@@ -392,8 +377,7 @@ UPDATE_MUTATION:
}
}
- -
- name: "order and pagination update result"
+ - name: order and pagination update result
gqlquery: |
mutation {
UPDATE_MUTATION {
diff --git a/graphql/resolve/query_test.yaml b/graphql/resolve/query_test.yaml
index ab15599d020..a9bae8e59b8 100644
--- a/graphql/resolve/query_test.yaml
+++ b/graphql/resolve/query_test.yaml
@@ -1,4 +1,4 @@
-- name: "eq filter with null value get translated into NOT(has) filter"
+- name: eq filter with null value get translated into NOT(has) filter
gqlquery: |
query {
queryState(filter: {code: {eq: null}}) {
@@ -15,7 +15,7 @@
}
}
-- name: "le filter with null value doesn't get translated"
+- name: le filter with null value doesn't get translated
gqlquery: |
query {
queryCountry(filter: {name: {le: null}}) {
@@ -30,7 +30,7 @@
}
}
-- name: "in filter on string type"
+- name: in filter on string type
gqlquery: |
query {
queryState(filter: {code: {in: ["abc", "def", "ghi"]}}) {
@@ -47,7 +47,7 @@
}
}
-- name: "in filter on float type"
+- name: in filter on float type
gqlquery: |
query {
queryAuthor(filter: {reputation: {in: [10.3, 12.6, 13.6]}}) {
@@ -64,7 +64,7 @@
}
}
-- name: "in filter on datetime type"
+- name: in filter on datetime type
gqlquery: |
query {
queryAuthor(filter: {dob: {in: ["2001-01-01", "2002-02-01"]}}) {
@@ -81,7 +81,7 @@
}
}
-- name: "in filter on int type"
+- name: in filter on int type
gqlquery: |
query {
queryPost(filter: {numLikes: {in: [10, 15, 100]}}) {
@@ -95,7 +95,8 @@
dgraph.uid : uid
}
}
-- name: "in filter on field which is of enum type"
+
+- name: in filter on field which is of enum type
gqlquery: |
query{
queryVerification(filter: {prevStatus: {in: [ACTIVE, DEACTIVATED]}}){
@@ -112,7 +113,7 @@
}
}
-- name: "in filter on field which is a List of enum type"
+- name: in filter on field which is a List of enum type
gqlquery: |
query{
queryVerification(filter: {status: {in: [ACTIVE, DEACTIVATED]}}){
@@ -129,7 +130,7 @@
}
}
-- name: "eq filter on field which is a List of enum type"
+- name: eq filter on field which is a List of enum type
gqlquery: |
query{
queryVerification(filter: {status: {eq: ACTIVE}}){
@@ -146,7 +147,7 @@
}
}
-- name: "le filter on field which is a List of enum type"
+- name: le filter on field which is a List of enum type
gqlquery: |
query{
queryVerification(filter: {status: {le: INACTIVE}}){
@@ -162,7 +163,7 @@
dgraph.uid : uid
}
}
-- name: "Point query near filter"
+- name: Point query near filter
gqlquery: |
query {
queryHotel(filter: { location: { near: { distance: 33.33, coordinate: { latitude: 11.11, longitude: 22.22} } } }) {
@@ -182,7 +183,7 @@
}
}
-- name: "Point query within filter"
+- name: Point query within filter
gqlquery: |
query {
queryHotel(filter: { location: { within: { polygon: { coordinates: [ { points: [{ latitude: 11.11, longitude: 22.22}, { latitude: 15.15, longitude: 16.16} , { latitude: 20.20, longitude: 21.21} ]}, { points: [{ latitude: 11.18, longitude: 22.28}, { latitude: 15.18, longitude: 16.18} , { latitude: 20.28, longitude: 21.28}]} ] } } } }) {
@@ -202,7 +203,7 @@
}
}
-- name: "Polygon query near filter"
+- name: Polygon query near filter
gqlquery: |
query {
queryHotel(filter: { area: { near: { distance: 33.33, coordinate: { latitude: 11.11, longitude: 22.22} } } }) {
@@ -226,7 +227,7 @@
}
}
-- name: "Polygon query within filter"
+- name: Polygon query within filter
gqlquery: |
query {
queryHotel(filter: { area: { within: { polygon: { coordinates: [ { points: [{ latitude: 11.11, longitude: 22.22}, { latitude: 15.15, longitude: 16.16} , { latitude: 20.20, longitude: 21.21} ]}, { points: [{ latitude: 11.18, longitude: 22.28}, { latitude: 15.18, longitude: 16.18} , { latitude: 20.28, longitude: 21.28}]} ] } } } }) {
@@ -250,7 +251,7 @@
}
}
-- name: "Polygon query contains polygon filter"
+- name: Polygon query contains polygon filter
gqlquery: |
query {
queryHotel(filter: { area: { contains: { polygon: { coordinates: [ { points: [{ latitude: 11.11, longitude: 22.22}, { latitude: 15.15, longitude: 16.16} , { latitude: 20.20, longitude: 21.21} ]}, { points: [{ latitude: 11.18, longitude: 22.28}, { latitude: 15.18, longitude: 16.18} , { latitude: 20.28, longitude: 21.28}]} ] } } } }) {
@@ -274,7 +275,7 @@
}
}
-- name: "Polygon query contains point filter"
+- name: Polygon query contains point filter
gqlquery: |
query {
queryHotel(filter: { area: { contains: { point: { latitude: 11.11, longitude: 22.22}} } }) {
@@ -298,7 +299,7 @@
}
}
-- name: "Polygon query intersect polygon filter"
+- name: Polygon query intersect polygon filter
gqlquery: |
query {
queryHotel(filter: {
@@ -352,7 +353,7 @@
}
}
-- name: "Polygon query intersect multi-polygon filter"
+- name: Polygon query intersect multi-polygon filter
gqlquery: |
query {
queryHotel(filter: {
@@ -432,7 +433,7 @@
}
}
-- name: "MultiPolygon query near filter"
+- name: MultiPolygon query near filter
gqlquery: |
query {
queryHotel(filter: { branches: { near: { distance: 33.33, coordinate: { latitude: 11.11, longitude: 22.22} } } }) {
@@ -458,7 +459,7 @@
}
}
-- name: "MultiPolygon query within filter"
+- name: MultiPolygon query within filter
gqlquery: |
query {
queryHotel(filter: { branches: { within: { polygon: { coordinates: [ { points: [{ latitude: 11.11, longitude: 22.22}, { latitude: 15.15, longitude: 16.16} , { latitude: 20.20, longitude: 21.21} ]}, { points: [{ latitude: 11.18, longitude: 22.28}, { latitude: 15.18, longitude: 16.18} , { latitude: 20.28, longitude: 21.28}]} ] } } } }) {
@@ -484,7 +485,7 @@
}
}
-- name: "MultiPolygon query contains polygon filter"
+- name: MultiPolygon query contains polygon filter
gqlquery: |
query {
queryHotel(filter: { branches: { contains: { polygon: { coordinates: [ { points: [{ latitude: 11.11, longitude: 22.22}, { latitude: 15.15, longitude: 16.16} , { latitude: 20.20, longitude: 21.21} ]}, { points: [{ latitude: 11.18, longitude: 22.28}, { latitude: 15.18, longitude: 16.18} , { latitude: 20.28, longitude: 21.28}]} ] } } } }) {
@@ -510,7 +511,7 @@
}
}
-- name: "MultiPolygon query contains point filter"
+- name: MultiPolygon query contains point filter
gqlquery: |
query {
queryHotel(filter: { branches: { contains: { point: { latitude: 11.11, longitude: 22.22}} } }) {
@@ -536,7 +537,7 @@
}
}
-- name: "MultiPolygon query intersect polygon filter"
+- name: MultiPolygon query intersect polygon filter
gqlquery: |
query {
queryHotel(filter: {
@@ -592,7 +593,7 @@
}
}
-- name: "MultiPolygon query intersect multi-polygon filter"
+- name: MultiPolygon query intersect multi-polygon filter
gqlquery: |
query {
queryHotel(filter: {
@@ -674,7 +675,7 @@
}
}
-- name: "ID query"
+- name: ID query
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -689,7 +690,7 @@
}
}
-- name: "Alias isn't ignored in query rewriting - get"
+- name: Alias isn't ignored in query rewriting - get
gqlquery: |
query {
author : getAuthor(id: "0x1") {
@@ -711,7 +712,7 @@
}
}
-- name: "Alias isn't ignored in query rewriting - query"
+- name: Alias isn't ignored in query rewriting - query
gqlquery: |
query {
author : queryAuthor {
@@ -733,7 +734,7 @@
}
}
-- name: "ID field gets transformed to uid"
+- name: ID field gets transformed to uid
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -749,7 +750,7 @@
}
}
-- name: "ID query with depth"
+- name: ID query with depth
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -773,7 +774,7 @@
}
}
-- name: "ID query deep"
+- name: ID query deep
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -805,7 +806,7 @@
}
}
-- name: "Query with no args is query for everything of that type"
+- name: Query with no args is query for everything of that type
gqlquery: |
query {
queryAuthor {
@@ -820,8 +821,7 @@
}
}
-
-- name: "eq Filter gets rewritten as root func"
+- name: eq Filter gets rewritten as root func
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" } }) {
@@ -851,7 +851,7 @@
}
}
-- name: "Query with has Filter"
+- name: Query with has Filter
gqlquery: |
query {
queryTeacher(filter: {has: subject}) {
@@ -866,7 +866,7 @@
}
}
-- name: "has Filter with not"
+- name: has Filter with not
gqlquery: |
query {
queryTeacher(filter: { not : {has: subject } }) {
@@ -881,7 +881,7 @@
}
}
-- name: "has Filter with and"
+- name: has Filter with and
gqlquery: |
query {
queryTeacher(filter: {has: subject, and: {has: teaches } } ) {
@@ -896,7 +896,7 @@
}
}
-- name: "has Filter on list of fields"
+- name: has Filter on list of fields
gqlquery: |
query {
queryTeacher(filter: {has: [subject, teaches ] } ) {
@@ -910,7 +910,7 @@
dgraph.uid : uid
}
}
-- name: "Query Has Filter on type which has neither ID field nor any search argument"
+- name: Query Has Filter on type which has neither ID field nor any search argument
gqlquery: |
query {
queryNode(filter: {has: name}){
@@ -924,7 +924,7 @@
dgraph.uid : uid
}
}
-- name: "Filters in same input object implies AND"
+- name: Filters in same input object implies AND
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" }, dob: { le: "2001-01-01" }, reputation: { gt: 2.5 } } ) {
@@ -939,7 +939,7 @@
}
}
-- name: "Filter with nested 'and'"
+- name: Filter with nested 'and'
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" }, and: { dob: { le: "2001-01-01" }, and: { reputation: { gt: 2.5 } } } } ) {
@@ -954,7 +954,7 @@
}
}
-- name: "has Filter with nested 'and'"
+- name: has Filter with nested 'and'
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" }, and: { dob: { le: "2001-01-01" }, and: { has: country } } } ) {
@@ -969,7 +969,7 @@
}
}
-- name: "Filter with 'or'"
+- name: Filter with 'or'
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" }, or: { dob: { le: "2001-01-01" } } } ) {
@@ -984,7 +984,7 @@
}
}
-- name: "Filter with 'or' array"
+- name: Filter with 'or' array
gqlquery: |
query {
queryAuthor(filter: { or: [ { name: { eq: "A. N. Author" } }, { dob: { le: "2001-01-01" } }] } ) {
@@ -999,7 +999,7 @@
}
}
-- name: "Filter with 'or' object"
+- name: Filter with 'or' object
gqlquery: |
query {
queryAuthor(filter: { or: { name: { eq: "A. N. Author" } }} ) {
@@ -1014,8 +1014,7 @@
}
}
-
-- name: "Filter with implied and as well as 'or'"
+- name: Filter with implied and as well as 'or'
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" }, reputation: { gt: 2.5 }, or: { dob: { le: "2001-01-01" } } } ) {
@@ -1030,7 +1029,7 @@
}
}
-- name: "Filter with implied and nested in 'or'"
+- name: Filter with implied and nested in 'or'
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" }, or: { reputation: { gt: 2.5 }, dob: { le: "2001-01-01" } } } ) {
@@ -1045,7 +1044,7 @@
}
}
-- name: "Filter nested 'or'"
+- name: Filter nested 'or'
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" }, or: { reputation: { gt: 2.5 }, or: { dob: { le: "2001-01-01" } } } } ) {
@@ -1060,7 +1059,7 @@
}
}
-- name: "Filter with 'not"
+- name: Filter with 'not
gqlquery: |
query {
queryAuthor(filter: { not: { reputation: { gt: 2.5 } } } ) {
@@ -1075,7 +1074,7 @@
}
}
-- name: "Filter with first"
+- name: Filter with first
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" } }, first: 10) {
@@ -1090,7 +1089,7 @@
}
}
-- name: "Filter with first and offset"
+- name: Filter with first and offset
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" } }, first: 10, offset: 10) {
@@ -1105,7 +1104,7 @@
}
}
-- name: "Filter with order asc"
+- name: Filter with order asc
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" } }, order: { asc: reputation }) {
@@ -1120,7 +1119,7 @@
}
}
-- name: "Filter with order desc"
+- name: Filter with order desc
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" } }, order: { desc: reputation }) {
@@ -1135,8 +1134,7 @@
}
}
-
-- name: "Filter with nested order"
+- name: Filter with nested order
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" } }, order: { desc: reputation, then: { asc: dob } }) {
@@ -1151,7 +1149,7 @@
}
}
-- name: "Filter with order, first and offset"
+- name: Filter with order, first and offset
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" } }, order: { desc: reputation }, first: 10, offset: 10) {
@@ -1166,7 +1164,7 @@
}
}
-- name: "Deep filter"
+- name: Deep filter
gqlquery: |
query {
queryAuthor {
@@ -1188,8 +1186,7 @@
}
}
-
-- name: "Deep filter with has filter"
+- name: Deep filter with has filter
gqlquery: |
query {
queryAuthor {
@@ -1210,7 +1207,8 @@
dgraph.uid : uid
}
}
-- name: "Deep filter with has filter on list of fields"
+
+- name: Deep filter with has filter on list of fields
gqlquery: |
query {
queryAuthor {
@@ -1232,7 +1230,7 @@
}
}
-- name: "Deep filter with has and other filters"
+- name: Deep filter with has and other filters
gqlquery: |
query {
queryAuthor {
@@ -1253,7 +1251,8 @@
dgraph.uid : uid
}
}
-- name: "Deep filter with first"
+
+- name: Deep filter with first
gqlquery: |
query {
queryAuthor {
@@ -1275,7 +1274,7 @@
}
}
-- name: "Deep filter with order, first and offset"
+- name: Deep filter with order, first and offset
gqlquery: |
query {
queryAuthor {
@@ -1297,7 +1296,7 @@
}
}
-- name: "Deep filter with multiple order, first and offset"
+- name: Deep filter with multiple order, first and offset
gqlquery: |
query {
queryAuthor {
@@ -1319,7 +1318,7 @@
}
}
-- name: "Float with large exponentiation"
+- name: Float with large exponentiation
gqlquery: |
query {
queryAuthor(filter:{ reputation: { gt: 123456789.113 } }) {
@@ -1334,7 +1333,7 @@
}
}
-- name: "All Float filters work"
+- name: All Float filters work
gqlquery: |
query {
queryAuthor(filter: { reputation: { gt: 1.1 }, or: { reputation: { ge: 1.1 }, or: { reputation: { lt: 1.1 }, or: { reputation: { le: 1.1 }, or: { reputation: { eq: 1.1 } } } } } } ) {
@@ -1349,7 +1348,7 @@
}
}
-- name: "All DateTime filters work"
+- name: All DateTime filters work
gqlquery: |
query {
queryAuthor(filter: { dob: { gt: "2000-01-01" }, or: { dob: { ge: "2000-01-01" }, or: { dob: { lt: "2000-01-01" }, or: { dob: { le: "2000-01-01" }, or: { dob: { eq: "2000-01-01" } } } } } } ) {
@@ -1364,7 +1363,7 @@
}
}
-- name: "All Int filters work"
+- name: All Int filters work
gqlquery: |
query {
queryPost(filter: { numLikes: { gt: 10 }, or: { numLikes: { ge: 10 }, or: { numLikes: { lt: 10 }, or: { numLikes: { le: 10 }, or: { numLikes: { eq: 10 } } } } } } ) {
@@ -1379,7 +1378,7 @@
}
}
-- name: "All String hash filters work"
+- name: All String hash filters work
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" } } ) {
@@ -1394,7 +1393,7 @@
}
}
-- name: "All String exact filters work"
+- name: All String exact filters work
gqlquery: |
query {
queryCountry(filter: { name: { gt: "AAA" }, or: { name: { ge: "AAA" }, or: { name: { lt: "AAA" }, or: { name: { le: "AAA" }, or: { name: { eq: "AAA" } } } } } } ) {
@@ -1409,7 +1408,7 @@
}
}
-- name: "All String exact filters work with an array for OR"
+- name: All String exact filters work with an array for OR
gqlquery: |
query {
queryCountry(filter: { name: { gt: "AAA" }, or: [{ name: { ge: "AAA" }}, { name: { lt: "AAA" }}, { name: { le: "AAA" }}, { name: { eq: "AAA" } }] }) {
@@ -1424,7 +1423,7 @@
}
}
-- name: "All String exact filters work with an array for AND"
+- name: All String exact filters work with an array for AND
gqlquery: |
query {
queryCountry(filter: { name: { gt: "AAA" }, and: [{ name: { ge: "AAA" }}, { name: { lt: "AAA" }}, { name: { le: "AAA" }}, { name: { eq: "AAA" } }] }) {
@@ -1439,8 +1438,7 @@
}
}
-
-- name: "Represent (A OR B) AND (C OR D)"
+- name: Represent (A OR B) AND (C OR D)
gqlquery: |
query {
queryCountry(filter: { and: [{ name: { gt: "AAA" }, or: { name: { lt: "XXX" }}}, { name: { gt : "CCC" }, or: { name: { lt: "MMM" }}}] }) {
@@ -1455,7 +1453,7 @@
}
}
-- name: "All String term filters work"
+- name: All String term filters work
gqlquery: |
query {
queryPost(filter: { title: { anyofterms: "GraphQL"}, or: { title: { allofterms: "GraphQL" } } } ) {
@@ -1470,8 +1468,7 @@
}
}
-
-- name: "All String fulltext filters work"
+- name: All String fulltext filters work
gqlquery: |
query {
queryPost(filter: { text: { anyoftext: "GraphQL"}, or: { text: { alloftext: "GraphQL" } } } ) {
@@ -1486,7 +1483,7 @@
}
}
-- name: "All String regexp filters work"
+- name: All String regexp filters work
gqlquery: |
query {
queryCountry(filter: { name: { regexp: "/.*ust.*/" }}) {
@@ -1501,7 +1498,7 @@
}
}
-- name: "Aggregate Query"
+- name: Aggregate Query
gqlquery: |
query {
aggregateCountry(filter: { name: { regexp: "/.*ust.*/" }}) {
@@ -1527,7 +1524,7 @@
}
}
-- name: "Skip directive"
+- name: Skip directive
gqlquery: |
query ($skipTrue: Boolean!, $skipFalse: Boolean!) {
getAuthor(id: "0x1") {
@@ -1551,7 +1548,7 @@
}
}
-- name: "Include directive"
+- name: Include directive
gqlquery: |
query ($includeTrue: Boolean!, $includeFalse: Boolean!) {
queryAuthor {
@@ -1575,7 +1572,7 @@
}
}
-- name: "Include only fields for which skip is !false or include is true"
+- name: Include only fields for which skip is !false or include is true
variables:
includeFalse: false
includeTrue: true
@@ -1610,7 +1607,7 @@
}
}
-- name: "Cascade directive on get query"
+- name: Cascade directive on get query
gqlquery: |
query {
getAuthor(id: "0x1") @cascade {
@@ -1632,7 +1629,7 @@
}
}
-- name: "Cascade directive on filter query"
+- name: Cascade directive on filter query
gqlquery: |
query {
queryAuthor @cascade {
@@ -1654,7 +1651,7 @@
}
}
-- name: "Cascade directive on query field"
+- name: Cascade directive on query field
gqlquery: |
query {
queryAuthor {
@@ -1676,7 +1673,7 @@
}
}
-- name: "Cascade directive on root query and query field"
+- name: Cascade directive on root query and query field
gqlquery: |
query {
queryAuthor @cascade {
@@ -1698,7 +1695,7 @@
}
}
-- name: "Parameterized Cascade directive on filter query"
+- name: Parameterized Cascade directive on filter query
gqlquery: |
query {
queryAuthor @cascade(fields:["dob"]) {
@@ -1722,7 +1719,7 @@
}
}
-- name: "Parameterized Cascade directive on get query"
+- name: Parameterized Cascade directive on get query
gqlquery: |
query {
getAuthor(id: "0x1") @cascade(fields:["dob"]) {
@@ -1746,7 +1743,7 @@
}
}
-- name: "Parameterized Cascade directive on query field"
+- name: Parameterized Cascade directive on query field
gqlquery: |
query {
queryAuthor {
@@ -1770,7 +1767,7 @@
}
}
-- name: "Parameterized Cascade directive on root and query field"
+- name: Parameterized Cascade directive on root and query field
gqlquery: |
query {
queryAuthor @cascade(fields:["dob"]) {
@@ -1796,7 +1793,7 @@
}
}
-- name: "Parameterized Cascade directive with multiple parameters on root and query field"
+- name: Parameterized Cascade directive with multiple parameters on root and query field
gqlquery: |
query {
queryAuthor @cascade(fields:["dob","reputation","id"]) {
@@ -1822,7 +1819,8 @@
}
}
-- name: "Parameterized Cascade directive with argument at outer level which is not present in inner level "
+- name:
+ Parameterized Cascade directive with argument at outer level which is not present in inner level
gqlquery: |
query {
queryAuthor @cascade(fields:["dob"]) {
@@ -1848,7 +1846,7 @@
}
}
-- name: "parameterized cascade with interface implementation Human"
+- name: parameterized cascade with interface implementation Human
gqlquery: |
query {
queryHuman @cascade(fields:["id","name","ename","dob"]) {
@@ -1870,7 +1868,7 @@
}
}
-- name: "parameterized cascade with interface Character"
+- name: parameterized cascade with interface Character
gqlquery: |
query {
queryCharacter @cascade(fields:["id","name"]) {
@@ -1887,7 +1885,7 @@
}
}
-- name: "Parameterized Cascade directive on root and nested field using variables"
+- name: Parameterized Cascade directive on root and nested field using variables
gqlquery: |
query($fieldsRoot:[String],$fieldsDeep:[String]) {
queryAuthor @cascade(fields: $fieldsRoot) {
@@ -1923,7 +1921,7 @@
}
}
-- name: "getHuman which implements an interface"
+- name: getHuman which implements an interface
gqlquery: |
query {
getHuman(id: "0x1") {
@@ -1945,7 +1943,7 @@
}
}
-- name: "queryHuman which implements an interface"
+- name: queryHuman which implements an interface
gqlquery: |
query {
queryHuman {
@@ -1967,7 +1965,7 @@
}
}
-- name: "Get Query on interface whose implementation contains Auth rules."
+- name: Get Query on interface whose implementation contains Auth rules
gqlquery: |
query {
getX(id: "0x1") {
@@ -1980,7 +1978,7 @@
getX()
}
-- name: "Query on interface whose implementation contains Auth rules."
+- name: Query on interface whose implementation contains Auth rules
gqlquery: |
query {
queryX {
@@ -1993,7 +1991,7 @@
queryX()
}
-- name: "filter with order for type which implements an interface"
+- name: filter with order for type which implements an interface
gqlquery: |
query {
queryHuman (filter: { name: { anyofterms: "GraphQL" } }, order: { asc: ename }) {
@@ -2013,7 +2011,7 @@
}
}
-- name: "queryCharacter with fragment for human"
+- name: queryCharacter with fragment for human
gqlquery: |
query {
queryCharacter {
@@ -2036,7 +2034,7 @@
}
}
-- name: "queryCharacter with fragment on multiple types"
+- name: queryCharacter with fragment on multiple types
gqlquery: |
query {
queryCharacter {
@@ -2063,7 +2061,9 @@
}
}
-- name: "fragment on interface implemented by type which implements multiple interfaces in query on some other interface"
+- name:
+ fragment on interface implemented by type which implements multiple interfaces in query on some
+ other interface
gqlquery: |
query {
queryCharacter {
@@ -2088,7 +2088,7 @@
}
}
-- name: "Filter with id uses uid func at root."
+- name: Filter with id uses uid func at root
gqlquery: |
query {
queryAuthor(filter: { id: ["0x1", "0x2"], and: { name: { eq: "A. N. Author" } }}) {
@@ -2103,7 +2103,7 @@
}
}
-- name: "Between filter"
+- name: Between filter
gqlquery: |
query {
queryPost(filter: { numLikes: { between : { min :10, max: 20 }}}) {
@@ -2120,7 +2120,7 @@
}
}
-- name: "deep Between filter"
+- name: deep Between filter
gqlquery: |
query{
queryAuthor(filter: {reputation: {between: {min:6.0, max: 7.2}}}){
@@ -2146,7 +2146,7 @@
}
}
-- name: "Filter with id inside and argument doesn't use uid func at root."
+- name: Filter with id inside and argument doesn't use uid func at root
gqlquery: |
query {
queryAuthor(filter: { name: { eq: "A. N. Author" }, and: { id: ["0x1", "0x2"] }}) {
@@ -2161,7 +2161,7 @@
}
}
-- name: "Filter with id and not translates correctly.."
+- name: Filter with id and not translates correctly
gqlquery: |
query {
queryAuthor(filter: { not: { id: ["0x1", "0x2"] }}) {
@@ -2176,7 +2176,7 @@
}
}
-- name: "Deep filter with id"
+- name: Deep filter with id
gqlquery: |
query {
queryAuthor {
@@ -2198,7 +2198,7 @@
}
}
-- name: "Deep filter with id in not key"
+- name: Deep filter with id in not key
gqlquery: |
query {
queryAuthor {
@@ -2220,7 +2220,7 @@
}
}
-- name: "Pagination and Order at root node with UID."
+- name: Pagination and Order at root node with UID
gqlquery: |
query {
queryAuthor(filter: { id: ["0x1", "0x2"] }, order: {asc: name}, first: 0, offset: 1 ) {
@@ -2235,7 +2235,7 @@
}
}
-- name: "Order at root node with UID."
+- name: Order at root node with UID
gqlquery: |
query {
queryAuthor(filter: { id: ["0x1", "0x2"] }, order: {asc: name}) {
@@ -2250,7 +2250,7 @@
}
}
-- name: "Order at root node without UID."
+- name: Order at root node without UID
gqlquery: |
query {
queryAuthor(order: {asc: name}) {
@@ -2265,7 +2265,7 @@
}
}
-- name: "Order and Pagination at root node without UID."
+- name: Order and Pagination at root node without UID
gqlquery: |
query {
queryAuthor(order: {asc: name}, first: 2, offset: 3) {
@@ -2280,8 +2280,7 @@
}
}
-
-- name: "Filter with no valid id construct the right query with type func at root."
+- name: Filter with no valid id construct the right query with type func at root
gqlquery: |
query {
queryAuthor(filter: { id: ["alice", "bob"], and: { name: { eq: "A. N. Author" } }}) {
@@ -2296,7 +2295,7 @@
}
}
-- name: "Filter with id only includes valid id in dgquery."
+- name: Filter with id only includes valid id in dgquery
gqlquery: |
query {
queryAuthor(filter: { id: ["0x1", "bob"], and: { name: { eq: "A. N. Author" } }}) {
@@ -2311,7 +2310,7 @@
}
}
-- name: "Get editor without supplying anything"
+- name: Get editor without supplying anything
gqlquery: |
query {
getEditor {
@@ -2326,7 +2325,7 @@
}
}
-- name: "Get editor using code"
+- name: Get editor using code
gqlquery: |
query {
getEditor(code: "tolstoy") {
@@ -2341,7 +2340,7 @@
}
}
-- name: "Get editor using both code and id"
+- name: Get editor using both code and id
gqlquery: |
query {
getEditor(code: "tolstoy", id: "0x1") {
@@ -2356,7 +2355,7 @@
}
}
-- name: "Get with XID where no ID in type"
+- name: Get with XID where no ID in type
gqlquery: |
query {
getState(code: "NSW") {
@@ -2371,7 +2370,7 @@
}
}
-- name: "Query editor using code"
+- name: Query editor using code
gqlquery: |
query {
queryEditor(filter: { code: { eq: "editor" }, and: { name: { eq: "A. N. Editor" }}}) {
@@ -2386,7 +2385,7 @@
}
}
-- name: "Query editor using code and uid"
+- name: Query editor using code and uid
gqlquery: |
query {
queryEditor(filter: { id: ["0x1"], and: { code: { eq: "editor"}}}) {
@@ -2401,7 +2400,7 @@
}
}
-- name: "Query along reverse edge is converted appropriately"
+- name: Query along reverse edge is converted appropriately
gqlquery: |
query {
queryMovie {
@@ -2423,7 +2422,7 @@
}
}
-- name: "deprecated fields can be queried"
+- name: deprecated fields can be queried
gqlquery: |
query {
queryCategory {
@@ -2439,7 +2438,7 @@
}
}
-- name: "Password query"
+- name: Password query
gqlquery: |
query {
checkUserPassword(name: "user1", pwd: "Password") {
@@ -2457,7 +2456,7 @@
}
}
-- name: "Password query with alias"
+- name: Password query with alias
gqlquery: |
query {
verify : checkUserPassword(name: "user1", pwd: "Password") {
@@ -2475,7 +2474,7 @@
}
}
-- name: "Rewrite without custom fields"
+- name: Rewrite without custom fields
gqlquery: |
query {
getComment(id: "0x1") {
@@ -2499,7 +2498,7 @@
}
}
-- name: "Include fields needed by custom directive"
+- name: Include fields needed by custom directive
gqlquery: |
query {
getComment(id: "0x1") {
@@ -2517,7 +2516,7 @@
Comment.url : Comment.url
}
}
-- name: "Rewrite without custom fields deep"
+- name: Rewrite without custom fields deep
gqlquery: |-
query {
getPost(postID: "0x1") {
@@ -2548,7 +2547,7 @@
}
}
}
-- name: "Include fields needed by custom directive deep"
+- name: Include fields needed by custom directive deep
gqlquery: |-
query {
getPost(postID: "0x1") {
@@ -2577,7 +2576,7 @@
}
}
}
-- name: "getType by id should work"
+- name: getType by id should work
gqlquery: |-
query {
getTweets(id: "1286891968727982081") {
@@ -2594,7 +2593,9 @@
}
}
-- name: "querying a inbuiltType field multiple times with different aliases adds it multiple times in rewriting"
+- name:
+ querying a inbuiltType field multiple times with different aliases adds it multiple times in
+ rewriting
gqlquery: |-
query {
queryThingOne {
@@ -2616,7 +2617,9 @@
}
}
-- name: "querying an Enum type field multiple times with different aliases adds it multiple times in rewriting"
+- name:
+ querying an Enum type field multiple times with different aliases adds it multiple times in
+ rewriting
gqlquery: |-
query {
queryPost {
@@ -2634,7 +2637,9 @@
dgraph.uid : uid
}
}
-- name: "querying a non-inbuiltType field multiple times with different aliases should reflect in rewriting"
+- name:
+ querying a non-inbuiltType field multiple times with different aliases should reflect in
+ rewriting
gqlquery: |-
query {
queryAuthor {
@@ -2667,7 +2672,7 @@
}
}
-- name: "querying field multiple times with different aliases and same filters"
+- name: querying field multiple times with different aliases and same filters
gqlquery: |-
query {
queryAuthor {
@@ -2699,7 +2704,7 @@
dgraph.uid : uid
}
}
-- name: "Query with Same Alias"
+- name: Query with Same Alias
gqlquery: |-
query {
queryAuthor {
@@ -2726,7 +2731,7 @@
dgraph.uid : uid
}
}
-- name: "Aggregate Query with multiple aliases"
+- name: Aggregate Query with multiple aliases
gqlquery: |
query{
queryAuthor{
@@ -2751,7 +2756,7 @@
}
}
-- name: "query with fragments inside interface"
+- name: query with fragments inside interface
gqlquery: |-
query {
queryThing {
@@ -2790,7 +2795,7 @@
}
}
-- name: "query only __typename in fragments inside interface"
+- name: query only __typename in fragments inside interface
gqlquery: |-
query {
queryThing {
@@ -2810,7 +2815,7 @@
}
}
-- name: "query only __typename in fragment inside object"
+- name: query only __typename in fragment inside object
gqlquery: |-
query {
queryThingOne {
@@ -2826,7 +2831,7 @@
}
}
-- name: "query union field - with fragment on interface implemented by member-types"
+- name: query union field - with fragment on interface implemented by member-types
gqlquery: |-
query {
queryHome {
@@ -2865,7 +2870,7 @@
}
}
-- name: "query union field - with repeated field in member-types"
+- name: query union field - with repeated field in member-types
gqlquery: |-
query {
queryHome {
@@ -2894,7 +2899,7 @@
}
}
-- name: "query union field - with arguments on union field"
+- name: query union field - with arguments on union field
gqlquery: |-
query {
queryHome {
@@ -2928,7 +2933,7 @@
}
}
-- name: "query union field - memberTypes is empty list"
+- name: query union field - memberTypes is empty list
gqlquery: |-
query {
queryHome {
@@ -2952,7 +2957,7 @@
queryHome(func: type(Home))
}
-- name: "query union field - memberTypes isn't specified"
+- name: query union field - memberTypes isn't specified
gqlquery: |-
query {
queryHome {
@@ -2978,7 +2983,7 @@
}
}
-- name: "query union field - memberTypes contains all the types"
+- name: query union field - memberTypes contains all the types
gqlquery: |-
query {
queryHome {
@@ -3005,7 +3010,7 @@
}
}
-- name: "Count query at child level"
+- name: Count query at child level
gqlquery: |
query {
queryCountry {
@@ -3024,7 +3029,7 @@
}
}
-- name: "Aggregate query at child level with filter and multiple aggregate fields"
+- name: Aggregate query at child level with filter and multiple aggregate fields
gqlquery: |
query {
queryCountry {
@@ -3068,7 +3073,7 @@
}
}
-- name: "Count query at child level with filter"
+- name: Count query at child level with filter
gqlquery: |
query {
queryCountry {
@@ -3094,7 +3099,7 @@
}
}
-- name: "Deep child level get query with count"
+- name: Deep child level get query with count
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -3118,7 +3123,7 @@
}
}
-- name: "Aggregate Query with Sum and Avg"
+- name: Aggregate Query with Sum and Avg
gqlquery: |
query {
aggregateTweets() {
@@ -3144,7 +3149,7 @@
}
}
-- name: "query using single ID in filter"
+- name: query using single ID in filter
gqlquery: |
query {
queryAuthor(filter:{id: "0x1"}) {
@@ -3159,7 +3164,7 @@
}
}
-- name: "entities query for extended type having @key field of ID type"
+- name: entities query for extended type having @key field of ID type
gqlquery: |
query {
_entities(representations: [{__typename: "Astronaut", id: "0x1" },{__typename: "Astronaut", id: "0x2" }]) {
@@ -3182,7 +3187,7 @@
}
}
-- name: "entities query for extended type having @key field of string type with @id directive"
+- name: entities query for extended type having @key field of string type with @id directive
gqlquery: |
query {
_entities(representations: [{__typename: "SpaceShip", id: "0x1" },{__typename: "SpaceShip", id: "0x2" }]) {
@@ -3205,7 +3210,7 @@
}
}
-- name: "get query with multiple @id and an ID field"
+- name: get query with multiple @id and an ID field
gqlquery: |
query {
getBook(id: "0x1", title: "GraphQL", ISBN: "001HB") {
@@ -3255,8 +3260,7 @@
}
}
--
- name: "Query fields linked to reverse predicates in Dgraph"
+- name: Query fields linked to reverse predicates in Dgraph
gqlquery: |
query {
queryLinkX(filter:{f9:{eq: "Alice"}}) {
@@ -3303,8 +3307,7 @@
}
}
--
- name: "query language tag fields with filter and order"
+- name: query language tag fields with filter and order
gqlquery: |
query {
queryPerson(filter:{or:[{name:{eq:"Alice"}},{nameHi:{eq:"ऐलिस"}},{nameZh:{eq:"爱丽丝"}},{name_Untag_AnyLang:{eq:"Alice"}}]}, order: { asc: nameHi })
@@ -3330,7 +3333,7 @@
}
}
-- name: "get query on interface with @id field having interface argument set"
+- name: get query on interface with @id field having interface argument set
gqlquery: |
query {
getMember(refID: "101") {
@@ -3353,7 +3356,7 @@
dgraph.uid : uid
}
}
-- name: "query similar_to"
+- name: query similar_to
gqlquery: |
query {
querySimilarProductByEmbedding(by: productVector, topK: 1, vector: [0.1, 0.2, 0.3, 0.4, 0.5]) {
@@ -3377,7 +3380,8 @@
Product.vector_distance : val(distance)
}
}
-- name: "query vector using uid"
+
+- name: query vector using uid
gqlquery: |
query {
querySimilarProductById(by: productVector, topK: 3, id: "0x1") {
@@ -3408,7 +3412,7 @@
}
}
-- name: "query vector by id with cosine distance"
+- name: query vector by id with cosine distance
gqlquery: |
query {
querySimilarProjectCosineById(by: description_v, topK: 3, id: "0x1") {
@@ -3439,7 +3443,7 @@
}
}
-- name: "query similar_to with cosine distance"
+- name: query similar_to with cosine distance
gqlquery: |
query {
querySimilarProjectCosineByEmbedding(by: description_v, topK: 1, vector: [0.1, 0.2, 0.3, 0.4, 0.5]) {
@@ -3463,7 +3467,7 @@
ProjectCosine.vector_distance : val(distance)
}
}
-- name: "query vector by id with dot product distance"
+- name: query vector by id with dot product distance
gqlquery: |
query {
querySimilarProjectDotProductById(by: description_v, topK: 3, id: "0x1") {
@@ -3494,7 +3498,7 @@
}
}
-- name: "query similar_to with dot product distance"
+- name: query similar_to with dot product distance
gqlquery: |
query {
querySimilarProjectDotProductByEmbedding(by: description_v, topK: 1, vector: [0.1, 0.2, 0.3, 0.4, 0.5]) {
@@ -3517,4 +3521,4 @@
dgraph.uid : uid
ProjectDotProduct.vector_distance : val(distance)
}
- }
\ No newline at end of file
+ }
diff --git a/graphql/resolve/resolver_error_test.yaml b/graphql/resolve/resolver_error_test.yaml
index ff9d79fda34..59a5d1032d6 100644
--- a/graphql/resolve/resolver_error_test.yaml
+++ b/graphql/resolve/resolver_error_test.yaml
@@ -1,56 +1,59 @@
--
- name: "Strip Dgraph result list for non-list query result"
+- name: Strip Dgraph result list for non-list query result
gqlquery: |
query {
getAuthor(id: "0x1") {
name
}
}
- explanation: "Dgraph always returns a query result as a list. That needs to be
- fixed for queries with non-list result types."
+ explanation:
+ Dgraph always returns a query result as a list. That needs to be fixed for queries with
+ non-list result types.
response: |
{ "getAuthor": [ { "uid": "0x1", "name": "A.N. Author" } ] }
expected: |
{ "getAuthor": { "name": "A.N. Author" } }
-
--
- name: "Empty query result becomes null"
+
+- name: Empty query result becomes null
gqlquery: |
query {
getAuthor(id: "0x1") {
name
}
}
- explanation: "If Dgraph finds no results for a query, and the GraphQL
- type is nullable, we should set the result to null."
+ explanation:
+ If Dgraph finds no results for a query, and the GraphQL type is nullable, we should set the
+ result to null.
response: |
{ }
expected: |
{ "getAuthor": null }
--
- name: "Root level handled correctly if just uid when non-nullable missing"
+- name: Root level handled correctly if just uid when non-nullable missing
gqlquery: |
query {
getAuthor(id: "0x1") {
name
}
}
- explanation: "GraphQL error propagation causes an error on a non-nullable field
- (like name: String!) to propagate to the parent object."
+ explanation:
+ "GraphQL error propagation causes an error on a non-nullable field (like name: String!) to
+ propagate to the parent object."
response: |
{ "getAuthor": [ { "uid": "0x1" } ] }
expected: |
{ "getAuthor": null }
errors:
- [ {
- "message": "Non-nullable field 'name' (type String!) was not present in
- result from Dgraph. GraphQL error propagation triggered." ,
- "path": [ "getAuthor", "name" ],
- "locations": [ { "line": 3, "column": 5 } ] } ]
+ [
+ {
+ "message":
+ Non-nullable field 'name' (type String!) was not present in result from Dgraph. GraphQL
+ error propagation triggered.,
+ "path": [getAuthor, name],
+ "locations": [{ "line": 3, "column": 5 }],
+ },
+ ]
--
- name: "Multiple nullable query results becomes nulls (with alias)"
+- name: Multiple nullable query results becomes nulls (with alias)
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -60,15 +63,15 @@
name
}
}
- explanation: "If Dgraph finds no results for a query, and the GraphQL
- type is nullable, we should set the result to null."
+ explanation:
+ If Dgraph finds no results for a query, and the GraphQL type is nullable, we should set the
+ result to null.
response: |
{ }
expected: |
{ "getAuthor": null, "auth": null }
--
- name: "Multiple query results with a nullable becomes null"
+- name: Multiple query results with a nullable becomes null
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -78,15 +81,13 @@
text
}
}
- explanation: "Even if some queries result in null, we should return all the
- results we got."
+ explanation: Even if some queries result in null, we should return all the results we got
response: |
{ "getAuthor": [ { "uid": "0x1", "name": "A.N. Author" } ] }
expected: |
{ "getAuthor": { "name": "A.N. Author" }, "post": null }
--
- name: "Missing nullable field becomes null"
+- name: Missing nullable field becomes null
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -94,30 +95,30 @@
dob
}
}
- explanation: "When a field that's nullable (like dob: DateTime) is missing
- in the Dgraph result, it should be added as null to the GraphQL result."
+ explanation:
+ "When a field that's nullable (like dob: DateTime) is missing in the Dgraph result, it should be
+ added as null to the GraphQL result."
response: |
{ "getAuthor": [ { "uid": "0x1", "name": "A.N. Author" } ] }
expected: |
{ "getAuthor": { "name": "A.N. Author", "dob": null } }
--
- name: "Root level handled correctly if just uid when nullable missing"
+- name: Root level handled correctly if just uid when nullable missing
gqlquery: |
query {
getAuthor(id: "0x1") {
dob
}
}
- explanation: "GraphQL error propagation causes an error on a non-nullable field
- (like name: String!) to propagate to the parent object."
+ explanation:
+ "GraphQL error propagation causes an error on a non-nullable field (like name: String!) to
+ propagate to the parent object."
response: |
{ "getAuthor": [ { "uid": "0x1" } ] }
expected: |
{ "getAuthor": { "dob": null } }
--
- name: "Missing nullable field becomes null (aliased)"
+- name: Missing nullable field becomes null (aliased)
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -125,15 +126,15 @@
birthday : dob
}
}
- explanation: "When a field that's nullable (like dob: DateTime) is missing
- in the Dgraph result, it should be added as null to the GraphQL result."
+ explanation:
+ "When a field that's nullable (like dob: DateTime) is missing in the Dgraph result, it should be
+ added as null to the GraphQL result."
response: |
{ "getAuthor": [ { "uid": "0x1", "name": "A.N. Author" } ] }
expected: |
{ "getAuthor": { "name": "A.N. Author", "birthday": null } }
--
- name: "Missing nullable becomes null (deep)"
+- name: Missing nullable becomes null (deep)
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -144,8 +145,9 @@
}
}
}
- explanation: "When a field that's nullable (like text: String) is missing
- in the Dgraph result, it should be added as null to the GraphQL result."
+ explanation:
+ "When a field that's nullable (like text: String) is missing in the Dgraph result, it should be
+ added as null to the GraphQL result."
response: |
{ "getAuthor": [
{ "uid": "0x1",
@@ -158,8 +160,7 @@
"postsRequired": [ { "title": "A Title", "text": null } ] }
}
--
- name: "Missing required list becomes []"
+- name: Missing required list becomes []
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -169,15 +170,15 @@
}
}
}
- explanation: "When a field of any list type is missing in the result,
- it should be added as an empty list [], not null"
+ explanation:
+ When a field of any list type is missing in the result, it should be added as an empty list [],
+ not null
response: |
{ "getAuthor": [ { "uid": "0x1", "name": "A.N. Author" } ] }
expected: |
{ "getAuthor": { "name": "A.N. Author", "postsRequired": [ ] } }
--
- name: "Missing nullable list becomes []"
+- name: Missing nullable list becomes []
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -187,15 +188,15 @@
}
}
}
- explanation: "When a field of any list type is missing in the result,
- it should be added as an empty list [], not null"
+ explanation:
+ When a field of any list type is missing in the result, it should be added as an empty list [],
+ not null
response: |
{ "getAuthor": [ { "uid": "0x1", "name": "A.N. Author" } ] }
expected: |
{ "getAuthor": { "name": "A.N. Author", "postsNullable": [ ] } }
--
- name: "Missing list becomes [] (aliased)"
+- name: Missing list becomes [] (aliased)
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -205,15 +206,15 @@
}
}
}
- explanation: "When a field of any list type is missing in the result,
- it should be added as an empty list [], not null"
+ explanation:
+ When a field of any list type is missing in the result, it should be added as an empty list [],
+ not null
response: |
{ "getAuthor": [ { "uid": "0x1", "name": "A.N. Author" } ] }
expected: |
{ "getAuthor": { "name": "A.N. Author", "posts": [ ] } }
--
- name: "Multiple missing lists become [] (with alias)"
+- name: Multiple missing lists become [] (with alias)
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -226,23 +227,24 @@
}
}
}
- explanation: "When a field of any list type is missing in the result,
- it should be added as an empty list [], not null"
+ explanation:
+ When a field of any list type is missing in the result, it should be added as an empty list [],
+ not null
response: |
{ "getAuthor": [ { "uid": "0x1", "name": "A.N. Author" } ] }
expected: |
{ "getAuthor": { "name": "A.N. Author", "posts": [ ], "postsNullable": [ ] } }
--
- name: "Sensible error when expecting single but multiple items returned"
+- name: Sensible error when expecting single but multiple items returned
gqlquery: |
query {
getAuthor(id: "0x1") {
name
}
}
- explanation: "When a query result is of a non-list type, we really should only
- get one item in the Dgraph result."
+ explanation:
+ When a query result is of a non-list type, we really should only get one item in the Dgraph
+ result
response: |
{ "getAuthor": [
{ "uid": "0x1", "name": "A.N. Author" },
@@ -251,32 +253,39 @@
expected: |
{ "getAuthor": null }
errors:
- [ { "message": "A list was returned, but GraphQL was expecting just one item. This indicates
- an internal error - probably a mismatch between the GraphQL and Dgraph/remote schemas. The value
- was resolved as null (which may trigger GraphQL error propagation) and as much other data as
+ [
+ {
+ "message":
+ "A list was returned, but GraphQL was expecting just one item. This indicates an internal
+ error - probably a mismatch between the GraphQL and Dgraph/remote schemas. The value was
+ resolved as null (which may trigger GraphQL error propagation) and as much other data as
possible returned.",
- "locations": [ { "column":3, "line":2 } ],
- "path": ["getAuthor"] } ]
+ "locations": [{ "column": 3, "line": 2 }],
+ "path": [getAuthor],
+ },
+ ]
--
- name: "Sensible error when un-processable Dgraph result"
+- name: Sensible error when un-processable Dgraph result
gqlquery: |
query {
getAuthor(id: "0x1") {
name
}
}
- explanation: "Shouldn't happen"
+ explanation: Shouldn't happen
response: |
{ something is wrong }
expected: |
{ "getAuthor": null }
errors:
- [ { "message": "invalid character 's' looking for beginning of object key string" ,
- "locations": [ { "column":3, "line":2 } ] } ]
+ [
+ {
+ "message": invalid character 's' looking for beginning of object key string,
+ "locations": [{ "column": 3, "line": 2 }],
+ },
+ ]
--
- name: "Error gets propagated to nullable parent if missing non-nullable field"
+- name: Error gets propagated to nullable parent if missing non-nullable field
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -284,21 +293,25 @@
dob
}
}
- explanation: "GraphQL error propagation causes an error on a non-nullable field
- (like name: String!) to propagate to the parent object."
+ explanation:
+ "GraphQL error propagation causes an error on a non-nullable field (like name: String!) to
+ propagate to the parent object."
response: |
{ "getAuthor": [ { "uid": "0x1", "dob": "2000-01-01" } ] }
expected: |
{ "getAuthor": null }
errors:
- [ {
- "message": "Non-nullable field 'name' (type String!) was not present in
- result from Dgraph. GraphQL error propagation triggered." ,
- "path": [ "getAuthor", "name" ],
- "locations": [ { "line": 3, "column": 5 } ] } ]
+ [
+ {
+ "message":
+ Non-nullable field 'name' (type String!) was not present in result from Dgraph. GraphQL
+ error propagation triggered.,
+ "path": [getAuthor, name],
+ "locations": [{ "line": 3, "column": 5 }],
+ },
+ ]
--
- name: "Error in [T!] list propagated as null list"
+- name: Error in [T!] list propagated as null list
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -309,9 +322,9 @@
}
}
}
- explanation: "If a list has non-nullable elements and an element becomes null,
- here because title (String!) is missing, GraphQL error propagation
- says the list becomes null."
+ explanation:
+ If a list has non-nullable elements and an element becomes null, here because title (String!) is
+ missing, GraphQL error propagation says the list becomes null.
response: |
{ "getAuthor": [
{ "uid": "0x1",
@@ -324,13 +337,17 @@
expected: |
{ "getAuthor": { "name": "A.N. Author", "postsElmntRequired": null } }
errors:
- [ { "message": "Non-nullable field 'title' (type String!) was not present
- in result from Dgraph. GraphQL error propagation triggered.",
- "path": [ "getAuthor", "postsElmntRequired", 1, "title" ],
- "locations": [ { "line": 5, "column": 7 } ] } ]
+ [
+ {
+ "message":
+ Non-nullable field 'title' (type String!) was not present in result from Dgraph. GraphQL
+ error propagation triggered.,
+ "path": [getAuthor, postsElmntRequired, 1, title],
+ "locations": [{ "line": 5, "column": 7 }],
+ },
+ ]
--
- name: "Only uid in [T!] list propagated as null list"
+- name: Only uid in [T!] list propagated as null list
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -341,9 +358,9 @@
}
}
}
- explanation: "If a list has non-nullable elements and an element becomes null,
- here because title (String!) is missing, GraphQL error propagation
- says the list becomes null."
+ explanation:
+ If a list has non-nullable elements and an element becomes null, here because title (String!) is
+ missing, GraphQL error propagation says the list becomes null.
response: |
{ "getAuthor": [
{ "uid": "0x1",
@@ -356,13 +373,17 @@
expected: |
{ "getAuthor": { "name": "A.N. Author", "postsElmntRequired": null } }
errors:
- [ { "message": "Non-nullable field 'title' (type String!) was not present
- in result from Dgraph. GraphQL error propagation triggered.",
- "path": [ "getAuthor", "postsElmntRequired", 1, "title" ],
- "locations": [ { "line": 5, "column": 7 } ] } ]
+ [
+ {
+ "message":
+ Non-nullable field 'title' (type String!) was not present in result from Dgraph. GraphQL
+ error propagation triggered.,
+ "path": [getAuthor, postsElmntRequired, 1, title],
+ "locations": [{ "line": 5, "column": 7 }],
+ },
+ ]
--
- name: "Error in [T] list propagated as null element in list"
+- name: Error in [T] list propagated as null element in list
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -373,9 +394,10 @@
}
}
}
- explanation: "The schema asserts a Post's title as non nullable (title: String!),
- but allows nulls in an Author's postsNullable (postsNullable: [Post]). So a
- post in the result list that's missing a title gets squashed to null"
+ explanation:
+ "The schema asserts a Post's title as non nullable (title: String!), but allows nulls in an
+ Author's postsNullable (postsNullable: [Post]). So a post in the result list that's missing a
+ title gets squashed to null"
response: |
{ "getAuthor": [
{ "uid": "0x1",
@@ -394,13 +416,17 @@
] }
}
errors:
- [ { "message": "Non-nullable field 'title' (type String!) was not present
- in result from Dgraph. GraphQL error propagation triggered.",
- "path": [ "getAuthor", "postsNullable", 1, "title" ],
- "locations": [ { "line": 5, "column": 7 } ] } ]
+ [
+ {
+ "message":
+ Non-nullable field 'title' (type String!) was not present in result from Dgraph. GraphQL
+ error propagation triggered.,
+ "path": [getAuthor, postsNullable, 1, title],
+ "locations": [{ "line": 5, "column": 7 }],
+ },
+ ]
--
- name: "Only uid in [T] list propagated as null element in list"
+- name: Only uid in [T] list propagated as null element in list
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -410,9 +436,10 @@
}
}
}
- explanation: "The schema asserts a Post's title as non nullable (title: String!),
- but allows nulls in an Author's postsNullable (postsNullable: [Post]). So a
- post in the result list that's missing a title gets squashed to null"
+ explanation:
+ "The schema asserts a Post's title as non nullable (title: String!), but allows nulls in an
+ Author's postsNullable (postsNullable: [Post]). So a post in the result list that's missing a
+ title gets squashed to null"
response: |
{ "getAuthor": [
{ "uid": "0x1",
@@ -431,13 +458,17 @@
] }
}
errors:
- [ { "message": "Non-nullable field 'title' (type String!) was not present
- in result from Dgraph. GraphQL error propagation triggered.",
- "path": [ "getAuthor", "postsNullable", 0, "title" ],
- "locations": [ { "line": 5, "column": 7 } ] } ]
+ [
+ {
+ "message":
+ Non-nullable field 'title' (type String!) was not present in result from Dgraph. GraphQL
+ error propagation triggered.,
+ "path": [getAuthor, postsNullable, 0, title],
+ "locations": [{ "line": 5, "column": 7 }],
+ },
+ ]
--
- name: "Many errors in [T] list propagated as null elements in list"
+- name: Many errors in [T] list propagated as null elements in list
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -448,9 +479,10 @@
}
}
}
- explanation: "The schema asserts a Post's title as non nullable (title: String!),
- but allows nulls in an Author's postsNullable (postsNullable: [Post]). So any
- post in the result list that's missing a title gets squashed to null"
+ explanation:
+ "The schema asserts a Post's title as non nullable (title: String!), but allows nulls in an
+ Author's postsNullable (postsNullable: [Post]). So any post in the result list that's missing a
+ title gets squashed to null"
response: |
{ "getAuthor": [
{ "uid": "0x1",
@@ -473,21 +505,31 @@
] }
}
errors:
- [ { "message": "Non-nullable field 'title' (type String!) was not present
- in result from Dgraph. GraphQL error propagation triggered.",
- "path": [ "getAuthor", "postsNullable", 0, "title" ],
- "locations": [ { "line": 6, "column": 7 } ] },
- { "message": "Non-nullable field 'title' (type String!) was not present
- in result from Dgraph. GraphQL error propagation triggered.",
- "path": [ "getAuthor", "postsNullable", 2, "title" ],
- "locations": [ { "line": 6, "column": 7 } ] },
- { "message": "Non-nullable field 'title' (type String!) was not present
- in result from Dgraph. GraphQL error propagation triggered.",
- "path": [ "getAuthor", "postsNullable", 3, "title" ],
- "locations": [ { "line": 6, "column": 7 } ] } ]
+ [
+ {
+ "message":
+ Non-nullable field 'title' (type String!) was not present in result from Dgraph. GraphQL
+ error propagation triggered.,
+ "path": [getAuthor, postsNullable, 0, title],
+ "locations": [{ "line": 6, "column": 7 }],
+ },
+ {
+ "message":
+ Non-nullable field 'title' (type String!) was not present in result from Dgraph. GraphQL
+ error propagation triggered.,
+ "path": [getAuthor, postsNullable, 2, title],
+ "locations": [{ "line": 6, "column": 7 }],
+ },
+ {
+ "message":
+ Non-nullable field 'title' (type String!) was not present in result from Dgraph. GraphQL
+ error propagation triggered.,
+ "path": [getAuthor, postsNullable, 3, title],
+ "locations": [{ "line": 6, "column": 7 }],
+ },
+ ]
--
- name: "Only uid on nullable field list gets inserted correctly"
+- name: Only uid on nullable field list gets inserted correctly
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -497,8 +539,9 @@
}
}
}
- explanation: "The schema asserts a Post's text as nullable (text: String),
- so if a query finds posts without any text, nulls should be inserted"
+ explanation:
+ "The schema asserts a Post's text as nullable (text: String), so if a query finds posts without
+ any text, nulls should be inserted"
response: |
{ "getAuthor": [
{ "uid": "0x1",
@@ -521,8 +564,7 @@
] }
}
--
- name: "Error in [T]! list propagated as null element in list"
+- name: Error in [T]! list propagated as null element in list
gqlquery: |
query {
getAuthor(id: "0x1") {
@@ -533,9 +575,10 @@
}
}
}
- explanation: "The schema asserts a Post's title as non nullable (title: String!),
- but allows nulls in an Author's postsNullable (postsNullable: [Post]). So a
- post in the result list that's missing a title gets squashed to null"
+ explanation:
+ "The schema asserts a Post's title as non nullable (title: String!), but allows nulls in an
+ Author's postsNullable (postsNullable: [Post]). So a post in the result list that's missing a
+ title gets squashed to null"
response: |
{ "getAuthor": [
{ "uid": "0x1",
@@ -554,7 +597,12 @@
] }
}
errors:
- [ { "message": "Non-nullable field 'title' (type String!) was not present
- in result from Dgraph. GraphQL error propagation triggered.",
- "path": [ "getAuthor", "postsNullableListRequired", 0, "title" ],
- "locations": [ { "line": 5, "column": 7 } ] } ]
+ [
+ {
+ "message":
+ Non-nullable field 'title' (type String!) was not present in result from Dgraph. GraphQL
+ error propagation triggered.,
+ "path": [getAuthor, postsNullableListRequired, 0, title],
+ "locations": [{ "line": 5, "column": 7 }],
+ },
+ ]
diff --git a/graphql/resolve/update_mutation_test.yaml b/graphql/resolve/update_mutation_test.yaml
index bf434c04f7f..ef5155ebc94 100644
--- a/graphql/resolve/update_mutation_test.yaml
+++ b/graphql/resolve/update_mutation_test.yaml
@@ -1,5 +1,4 @@
--
- name: "Update set mutation on Geo - Point type"
+- name: Update set mutation on Geo - Point type
gqlmutation: |
mutation updateHotel($patch: UpdateHotelInput!) {
updateHotel(input: $patch) {
@@ -22,7 +21,7 @@
}
}
}
- explanation: "The update patch should get rewritten into the Dgraph set mutation"
+ explanation: The update patch should get rewritten into the Dgraph set mutation
dgquerysec: |-
query {
x as updateHotel(func: type(Hotel)) @filter(near(Hotel.location, [22.22,11.11], 33.33)) {
@@ -39,8 +38,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update remove mutation on Geo - Point type"
+- name: Update remove mutation on Geo - Point type
gqlmutation: |
mutation updateHotel($patch: UpdateHotelInput!) {
updateHotel(input: $patch) {
@@ -63,7 +61,7 @@
}
}
}
- explanation: "The update patch should get rewritten into the Dgraph delete mutation"
+ explanation: The update patch should get rewritten into the Dgraph delete mutation
dgquerysec: |-
query {
x as updateHotel(func: uid(0x123, 0x124)) @filter(type(Hotel)) {
@@ -80,9 +78,7 @@
}
cond: "@if(gt(len(x), 0))"
-
--
- name: "Update remove mutation on Geo - Polygon type"
+- name: Update remove mutation on Geo - Polygon type
gqlmutation: |
mutation updateHotel($patch: UpdateHotelInput!) {
updateHotel(input: $patch) {
@@ -133,7 +129,7 @@
}
}
}
- explanation: "The update patch should get rewritten into the Dgraph delete mutation"
+ explanation: The update patch should get rewritten into the Dgraph delete mutation
dgquerysec: |-
query {
x as updateHotel(func: uid(0x123, 0x124)) @filter(type(Hotel)) {
@@ -150,8 +146,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update set mutation on Geo - MultiPolygon type"
+- name: Update set mutation on Geo - MultiPolygon type
gqlmutation: |
mutation updateHotel($patch: UpdateHotelInput!) {
updateHotel(input: $patch) {
@@ -218,7 +213,7 @@
}
}
}
- explanation: "The update patch should get rewritten into the Dgraph set mutation"
+ explanation: The update patch should get rewritten into the Dgraph set mutation
dgquerysec: |-
query {
x as updateHotel(func: uid(0x123, 0x124)) @filter(type(Hotel)) {
@@ -235,8 +230,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update set mutation with variables"
+- name: Update set mutation with variables
gqlmutation: |
mutation updatePost($patch: UpdatePostInput!) {
updatePost(input: $patch) {
@@ -255,7 +249,7 @@
}
}
}
- explanation: "The update patch should get rewritten into the Dgraph set mutation"
+ explanation: The update patch should get rewritten into the Dgraph set mutation
dgquerysec: |-
query {
x as updatePost(func: uid(0x123, 0x124)) @filter(type(Post)) {
@@ -269,8 +263,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update remove mutation with variables and value"
+- name: Update remove mutation with variables and value
gqlmutation: |
mutation updatePost($patch: UpdatePostInput!) {
updatePost(input: $patch) {
@@ -289,7 +282,7 @@
}
}
}
- explanation: "The update patch should get rewritten into the Dgraph delete mutation"
+ explanation: The update patch should get rewritten into the Dgraph delete mutation
dgquerysec: |-
query {
x as updatePost(func: uid(0x123, 0x124)) @filter(type(Post)) {
@@ -303,8 +296,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update delete mutation with variables and null"
+- name: Update delete mutation with variables and null
gqlmutation: |
mutation updatePost($patch: UpdatePostInput!) {
updatePost(input: $patch) {
@@ -323,7 +315,7 @@
}
}
}
- explanation: "The update patch should get rewritten into the Dgraph mutation"
+ explanation: The update patch should get rewritten into the Dgraph mutation
dgquerysec: |-
query {
x as updatePost(func: uid(0x123, 0x124)) @filter(type(Post)) {
@@ -337,8 +329,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update mutation for a type that implements an interface"
+- name: Update mutation for a type that implements an interface
gqlmutation: |
mutation updateHuman($patch: UpdateHumanInput!) {
updateHuman(input: $patch) {
@@ -362,7 +353,7 @@
}
}
}
- explanation: "The mutation should get rewritten with correct edges from the interface."
+ explanation: The mutation should get rewritten with correct edges from the interface
dgquerysec: |-
query {
x as updateHuman(func: uid(0x123)) @filter(type(Human)) {
@@ -379,8 +370,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update mutation for an interface"
+- name: Update mutation for an interface
gqlmutation: |-
mutation {
updateCharacter(input: {filter: { id: ["0x123"] }, set: {name:"Bob"}}) {
@@ -390,7 +380,7 @@
}
}
}
- explanation: "The mutation should get rewritten with correct edges from the interface."
+ explanation: The mutation should get rewritten with correct edges from the interface
dgquerysec: |-
query {
x as updateCharacter(func: uid(0x123)) @filter(type(Character)) {
@@ -404,8 +394,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update mutation using filters"
+- name: Update mutation using filters
gqlmutation: |
mutation updatePost($patch: UpdatePostInput!) {
updatePost(input: $patch) {
@@ -424,7 +413,7 @@
}
}
}
- explanation: "The update patch should get rewritten into the Dgraph mutation"
+ explanation: The update patch should get rewritten into the Dgraph mutation
dgquerysec: |-
query {
x as updatePost(func: type(Post)) @filter(eq(Post.tags, "foo")) {
@@ -438,8 +427,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update mutation using code"
+- name: Update mutation using code
gqlmutation: |
mutation updateState($patch: UpdateStateInput!) {
updateState(input: $patch) {
@@ -458,7 +446,7 @@
}
}
}
- explanation: "The update mutation should get rewritten into a Dgraph upsert mutation"
+ explanation: The update mutation should get rewritten into a Dgraph upsert mutation
dgquerysec: |-
query {
x as updateState(func: type(State)) @filter(eq(State.code, "nsw")) {
@@ -472,9 +460,7 @@
}
cond: "@if(gt(len(x), 0))"
-
--
- name: "Update mutation using code on type which also has an ID field"
+- name: Update mutation using code on type which also has an ID field
gqlmutation: |
mutation updateEditor($patch: UpdateEditorInput!) {
updateEditor(input: $patch) {
@@ -494,7 +480,7 @@
}
}
}
- explanation: "The update mutation should get rewritten into a Dgraph upsert mutation"
+ explanation: The update mutation should get rewritten into a Dgraph upsert mutation
dgquerysec: |-
query {
x as updateEditor(func: uid(0x1, 0x2)) @filter((eq(Editor.code, "editor") AND type(Editor))) {
@@ -508,9 +494,7 @@
}
cond: "@if(gt(len(x), 0))"
-
--
- name: "Update add reference"
+- name: Update add reference
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -568,8 +552,7 @@
]
cond: "@if(gt(len(x), 0))"
--
- name: "Update remove without XID or ID"
+- name: Update remove without XID or ID
gqlmutation: |
mutation updateComputer($patch: UpdateComputerInput!) {
updateComputer(input: $patch) {
@@ -590,13 +573,10 @@
}
}
}
- explanation: "Remove requires an XID or ID"
- error2:
- { "message":
- "failed to rewrite mutation payload because field name cannot be empty" }
+ explanation: Remove requires an XID or ID
+ error2: { "message": failed to rewrite mutation payload because field name cannot be empty }
--
- name: "Update remove with XID"
+- name: Update remove with XID
gqlmutation: |
mutation updateComputer($patch: UpdateComputerInput!) {
updateComputer(input: $patch) {
@@ -648,8 +628,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update remove with ID"
+- name: Update remove with ID
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -698,8 +677,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update remove reference"
+- name: Update remove reference
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -747,8 +725,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update remove reference without id or xid"
+- name: Update remove reference without id or xid
gqlmutation: |
mutation updateWorkflow($patch: UpdateWorkflowInput!) {
updateWorkflow(input: $patch) {
@@ -774,8 +751,7 @@
message: |-
failed to rewrite mutation payload because id is not provided
--
- name: "Update add and remove together"
+- name: Update add and remove together
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -851,8 +827,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Deep updates don't alter linked objects"
+- name: Deep updates don't alter linked objects
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -875,7 +850,7 @@
}
}
}
- explanation: "updateAuthor doesn't update posts except where references are removed"
+ explanation: updateAuthor doesn't update posts except where references are removed
dgquery: |-
query {
Post_1(func: uid(0x456)) {
@@ -915,8 +890,7 @@
]
cond: "@if(gt(len(x), 0))"
--
- name: "Deep update"
+- name: Deep update
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -937,7 +911,7 @@
}
}
}
- explanation: "The update creates a new country"
+ explanation: The update creates a new country
dgquerysec: |-
query {
x as updateAuthor(func: uid(0x123)) @filter(type(Author)) {
@@ -955,8 +929,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Deep xid create options 1"
+- name: Deep xid create options 1
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -981,7 +954,7 @@
}
}
}
- explanation: "The update creates a new state"
+ explanation: The update creates a new state
dgquery: |-
query {
State_1(func: eq(State.code, "dg")) {
@@ -1017,8 +990,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Deep xid create options 2"
+- name: Deep xid create options 2
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -1043,7 +1015,7 @@
}
}
}
- explanation: "The update links to existing state"
+ explanation: The update links to existing state
dgquery: |-
query {
State_1(func: eq(State.code, "dg")) {
@@ -1088,9 +1060,7 @@
]
cond: "@if(gt(len(x), 0))"
-
--
- name: "Deep xid link only"
+- name: Deep xid link only
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -1114,7 +1084,7 @@
}
}
}
- explanation: "The update must link to the existing state"
+ explanation: The update must link to the existing state
dgquery: |-
query {
State_1(func: eq(State.code, "dg")) {
@@ -1159,8 +1129,7 @@
]
cond: "@if(gt(len(x), 0))"
--
- name: "update two single edges"
+- name: update two single edges
gqlmutation: |
mutation updateOwner($patch: UpdateOwnerInput!) {
updateOwner(input: $patch) {
@@ -1226,8 +1195,7 @@
]
cond: "@if(gt(len(x), 0))"
--
- name: "Update add reference doesn't add reverse edge"
+- name: Update add reference doesn't add reverse edge
gqlmutation: |
mutation updateMovieDirector($patch: UpdateMovieDirectorInput!) {
updateMovieDirector(input: $patch) {
@@ -1274,8 +1242,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update remove reference doesn't try to remove reverse edge."
+- name: Update remove reference doesn't try to remove reverse edge
gqlmutation: |
mutation updateMovieDirector($patch: UpdateMovieDirectorInput!) {
updateMovieDirector(input: $patch) {
@@ -1322,7 +1289,7 @@
}
cond: "@if(gt(len(x), 0))"
-- name: "Deep Mutation Duplicate XIDs with same object Test"
+- name: Deep Mutation Duplicate XIDs with same object Test
gqlmutation: |
mutation updateStudent($input: UpdateStudentInput!) {
updateStudent(input: $input) {
@@ -1351,8 +1318,9 @@
}
}
}
- explanation: "When duplicate XIDs are given as input to deep mutation but the object structure
- is same, it should not return error."
+ explanation:
+ When duplicate XIDs are given as input to deep mutation but the object structure is same, it
+ should not return error
dgquery: |-
query {
Teacher_1(func: eq(People.xid, "T1")) {
@@ -1386,7 +1354,7 @@
}
cond: "@if(gt(len(x), 0))"
-- name: "Deep Mutation Duplicate XIDs with same object with @hasInverse Test"
+- name: Deep Mutation Duplicate XIDs with same object with @hasInverse Test
gqlmutation: |
mutation updateCountry($input: UpdateCountryInput!) {
updateCountry(input: $input) {
@@ -1415,14 +1383,15 @@
}
}
}
- explanation: "When duplicate XIDs are given as input to deep mutation and the object structure
- is same and the containing object has @hasInverse on its xid object field, but the xid object
- does not have the @hasInverse field of List type, it should return error."
+ explanation:
+ When duplicate XIDs are given as input to deep mutation and the object structure is same and the
+ containing object has @hasInverse on its xid object field, but the xid object does not have the
+ @hasInverse field of List type, it should return error.
error:
message: |-
failed to rewrite mutation payload because duplicate XID found: S1
-- name: "Deep Mutation Duplicate XIDs with different object Test"
+- name: Deep Mutation Duplicate XIDs with different object Test
gqlmutation: |
mutation updateStudent($input: UpdateStudentInput!) {
updateStudent(input: $input) {
@@ -1451,13 +1420,14 @@
}
}
}
- explanation: "When duplicate XIDs are given as input to deep mutation but the object structure
- is different, it should return error."
+ explanation:
+ When duplicate XIDs are given as input to deep mutation but the object structure is different,
+ it should return error.
error:
message: |-
failed to rewrite mutation payload because duplicate XID found: T1
-- name: "Duplicate XIDs in single mutation for Interface"
+- name: Duplicate XIDs in single mutation for Interface
gqlmutation: |
mutation updateStudent($input: UpdateStudentInput!) {
updateStudent(input: $input) {
@@ -1492,13 +1462,13 @@
}
}
}
- explanation: "When duplicate XIDs are given as input for an Interface in a single mutation, it
- should return error."
+ explanation:
+ When duplicate XIDs are given as input for an Interface in a single mutation, it should return
+ error.
error:
message: |-
failed to rewrite mutation payload because duplicate XID found: T1
-
# Additional Deletes
#
# If we have
@@ -1538,7 +1508,7 @@
# edge is in the updated node, not the reference node)
# * as per case two, but with the singular edge in the updated node.
-- name: "Additional Deletes - Update references existing node by ID (update list edge)"
+- name: Additional Deletes - Update references existing node by ID (update list edge)
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -1598,7 +1568,7 @@
]
cond: "@if(gt(len(x), 0))"
-- name: "Additional Deletes - Update references existing node by ID (update single edge)"
+- name: Additional Deletes - Update references existing node by ID (update single edge)
gqlmutation: |
mutation updatePost($patch: UpdatePostInput!) {
updatePost(input: $patch) {
@@ -1656,7 +1626,7 @@
]
cond: "@if(gt(len(x), 0))"
-- name: "Additional Deletes - Update references existing node by XID (update list edge)"
+- name: Additional Deletes - Update references existing node by XID (update list edge)
gqlmutation: |
mutation updateCountry($patch: UpdateCountryInput!) {
updateCountry(input: $patch) {
@@ -1705,7 +1675,7 @@
}
cond: "@if(gt(len(x), 0))"
-- name: "Update mutation error on @id field for empty value"
+- name: Update mutation error on @id field for empty value
gqlmutation: |
mutation updateCountry($patch: UpdateCountryInput!) {
updateCountry(input: $patch) {
@@ -1725,11 +1695,15 @@
}
}
}
- explanation: "The update mutation should not be allowed since value of @id field is empty."
+ explanation: The update mutation should not be allowed since value of @id field is empty
error:
- { "message": "failed to rewrite mutation payload because encountered an empty value for @id field `State.code`" }
+ {
+ "message":
+ failed to rewrite mutation payload because encountered an empty value for @id field
+ `State.code`,
+ }
-- name: "Additional Deletes - Update references existing node by XID (update single edge)"
+- name: Additional Deletes - Update references existing node by XID (update single edge)
gqlmutation: |
mutation updateComputerOwner($patch: UpdateComputerOwnerInput!) {
updateComputerOwner(input: $patch) {
@@ -1771,8 +1745,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Add mutation with union"
+- name: Add mutation with union
gqlmutation: |
mutation($patch: UpdateHomeInput!) {
updateHome(input: $patch) {
@@ -1865,8 +1838,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update set mutation with multiple Id's"
+- name: Update set mutation with multiple Id's
gqlmutation: |
mutation update($patch: UpdateBookInput!) {
updateBook(input: $patch) {
@@ -1912,8 +1884,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "delete json shouldn't be generated for empty remove"
+- name: delete json shouldn't be generated for empty remove
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -1946,8 +1917,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "set json shouldn't be generated for empty set"
+- name: set json shouldn't be generated for empty set
gqlmutation: |
mutation updateAuthor($patch: UpdateAuthorInput!) {
updateAuthor(input: $patch) {
@@ -1980,11 +1950,11 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Updating @id field when given values for @id fields doesn't exists"
- explaination: "We are giving two @id fields title and ISBN in set part of update mutation,
- and will generate two existence queries for both of them. As none of the @id field is present,we
- update the values successfully "
+- name: Updating @id field when given values for @id fields doesn't exists
+ explaination:
+ We are giving two @id fields title and ISBN in set part of update mutation, and will generate
+ two existence queries for both of them. As none of the @id field is present,we update the values
+ successfully
gqlmutation: |
mutation update($patch: UpdateBookInput!) {
updateBook(input: $patch) {
@@ -2044,11 +2014,11 @@
"Book.title": "History of Humans"
}
cond: "@if(gt(len(x), 0))"
--
- name: "Updating @id field when given value for @id fields exist in some node"
- explaination: "We are giving two @id fields title and ISBN in set part of update mutation,
- and will generate two existence queries for both of them.As we already have node with title
- Sapiens, we will return error in this case "
+- name: Updating @id field when given value for @id fields exist in some node
+ explaination:
+ We are giving two @id fields title and ISBN in set part of update mutation, and will generate
+ two existence queries for both of them.As we already have node with title Sapiens, we will
+ return error in this case
gqlmutation: |
mutation update($patch: UpdateBookInput!) {
updateBook(input: $patch) {
@@ -2099,16 +2069,18 @@
"Book_2": "0x123"
}
error2:
- { "message":
- "failed to rewrite mutation payload because id History of Humans already exists for field title inside type Book"
+ {
+ "message":
+ failed to rewrite mutation payload because id History of Humans already exists for field
+ title inside type Book,
}
--
- name: "skipping nullable @id values while Updating link to non-existent nested object"
- explaination: "when we update link to nested field, we check if that node already exists or not,
- In this case nested object doesn't exists and update mutation create it and link it to root object.
- while creating nested object it skip @id nullable fields which don't exists in nested object, in this case
- it skips commentId in nested type Comment1"
+- name: skipping nullable @id values while Updating link to non-existent nested object
+ explaination:
+ when we update link to nested field, we check if that node already exists or not, In this case
+ nested object doesn't exists and update mutation create it and link it to root object. while
+ creating nested object it skip @id nullable fields which don't exists in nested object, in this
+ case it skips commentId in nested type Comment1
gqlmutation: |
mutation update($patch: UpdatePost1Input!) {
updatePost1(input: $patch) {
@@ -2171,12 +2143,12 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Updating link to nested field require all the non-null id's to be present in nested field"
- explaination: "when we update link to nested field then we check if that already exist or not,
- In this case since @id field is not present in nested field, so we assume it to be a new node.
- update mutation tries to create it but failed because non-nullable id field is required to add new
- node."
+- name: Updating link to nested field require all the non-null id's to be present in nested field
+ explaination:
+ when we update link to nested field then we check if that already exist or not, In this case
+ since @id field is not present in nested field, so we assume it to be a new node. update
+ mutation tries to create it but failed because non-nullable id field is required to add new
+ node.
gqlmutation: |
mutation update($patch: UpdatePost1Input!) {
updatePost1(input: $patch) {
@@ -2209,16 +2181,13 @@
dgraph.type
}
}
- error2:
- { "message":
- "failed to rewrite mutation payload because field id cannot be empty"
- }
+ error2: { "message": failed to rewrite mutation payload because field id cannot be empty }
--
- name: "Updating inherited @id field with interface arg -1 "
- explaination: "For this case we will generate one more existence query for inherited @id field refID which have
- interface arg set. No node with given refID exist in same or other implementing type of interface so we will
- successfully update node in this case"
+- name: "Updating inherited @id field with interface arg -1 "
+ explaination:
+ For this case we will generate one more existence query for inherited @id field refID which have
+ interface arg set. No node with given refID exist in same or other implementing type of
+ interface so we will successfully update node in this case
gqlmutation: |
mutation update($patch: UpdateLibraryMemberInput!) {
updateLibraryMember(input: $patch) {
@@ -2265,19 +2234,20 @@
}
}
dgmutations:
- - setjson: |
+ - setjson: |
{
"LibraryMember.readHours":"3d2hr",
"Member.name":"Alice",
"Member.refID":"102",
"uid":"uid(x)"
}
- cond: "@if(gt(len(x), 0))"
+ cond: "@if(gt(len(x), 0))"
--
- name: "Updating inherited @id field with interface arg -2 "
- explaination: "For this case we will generate one more existence query for inherited @id field refID.
- There already exist node with refID in other implementing type of interface so we will generate error for this case"
+- name: "Updating inherited @id field with interface arg -2 "
+ explaination:
+ For this case we will generate one more existence query for inherited @id field refID. There
+ already exist node with refID in other implementing type of interface so we will generate error
+ for this case
gqlmutation: |
mutation update($patch: UpdateLibraryMemberInput!) {
updateLibraryMember(input: $patch) {
@@ -2322,16 +2292,17 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because id 102 already exists for field refID
- in some other implementing type of interface Member"
+ "message":
+ failed to rewrite mutation payload because id 102 already exists for field refID in some
+ other implementing type of interface Member,
}
--
- name: "Updating link to nested object inheriting @id field with interface argument-1"
- explaination: "If nested object have inherited @id field which have interface argument set, and that
- field already exist in some other implementing type than we returns error.In below mutation manages
- is of type LibraryMember but node with given refID already exist in some other
- type than than LibraryMember"
+- name: Updating link to nested object inheriting @id field with interface argument-1
+ explaination:
+ If nested object have inherited @id field which have interface argument set, and that field
+ already exist in some other implementing type than we returns error.In below mutation manages is
+ of type LibraryMember but node with given refID already exist in some other type than than
+ LibraryMember
gqlmutation: |
mutation update($patch: UpdateLibraryManagerInput!) {
updateLibraryManager(input: $patch) {
@@ -2377,14 +2348,15 @@
}
error2:
{
- "message": "failed to rewrite mutation payload because id 101 already exists for field refID
- in some other implementing type of interface Member"
+ "message":
+ failed to rewrite mutation payload because id 101 already exists for field refID in some
+ other implementing type of interface Member,
}
--
- name: "Updating link to nested object inheriting @id field with interface argument-2"
- explaination: "In below mutation manages is of type LibraryMember and node of type LibraryMember already
- existed with given refID, so we link that correctly"
+- name: Updating link to nested object inheriting @id field with interface argument-2"
+ explaination:
+ In below mutation manages is of type LibraryMember and node of type LibraryMember already
+ existed with given refID, so we link that correctly
gqlmutation: |
mutation update($patch: UpdateLibraryManagerInput!) {
updateLibraryManager(input: $patch) {
@@ -2436,20 +2408,19 @@
}
}
dgmutations:
- - setjson: |
- {
- "LibraryManager.manages": [
- {
- "uid": "0x123"
- }
- ],
- "LibraryManager.name": "Bob",
- "uid": "uid(x)"
- }
- cond: "@if(gt(len(x), 0))"
+ - setjson: |
+ {
+ "LibraryManager.manages": [
+ {
+ "uid": "0x123"
+ }
+ ],
+ "LibraryManager.name": "Bob",
+ "uid": "uid(x)"
+ }
+ cond: "@if(gt(len(x), 0))"
--
- name: "Update with @default directive"
+- name: Update with @default directive
gqlmutation: |
mutation updateBooking($patch: UpdateBookingInput!) {
updateBooking(input: $patch) {
@@ -2468,7 +2439,9 @@
}
}
}
- explanation: "The update patch should include default values on the fields with the @default(update:) directive"
+ explanation:
+ The update patch should include default values on the fields with the @default(update:)
+ directive
dgquerysec: |-
query {
x as updateBooking(func: uid(0x123, 0x124)) @filter(type(Booking)) {
@@ -2488,8 +2461,7 @@
}
cond: "@if(gt(len(x), 0))"
--
- name: "Update with @default directive uses provided values"
+- name: Update with @default directive uses provided values
gqlmutation: |
mutation updateBooking($patch: UpdateBookingInput!) {
updateBooking(input: $patch) {
@@ -2513,7 +2485,9 @@
}
}
}
- explanation: "Fields with @default(update) should use input values if provided (note that count is still using default)"
+ explanation:
+ Fields with @default(update) should use input values if provided (note that count is still using
+ default)
dgquerysec: |-
query {
x as updateBooking(func: uid(0x123, 0x124)) @filter(type(Booking)) {
diff --git a/graphql/resolve/validate_mutation_test.yaml b/graphql/resolve/validate_mutation_test.yaml
index ee5a88b2ca4..4f3ad928da6 100644
--- a/graphql/resolve/validate_mutation_test.yaml
+++ b/graphql/resolve/validate_mutation_test.yaml
@@ -1,5 +1,4 @@
--
- name: "Add mutation with object instead of an array"
+- name: Add mutation with object instead of an array
gqlmutation: |
mutation addAuthor($auth: AddAuthorInput!) {
addAuthor(input: $auth) {
@@ -15,16 +14,16 @@
"posts": []
}
}
- explanation: "Add mutation expects an array instead of an object"
+ explanation: Add mutation expects an array instead of an object
validationerror:
- { "message":
- "input:2: Variable type provided AddAuthorInput! is incompatible with expected type
- [AddAuthorInput!]!\ninput:2: Variable \"$auth\" of type \"AddAuthorInput!\" used in
- position expecting type \"[AddAuthorInput!]!\".\n" }
-
+ {
+ "message":
+ "input:2: Variable type provided AddAuthorInput! is incompatible with expected type
+ [AddAuthorInput!]!\ninput:2: Variable \"$auth\" of type \"AddAuthorInput!\" used in position
+ expecting type \"[AddAuthorInput!]!\".\n",
+ }
--
- name: "Add mutation with invalid object"
+- name: Add mutation with invalid object
gqlmutation: |
mutation addAuthor($auth: AddAuthorInput!) {
addAuthor(input: [$auth]) {
@@ -38,14 +37,10 @@
{ "posts": []
}
}
- explanation: "Name is a required field here and all the elements provided
- should have one"
- validationerror:
- { "message":
- "input: variable.auth.name must be defined" }
+ explanation: Name is a required field here and all the elements provided should have one
+ validationerror: { "message": "input: variable.auth.name must be defined" }
--
- name: "Add multiple mutation with invalid object"
+- name: Add multiple mutation with invalid object
gqlmutation: |
mutation addAuthor($auth: [AddAuthorInput!]!) {
addAuthor(input: $auth) {
@@ -62,8 +57,5 @@
{ "posts": []
}]
}
- explanation: "Name is a required field and all the elements provided
- should have one"
- validationerror:
- { "message":
- "input: variable.auth[1].name must be defined" }
+ explanation: Name is a required field and all the elements provided should have one
+ validationerror: { "message": "input: variable.auth[1].name must be defined" }
diff --git a/graphql/schema/auth_schemas_test.yaml b/graphql/schema/auth_schemas_test.yaml
index 1a7195938e2..26057e12204 100644
--- a/graphql/schema/auth_schemas_test.yaml
+++ b/graphql/schema/auth_schemas_test.yaml
@@ -1,6 +1,5 @@
invalid_schemas:
-
- - name: "GraphQL parsing errors should be reported"
+ - name: GraphQL parsing errors should be reported
input: |
type X @auth(
query: { rule: "query { " }
@@ -8,12 +7,15 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- {"message": "Type X: @auth: failed to parse GraphQL rule
- [reason : Expected Name, found ]"}
- ]
+ errlist:
+ [
+ {
+ "message":
+ "Type X: @auth: failed to parse GraphQL rule [reason : Expected Name, found ]",
+ },
+ ]
- - name: "GraphQL validation errors should be reported"
+ - name: GraphQL validation errors should be reported
input: |
type X @auth(
query: {rule: "query { queryX(filter: { userRle: { eq: \"ADMIN\" } }) { __typename } }"}
@@ -21,11 +23,14 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- {"message": "Type X: @auth: failed to validate GraphQL rule
- [reason : Field \"userRle\" is not defined by type XFilter.
- Did you mean userRole or username?]"}
- ]
+ errlist:
+ [
+ {
+ "message":
+ 'Type X: @auth: failed to validate GraphQL rule [reason : Field "userRle" is not defined
+ by type XFilter. Did you mean userRole or username?]',
+ },
+ ]
- name: "Invalid RBAC rule: in filter not array variable 1"
input: |
@@ -35,10 +40,14 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- { "message": "Type X: @auth: `in` operator has invalid value `xyz@dgraph.io`.
- Value should be an array." }
- ]
+ errlist:
+ [
+ {
+ "message":
+ "Type X: @auth: `in` operator has invalid value `xyz@dgraph.io`. Value should be an
+ array.",
+ },
+ ]
- name: "Invalid RBAC rule: in filter not array variable 2"
input: |
@@ -48,10 +57,13 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- { "message": "Type X: @auth: `in` operator has invalid value `true`.
- Value should be an array."}
- ]
+ errlist:
+ [
+ {
+ "message":
+ "Type X: @auth: `in` operator has invalid value `true`. Value should be an array.",
+ },
+ ]
- name: "Invalid RBAC rule: nil as the value"
input: |
@@ -61,9 +73,7 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- { "message": "Type X: @auth: `$USER` is not a valid GraphQL variable." }
- ]
+ errlist: [{ "message": "Type X: @auth: `$USER` is not a valid GraphQL variable." }]
- name: "Invalid RBAC rule: null as the value"
input: |
@@ -73,9 +83,13 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- { "message": "Type X: @auth: `eq` operator has invalid value. null values aren't supported." }
- ]
+ errlist:
+ [
+ {
+ "message":
+ "Type X: @auth: `eq` operator has invalid value. null values aren't supported.",
+ },
+ ]
- name: "Invalid RBAC rule: regexp filter not string variable"
input: |
@@ -85,12 +99,16 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- { "message": "Type X: @auth: `regexp` operator has invalid value `12345`.
- Value should be of type String." }
- ]
+ errlist:
+ [
+ {
+ "message":
+ "Type X: @auth: `regexp` operator has invalid value `12345`. Value should be of type
+ String.",
+ },
+ ]
- - name: "RBAC rule invalid variable"
+ - name: RBAC rule invalid variable
input: |
type X @auth(
query: {rule: "{ X_MyApp_Role : { eq : \"ADMIN\"}}"
@@ -99,9 +117,9 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [{"message": "Type X: @auth: `X_MyApp_Role` is not a valid GraphQL variable."}]
+ errlist: [{ "message": "Type X: @auth: `X_MyApp_Role` is not a valid GraphQL variable." }]
- - name: "RBAC rule invalid operator"
+ - name: RBAC rule invalid operator
input: |
type X @auth(
query: {rule: "{ $X_MyApp_Role : { xyz : \"ADMIN\"}}"
@@ -110,9 +128,9 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [{"message": "Type X: @auth: `xyz` operator is not supported."}]
+ errlist: [{ "message": "Type X: @auth: `xyz` operator is not supported." }]
- - name: "Invalid RBAC rule"
+ - name: Invalid RBAC rule
input: |
type X @auth(
query: {rule: "{ \"ADMIN\" }"
@@ -121,9 +139,9 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [{"message": "Type X: @auth: `{ \"ADMIN\" }` is not a valid rule."}]
+ errlist: [{ "message": 'Type X: @auth: `{ "ADMIN" }` is not a valid rule.' }]
- - name: "Empty rule"
+ - name: Empty rule
input: |
type X @auth(
query: { rule: ""
@@ -132,12 +150,15 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- {"message": "Type X: @auth: a rule should be exactly one query,
- found 0 GraphQL operations"}
- ]
+ errlist:
+ [
+ {
+ "message":
+ "Type X: @auth: a rule should be exactly one query, found 0 GraphQL operations",
+ },
+ ]
- - name: "Invalid auth syntax"
+ - name: Invalid auth syntax
input: |
type X @auth(
query: { xyz: ""
@@ -146,12 +167,10 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- {"message": "Type X: @auth: there should be only one of \"and\", \"or\",
- \"not\" and \"rule\""}
- ]
+ errlist:
+ [{ "message": 'Type X: @auth: there should be only one of "and", "or", "not" and "rule"' }]
- - name: "Single or rule"
+ - name: Single or rule
input: |
type X @auth(
query: {
@@ -167,9 +186,9 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [{"message": "Type X: @auth: 'OR' should contain at least two rules"}]
+ errlist: [{ "message": "Type X: @auth: 'OR' should contain at least two rules" }]
- - name: "Multiple logical operation at same level"
+ - name: Multiple logical operation at same level
input: |
type X @auth(
query: {
@@ -204,12 +223,10 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- {"message": "Type X: @auth: there should be only one of \"and\", \"or\",
- \"not\" and \"rule\""}
- ]
+ errlist:
+ [{ "message": 'Type X: @auth: there should be only one of "and", "or", "not" and "rule"' }]
- - name: "Same logical operation at same level"
+ - name: Same logical operation at same level
input: |
type X @auth(
query: {
@@ -244,12 +261,10 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- {"message": "Type X: @auth: there should be only one of \"and\", \"or\",
- \"not\" and \"rule\""}
- ]
+ errlist:
+ [{ "message": 'Type X: @auth: there should be only one of "and", "or", "not" and "rule"' }]
- - name: "Rules with null value"
+ - name: Rules with null value
input: |
type X @auth(
query: { and: [ null, null ] }
@@ -257,16 +272,20 @@ invalid_schemas:
username: String! @id
userRole: String @search(by: [hash])
}
- errlist: [
- {"message": "Type X: @auth: no arguments - there should be only one of \"and\", \"or\",
- \"not\" and \"rule\""},
- {"message": "Type X: @auth: no arguments - there should be only one of \"and\", \"or\",
- \"not\" and \"rule\""}
- ]
+ errlist:
+ [
+ {
+ "message":
+ 'Type X: @auth: no arguments - there should be only one of "and", "or", "not" and "rule"',
+ },
+ {
+ "message":
+ 'Type X: @auth: no arguments - there should be only one of "and", "or", "not" and "rule"',
+ },
+ ]
valid_schemas:
-
- - name: "GraphQL Should Parse"
+ - name: GraphQL Should Parse
input: |
type X @auth(
query: {rule: """
@@ -281,7 +300,7 @@ valid_schemas:
userRole: String @search(by: [hash])
}
- - name: "GraphQL auth RBAC rule"
+ - name: GraphQL auth RBAC rule
input: |
type X @auth(
query: { rule: "{ $X_MyApp_Role: { eq: \"ADMIN\" }}"
@@ -291,7 +310,7 @@ valid_schemas:
userRole: String @search(by: [hash])
}
- - name: "GraphQL With Variable Should Parse"
+ - name: GraphQL With Variable Should Parse
input: |
type X @auth(
query: { rule: """
@@ -306,7 +325,7 @@ valid_schemas:
userRole: String @search(by: [hash])
}
- - name: "Complex GraphQL Should Parse"
+ - name: Complex GraphQL Should Parse
input: |
type Proj @auth(
update: { rule: """
@@ -331,7 +350,7 @@ valid_schemas:
username: String! @id
}
- - name: "Rule using logical `or` operation"
+ - name: Rule using logical `or` operation
input: |
type X @auth(
query: {
@@ -354,7 +373,7 @@ valid_schemas:
userRole: String @search(by: [hash])
}
- - name: "Rule using logical `and` operation"
+ - name: Rule using logical `and` operation
input: |
type X @auth(
query: {
@@ -377,7 +396,7 @@ valid_schemas:
userRole: String @search(by: [hash])
}
- - name: "Rule using logical `not` operation"
+ - name: Rule using logical `not` operation
input: |
type X @auth(
query: { not: { rule: """
diff --git a/graphql/schema/custom_http_config_test.yaml b/graphql/schema/custom_http_config_test.yaml
index 67b6a1fc085..bf19f784314 100644
--- a/graphql/schema/custom_http_config_test.yaml
+++ b/graphql/schema/custom_http_config_test.yaml
@@ -1,6 +1,5 @@
--
- name: "custom query"
- type: "query"
+- name: custom query
+ type: query
gqlschema: |
type Country @remote {
code: String
@@ -45,9 +44,8 @@
remotevariables: |-
{ "id": "0x1" }
--
- name: "custom query with arguments on fields"
- type: "query"
+- name: custom query with arguments on fields
+ type: query
gqlschema: |
input ExactFilter {
eq: String
@@ -110,9 +108,8 @@
remotevariables: |-
{ "id": "0x1" }
--
- name: "custom mutation with arguments on field"
- type: "mutation"
+- name: custom mutation with arguments on field
+ type: mutation
gqlschema: |
input ExactFilter {
eq: String
@@ -252,9 +249,8 @@
}
}
--
- name: "custom field single mode"
- type: "field"
+- name: custom field single mode
+ type: field
gqlschema: |
type User {
id: ID!
@@ -289,9 +285,8 @@
inputvariables: |-
{ "id": "0x2", "age": 10 }
--
- name: "custom field batch mode"
- type: "field"
+- name: custom field batch mode
+ type: field
gqlschema: |
type User {
id: ID!
@@ -330,4 +325,4 @@
remotequery: |-
query($random: [UserInput]) { userName(random: $random)}
inputvariables: |-
- {"random": [{ "id": "0x2", "age": "10" },{ "id": "0x3", "age": "20"}]}
\ No newline at end of file
+ {"random": [{ "id": "0x2", "age": "10" },{ "id": "0x3", "age": "20"}]}
diff --git a/graphql/schema/dgraph_schemagen_test.yml b/graphql/schema/dgraph_schemagen_test.yml
index 408171ce7d3..cac70c84aca 100644
--- a/graphql/schema/dgraph_schemagen_test.yml
+++ b/graphql/schema/dgraph_schemagen_test.yml
@@ -1,5 +1,5 @@
schemas:
- - name: "Object data type"
+ - name: Object data type
input: |
type A {
id: ID!
@@ -19,7 +19,7 @@ schemas:
}
P.q: uid .
- - name: "Scalar list"
+ - name: Scalar list
input: |
type X {
id: ID!
@@ -31,7 +31,7 @@ schemas:
}
X.names: [string] .
- - name: "Password type"
+ - name: Password type
input: |
type X @secret(field: "pwd"){
id: ID!
@@ -45,8 +45,7 @@ schemas:
X.names: [string] .
X.pwd: password .
-
- - name: "Object list"
+ - name: Object list
input: |
type X {
p: [P!]!
@@ -65,7 +64,7 @@ schemas:
}
P.name: string .
- - name: "Scalar types"
+ - name: Scalar types
input: |
type X {
p: Int
@@ -107,7 +106,7 @@ schemas:
X.v: int .
X.vList: [int] .
- - name: "enum - always gets an index"
+ - name: enum - always gets an index
input: |
type X {
e: E
@@ -122,8 +121,7 @@ schemas:
X.e: string @index(hash) .
X.f: [string] @index(hash) .
-
- - name: "Search indexes are correct"
+ - name: Search indexes are correct
input: |
type X {
i1: Int @search
@@ -231,7 +229,7 @@ schemas:
X.e6: string @index(hash, trigram) .
X.e7: string @index(exact, trigram) .
- - name: "interface and types interact properly"
+ - name: interface and types interact properly
input: |
interface A {
id: ID!
@@ -259,7 +257,7 @@ schemas:
}
C.dob: dateTime .
- - name: "interface using other interface generate type in dgraph"
+ - name: interface using other interface generate type in dgraph
input: |
interface A {
id: ID!
@@ -301,7 +299,7 @@ schemas:
D.link: uid .
D.correct: bool .
- - name: "Schema with union"
+ - name: Schema with union
input: |
interface W {
f1: ID!
@@ -349,7 +347,7 @@ schemas:
data: [uid] .
V.f6: uid .
- - name: "Schema with @dgraph directive."
+ - name: Schema with @dgraph directive
input: |
type A @dgraph(type: "dgraph.type.A") {
id: ID!
@@ -436,7 +434,9 @@ schemas:
dgraph.pList: [int] .
f: float .
- - name: "Schema with multiple language tags, indexes on language tag fields got merged on language untagged field"
+ - name:
+ Schema with multiple language tags, indexes on language tag fields got merged on language
+ untagged field
input: |
interface Node {
f1: String
@@ -474,7 +474,7 @@ schemas:
Person.address: string @index(fulltext) @lang .
Person.profession: string @lang .
- - name: "Field with @id directive but no search directive gets hash index."
+ - name: Field with @id directive but no search directive gets hash index
input: |
interface A {
id: String! @id
@@ -493,7 +493,7 @@ schemas:
}
B.correct: bool @index(bool) .
- - name: "Field with @id directive gets hash index."
+ - name: Field with @id directive gets hash index
input: |
interface A {
id: String! @id @search(by: [trigram])
@@ -512,7 +512,7 @@ schemas:
}
B.correct: bool @index(bool) .
- - name: "Field with @id directive and a hash arg in search directive generates correct schema."
+ - name: Field with @id directive and a hash arg in search directive generates correct schema
input: |
interface A {
id: String! @id @search(by: ["hash", "term"])
@@ -531,7 +531,7 @@ schemas:
}
B.correct: bool @index(bool) .
- - name: "Field with @id directive and a exact arg in search directive generates correct schema."
+ - name: Field with @id directive and a exact arg in search directive generates correct schema
input: |
interface A {
id: String! @id @search(by: [exact])
@@ -550,7 +550,7 @@ schemas:
}
B.correct: bool @index(bool) .
- - name: "Field with reverse predicate in dgraph directive adds @reverse to predicate."
+ - name: Field with reverse predicate in dgraph directive adds @reverse to predicate
input: |
type Movie {
director: [Person] @dgraph(pred: "~directed.movies")
@@ -566,7 +566,7 @@ schemas:
}
directed.movies: [uid] @reverse .
- - name: "Field with reverse predicate in dgraph directive where actual predicate comes first."
+ - name: Field with reverse predicate in dgraph directive where actual predicate comes first
input: |
type Person {
directed: [Movie] @dgraph(pred: "directed.movies")
@@ -582,7 +582,7 @@ schemas:
type Movie {
}
- - name: "deprecated fields get included in Dgraph schema"
+ - name: deprecated fields get included in Dgraph schema
input: |
type A {
id: ID!
@@ -597,7 +597,7 @@ schemas:
A.p: string .
A.q: string .
- - name: "remote types shouldn't be part of Dgraph schema"
+ - name: remote types shouldn't be part of Dgraph schema
input: |
type B {
id: ID!
@@ -649,8 +649,8 @@ schemas:
}
C.name: string .
- - name: "fields with same @dgraph(pred: ...) and different @search(by: [...]) have indexes
- combined"
+ - name:
+ "fields with same @dgraph(pred: ...) and different @search(by: [...]) have indexes combined"
input: |
type A {
p: String @dgraph(pred: "name") @search(by: ["exact", "term"])
@@ -687,7 +687,7 @@ schemas:
post: string @index(exact, term) .
<公司>: string @index(exact, term) .
- - name: "custom query and mutation shouldn't be part of Dgraph schema"
+ - name: custom query and mutation shouldn't be part of Dgraph schema
input: |
type User @remote {
id: ID!
@@ -708,7 +708,7 @@ schemas:
})
}
- - name: "custom field shouldn't be part of dgraph schema"
+ - name: custom field shouldn't be part of dgraph schema
input: |
type User {
id: ID!
@@ -725,7 +725,7 @@ schemas:
}
User.name: string .
- - name: "Geo field in schema."
+ - name: Geo field in schema
input: |
type Hotel {
id: ID!
@@ -764,7 +764,7 @@ schemas:
Hotel.branches2: geo .
Hotel.branches3: geo @index(geo) .
- - name: "Int field with @id Directive"
+ - name: Int field with @id Directive
input: |
type T {
id : Int! @id
@@ -778,7 +778,7 @@ schemas:
T.id: int @index(int) @upsert .
T.value: string .
- - name: "Int64 field with @id Directive"
+ - name: Int64 field with @id Directive
input: |
type T {
id : Int64! @id
@@ -792,7 +792,7 @@ schemas:
T.id: int @index(int) @upsert .
T.value: string .
- - name: "type extension having @external field of ID type which is @key"
+ - name: type extension having @external field of ID type which is @key
input: |
extend type Product @key(fields: "id") {
id: ID! @external
@@ -810,7 +810,7 @@ schemas:
Product.name: string .
Product.reviews: [string] .
- - name: "type extension having @external field of non ID type which is @key"
+ - name: type extension having @external field of non ID type which is @key
input: |
extend type Product @key(fields: "name") {
id: ID! @external
@@ -825,7 +825,7 @@ schemas:
Product.name: string @index(hash) @upsert .
Product.reviews: [string] .
- - name: "A full valid federation schema"
+ - name: A full valid federation schema
input: |
type Review {
body: String
@@ -873,7 +873,7 @@ schemas:
User.username: string .
User.reviews: [uid] .
- - name: "nothing is added in dgraph schema with lambdaOnMutate"
+ - name: nothing is added in dgraph schema with lambdaOnMutate
input: |
type T @lambdaOnMutate(add: true, update: true, delete: true) {
id : ID!
@@ -884,4 +884,3 @@ schemas:
T.value
}
T.value: string .
-
diff --git a/graphql/schema/gqlschema_test.yml b/graphql/schema/gqlschema_test.yml
index a82500b4426..19fc92afa61 100644
--- a/graphql/schema/gqlschema_test.yml
+++ b/graphql/schema/gqlschema_test.yml
@@ -1,6 +1,5 @@
invalid_schemas:
- -
- name: "More than 1 id field"
+ - name: More than 1 id field
input: |
type P {
id1: ID!
@@ -12,7 +11,7 @@ invalid_schemas:
{"message":"Type P; is invalid, a type must have atleast one field that is not of ID! type and doesn't have @custom/@lambda directive.", "locations":[{"line":1, "column":6}]}
]
- - name: "Geo field with invalid argument in @search."
+ - name: Geo field with invalid argument in @search
input: |
type Hotel {
id: ID!
@@ -24,7 +23,7 @@ invalid_schemas:
]
-
- name: "UID as a field name"
+ name: UID as a field name
input: |
type P {
uid: String
@@ -34,7 +33,7 @@ invalid_schemas:
]
-
- name: "Query, Mutation in initial schema"
+ name: Query, Mutation in initial schema
input: |
type Query {
getAuthor(id: ID): Author!
@@ -43,16 +42,16 @@ invalid_schemas:
getAuthor(id: ID): Author!
}
errlist: [
- {"message":"GraphQL Query and Mutation types are only allowed to have fields
+ {"message": GraphQL Query and Mutation types are only allowed to have fields
with @custom/@lambda directive. Other fields are built automatically for you. Found Query getAuthor
- without @custom/@lambda.", "locations":[{"line":1, "column":6}]},
- {"message":"GraphQL Query and Mutation types are only allowed to have fields with
+ without @custom/@lambda., "locations":[{"line":1, "column":6}]},
+ {"message":GraphQL Query and Mutation types are only allowed to have fields with
@custom/@lambda directive. Other fields are built automatically for you. Found Mutation getAuthor
- without @custom/@lambda.", "locations":[{"line":4, "column":6}]},
+ without @custom/@lambda., "locations":[{"line":4, "column":6}]},
]
-
- name: "No ID list of any kind"
+ name: No ID list of any kind
input: |
type A {
f: [ID]
@@ -64,7 +63,7 @@ invalid_schemas:
-
- name: "No nested list of any kind"
+ name: No nested list of any kind
input: |
type A {
f: [[String]]
@@ -74,7 +73,7 @@ invalid_schemas:
]
-
- name: "Enum indexes clash trigram and regexp"
+ name: Enum indexes clash trigram and regexp
input: |
type T {
f: E @search(by: ["trigram", "regexp"])
@@ -88,7 +87,7 @@ invalid_schemas:
]
-
- name: "Enum indexes clash hash and exact"
+ name: Enum indexes clash hash and exact
input: |
type T {
f: E @search(by: ["hash", "exact"])
@@ -101,7 +100,7 @@ invalid_schemas:
]
-
- name: "HNSW index options malformed"
+ name: HNSW index options malformed
input: |
type T {
f: [Float!] @embedding @search(by: ["hnsw(metric:dotproduct)"])
@@ -112,17 +111,17 @@ invalid_schemas:
]
-
- name: "Reference type that is not in input schema"
+ name: Reference type that is not in input schema
input: |
type T {
f: Author
}
errlist: [
- {"message": "Undefined type Author.", "locations": [{"line": 2, "column": 8}]}
+ {"message": Undefined type Author., "locations": [{"line": 2, "column": 8}]}
]
-
- name: "Unsupported definitions in initial schema"
+ name: Unsupported definitions in initial schema
input: |
scalar Int
interface P {
@@ -133,30 +132,30 @@ invalid_schemas:
x: X!
}
errlist: [
- {"message":"You can't add scalar definitions. Only type, interface, union, input and enums are allowed in initial schema.", "locations":[{"line":1, "column":8}]}
+ {"message":"You can't add scalar definitions. Only type, interface, union, input and enums are allowed in initial schema.", "locations":[{"line":1, "column":8}]}
]
-
- name: "union members can't be non-object types - Interface"
+ name: union members can't be non-object types - Interface
input: |
interface I {
f: String
}
union U = I
errlist: [
- {"message":"UNION type \"I\" must be OBJECT.", "locations":[{"line":4, "column":7}]}
+ {"message": UNION type \"I\" must be OBJECT., "locations":[{"line":4, "column":7}]}
]
-
- name: "union members can't be non-object types - Scalar"
+ name: union members can't be non-object types - Scalar
input: |
union U = String
errlist: [
- {"message":"UNION type \"String\" must be OBJECT.", "locations":[{"line":1, "column":7}]}
+ {"message": UNION type \"String\" must be OBJECT., "locations":[{"line":1, "column":7}]}
]
-
- name: "union members can't be non-object types - Enum"
+ name: union members can't be non-object types - Enum
input: |
enum E {
E1
@@ -164,18 +163,18 @@ invalid_schemas:
}
union U = E
errlist: [
- {"message":"UNION type \"E\" must be OBJECT.", "locations":[{"line":5, "column":7}]}
+ {"message": UNION type \"E\" must be OBJECT., "locations":[{"line":5, "column":7}]}
]
-
- name: "union members can't be non-object types - Input Object"
+ name: union members can't be non-object types - Input Object
input: |
input I {
f: String
}
union U = I
errlist: [
- {"message":"UNION type \"I\" must be OBJECT.", "locations":[{"line":4, "column":7}]}
+ {"message": UNION type \"I\" must be OBJECT., "locations":[{"line":4, "column":7}]}
]
-
@@ -193,7 +192,7 @@ invalid_schemas:
]
-
- name: "union can't be used with @withSubscription"
+ name: union can't be used with @withSubscription
input: |
type X {
f1: String
@@ -207,7 +206,7 @@ invalid_schemas:
]
-
- name: "union can't be used with @secret"
+ name: union can't be used with @secret
input: |
type X {
f1: String
@@ -221,7 +220,7 @@ invalid_schemas:
]
-
- name: "union can't be used with @auth"
+ name: union can't be used with @auth
input: |
type X {
f1: String
@@ -235,7 +234,7 @@ invalid_schemas:
]
-
- name: "union can't be used with @hasInverse, @search, @id"
+ name: union can't be used with @hasInverse, @search, @id
input: |
type X {
f1: String
@@ -254,7 +253,7 @@ invalid_schemas:
]
-
- name: "Typename is reserved word"
+ name: Typename is reserved word
input: |
type String {
id: ID!
@@ -281,7 +280,7 @@ invalid_schemas:
{"message":"uid is a reserved word, so you can't declare a type with this name. Pick a different name for the type.", "locations":[{"line":14, "column":6}]},
]
- - name: "Point is reserved word"
+ - name: Point is reserved word
input: |
type Point {
id: ID!
@@ -291,7 +290,7 @@ invalid_schemas:
]
-
- name: "More than 1 errors"
+ name: More than 1 errors
input: |
type X {
i1: ID!
@@ -306,7 +305,7 @@ invalid_schemas:
]
-
- name: "Non linking inverse directive with correct field type"
+ name: Non linking inverse directive with correct field type
input: |
type Post {
author: Author! @hasInverse(field: "posts")
@@ -320,7 +319,7 @@ invalid_schemas:
]
-
- name: "Multiple hasInverse to one field"
+ name: Multiple hasInverse to one field
input: |
type Post {
author: Author! @hasInverse(field: "posts")
@@ -335,7 +334,7 @@ invalid_schemas:
]
-
- name: "Non linking inverse directives"
+ name: Non linking inverse directives
input: |
type X {
f1: P @hasInverse(field: "f1")
@@ -350,7 +349,7 @@ invalid_schemas:
]
-
- name: "Inverse Directive on non object field"
+ name: Inverse Directive on non object field
input: |
type X {
f1: String @hasInverse(field: "f1")
@@ -360,7 +359,7 @@ invalid_schemas:
]
-
- name: "Inverse Directive doesn't have field argument"
+ name: Inverse Directive doesn't have field argument
input: |
type X {
f1: X @hasInverse
@@ -370,7 +369,7 @@ invalid_schemas:
]
-
- name: "hasInverse on non existing field"
+ name: hasInverse on non existing field
input: |
type X {
f1: [P!]! @hasInverse(field: "f2")
@@ -383,7 +382,7 @@ invalid_schemas:
]
-
- name: "ID can't have the @search directive"
+ name: ID can't have the @search directive
input: |
type X {
id: ID! @search
@@ -404,7 +403,7 @@ invalid_schemas:
]
-
- name: "Search will error on type that can't have the @search"
+ name: Search will error on type that can't have the @search
input: |
type X {
y: Y @search
@@ -419,7 +418,7 @@ invalid_schemas:
]
-
- name: "Search (with arg) will error that can't have the @search"
+ name: Search (with arg) will error that can't have the @search
input: |
type X {
y: Y @search(by: [term])
@@ -435,7 +434,7 @@ invalid_schemas:
]
-
- name: "Search with wrong arg with error on default search type"
+ name: Search with wrong arg with error on default search type
input: |
type X {
y: Int @search(by: [term])
@@ -448,7 +447,7 @@ invalid_schemas:
]
-
- name: "Search with wrong arg (int) with error on default search type (Int64)"
+ name: Search with wrong arg (int) with error on default search type (Int64)
input: |
type X {
y: Int64 @search(by: [int])
@@ -461,7 +460,7 @@ invalid_schemas:
]
-
- name: "Search with wrong arg with error on search type"
+ name: Search with wrong arg with error on search type
input: |
type X {
y: String @search(by: [day])
@@ -474,7 +473,7 @@ invalid_schemas:
]
-
- name: "Search with wrong arg for the index"
+ name: Search with wrong arg for the index
input: |
type X {
y: String @search(by: ["hash", "hour"])
@@ -487,7 +486,7 @@ invalid_schemas:
]
-
- name: "Search without []"
+ name: Search without []
input: |
type X {
y: String @search(by: "hash")
@@ -499,7 +498,7 @@ invalid_schemas:
]
-
- name: "Search doesn't allow hash and exact together"
+ name: Search doesn't allow hash and exact together
input: |
type X {
y: String @search(by: ["hash", "exact"])
@@ -511,7 +510,7 @@ invalid_schemas:
]
-
- name: "Search with multiple datetime index"
+ name: Search with multiple datetime index
input: |
type X {
y: DateTime @search(by: ["hour", "month"])
@@ -523,7 +522,7 @@ invalid_schemas:
]
-
- name: "Search doesn't allow trigram and regexp together"
+ name: Search doesn't allow trigram and regexp together
input: |
type X {
y: String @search(by: ["trigram", "regexp"])
@@ -535,7 +534,7 @@ invalid_schemas:
]
-
- name: "Search doesn't accept bogus args"
+ name: Search doesn't accept bogus args
input: |
type X {
y: String @search(by: ["bogus"])
@@ -547,7 +546,7 @@ invalid_schemas:
]
-
- name: "Type implements an interface which wasn't defined"
+ name: Type implements an interface which wasn't defined
input: |
type X implements Y {
y: String
@@ -558,7 +557,7 @@ invalid_schemas:
]
-
- name: "Type implements an interface with the field name repeated but different type"
+ name: Type implements an interface with the field name repeated but different type
input: |
interface Y {
id: ID
@@ -568,12 +567,12 @@ invalid_schemas:
y: String
}
errlist: [
- {"message": "For type X to implement interface Y the field id must have type ID",
+ {"message": For type X to implement interface Y the field id must have type ID,
"locations":[{"line":4, "column":6}]}
]
-
- name: "Type implements an interface with no field of its own"
+ name: Type implements an interface with no field of its own
input: |
interface Y {
id: ID
@@ -586,7 +585,7 @@ invalid_schemas:
]
-
- name: "Type implements from two interfaces where both have ID with different type"
+ name: Type implements from two interfaces where both have ID with different type
input: |
interface X {
id: ID!
@@ -598,12 +597,12 @@ invalid_schemas:
name: String
}
errlist: [
- {"message": "field id is of type ID in interface Y and is of type ID! in interface X",
+ {"message": field id is of type ID in interface Y and is of type ID! in interface X,
"locations":[{"line":7, "column":6}]}
]
-
- name: "List of Boolean is not allowed"
+ name: List of Boolean is not allowed
input: |
type X {
q: [Boolean]
@@ -614,7 +613,7 @@ invalid_schemas:
]
-
- name: "ID field can't have @dgraph directive"
+ name: ID field can't have @dgraph directive
input: |
type X {
id: ID @dgraph(pred: "X.id")
@@ -627,7 +626,7 @@ invalid_schemas:
]
-
- name: "Field with @id directive has wrong type"
+ name: Field with @id directive has wrong type
input: |
type X {
f1: [String] @id
@@ -649,7 +648,7 @@ invalid_schemas:
]
-
- name: "Dgraph directive with wrong argument produces an error"
+ name: Dgraph directive with wrong argument produces an error
input: |
type X {
f1: String! @dgraph(type: "f1")
@@ -661,7 +660,7 @@ invalid_schemas:
]
-
- name: "Dgraph directive with no argument on field produces an error"
+ name: Dgraph directive with no argument on field produces an error
input: |
type X {
f1: String! @dgraph
@@ -673,7 +672,7 @@ invalid_schemas:
]
-
- name: "Dgraph directive with wrong argument type on field produces an error"
+ name: Dgraph directive with wrong argument type on field produces an error
input: |
type X {
f1: String! @dgraph(pred: 2)
@@ -684,40 +683,39 @@ invalid_schemas:
]
-
- name: "Dgraph directive with wrong argument on type produces an error"
+ name: Dgraph directive with wrong argument on type produces an error
input: |
type X @dgraph(pred: "X") {
f1: String!
}
errlist: [
- {"message": "Type X; type argument for @dgraph directive should not be empty.",
+ {"message": Type X; type argument for @dgraph directive should not be empty.,
"locations":[{"line":1, "column":9}]}
]
- -
- name: "Dgraph directive with no argument on type produces an error"
+ - name: Dgraph directive with no argument on type produces an error
input: |
type X @dgraph {
f1: String!
}
errlist: [
- {"message": "Type X; type argument for @dgraph directive should not be empty.",
+ {"message": Type X; type argument for @dgraph directive should not be empty.,
"locations":[{"line":1, "column":9}]}
]
-
- name: "Dgraph directive with wrong argument type on type produces an error"
+ name: Dgraph directive with wrong argument type on type produces an error
input: |
type X @dgraph(type: 2) {
f1: String!
}
errlist: [
- {"message": "Type X; type argument for @dgraph directive should of type String.",
+ {"message": Type X; type argument for @dgraph directive should of type String.,
"locations":[{"line":1, "column":9}]}
]
-
- name: "Dgraph directive with reverse pred argument on scalar field produces an error"
+ name: Dgraph directive with reverse pred argument on scalar field produces an error
input: |
type X {
f1: String! @dgraph(pred:"~movie")
@@ -733,7 +731,7 @@ invalid_schemas:
]
-
- name: "Dgraph directive with reverse pred argument on field without a corresponding reverse field is an error"
+ name: Dgraph directive with reverse pred argument on field without a corresponding reverse field is an error
input: |
type Y {
g1: String!
@@ -748,7 +746,7 @@ invalid_schemas:
]
-
- name: "Dgraph directive with reverse pred argument along with hasInverse produces an error"
+ name: Dgraph directive with reverse pred argument along with hasInverse produces an error
input: |
type X {
f1: [Y] @dgraph(pred: "f1")
@@ -763,8 +761,8 @@ invalid_schemas:
]
-
- name: "Dgraph directive with reverse pred argument along with hasInverse in forward direction
- produces an error"
+ name: Dgraph directive with reverse pred argument along with hasInverse in forward direction
+ produces an error
input: |
type X {
f1: [Y] @dgraph(pred: "f1") @hasInverse(field: "f1")
@@ -779,7 +777,7 @@ invalid_schemas:
]
-
- name: "Dgraph directive with reverse pred argument matching with wrong type produces an error"
+ name: Dgraph directive with reverse pred argument matching with wrong type produces an error
input: |
type Z {
f1: String!
@@ -797,7 +795,7 @@ invalid_schemas:
]
-
- name: "Dgraph directive with reverse pred argument matching with wrong type implementing an interface produces an error"
+ name: Dgraph directive with reverse pred argument matching with wrong type implementing an interface produces an error
input: |
type Z {
f1: String!
@@ -820,7 +818,7 @@ invalid_schemas:
]
-
- name: "Dgraph directive with reverse pred argument matching with wrong type implementing multiple interfaces produces an error"
+ name: Dgraph directive with reverse pred argument matching with wrong type implementing multiple interfaces produces an error
input: |
type Z {
f1: String!
@@ -847,7 +845,7 @@ invalid_schemas:
]
-
- name: "Field with a dgraph directive with reverse pred argument should be a list"
+ name: Field with a dgraph directive with reverse pred argument should be a list
input: |
type X {
f1: [Y] @dgraph(pred: "f1")
@@ -863,18 +861,18 @@ invalid_schemas:
-
- name: "Empty field in secret directive"
+ name: Empty field in secret directive
input: |
type X @secret(field:""){
f1: String!
}
errlist: [
- {"message": "Type X; Argument \"field\" of secret directive is empty",
+ {"message": Type X; Argument \"field\" of secret directive is empty,
"locations":[{"line":1, "column":6}]},
]
-
- name: "Multiple secret directive"
+ name: Multiple secret directive
input: |
type X @secret(field:"password") @secret(field: "psss"){
f1: String!
@@ -884,14 +882,14 @@ invalid_schemas:
"locations":[{"line":1, "column":6}]},
]
- - name: "Conflicting secret directive and field"
+ - name: Conflicting secret directive and field
input: |
type X @secret(field:"f1"){
f1: String!
}
errlist: [
- {"message": "Type X; has a secret directive and field of the same name f1",
- "locations":[{"line":1, "column":6}]},
+ {"message": Type X; has a secret directive and field of the same name f1,
+ "locations":[{"line":1, "column":6}]},
]
-
@@ -921,24 +919,25 @@ invalid_schemas:
f10: String @dgraph(pred: "pwd")
}
errlist: [
- {"message": "Type X; implements interfaces [V W], all of which have fields with @dgraph predicate: ff1. These fields must use different Dgraph predicates.",
- "locations":[{"line":10, "column":6}]},
- {"message": "Type X; Field f9: has the @dgraph directive, which conflicts with interface W; field f4, that this type implements. These fields must use different Dgraph predicates.",
- "locations":[{"line":14, "column":3}]},
- {"message": "Type Y; Field f2: has type Int, which is different to type W; field f2, which has the same @dgraph directive but type String. These fields must have either the same GraphQL types, or use different Dgraph predicates.",
- "locations":[{"line":17, "column":3}]},
- {"message": "Type Y; Field f3: has type Float, which is different to type W; field f3, which has the same @dgraph directive but type [Float]. These fields must have either the same GraphQL types, or use different Dgraph predicates.",
- "locations":[{"line":18, "column":3}]},
- {"message": "Type Y; Field f6: has type X, which is different to type X; field f6, which has the same @dgraph directive but type Y. These fields must have either the same GraphQL types, or use different Dgraph predicates.",
- "locations":[{"line":19, "column":3}]},
- {"message": "Type Y; Field f7: doesn't have @id directive, which conflicts with type X; field f7, which has the same @dgraph directive along with @id directive. Both these fields must either use @id directive, or use different Dgraph predicates.",
- "locations":[{"line":20, "column":3}]},
- {"message": "Type Y; Field f8: has type Int, which is different to type X; field f8, which has the same @dgraph directive but type String. These fields must have either the same GraphQL types, or use different Dgraph predicates.",
- "locations":[{"line":21, "column":3}]},
- {"message": "Type Y; Field f10: has the @dgraph predicate, but that conflicts with type W @secret directive on the same predicate. @secret predicates are stored encrypted and so the same predicate can't be used as a String.",
- "locations":[{"line":22, "column":3}]}]
+ {"message": "Type X; implements interfaces [V W], all of which have fields with @dgraph predicate: ff1. These fields must use different Dgraph predicates.",
+ "locations":[{"line":10, "column":6}]},
+ {"message": "Type X; Field f9: has the @dgraph directive, which conflicts with interface W; field f4, that this type implements. These fields must use different Dgraph predicates.",
+ "locations":[{"line":14, "column":3}]},
+ {"message": "Type Y; Field f2: has type Int, which is different to type W; field f2, which has the same @dgraph directive but type String. These fields must have either the same GraphQL types, or use different Dgraph predicates.",
+ "locations":[{"line":17, "column":3}]},
+ {"message": "Type Y; Field f3: has type Float, which is different to type W; field f3, which has the same @dgraph directive but type [Float]. These fields must have either the same GraphQL types, or use different Dgraph predicates.",
+ "locations":[{"line":18, "column":3}]},
+ {"message": "Type Y; Field f6: has type X, which is different to type X; field f6, which has the same @dgraph directive but type Y. These fields must have either the same GraphQL types, or use different Dgraph predicates.",
+ "locations":[{"line":19, "column":3}]},
+ {"message": "Type Y; Field f7: doesn't have @id directive, which conflicts with type X; field f7, which has the same @dgraph directive along with @id directive. Both these fields must either use @id directive, or use different Dgraph predicates.",
+ "locations":[{"line":20, "column":3}]},
+ {"message": "Type Y; Field f8: has type Int, which is different to type X; field f8, which has the same @dgraph directive but type String. These fields must have either the same GraphQL types, or use different Dgraph predicates.",
+ "locations":[{"line":21, "column":3}]},
+ {"message": "Type Y; Field f10: has the @dgraph predicate, but that conflicts with type W @secret directive on the same predicate. @secret predicates are stored encrypted and so the same predicate can't be used as a String.",
+ "locations":[{"line":22, "column":3}]}
+ ]
- - name: "user-defined types can't have same name as the types generated for other user-defined types or any inbuilt types"
+ - name: user-defined types can't have same name as the types generated for other user-defined types or any inbuilt types
input: |
type Author {
id: ID!
@@ -956,9 +955,9 @@ invalid_schemas:
name: String
}
errlist: [
- {"message": "UpdateAuthorInput is a reserved word, so you can't declare a INPUT_OBJECT with this name. Pick a different name for the INPUT_OBJECT.", "locations":[{"line":5, "column":7}]},
- {"message": "URef is a reserved word, so you can't declare a INPUT_OBJECT with this name. Pick a different name for the INPUT_OBJECT.", "locations":[{"line":10, "column":7}]},
- {"message": "IntFilter is a reserved word, so you can't declare a OBJECT with this name. Pick a different name for the OBJECT.", "locations":[{"line":13, "column":6}]},
+ {"message": "UpdateAuthorInput is a reserved word, so you can't declare a INPUT_OBJECT with this name. Pick a different name for the INPUT_OBJECT.", "locations":[{"line":5, "column":7}]},
+ {"message": "URef is a reserved word, so you can't declare a INPUT_OBJECT with this name. Pick a different name for the INPUT_OBJECT.", "locations":[{"line":10, "column":7}]},
+ {"message": "IntFilter is a reserved word, so you can't declare a OBJECT with this name. Pick a different name for the OBJECT.", "locations":[{"line":13, "column":6}]},
]
- name: "@custom query can't have same name as the query generated for other types"
@@ -972,8 +971,8 @@ invalid_schemas:
getAuthor(id: ID): Author! @custom(http: {url: "http://blah.com", method: "GET"})
}
errlist: [
- {"message": "getAuthor is a reserved word, so you can't declare a query with this name. Pick a different name for the query.",
- "locations":[{"line":7, "column":3}]},
+ {"message": "getAuthor is a reserved word, so you can't declare a query with this name. Pick a different name for the query.",
+ "locations":[{"line":7, "column":3}]},
]
- name: "@custom mutation can't have same name as the mutation generated for other types"
@@ -987,8 +986,8 @@ invalid_schemas:
addAuthor(id: ID): Author! @custom(http: {url: "http://blah.com", method: "GET"})
}
errlist: [
- {"message": "addAuthor is a reserved word, so you can't declare a mutation with this name. Pick a different name for the mutation.",
- "locations":[{"line":7, "column":3}]},
+ {"message": "addAuthor is a reserved word, so you can't declare a mutation with this name. Pick a different name for the mutation.",
+ "locations":[{"line":7, "column":3}]},
]
- name: "@custom directive with extra arguments"
@@ -1002,8 +1001,8 @@ invalid_schemas:
getAuthor1(id: ID): Author! @custom(http: {url: "blah.com", method: "GET"}, dql: "random")
}
errlist: [
- {"message": "Type Query; Field getAuthor1: has 2 arguments for @custom directive, it should contain exactly one of `http` or `dql` arguments.",
- "locations":[{"line":7, "column":32}]},
+ {"message": "Type Query; Field getAuthor1: has 2 arguments for @custom directive, it should contain exactly one of `http` or `dql` arguments.",
+ "locations":[{"line":7, "column":32}]},
]
- name: "@custom directive without http or dql argument"
@@ -1017,7 +1016,7 @@ invalid_schemas:
getAuthor1(id: ID): Author! @custom(https: {url: "blah.com", method: "GET"})
}
errlist: [
- {"message": "https is not supported as an argument for custom directive.",
+ {"message": https is not supported as an argument for custom directive.,
"locations":[{"line":7, "column":32}]},
]
@@ -1033,8 +1032,8 @@ invalid_schemas:
dql: "{me(func: uid(0x1))}")
}
errlist: [
- {"message": "Type Query; Field getAuthor1: has 2 arguments for @custom directive, it should contain exactly one of `http` or `dql` arguments.",
- "locations":[{"line":7, "column":32}]},
+ {"message": "Type Query; Field getAuthor1: has 2 arguments for @custom directive, it should contain exactly one of `http` or `dql` arguments.",
+ "locations":[{"line":7, "column":32}]},
]
-
@@ -1052,9 +1051,9 @@ invalid_schemas:
""")
}
errlist: [
- {"message": "Type Author; Field name: @custom directive with `dql` can be used only on
- queries.",
- "locations": [{"line": 4,"column": 25}]}
+ {"message": "Type Author; Field name: @custom directive with `dql` can be used only on
+ queries.",
+ "locations": [{"line": 4,"column": 25}]}
]
-
@@ -1070,8 +1069,8 @@ invalid_schemas:
""")
}
errlist: [
- {"message": "Type Mutation; Field customMutation: @custom directive with `dql` can be used only on queries.",
- "locations": [{"line": 2,"column": 35}]}
+ {"message": "Type Mutation; Field customMutation: @custom directive with `dql` can be used only on queries.",
+ "locations": [{"line": 2,"column": 35}]}
]
-
@@ -1081,8 +1080,8 @@ invalid_schemas:
query1: String! @custom(dql: 5)
}
errlist: [
- {"message": "Type Query; Field query1: dql argument for @custom directive must be of type String.",
- "locations": [{"line": 2,"column": 27}]}
+ {"message": "Type Query; Field query1: dql argument for @custom directive must be of type String.",
+ "locations": [{"line": 2,"column": 27}]}
]
-
@@ -1092,8 +1091,8 @@ invalid_schemas:
query1: String! @custom(dql: " ")
}
errlist: [
- {"message": "Type Query; Field query1: dql argument for @custom directive must not be empty.",
- "locations": [{"line": 2,"column": 27}]}
+ {"message": "Type Query; Field query1: dql argument for @custom directive must not be empty.",
+ "locations": [{"line": 2,"column": 27}]}
]
-
@@ -1109,8 +1108,8 @@ invalid_schemas:
""")
}
errlist: [
- {"message": "Type Query; Field query1: Argument arg1: must be of a scalar type. @custom DQL queries accept only scalar arguments.",
- "locations": [{"line": 2,"column": 43}]}
+ {"message": "Type Query; Field query1: Argument arg1: must be of a scalar type. @custom DQL queries accept only scalar arguments.",
+ "locations": [{"line": 2,"column": 43}]}
]
-
@@ -1125,7 +1124,7 @@ invalid_schemas:
getAuthor1(id: ID): Author! @custom(http: {url: "123", method: "GET"})
}
errlist: [
- {"message": "Type Query; Field getAuthor1; url field inside @custom directive is invalid.",
+ {"message": Type Query; Field getAuthor1; url field inside @custom directive is invalid.,
"locations":[{"line":7, "column":52}]},
]
@@ -1141,7 +1140,7 @@ invalid_schemas:
getAuthor1(id: ID): Author! @custom(http: {url: "http://google.com/$idm", method: "GET"})
}
errlist: [
- {"message": "Type Query; Field getAuthor1; url path inside @custom directive uses an argument idm that is not defined.",
+ {"message": Type Query; Field getAuthor1; url path inside @custom directive uses an argument idm that is not defined.,
"locations":[{"line":7, "column":52}]},
]
@@ -1157,7 +1156,7 @@ invalid_schemas:
getAuthor1(id: ID): Author! @custom(http: {url: "http://google.com/$id", method: "GET"})
}
errlist: [
- {"message": "Type Query; Field getAuthor1; url path inside @custom directive uses an argument id that can be null.",
+ {"message": Type Query; Field getAuthor1; url path inside @custom directive uses an argument id that can be null.,
"locations":[{"line":7, "column":52}]},
]
@@ -1173,7 +1172,7 @@ invalid_schemas:
getAuthor1(id: ID): Author! @custom(http: {url: "http://google.com?a=$idm", method: "GET"})
}
errlist: [
- {"message": "Type Query; Field getAuthor1; url query inside @custom directive uses an argument idm that is not defined.",
+ {"message": Type Query; Field getAuthor1; url query inside @custom directive uses an argument idm that is not defined.,
"locations":[{"line":7, "column":52}]},
]
@@ -1189,7 +1188,7 @@ invalid_schemas:
getAuthor1(id: ID): Author! @custom(http: {url: "http://google.com/", method: "GETS"})
}
errlist: [
- {"message": "Type Query; Field getAuthor1; method field inside @custom directive can only be GET/POST/PUT/PATCH/DELETE.",
+ {"message": Type Query; Field getAuthor1; method field inside @custom directive can only be GET/POST/PUT/PATCH/DELETE.,
"locations":[{"line":7, "column":82}]},
]
@@ -1205,7 +1204,7 @@ invalid_schemas:
getAuthor1(id: ID): Author! @custom(http: {url: "http://google.com/", method: "GET", mode: SINGLE})
}
errlist: [
- {"message": "Type Query; Field getAuthor1; mode field inside @custom directive can't be present on Query/Mutation.",
+ {"message": Type Query; Field getAuthor1; mode field inside @custom directive can't be present on Query/Mutation.,
"locations":[{"line":7, "column":94}]},
]
@@ -1223,7 +1222,7 @@ invalid_schemas:
author: Author! @custom(http: {url: "http://google.com/", method: "GET", mode: RANDOM})
}
errlist: [
- {"message": "Type Post; Field author; mode field inside @custom directive can only be SINGLE/BATCH.",
+ {"message": Type Post; Field author; mode field inside @custom directive can only be SINGLE/BATCH.,
"locations":[{"line":9, "column":82}]},
]
@@ -1346,7 +1345,7 @@ invalid_schemas:
})
}
errlist: [
- {"message": "Type Query; Field getAuthor1; body template inside @custom directive uses an argument idm that is not defined.",
+ {"message": Type Query; Field getAuthor1; body template inside @custom directive uses an argument idm that is not defined.,
"locations":[{"line":10, "column":12}]},
]
@@ -1653,7 +1652,7 @@ invalid_schemas:
}
errlist: [
{
- "message": "Type Author; Field id; custom directive not allowed on field of type ID! or field with @id directive.",
+ "message": Type Author; Field id; custom directive not allowed on field of type ID! or field with @id directive.,
"locations": [
{
"line": 2,
@@ -1694,7 +1693,7 @@ invalid_schemas:
bar: String
}
errlist: [
- {"message": "Type Author; Field name; custom directive not allowed on field of type ID! or field with @id directive.",
+ {"message": Type Author; Field name; custom directive not allowed on field of type ID! or field with @id directive.,
"locations":[{"line":3, "column":22}]},
]
@@ -2003,7 +2002,7 @@ invalid_schemas:
}
errlist: [
{
- "message": "Type Author; Field name; @custom directive is only allowed on fields where the type definition has a field with type ID! or a field with @id directive.",
+ "message": Type Author; Field name; @custom directive is only allowed on fields where the type definition has a field with type ID! or a field with @id directive.,
"locations": [
{
"line": 3,
@@ -2044,7 +2043,7 @@ invalid_schemas:
bar: String
}
errlist: [
- {"message": "Type Author; Field name; custom directive not allowed along with @search directive.",
+ {"message": Type Author; Field name; custom directive not allowed along with @search directive.,
"locations":[{"line":3, "column":26}]},
]
@@ -2061,7 +2060,7 @@ invalid_schemas:
bar: String
}
errlist: [
- {"message": "Type Author; Field name; custom directive not allowed along with @dgraph directive.",
+ {"message": Type Author; Field name; custom directive not allowed along with @dgraph directive.,
"locations":[{"line":3, "column":39}]},
]
@@ -2079,7 +2078,7 @@ invalid_schemas:
}
errlist: [
{
- "message": "Type Author; Field name; url path inside @custom directive uses a field fooz that is not defined.",
+ "message": Type Author; Field name; url path inside @custom directive uses a field fooz that is not defined.,
"locations": [
{
"line": 5,
@@ -2088,7 +2087,7 @@ invalid_schemas:
]
},
{
- "message": "Type Author; Field name; url path inside @custom directive uses a field bar that can be null.",
+ "message": Type Author; Field name; url path inside @custom directive uses a field bar that can be null.,
"locations": [
{
"line": 5,
@@ -2113,7 +2112,7 @@ invalid_schemas:
}
errlist: [
{
- "message": "Type Author; Field name; url path inside @custom directive uses a field foo that can be null.",
+ "message": Type Author; Field name; url path inside @custom directive uses a field foo that can be null.,
"locations": [
{
"line": 6,
@@ -2122,7 +2121,7 @@ invalid_schemas:
]
},
{
- "message": "Type Author; Field name; url path inside @custom directive uses a field bar that can be null.",
+ "message": Type Author; Field name; url path inside @custom directive uses a field bar that can be null.,
"locations": [
{
"line": 6,
@@ -2243,7 +2242,7 @@ invalid_schemas:
})
}
errlist: [
- {"message": "Type Author; Field name; url query inside @custom directive uses a field fooz that is not defined.",
+ {"message": Type Author; Field name; url query inside @custom directive uses a field fooz that is not defined.,
"locations":[{"line":5, "column":11}]},
]
@@ -2254,7 +2253,7 @@ invalid_schemas:
userRole: String @search(by: [hash])
}
errlist: [
- {"message": "Directive auth is not applicable on FIELD_DEFINITION.",
+ {"message": Directive auth is not applicable on FIELD_DEFINITION.,
"locations":[{"line":2, "column":26}]},
]
@@ -2276,7 +2275,7 @@ invalid_schemas:
})
}
errlist: [
- {"message": "Type Class; cannot have both @auth and @remote directive",
+ {"message": Type Class; cannot have both @auth and @remote directive,
"locations":[{"line":1, "column":6}]},
]
@@ -2298,7 +2297,7 @@ invalid_schemas:
})
}
errlist: [
- {"message": "Type Class; cannot have both @withSubscription and @remote directive",
+ {"message": Type Class; cannot have both @withSubscription and @remote directive,
"locations":[{"line":1, "column":6}]},
]
@@ -2411,7 +2410,7 @@ invalid_schemas:
]
-
- name: "invalid value for skip introspection"
+ name: invalid value for skip introspection
input: |
type Author {
id: ID!
@@ -2437,7 +2436,7 @@ invalid_schemas:
]
-
- name: "type can't just have ID! type field"
+ name: type can't just have ID! type field
input: |
type Author {
id: ID!
@@ -2449,7 +2448,7 @@ invalid_schemas:
]
-
- name: "types must have field which is not of ID! type and doesn't have @custom directive"
+ name: types must have field which is not of ID! type and doesn't have @custom directive
input: |
type Author {
id: ID!
@@ -2465,7 +2464,7 @@ invalid_schemas:
"locations":[{"line":1, "column":6}]},
]
- - name: "There shoudnt be any reserved arguments on any field"
+ - name: There shoudnt be any reserved arguments on any field
input: |
type T {
f(first: Int): String
@@ -2473,9 +2472,8 @@ invalid_schemas:
errlist: [
{"message": "Type T; Field f: can't have first as an argument because it is a reserved argument.", "locations": [{"line": 2, "column": 3}]}]
- - name: "remote type with @custom directives on fields shouldn't be allowed."
- description: "Remote types are not resolved further currently, hence they shouldn't have
- fields with @custom directive on them."
+ - name: remote type with @custom directives on fields shouldn't be allowed
+ description: Remote types are not resolved further currently, hence they shouldn't have fields with @custom directive on them.
input: |
type User {
id: ID!
@@ -2492,14 +2490,13 @@ invalid_schemas:
})
}
errlist: [
- {"message": "Type School; field name; can't have @custom/@lambda directive as a @remote
- type can't have fields with @custom/@lambda directive.", "locations": [{"line":9, "column":3}]}
+ {"message": Type School; field name; can't have @custom/@lambda directive as a @remote
+ type can't have fields with @custom/@lambda directive., "locations": [{"line":9, "column":3}]}
]
-
- name: "a non-remote type can't have fields which are of remote type"
- description: "This is disallowed because we don't generate UserRef etc., so we can't
- allow adding/updating user from author."
+ name: a non-remote type can't have fields which are of remote type
+ description: This is disallowed because we don't generate UserRef etc., so we can't allow adding/updating user from author
input: |
type User @remote {
id: ID!
@@ -2512,15 +2509,14 @@ invalid_schemas:
neighbour: [User!]
}
errlist: [
- {"message": "Type Author; field neighbour; is of a type that has @remote directive. Those
- would need to be resolved by a @custom/@lambda directive.",
+ {"message": Type Author; field neighbour; is of a type that has @remote directive. Those
+ would need to be resolved by a @custom/@lambda directive.,
"locations": [{"line":9, "column":3}]}
]
-
- name: "a remote type can't implement a non-remote interface"
- description: "Since we won't be adding/update the remote type, it wouldn't show up in
- getPerson, queryPerson etc., hence causing confusion."
+ name: a remote type can't implement a non-remote interface
+ description: Since we won't be adding/update the remote type, it wouldn't show up in getPerson, queryPerson etc., hence causing confusion.
input: |
interface Person {
id: ID!
@@ -2530,15 +2526,14 @@ invalid_schemas:
age: Int!
}
errlist: [
- {"message": "Type User; with @remote directive implements interface Person; which doesn't have
- @remote directive.",
+ {"message": Type User; with @remote directive implements interface Person; which doesn't have
+ @remote directive.,
"locations": [{"line":5, "column":6}]}
]
-
- name: "non-remote type can't implement a remote type"
- description: "Dgraph schema generation and possibly the way we do field mapping would have to
- be re-worked to make this work correctly."
+ name: non-remote type can't implement a remote type
+ description: Dgraph schema generation and possibly the way we do field mapping would have to be re-worked to make this work correctly
input: |
interface Person @remote {
id: ID!
@@ -2548,12 +2543,12 @@ invalid_schemas:
age: Int!
}
errlist: [
- {"message": "Type User; without @remote directive can't implement an interface Person; with
- have @remote directive.",
+ {"message": Type User; without @remote directive can't implement an interface Person; with
+ have @remote directive.,
"locations": [{"line":5, "column":6}]}
]
- - name: "ID field can't have @dgraph directive and @search directive"
+ - name: ID field can't have @dgraph directive and @search directive
input: |
type X {
id: ID @dgraph(pred: "X.id") @search
@@ -2604,7 +2599,7 @@ invalid_schemas:
]
-
- name: "as is reserved keyword - type Name"
+ name: as is reserved keyword - type Name
input: |
type As {
id: ID!
@@ -2614,7 +2609,7 @@ invalid_schemas:
{ "message": "As is a reserved word, so you can't declare a type with this name. Pick a different name for the type.", "locations": [ { "line": 1, "column": 6 } ] },
]
- - name: "as is reserved keyword - field name"
+ - name: as is reserved keyword - field name
input: |
type X {
as: ID!
@@ -2624,17 +2619,17 @@ invalid_schemas:
{ "message": "Type X; Field as: as is a reserved keyword and you cannot declare a field with this name.", "locations": [ { "line": 2, "column": 3 } ] },
]
- - name: "as is reserved keyword - type name using @dgraph directive"
+ - name: as is reserved keyword - type name using @dgraph directive
input: |
type X @dgraph(type:"as") {
id: ID!
name: String
}
errlist: [
- { "message": "Type X; type argument 'as' for @dgraph directive is a reserved keyword.", "locations": [ { "line": 1, "column": 9 } ] },
+ { "message": Type X; type argument 'as' for @dgraph directive is a reserved keyword., "locations": [ { "line": 1, "column": 9 } ] },
]
- - name: "as is reserved keyword - field name using @dgraph directive"
+ - name: as is reserved keyword - field name using @dgraph directive
input: |
type X {
id: ID!
@@ -2644,7 +2639,7 @@ invalid_schemas:
{ "message": "Type X; Field name: pred argument 'as' for @dgraph directive is a reserved keyword.", "locations": [ { "line": 3, "column": 17 } ] },
]
- - name: "field type mismatched between implementation and interface"
+ - name: field type mismatched between implementation and interface
input: |
interface I1 {
name: String!
@@ -2653,10 +2648,10 @@ invalid_schemas:
name:String
}
errlist: [
- { "message": "For type I3 to implement interface I1 the field name must have type String!", "locations": [ { "line": 4, "column": 6 } ] },
+ { "message": For type I3 to implement interface I1 the field name must have type String!, "locations": [ { "line": 4, "column": 6 } ] },
]
- - name: "Type implements multiple interfaces with same field name"
+ - name: Type implements multiple interfaces with same field name
input: |
interface I1 {
name: String!
@@ -2668,7 +2663,7 @@ invalid_schemas:
name:String!
}
errlist: [
- { "message": "Field I3.name can only be defined once.", "locations": [ { "line": 2, "column": 5 } ] },
+ { "message": Field I3.name can only be defined once., "locations": [ { "line": 2, "column": 5 } ] },
]
- name: "@external directive can only be used on fields of Type Extension"
@@ -2689,10 +2684,10 @@ invalid_schemas:
reviews: String
}
errlist: [
- { "message": "Type Product; @key directive should not be defined more than once.", "locations": [ { "line": 1, "column": 34 } ] },
+ { "message": Type Product; @key directive should not be defined more than once., "locations": [ { "line": 1, "column": 34 } ] },
]
- - name: "Argument inside @key directive uses field not defined in the type"
+ - name: Argument inside @key directive uses field not defined in the type
input: |
type Product @key(fields: "username") {
id: ID!
@@ -2700,10 +2695,10 @@ invalid_schemas:
reviews: String
}
errlist: [
- { "message": "Type Product; @key directive uses a field username which is not defined inside the type.", "locations": [ { "line": 1, "column":19 } ] },
+ { "message": Type Product; @key directive uses a field username which is not defined inside the type., "locations": [ { "line": 1, "column":19 } ] },
]
- - name: "Argument inside @key directive must have ID field or field with @id directive"
+ - name: Argument inside @key directive must have ID field or field with @id directive
input: |
extend type Product @key(fields: "name") {
id: ID! @external
@@ -2727,7 +2722,7 @@ invalid_schemas:
review: String!
}
errlist: [
- {"message": "Type Product; Type Extension cannot be defined without @key directive", "locations": [ { "line": 13, "column": 12} ] },
+ {"message": Type Product; Type Extension cannot be defined without @key directive, "locations": [ { "line": 13, "column": 12} ] },
]
- name: "@remote directive with @key"
@@ -2743,10 +2738,10 @@ invalid_schemas:
review: String!
}
errlist: [
- {"message": "Type Product; @remote directive cannot be defined with @key directive", "locations": [ { "line": 181, "column": 12} ] },
+ {"message": Type Product; @remote directive cannot be defined with @key directive, "locations": [ { "line": 181, "column": 12} ] },
]
- - name: "directives defined on @external fields that are not @key."
+ - name: directives defined on @external fields that are not @key.
input: |
extend type Product @key(fields: "id"){
id: ID! @external
@@ -2800,7 +2795,7 @@ invalid_schemas:
{"message": "Type Product: Field reviews: @requires directive can only be defined on fields in type extensions. i.e., the type must have `@extends` or use `extend` keyword.", "locations": [ { "line": 4, "column": 23} ] }
]
- - name: "argument inside @requires directive is not an @external field."
+ - name: argument inside @requires directive is not an @external field.
input: |
extend type Product @key(fields: "id"){
id: ID! @external
@@ -2812,7 +2807,7 @@ invalid_schemas:
review: String!
}
errlist: [
- {"message": "Type Product; Field name must be @external.", "locations": [ { "line": 4, "column": 23} ] }
+ {"message": Type Product; Field name must be @external., "locations": [ { "line": 4, "column": 23} ] }
]
- name: "@provides directive used on field with type that does not have a @key."
@@ -2827,7 +2822,7 @@ invalid_schemas:
name: String
}
errlist: [
- {"message": "Type Product; Field reviews does not return a type that has a @key.", "locations": [ { "line": 4, "column": 23} ] }
+ {"message": Type Product; Field reviews does not return a type that has a @key., "locations": [ { "line": 4, "column": 23} ] }
]
- name: "@provides directive uses a field that is not defined in the extended type"
@@ -2909,10 +2904,10 @@ invalid_schemas:
})
}
errlist: [
- { "message": "Type TwitterUser; @lambdaOnMutate directive not allowed along with @remote directive.", "locations": [{"line": 1, "column": 27}]}
+ { "message": Type TwitterUser; @lambdaOnMutate directive not allowed along with @remote directive., "locations": [{"line": 1, "column": 27}]}
]
- - name: "language tag field can't contain more than on @"
+ - name: language tag field can't contain more than on @
input: |
type Person {
name: String!
@@ -2923,7 +2918,7 @@ invalid_schemas:
"locations": [ { "line": 3, "column": 19 } ] },
]
- - name: "language tag field should be of String type"
+ - name: language tag field should be of String type
input: |
type Person {
name: String!
@@ -2957,7 +2952,7 @@ invalid_schemas:
"locations": [ { "line": 3, "column": 56 } ] },
]
- - name: "unsupported `*` language tag in graphql"
+ - name: unsupported `*` language tag in graphql
input: |
type Person {
name: String!
@@ -3100,20 +3095,20 @@ invalid_schemas:
]
valid_schemas:
- - name: "Multiple fields with @id directive should be allowed"
+ - name: Multiple fields with @id directive should be allowed
input: |
type X {
f1: String! @id
f2: String! @id
}
- - name: "field with @id directive can have exact index"
+ - name: field with @id directive can have exact index
input: |
type X {
f1: String! @id @search(by:[exact])
}
- - name: "Type implements from two interfaces where both have ID"
+ - name: Type implements from two interfaces where both have ID
input: |
interface X {
id: ID
@@ -3125,7 +3120,7 @@ valid_schemas:
name: String
}
- - name: "Type implements an interface with the field definition repeated"
+ - name: Type implements an interface with the field definition repeated
input: |
interface Y {
id: ID
@@ -3137,7 +3132,7 @@ valid_schemas:
y: String
}
- - name: "schema with union"
+ - name: schema with union
input: |
interface W {
f1: ID!
@@ -3177,7 +3172,7 @@ valid_schemas:
}
-
- name: "hasInverse directive on singleton"
+ name: hasInverse directive on singleton
input: |
type X {
f1: Y @hasInverse(field: "f1")
@@ -3187,7 +3182,7 @@ valid_schemas:
}
-
- name: "hasInverse directive on list type 1"
+ name: hasInverse directive on list type 1
input: |
type X {
f1: [Y] @hasInverse(field: "f1")
@@ -3197,7 +3192,7 @@ valid_schemas:
}
-
- name: "hasInverse directive from list type"
+ name: hasInverse directive from list type
input: |
type Post {
postId: ID!
@@ -3208,7 +3203,7 @@ valid_schemas:
posts: [Post!]! @hasInverse(field: "author")
}
-
- name: "hasInverse directive to list type"
+ name: hasInverse directive to list type
input: |
type Post {
postId: ID!
@@ -3220,7 +3215,7 @@ valid_schemas:
}
-
- name: "hasInverse directive on list type 2"
+ name: hasInverse directive on list type 2
input: |
type X {
f1: [Y] @hasInverse(field: "f1")
@@ -3230,7 +3225,7 @@ valid_schemas:
}
-
- name: "Correct search types"
+ name: Correct search types
input: |
type X {
int1: Int @search
@@ -3271,7 +3266,7 @@ valid_schemas:
}
-
- name: "dgraph directive with correct reverse field works"
+ name: dgraph directive with correct reverse field works
input: |
type X {
id: ID!
@@ -3305,7 +3300,7 @@ valid_schemas:
}
-
- name: "initial schema with @custom directive"
+ name: initial schema with @custom directive
input: |
type Author {
id: ID!
@@ -3339,7 +3334,7 @@ valid_schemas:
method: "POST"})
}
- - name: "Schema with @custom directives on fields."
+ - name: Schema with @custom directives on fields
input: |
type Class @remote {
id: ID!
@@ -3363,7 +3358,7 @@ valid_schemas:
}
-
- name: "Schema with @custom directives on field where body requires field with @id directive."
+ name: Schema with @custom directives on field where body requires field with @id directive
input: |
type Class @remote {
id: ID!
@@ -3433,7 +3428,7 @@ valid_schemas:
}
-
- name: "remote type can use other types which are dgraph types"
+ name: remote type can use other types which are dgraph types
input: |
type User @remote {
id: ID!
@@ -3447,7 +3442,7 @@ valid_schemas:
}
-
- name: "remote type can implement a remote type"
+ name: remote type can implement a remote type
input: |
type Car {
id: ID!
@@ -3464,8 +3459,7 @@ valid_schemas:
-
- name: "a non-remote type can have fields which are of remote type if they have @custom
- directive"
+ name: a non-remote type can have fields which are of remote type if they have @custom directive
input: |
type User @remote {
id: ID!
@@ -3483,7 +3477,7 @@ valid_schemas:
}
-
- name: "dgraph directive with reverse edges should work with interfaces"
+ name: dgraph directive with reverse edges should work with interfaces
input: |
type Object {
id: ID!
@@ -3523,7 +3517,7 @@ valid_schemas:
addAuthor(id: ID): Author! @custom(http: {url: "http://blah.com", method: "GET"})
}
- - name: "UpdateAuthorInput is allowed if Author is of remote type"
+ - name: UpdateAuthorInput is allowed if Author is of remote type
input: |
type Author @remote {
id: ID!
@@ -3540,7 +3534,7 @@ valid_schemas:
name: String
}
- - name: "A valid federation schema"
+ - name: A valid federation schema
input: |
type Review {
body: String
@@ -3577,7 +3571,7 @@ valid_schemas:
questionText: String
}
- - name: "Same reverse dgraph predicate can be used by two different GraphQL fields"
+ - name: Same reverse dgraph predicate can be used by two different GraphQL fields
input: |
type X {
f1: [Y] @dgraph(pred: "~link")
@@ -3590,7 +3584,7 @@ valid_schemas:
f4: [X] @dgraph(pred: "link")
}
- - name: "valid schema with multiple language tag fields"
+ - name: valid schema with multiple language tag fields
input: |
interface Node {
f1: String
@@ -3610,7 +3604,7 @@ valid_schemas:
professionEn: String @dgraph(pred: "Person.profession@en")
}
- - name: "valid schema with @id directive having interface argument in interface"
+ - name: valid schema with @id directive having interface argument in interface
input: |
interface Member {
refID: String! @id(interface: true)
@@ -3642,4 +3636,4 @@ valid_schemas:
type LibraryManager {
name: String! @id
manages: [LibraryMember]
- }
+ }
\ No newline at end of file
diff --git a/graphql/schema/testdata/schemagen/input/password-type.graphql b/graphql/schema/testdata/schemagen/input/password-type.graphql
index 508ae881b1a..8e2b746cbf0 100644
--- a/graphql/schema/testdata/schemagen/input/password-type.graphql
+++ b/graphql/schema/testdata/schemagen/input/password-type.graphql
@@ -1,4 +1,4 @@
type Author @secret(field: "pwd") {
- name: String! @id
- token: String
+ name: String! @id
+ token: String
}
diff --git a/graphql/schema/testdata/schemagen/input/searchables-references.graphql b/graphql/schema/testdata/schemagen/input/searchables-references.graphql
index d69dad886ce..2cdcf0ef15a 100644
--- a/graphql/schema/testdata/schemagen/input/searchables-references.graphql
+++ b/graphql/schema/testdata/schemagen/input/searchables-references.graphql
@@ -1,13 +1,13 @@
type Author {
- id: ID!
- name: String! @search(by: [hash])
- dob: DateTime # Have something not search
- posts: [Post] # This should have arguments added for a filter on Post
+ id: ID!
+ name: String! @search(by: [hash])
+ dob: DateTime # Have something not search
+ posts: [Post] # This should have arguments added for a filter on Post
}
type Post {
- postID: ID!
- title: String! @search(by: ["term", "fulltext"])
- text: String @search(by: ["fulltext", "term"])
- datePublished: DateTime # Have something not search
+ postID: ID!
+ title: String! @search(by: ["term", "fulltext"])
+ text: String @search(by: ["fulltext", "term"])
+ datePublished: DateTime # Have something not search
}
diff --git a/graphql/schema/testdata/schemagen/input/searchables.graphql b/graphql/schema/testdata/schemagen/input/searchables.graphql
index 3690268b843..54649df9c55 100644
--- a/graphql/schema/testdata/schemagen/input/searchables.graphql
+++ b/graphql/schema/testdata/schemagen/input/searchables.graphql
@@ -1,38 +1,38 @@
type Post {
- postID: ID!
- title: String! @search(by: [term])
- titleByEverything: String! @search(by: ["term", "fulltext", "trigram", "hash"])
- text: String @search(by: [fulltext])
+ postID: ID!
+ title: String! @search(by: [term])
+ titleByEverything: String! @search(by: ["term", "fulltext", "trigram", "hash"])
+ text: String @search(by: [fulltext])
- tags: [String] @search(by: [trigram])
- tagsHash: [String] @search(by: [hash])
- tagsExact: [String] @search(by: [exact])
+ tags: [String] @search(by: [trigram])
+ tagsHash: [String] @search(by: [hash])
+ tagsExact: [String] @search(by: [exact])
- publishByYear: DateTime @search(by: [year])
- publishByMonth: DateTime @search(by: [month])
- publishByDay: DateTime @search(by: [day])
- publishByHour: DateTime @search(by: [hour])
- publishTimestamp: Int64 @search
+ publishByYear: DateTime @search(by: [year])
+ publishByMonth: DateTime @search(by: [month])
+ publishByDay: DateTime @search(by: [day])
+ publishByHour: DateTime @search(by: [hour])
+ publishTimestamp: Int64 @search
- numViewers: Int64 @search(by: [int64])
- numLikes: Int @search
- score: Float @search
- isPublished: Boolean @search
+ numViewers: Int64 @search(by: [int64])
+ numLikes: Int @search
+ score: Float @search
+ isPublished: Boolean @search
- postType: PostType @search
- postTypeNonNull: PostType! @search
- postTypeList: [PostType] @search
- postTypeTrigram: PostType @search(by: [trigram])
- postTypeRegexp: PostType @search(by: [regexp])
- postTypeExact: [PostType] @search(by: [exact])
- postTypeHash: PostType @search(by: [hash])
- postTypeRegexpExact: PostType @search(by: ["exact", "regexp"])
- postTypeHashRegexp: PostType @search(by: ["hash", "regexp"])
- postTypeNone: PostType @search(by: [])
+ postType: PostType @search
+ postTypeNonNull: PostType! @search
+ postTypeList: [PostType] @search
+ postTypeTrigram: PostType @search(by: [trigram])
+ postTypeRegexp: PostType @search(by: [regexp])
+ postTypeExact: [PostType] @search(by: [exact])
+ postTypeHash: PostType @search(by: [hash])
+ postTypeRegexpExact: PostType @search(by: ["exact", "regexp"])
+ postTypeHashRegexp: PostType @search(by: ["hash", "regexp"])
+ postTypeNone: PostType @search(by: [])
}
enum PostType {
- Fact
- Question
- Opinion
+ Fact
+ Question
+ Opinion
}
diff --git a/graphql/schema/testdata/schemagen/input/single-type-with-enum.graphql b/graphql/schema/testdata/schemagen/input/single-type-with-enum.graphql
index 16a10daf72d..5ff4d9b3f2e 100644
--- a/graphql/schema/testdata/schemagen/input/single-type-with-enum.graphql
+++ b/graphql/schema/testdata/schemagen/input/single-type-with-enum.graphql
@@ -9,4 +9,4 @@ enum PostType {
Statement
Question
Answer
-}
\ No newline at end of file
+}
diff --git a/graphql/schema/testdata/schemagen/input/single-type.graphql b/graphql/schema/testdata/schemagen/input/single-type.graphql
index 7816c5016a9..714b30895fe 100644
--- a/graphql/schema/testdata/schemagen/input/single-type.graphql
+++ b/graphql/schema/testdata/schemagen/input/single-type.graphql
@@ -1,7 +1,7 @@
type Message {
- id: ID!
- content: String!
- author: String
- uniqueId: Int64
- datePosted: DateTime
+ id: ID!
+ content: String!
+ author: String
+ uniqueId: Int64
+ datePosted: DateTime
}
diff --git a/graphql/schema/testdata/schemagen/input/type-implements-multiple-interfaces.graphql b/graphql/schema/testdata/schemagen/input/type-implements-multiple-interfaces.graphql
index 91cd15ad088..2ab2ef40a70 100644
--- a/graphql/schema/testdata/schemagen/input/type-implements-multiple-interfaces.graphql
+++ b/graphql/schema/testdata/schemagen/input/type-implements-multiple-interfaces.graphql
@@ -1,14 +1,14 @@
interface Character {
- id: ID!
- name: String! @search(by: [exact])
- friends: [Character]
+ id: ID!
+ name: String! @search(by: [exact])
+ friends: [Character]
}
interface Employee {
- employeeId: String!
- title: String!
+ employeeId: String!
+ title: String!
}
type Human implements Character & Employee {
- totalCredits: Int
+ totalCredits: Int
}
diff --git a/graphql/schema/testdata/schemagen/input/type-with-arguments-on-field.graphql b/graphql/schema/testdata/schemagen/input/type-with-arguments-on-field.graphql
index d544aa76608..29ff7259196 100644
--- a/graphql/schema/testdata/schemagen/input/type-with-arguments-on-field.graphql
+++ b/graphql/schema/testdata/schemagen/input/type-with-arguments-on-field.graphql
@@ -1,10 +1,10 @@
interface Abstract {
- id: ID!
- name(random: Int!, size: String): String!
+ id: ID!
+ name(random: Int!, size: String): String!
}
type Message implements Abstract {
- content(pick: Int!, name: String): String!
- author: String
- datePosted: DateTime
+ content(pick: Int!, name: String): String!
+ author: String
+ datePosted: DateTime
}
diff --git a/graphql/schema/testdata/schemagen/input/type-with-custom-field-on-dgraph-type.graphql b/graphql/schema/testdata/schemagen/input/type-with-custom-field-on-dgraph-type.graphql
index e7aa5867c77..479626311e3 100644
--- a/graphql/schema/testdata/schemagen/input/type-with-custom-field-on-dgraph-type.graphql
+++ b/graphql/schema/testdata/schemagen/input/type-with-custom-field-on-dgraph-type.graphql
@@ -1,19 +1,12 @@
type Car {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
type User {
- id: ID!
- name: String @custom(http: {
- url: "http://mock:8888/userNames",
- method: "GET",
- body: "{uid: $id}"
- })
- age: Int! @search
- cars: [Car] @custom(http: {
- url: "http://mock:8888/cars",
- method: "GET",
- body: "{uid: $id}"
- })
-}
\ No newline at end of file
+ id: ID!
+ name: String
+ @custom(http: { url: "http://mock:8888/userNames", method: "GET", body: "{uid: $id}" })
+ age: Int! @search
+ cars: [Car] @custom(http: { url: "http://mock:8888/cars", method: "GET", body: "{uid: $id}" })
+}
diff --git a/graphql/schema/testdata/schemagen/input/type-with-custom-fields-on-remote-type.graphql b/graphql/schema/testdata/schemagen/input/type-with-custom-fields-on-remote-type.graphql
index a09b70205e9..37dd774b759 100644
--- a/graphql/schema/testdata/schemagen/input/type-with-custom-fields-on-remote-type.graphql
+++ b/graphql/schema/testdata/schemagen/input/type-with-custom-fields-on-remote-type.graphql
@@ -1,19 +1,12 @@
type Car @remote {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
type User {
- id: ID!
- name: String @custom(http: {
- url: "http://mock:8888/userNames",
- method: "GET",
- body: "{uid: $id}"
- })
- age: Int! @search
- cars: [Car] @custom(http: {
- url: "http://mock:8888/cars",
- method: "GET",
- body: "{uid: $id}"
- })
-}
\ No newline at end of file
+ id: ID!
+ name: String
+ @custom(http: { url: "http://mock:8888/userNames", method: "GET", body: "{uid: $id}" })
+ age: Int! @search
+ cars: [Car] @custom(http: { url: "http://mock:8888/cars", method: "GET", body: "{uid: $id}" })
+}
diff --git a/graphql/schema/testdata/schemagen/input/type-without-orderables.graphql b/graphql/schema/testdata/schemagen/input/type-without-orderables.graphql
index 773242e89f6..2f4037f6af5 100644
--- a/graphql/schema/testdata/schemagen/input/type-without-orderables.graphql
+++ b/graphql/schema/testdata/schemagen/input/type-without-orderables.graphql
@@ -1,6 +1,6 @@
type Data {
- id: ID!
- intList: [Int]
- stringList: [String]
- metaData: Data
-}
\ No newline at end of file
+ id: ID!
+ intList: [Int]
+ stringList: [String]
+ metaData: Data
+}
diff --git a/graphql/schema/testdata/schemagen/input/union.graphql b/graphql/schema/testdata/schemagen/input/union.graphql
index d18f80cd9e1..ef10b9da346 100644
--- a/graphql/schema/testdata/schemagen/input/union.graphql
+++ b/graphql/schema/testdata/schemagen/input/union.graphql
@@ -1,41 +1,38 @@
interface Character {
- id: ID!
- name: String! @search(by: [exact])
- friends: [Character]
- enemyOf: Resident
- appearsIn: [Episode!]! @search
+ id: ID!
+ name: String! @search(by: [exact])
+ friends: [Character]
+ enemyOf: Resident
+ appearsIn: [Episode!]! @search
}
type Human implements Character {
- starships: [Starship]
- totalCredits: Int
+ starships: [Starship]
+ totalCredits: Int
}
type Droid implements Character {
- primaryFunction: String
+ primaryFunction: String
}
enum Episode {
- NEWHOPE
- EMPIRE
- JEDI
+ NEWHOPE
+ EMPIRE
+ JEDI
}
type Starship {
- id: ID!
- name: String! @search(by: [term])
- length: Float
+ id: ID!
+ name: String! @search(by: [term])
+ length: Float
}
union Resident = Human | Droid | Starship
union Tool @remote = Droid | Starship
type Planet {
- id: ID!
- name: String!
- residents: [Resident!] @dgraph(pred: "residents")
- bestTool: Tool @custom(http: {
- url: "http://mock:8888/tool/$id"
- method: "GET"
- })
+ id: ID!
+ name: String!
+ residents: [Resident!] @dgraph(pred: "residents")
+ bestTool: Tool @custom(http: { url: "http://mock:8888/tool/$id", method: "GET" })
}
diff --git a/graphql/schema/testdata/schemagen/output/apollo-federation.graphql b/graphql/schema/testdata/schemagen/output/apollo-federation.graphql
index ff44a7b8041..32f175da43f 100644
--- a/graphql/schema/testdata/schemagen/output/apollo-federation.graphql
+++ b/graphql/schema/testdata/schemagen/output/apollo-federation.graphql
@@ -3,43 +3,45 @@
#######################
type Reviews @key(fields: "id") {
- id: ID!
- review: String!
- user(filter: UserFilter): User @provides(fields: "age")
+ id: ID!
+ review: String!
+ user(filter: UserFilter): User @provides(fields: "age")
}
type Student @key(fields: "id") {
- id: ID!
- name: String!
- age: Int!
+ id: ID!
+ name: String!
+ age: Int!
}
type School @key(fields: "id") {
- id: ID!
- students(filter: StudentFilter, order: StudentOrder, first: Int, offset: Int): [Student] @provides(fields: "name")
- studentsAggregate(filter: StudentFilter): StudentAggregateResult
+ id: ID!
+ students(filter: StudentFilter, order: StudentOrder, first: Int, offset: Int): [Student]
+ @provides(fields: "name")
+ studentsAggregate(filter: StudentFilter): StudentAggregateResult
}
type Country {
- code: String! @id
- name: String!
+ code: String! @id
+ name: String!
}
type Product @key(fields: "id") @extends {
- id: ID! @external
- name: String! @external
- price: Int @external
- weight: Int @external
- reviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews] @requires(fields: "price weight")
- reviewsAggregate(filter: ReviewsFilter): ReviewsAggregateResult
+ id: ID! @external
+ name: String! @external
+ price: Int @external
+ weight: Int @external
+ reviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
+ @requires(fields: "price weight")
+ reviewsAggregate(filter: ReviewsFilter): ReviewsAggregateResult
}
type User @key(fields: "name") @extends {
- id: ID! @external
- name: String! @id @external
- age: Int! @external
- reviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
- reviewsAggregate(filter: ReviewsFilter): ReviewsAggregateResult
+ id: ID! @external
+ name: String! @id @external
+ age: Int! @external
+ reviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
+ reviewsAggregate(filter: ReviewsFilter): ReviewsAggregateResult
}
#######################
@@ -58,162 +60,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -225,11 +227,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -238,77 +241,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -320,7 +324,7 @@ scalar _Any
scalar _FieldSet
type _Service {
- sdl: String
+ sdl: String
}
directive @external on FIELD_DEFINITION
@@ -334,143 +338,143 @@ directive @extends on OBJECT | INTERFACE
#######################
type AddCountryPayload {
- country(filter: CountryFilter, order: CountryOrder, first: Int, offset: Int): [Country]
- numUids: Int
+ country(filter: CountryFilter, order: CountryOrder, first: Int, offset: Int): [Country]
+ numUids: Int
}
type AddProductPayload {
- product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- numUids: Int
+ product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ numUids: Int
}
type AddReviewsPayload {
- reviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
- numUids: Int
+ reviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
+ numUids: Int
}
type AddSchoolPayload {
- school(filter: SchoolFilter, first: Int, offset: Int): [School]
- numUids: Int
+ school(filter: SchoolFilter, first: Int, offset: Int): [School]
+ numUids: Int
}
type AddStudentPayload {
- student(filter: StudentFilter, order: StudentOrder, first: Int, offset: Int): [Student]
- numUids: Int
+ student(filter: StudentFilter, order: StudentOrder, first: Int, offset: Int): [Student]
+ numUids: Int
}
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type CountryAggregateResult {
- count: Int
- codeMin: String
- codeMax: String
- nameMin: String
- nameMax: String
+ count: Int
+ codeMin: String
+ codeMax: String
+ nameMin: String
+ nameMax: String
}
type DeleteCountryPayload {
- country(filter: CountryFilter, order: CountryOrder, first: Int, offset: Int): [Country]
- msg: String
- numUids: Int
+ country(filter: CountryFilter, order: CountryOrder, first: Int, offset: Int): [Country]
+ msg: String
+ numUids: Int
}
type DeleteProductPayload {
- product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- msg: String
- numUids: Int
+ product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ msg: String
+ numUids: Int
}
type DeleteReviewsPayload {
- reviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
- msg: String
- numUids: Int
+ reviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
+ msg: String
+ numUids: Int
}
type DeleteSchoolPayload {
- school(filter: SchoolFilter, first: Int, offset: Int): [School]
- msg: String
- numUids: Int
+ school(filter: SchoolFilter, first: Int, offset: Int): [School]
+ msg: String
+ numUids: Int
}
type DeleteStudentPayload {
- student(filter: StudentFilter, order: StudentOrder, first: Int, offset: Int): [Student]
- msg: String
- numUids: Int
+ student(filter: StudentFilter, order: StudentOrder, first: Int, offset: Int): [Student]
+ msg: String
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type ProductAggregateResult {
- count: Int
- idMin: ID
- idMax: ID
+ count: Int
+ idMin: ID
+ idMax: ID
}
type ReviewsAggregateResult {
- count: Int
- reviewMin: String
- reviewMax: String
+ count: Int
+ reviewMin: String
+ reviewMax: String
}
type SchoolAggregateResult {
- count: Int
+ count: Int
}
type StudentAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- ageMin: Int
- ageMax: Int
- ageSum: Int
- ageAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ ageMin: Int
+ ageMax: Int
+ ageSum: Int
+ ageAvg: Float
}
type UpdateCountryPayload {
- country(filter: CountryFilter, order: CountryOrder, first: Int, offset: Int): [Country]
- numUids: Int
+ country(filter: CountryFilter, order: CountryOrder, first: Int, offset: Int): [Country]
+ numUids: Int
}
type UpdateProductPayload {
- product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- numUids: Int
+ product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ numUids: Int
}
type UpdateReviewsPayload {
- reviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
- numUids: Int
+ reviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
+ numUids: Int
}
type UpdateSchoolPayload {
- school(filter: SchoolFilter, first: Int, offset: Int): [School]
- numUids: Int
+ school(filter: SchoolFilter, first: Int, offset: Int): [School]
+ numUids: Int
}
type UpdateStudentPayload {
- student(filter: StudentFilter, order: StudentOrder, first: Int, offset: Int): [Student]
- numUids: Int
+ student(filter: StudentFilter, order: StudentOrder, first: Int, offset: Int): [Student]
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- ageMin: Int
- ageMax: Int
- ageSum: Int
- ageAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ ageMin: Int
+ ageMax: Int
+ ageSum: Int
+ ageAvg: Float
}
#######################
@@ -478,55 +482,55 @@ type UserAggregateResult {
#######################
enum CountryHasFilter {
- code
- name
+ code
+ name
}
enum CountryOrderable {
- code
- name
+ code
+ name
}
enum ProductHasFilter {
- reviews
+ reviews
}
enum ProductOrderable {
- id
+ id
}
enum ReviewsHasFilter {
- review
- user
+ review
+ user
}
enum ReviewsOrderable {
- review
+ review
}
enum SchoolHasFilter {
- students
+ students
}
enum StudentHasFilter {
- name
- age
+ name
+ age
}
enum StudentOrderable {
- name
- age
+ name
+ age
}
enum UserHasFilter {
- name
- age
- reviews
+ name
+ age
+ reviews
}
enum UserOrderable {
- name
- age
+ name
+ age
}
#######################
@@ -534,209 +538,209 @@ enum UserOrderable {
#######################
input AddCountryInput {
- code: String!
- name: String!
+ code: String!
+ name: String!
}
input AddProductInput {
- id: ID!
- reviews: [ReviewsRef]
+ id: ID!
+ reviews: [ReviewsRef]
}
input AddReviewsInput {
- review: String!
- user: UserRef
+ review: String!
+ user: UserRef
}
input AddSchoolInput {
- students: [StudentRef]
+ students: [StudentRef]
}
input AddStudentInput {
- name: String!
- age: Int!
+ name: String!
+ age: Int!
}
input AddUserInput {
- name: String!
- age: Int!
- reviews: [ReviewsRef]
+ name: String!
+ age: Int!
+ reviews: [ReviewsRef]
}
input CountryFilter {
- code: StringHashFilter
- has: [CountryHasFilter]
- and: [CountryFilter]
- or: [CountryFilter]
- not: CountryFilter
+ code: StringHashFilter
+ has: [CountryHasFilter]
+ and: [CountryFilter]
+ or: [CountryFilter]
+ not: CountryFilter
}
input CountryOrder {
- asc: CountryOrderable
- desc: CountryOrderable
- then: CountryOrder
+ asc: CountryOrderable
+ desc: CountryOrderable
+ then: CountryOrder
}
input CountryPatch {
- code: String
- name: String
+ code: String
+ name: String
}
input CountryRef {
- code: String
- name: String
+ code: String
+ name: String
}
input ProductFilter {
- id: [ID!]
- has: [ProductHasFilter]
- and: [ProductFilter]
- or: [ProductFilter]
- not: ProductFilter
+ id: [ID!]
+ has: [ProductHasFilter]
+ and: [ProductFilter]
+ or: [ProductFilter]
+ not: ProductFilter
}
input ProductOrder {
- asc: ProductOrderable
- desc: ProductOrderable
- then: ProductOrder
+ asc: ProductOrderable
+ desc: ProductOrderable
+ then: ProductOrder
}
input ProductPatch {
- reviews: [ReviewsRef]
+ reviews: [ReviewsRef]
}
input ProductRef {
- id: ID
- reviews: [ReviewsRef]
+ id: ID
+ reviews: [ReviewsRef]
}
input ReviewsFilter {
- id: [ID!]
- has: [ReviewsHasFilter]
- and: [ReviewsFilter]
- or: [ReviewsFilter]
- not: ReviewsFilter
+ id: [ID!]
+ has: [ReviewsHasFilter]
+ and: [ReviewsFilter]
+ or: [ReviewsFilter]
+ not: ReviewsFilter
}
input ReviewsOrder {
- asc: ReviewsOrderable
- desc: ReviewsOrderable
- then: ReviewsOrder
+ asc: ReviewsOrderable
+ desc: ReviewsOrderable
+ then: ReviewsOrder
}
input ReviewsPatch {
- review: String
- user: UserRef
+ review: String
+ user: UserRef
}
input ReviewsRef {
- id: ID
- review: String
- user: UserRef
+ id: ID
+ review: String
+ user: UserRef
}
input SchoolFilter {
- id: [ID!]
- has: [SchoolHasFilter]
- and: [SchoolFilter]
- or: [SchoolFilter]
- not: SchoolFilter
+ id: [ID!]
+ has: [SchoolHasFilter]
+ and: [SchoolFilter]
+ or: [SchoolFilter]
+ not: SchoolFilter
}
input SchoolPatch {
- students: [StudentRef]
+ students: [StudentRef]
}
input SchoolRef {
- id: ID
- students: [StudentRef]
+ id: ID
+ students: [StudentRef]
}
input StudentFilter {
- id: [ID!]
- has: [StudentHasFilter]
- and: [StudentFilter]
- or: [StudentFilter]
- not: StudentFilter
+ id: [ID!]
+ has: [StudentHasFilter]
+ and: [StudentFilter]
+ or: [StudentFilter]
+ not: StudentFilter
}
input StudentOrder {
- asc: StudentOrderable
- desc: StudentOrderable
- then: StudentOrder
+ asc: StudentOrderable
+ desc: StudentOrderable
+ then: StudentOrder
}
input StudentPatch {
- name: String
- age: Int
+ name: String
+ age: Int
}
input StudentRef {
- id: ID
- name: String
- age: Int
+ id: ID
+ name: String
+ age: Int
}
input UpdateCountryInput {
- filter: CountryFilter!
- set: CountryPatch
- remove: CountryPatch
+ filter: CountryFilter!
+ set: CountryPatch
+ remove: CountryPatch
}
input UpdateProductInput {
- filter: ProductFilter!
- set: ProductPatch
- remove: ProductPatch
+ filter: ProductFilter!
+ set: ProductPatch
+ remove: ProductPatch
}
input UpdateReviewsInput {
- filter: ReviewsFilter!
- set: ReviewsPatch
- remove: ReviewsPatch
+ filter: ReviewsFilter!
+ set: ReviewsPatch
+ remove: ReviewsPatch
}
input UpdateSchoolInput {
- filter: SchoolFilter!
- set: SchoolPatch
- remove: SchoolPatch
+ filter: SchoolFilter!
+ set: SchoolPatch
+ remove: SchoolPatch
}
input UpdateStudentInput {
- filter: StudentFilter!
- set: StudentPatch
- remove: StudentPatch
+ filter: StudentFilter!
+ set: StudentPatch
+ remove: StudentPatch
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- name: StringHashFilter
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ name: StringHashFilter
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- name: String
- age: Int
- reviews: [ReviewsRef]
+ name: String
+ age: Int
+ reviews: [ReviewsRef]
}
input UserRef {
- name: String
- age: Int
- reviews: [ReviewsRef]
+ name: String
+ age: Int
+ reviews: [ReviewsRef]
}
#######################
@@ -744,26 +748,26 @@ input UserRef {
#######################
type Query {
- _entities(representations: [_Any!]!): [_Entity]!
- _service: _Service!
- getReviews(id: ID!): Reviews
- queryReviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
- aggregateReviews(filter: ReviewsFilter): ReviewsAggregateResult
- getStudent(id: ID!): Student
- queryStudent(filter: StudentFilter, order: StudentOrder, first: Int, offset: Int): [Student]
- aggregateStudent(filter: StudentFilter): StudentAggregateResult
- getSchool(id: ID!): School
- querySchool(filter: SchoolFilter, first: Int, offset: Int): [School]
- aggregateSchool(filter: SchoolFilter): SchoolAggregateResult
- getCountry(code: String!): Country
- queryCountry(filter: CountryFilter, order: CountryOrder, first: Int, offset: Int): [Country]
- aggregateCountry(filter: CountryFilter): CountryAggregateResult
- getProduct(id: ID!): Product
- queryProduct(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- aggregateProduct(filter: ProductFilter): ProductAggregateResult
- getUser(name: String!): User
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ _entities(representations: [_Any!]!): [_Entity]!
+ _service: _Service!
+ getReviews(id: ID!): Reviews
+ queryReviews(filter: ReviewsFilter, order: ReviewsOrder, first: Int, offset: Int): [Reviews]
+ aggregateReviews(filter: ReviewsFilter): ReviewsAggregateResult
+ getStudent(id: ID!): Student
+ queryStudent(filter: StudentFilter, order: StudentOrder, first: Int, offset: Int): [Student]
+ aggregateStudent(filter: StudentFilter): StudentAggregateResult
+ getSchool(id: ID!): School
+ querySchool(filter: SchoolFilter, first: Int, offset: Int): [School]
+ aggregateSchool(filter: SchoolFilter): SchoolAggregateResult
+ getCountry(code: String!): Country
+ queryCountry(filter: CountryFilter, order: CountryOrder, first: Int, offset: Int): [Country]
+ aggregateCountry(filter: CountryFilter): CountryAggregateResult
+ getProduct(id: ID!): Product
+ queryProduct(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ aggregateProduct(filter: ProductFilter): ProductAggregateResult
+ getUser(name: String!): User
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -771,23 +775,22 @@ type Query {
#######################
type Mutation {
- addReviews(input: [AddReviewsInput!]!): AddReviewsPayload
- updateReviews(input: UpdateReviewsInput!): UpdateReviewsPayload
- deleteReviews(filter: ReviewsFilter!): DeleteReviewsPayload
- addStudent(input: [AddStudentInput!]!): AddStudentPayload
- updateStudent(input: UpdateStudentInput!): UpdateStudentPayload
- deleteStudent(filter: StudentFilter!): DeleteStudentPayload
- addSchool(input: [AddSchoolInput!]!): AddSchoolPayload
- updateSchool(input: UpdateSchoolInput!): UpdateSchoolPayload
- deleteSchool(filter: SchoolFilter!): DeleteSchoolPayload
- addCountry(input: [AddCountryInput!]!, upsert: Boolean): AddCountryPayload
- updateCountry(input: UpdateCountryInput!): UpdateCountryPayload
- deleteCountry(filter: CountryFilter!): DeleteCountryPayload
- addProduct(input: [AddProductInput!]!): AddProductPayload
- updateProduct(input: UpdateProductInput!): UpdateProductPayload
- deleteProduct(filter: ProductFilter!): DeleteProductPayload
- addUser(input: [AddUserInput!]!, upsert: Boolean): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ addReviews(input: [AddReviewsInput!]!): AddReviewsPayload
+ updateReviews(input: UpdateReviewsInput!): UpdateReviewsPayload
+ deleteReviews(filter: ReviewsFilter!): DeleteReviewsPayload
+ addStudent(input: [AddStudentInput!]!): AddStudentPayload
+ updateStudent(input: UpdateStudentInput!): UpdateStudentPayload
+ deleteStudent(filter: StudentFilter!): DeleteStudentPayload
+ addSchool(input: [AddSchoolInput!]!): AddSchoolPayload
+ updateSchool(input: UpdateSchoolInput!): UpdateSchoolPayload
+ deleteSchool(filter: SchoolFilter!): DeleteSchoolPayload
+ addCountry(input: [AddCountryInput!]!, upsert: Boolean): AddCountryPayload
+ updateCountry(input: UpdateCountryInput!): UpdateCountryPayload
+ deleteCountry(filter: CountryFilter!): DeleteCountryPayload
+ addProduct(input: [AddProductInput!]!): AddProductPayload
+ updateProduct(input: UpdateProductInput!): UpdateProductPayload
+ deleteProduct(filter: ProductFilter!): DeleteProductPayload
+ addUser(input: [AddUserInput!]!, upsert: Boolean): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/auth-on-interfaces.graphql b/graphql/schema/testdata/schemagen/output/auth-on-interfaces.graphql
index 79edc9a60e5..93b04644cdb 100644
--- a/graphql/schema/testdata/schemagen/output/auth-on-interfaces.graphql
+++ b/graphql/schema/testdata/schemagen/output/auth-on-interfaces.graphql
@@ -3,25 +3,39 @@
#######################
type Author {
- id: ID!
- name: String! @search(by: [hash])
- posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post] @hasInverse(field: author)
- postsAggregate(filter: PostFilter): PostAggregateResult
-}
-
-interface Post @secret(field: "pwd") @auth(password: {rule:"{$ROLE: { eq: \"Admin\" } }"}, query: {rule:"query($TEXT: String!) { \n queryPost(filter: { text : {eq: $TEXT } } ) { \n id \n } \n}"}) {
- id: ID!
- text: String @search(by: [exact])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author! @hasInverse(field: posts)
-}
-
-type Question implements Post @auth(query: {rule:"query($ANS: Boolean!) { \n queryQuestion(filter: { answered: $ANS } ) { \n id \n } \n}"}) @secret(field: "pwd") {
- id: ID!
- text: String @search(by: [exact])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author! @hasInverse(field: posts)
- answered: Boolean @search
+ id: ID!
+ name: String! @search(by: [hash])
+ posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ @hasInverse(field: author)
+ postsAggregate(filter: PostFilter): PostAggregateResult
+}
+
+interface Post
+ @secret(field: "pwd")
+ @auth(
+ password: { rule: "{$ROLE: { eq: \"Admin\" } }" }
+ query: {
+ rule: "query($TEXT: String!) { \n queryPost(filter: { text : {eq: $TEXT } } ) { \n id \n } \n}"
+ }
+ ) {
+ id: ID!
+ text: String @search(by: [exact])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author! @hasInverse(field: posts)
+}
+
+type Question implements Post
+ @auth(
+ query: {
+ rule: "query($ANS: Boolean!) { \n queryQuestion(filter: { answered: $ANS } ) { \n id \n } \n}"
+ }
+ )
+ @secret(field: "pwd") {
+ id: ID!
+ text: String @search(by: [exact])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author! @hasInverse(field: posts)
+ answered: Boolean @search
}
#######################
@@ -40,162 +54,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -207,11 +221,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -220,77 +235,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -298,68 +314,68 @@ input StringHashFilter {
#######################
type AddAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ numUids: Int
}
type AuthorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type DeleteQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- msg: String
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type QuestionAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type UpdateQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ numUids: Int
}
#######################
@@ -367,35 +383,35 @@ type UpdateQuestionPayload {
#######################
enum AuthorHasFilter {
- name
- posts
+ name
+ posts
}
enum AuthorOrderable {
- name
+ name
}
enum PostHasFilter {
- text
- datePublished
- author
+ text
+ datePublished
+ author
}
enum PostOrderable {
- text
- datePublished
+ text
+ datePublished
}
enum QuestionHasFilter {
- text
- datePublished
- author
- answered
+ text
+ datePublished
+ author
+ answered
}
enum QuestionOrderable {
- text
- datePublished
+ text
+ datePublished
}
#######################
@@ -403,121 +419,121 @@ enum QuestionOrderable {
#######################
input AddAuthorInput {
- name: String!
- posts: [PostRef]
+ name: String!
+ posts: [PostRef]
}
input AddQuestionInput {
- text: String
- datePublished: DateTime
- author: AuthorRef!
- answered: Boolean
- pwd: String!
+ text: String
+ datePublished: DateTime
+ author: AuthorRef!
+ answered: Boolean
+ pwd: String!
}
input AuthorFilter {
- id: [ID!]
- name: StringHashFilter
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ name: StringHashFilter
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorOrder {
- asc: AuthorOrderable
- desc: AuthorOrderable
- then: AuthorOrder
+ asc: AuthorOrderable
+ desc: AuthorOrderable
+ then: AuthorOrder
}
input AuthorPatch {
- name: String
- posts: [PostRef]
+ name: String
+ posts: [PostRef]
}
input AuthorRef {
- id: ID
- name: String
- posts: [PostRef]
+ id: ID
+ name: String
+ posts: [PostRef]
}
input PostFilter {
- id: [ID!]
- text: StringExactFilter
- datePublished: DateTimeFilter
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ id: [ID!]
+ text: StringExactFilter
+ datePublished: DateTimeFilter
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
- pwd: String
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ pwd: String
}
input PostRef {
- id: ID!
+ id: ID!
}
input QuestionFilter {
- id: [ID!]
- text: StringExactFilter
- datePublished: DateTimeFilter
- answered: Boolean
- has: [QuestionHasFilter]
- and: [QuestionFilter]
- or: [QuestionFilter]
- not: QuestionFilter
+ id: [ID!]
+ text: StringExactFilter
+ datePublished: DateTimeFilter
+ answered: Boolean
+ has: [QuestionHasFilter]
+ and: [QuestionFilter]
+ or: [QuestionFilter]
+ not: QuestionFilter
}
input QuestionOrder {
- asc: QuestionOrderable
- desc: QuestionOrderable
- then: QuestionOrder
+ asc: QuestionOrderable
+ desc: QuestionOrderable
+ then: QuestionOrder
}
input QuestionPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
- answered: Boolean
- pwd: String
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ answered: Boolean
+ pwd: String
}
input QuestionRef {
- id: ID
- text: String
- datePublished: DateTime
- author: AuthorRef
- answered: Boolean
- pwd: String
+ id: ID
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ answered: Boolean
+ pwd: String
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
input UpdateQuestionInput {
- filter: QuestionFilter!
- set: QuestionPatch
- remove: QuestionPatch
+ filter: QuestionFilter!
+ set: QuestionPatch
+ remove: QuestionPatch
}
#######################
@@ -525,17 +541,17 @@ input UpdateQuestionInput {
#######################
type Query {
- getAuthor(id: ID!): Author
- queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
- getPost(id: ID!): Post
- checkPostPassword(id: ID!, pwd: String!): Post
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
- getQuestion(id: ID!): Question
- checkQuestionPassword(id: ID!, pwd: String!): Question
- queryQuestion(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- aggregateQuestion(filter: QuestionFilter): QuestionAggregateResult
+ getAuthor(id: ID!): Author
+ queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getPost(id: ID!): Post
+ checkPostPassword(id: ID!, pwd: String!): Post
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
+ getQuestion(id: ID!): Question
+ checkQuestionPassword(id: ID!, pwd: String!): Question
+ queryQuestion(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ aggregateQuestion(filter: QuestionFilter): QuestionAggregateResult
}
#######################
@@ -543,13 +559,12 @@ type Query {
#######################
type Mutation {
- addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
- addQuestion(input: [AddQuestionInput!]!): AddQuestionPayload
- updateQuestion(input: UpdateQuestionInput!): UpdateQuestionPayload
- deleteQuestion(filter: QuestionFilter!): DeleteQuestionPayload
+ addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
+ addQuestion(input: [AddQuestionInput!]!): AddQuestionPayload
+ updateQuestion(input: UpdateQuestionInput!): UpdateQuestionPayload
+ deleteQuestion(filter: QuestionFilter!): DeleteQuestionPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/authorization.graphql b/graphql/schema/testdata/schemagen/output/authorization.graphql
index 2dc4bf64f59..9cd4d757904 100644
--- a/graphql/schema/testdata/schemagen/output/authorization.graphql
+++ b/graphql/schema/testdata/schemagen/output/authorization.graphql
@@ -2,22 +2,49 @@
# Input Schema
#######################
-type Todo @secret(field: "pwd") @auth(password: {rule:"{$ROLE: { eq: \"Admin\" } }"}, query: {or:[{rule:"query($X_MyApp_User: String!) { \n queryTodo { \n owner (filter: { username: { eq: $X_MyApp_User }}) {\n username\n }\n }\n}"},{rule:"query($X_MyApp_User: String!) { \n queryTodo {\n sharedWith (filter: { username: { eq: $X_MyApp_User }}) {\n username\n }\n }\n}"},{rule:"query { \n queryTodo(filter: { isPublic: true }) {\n id\n }\n}"}]}, add: {rule:"query($X_MyApp_User: String!) { \n queryTodo {\n owner (filter: { username: { eq: $X_MyApp_User }}) {\n username\n }\n }\n}"}, update: {rule:"query($X_MyApp_User: String!) { \n queryTodo {\n owner (filter: { username: { eq: $X_MyApp_User }}) {\n username\n }\n }\n}"}) {
- id: ID!
- title: String
- text: String
- isPublic: Boolean @search
- dateCompleted: String @search
- sharedWith(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- owner(filter: UserFilter): User @hasInverse(field: "todos")
- somethingPrivate: String
- sharedWithAggregate(filter: UserFilter): UserAggregateResult
-}
-
-type User @auth(update: {rule:"query($X_MyApp_User: String!) { \n queryUser(filter: { username: { eq: $X_MyApp_User }}) {\n username\n }\n}"}) {
- username: String! @id
- todos(filter: TodoFilter, order: TodoOrder, first: Int, offset: Int): [Todo] @hasInverse(field: owner)
- todosAggregate(filter: TodoFilter): TodoAggregateResult
+type Todo
+ @secret(field: "pwd")
+ @auth(
+ password: { rule: "{$ROLE: { eq: \"Admin\" } }" }
+ query: {
+ or: [
+ {
+ rule: "query($X_MyApp_User: String!) { \n queryTodo { \n owner (filter: { username: { eq: $X_MyApp_User }}) {\n username\n }\n }\n}"
+ }
+ {
+ rule: "query($X_MyApp_User: String!) { \n queryTodo {\n sharedWith (filter: { username: { eq: $X_MyApp_User }}) {\n username\n }\n }\n}"
+ }
+ { rule: "query { \n queryTodo(filter: { isPublic: true }) {\n id\n }\n}" }
+ ]
+ }
+ add: {
+ rule: "query($X_MyApp_User: String!) { \n queryTodo {\n owner (filter: { username: { eq: $X_MyApp_User }}) {\n username\n }\n }\n}"
+ }
+ update: {
+ rule: "query($X_MyApp_User: String!) { \n queryTodo {\n owner (filter: { username: { eq: $X_MyApp_User }}) {\n username\n }\n }\n}"
+ }
+ ) {
+ id: ID!
+ title: String
+ text: String
+ isPublic: Boolean @search
+ dateCompleted: String @search
+ sharedWith(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ owner(filter: UserFilter): User @hasInverse(field: "todos")
+ somethingPrivate: String
+ sharedWithAggregate(filter: UserFilter): UserAggregateResult
+}
+
+type User
+ @auth(
+ update: {
+ rule: "query($X_MyApp_User: String!) { \n queryUser(filter: { username: { eq: $X_MyApp_User }}) {\n username\n }\n}"
+ }
+ ) {
+ username: String! @id
+ todos(filter: TodoFilter, order: TodoOrder, first: Int, offset: Int): [Todo]
+ @hasInverse(field: owner)
+ todosAggregate(filter: TodoFilter): TodoAggregateResult
}
#######################
@@ -36,162 +63,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -203,11 +230,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -216,77 +244,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -294,53 +323,53 @@ input StringHashFilter {
#######################
type AddTodoPayload {
- todo(filter: TodoFilter, order: TodoOrder, first: Int, offset: Int): [Todo]
- numUids: Int
+ todo(filter: TodoFilter, order: TodoOrder, first: Int, offset: Int): [Todo]
+ numUids: Int
}
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type DeleteTodoPayload {
- todo(filter: TodoFilter, order: TodoOrder, first: Int, offset: Int): [Todo]
- msg: String
- numUids: Int
+ todo(filter: TodoFilter, order: TodoOrder, first: Int, offset: Int): [Todo]
+ msg: String
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type TodoAggregateResult {
- count: Int
- titleMin: String
- titleMax: String
- textMin: String
- textMax: String
- dateCompletedMin: String
- dateCompletedMax: String
- somethingPrivateMin: String
- somethingPrivateMax: String
+ count: Int
+ titleMin: String
+ titleMax: String
+ textMin: String
+ textMax: String
+ dateCompletedMin: String
+ dateCompletedMax: String
+ somethingPrivateMin: String
+ somethingPrivateMax: String
}
type UpdateTodoPayload {
- todo(filter: TodoFilter, order: TodoOrder, first: Int, offset: Int): [Todo]
- numUids: Int
+ todo(filter: TodoFilter, order: TodoOrder, first: Int, offset: Int): [Todo]
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- usernameMin: String
- usernameMax: String
+ count: Int
+ usernameMin: String
+ usernameMax: String
}
#######################
@@ -348,29 +377,29 @@ type UserAggregateResult {
#######################
enum TodoHasFilter {
- title
- text
- isPublic
- dateCompleted
- sharedWith
- owner
- somethingPrivate
+ title
+ text
+ isPublic
+ dateCompleted
+ sharedWith
+ owner
+ somethingPrivate
}
enum TodoOrderable {
- title
- text
- dateCompleted
- somethingPrivate
+ title
+ text
+ dateCompleted
+ somethingPrivate
}
enum UserHasFilter {
- username
- todos
+ username
+ todos
}
enum UserOrderable {
- username
+ username
}
#######################
@@ -378,94 +407,94 @@ enum UserOrderable {
#######################
input AddTodoInput {
- title: String
- text: String
- isPublic: Boolean
- dateCompleted: String
- sharedWith: [UserRef]
- owner: UserRef
- somethingPrivate: String
- pwd: String!
+ title: String
+ text: String
+ isPublic: Boolean
+ dateCompleted: String
+ sharedWith: [UserRef]
+ owner: UserRef
+ somethingPrivate: String
+ pwd: String!
}
input AddUserInput {
- username: String!
- todos: [TodoRef]
+ username: String!
+ todos: [TodoRef]
}
input TodoFilter {
- id: [ID!]
- isPublic: Boolean
- dateCompleted: StringTermFilter
- has: [TodoHasFilter]
- and: [TodoFilter]
- or: [TodoFilter]
- not: TodoFilter
+ id: [ID!]
+ isPublic: Boolean
+ dateCompleted: StringTermFilter
+ has: [TodoHasFilter]
+ and: [TodoFilter]
+ or: [TodoFilter]
+ not: TodoFilter
}
input TodoOrder {
- asc: TodoOrderable
- desc: TodoOrderable
- then: TodoOrder
+ asc: TodoOrderable
+ desc: TodoOrderable
+ then: TodoOrder
}
input TodoPatch {
- title: String
- text: String
- isPublic: Boolean
- dateCompleted: String
- sharedWith: [UserRef]
- owner: UserRef
- somethingPrivate: String
- pwd: String
+ title: String
+ text: String
+ isPublic: Boolean
+ dateCompleted: String
+ sharedWith: [UserRef]
+ owner: UserRef
+ somethingPrivate: String
+ pwd: String
}
input TodoRef {
- id: ID
- title: String
- text: String
- isPublic: Boolean
- dateCompleted: String
- sharedWith: [UserRef]
- owner: UserRef
- somethingPrivate: String
- pwd: String
+ id: ID
+ title: String
+ text: String
+ isPublic: Boolean
+ dateCompleted: String
+ sharedWith: [UserRef]
+ owner: UserRef
+ somethingPrivate: String
+ pwd: String
}
input UpdateTodoInput {
- filter: TodoFilter!
- set: TodoPatch
- remove: TodoPatch
+ filter: TodoFilter!
+ set: TodoPatch
+ remove: TodoPatch
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- username: StringHashFilter
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ username: StringHashFilter
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- username: String
- todos: [TodoRef]
+ username: String
+ todos: [TodoRef]
}
input UserRef {
- username: String
- todos: [TodoRef]
+ username: String
+ todos: [TodoRef]
}
#######################
@@ -473,13 +502,13 @@ input UserRef {
#######################
type Query {
- getTodo(id: ID!): Todo
- checkTodoPassword(id: ID!, pwd: String!): Todo
- queryTodo(filter: TodoFilter, order: TodoOrder, first: Int, offset: Int): [Todo]
- aggregateTodo(filter: TodoFilter): TodoAggregateResult
- getUser(username: String!): User
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ getTodo(id: ID!): Todo
+ checkTodoPassword(id: ID!, pwd: String!): Todo
+ queryTodo(filter: TodoFilter, order: TodoOrder, first: Int, offset: Int): [Todo]
+ aggregateTodo(filter: TodoFilter): TodoAggregateResult
+ getUser(username: String!): User
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -487,11 +516,10 @@ type Query {
#######################
type Mutation {
- addTodo(input: [AddTodoInput!]!): AddTodoPayload
- updateTodo(input: UpdateTodoInput!): UpdateTodoPayload
- deleteTodo(filter: TodoFilter!): DeleteTodoPayload
- addUser(input: [AddUserInput!]!, upsert: Boolean): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ addTodo(input: [AddTodoInput!]!): AddTodoPayload
+ updateTodo(input: UpdateTodoInput!): UpdateTodoPayload
+ deleteTodo(filter: TodoFilter!): DeleteTodoPayload
+ addUser(input: [AddUserInput!]!, upsert: Boolean): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/comments-and-descriptions.graphql b/graphql/schema/testdata/schemagen/output/comments-and-descriptions.graphql
index be37be2b30e..c735a4c9271 100644
--- a/graphql/schema/testdata/schemagen/output/comments-and-descriptions.graphql
+++ b/graphql/schema/testdata/schemagen/output/comments-and-descriptions.graphql
@@ -2,35 +2,53 @@
# Input Schema
#######################
-"""Desc"""
+"""
+Desc
+"""
interface I {
- """Desc"""
- s: String!
+ """
+ Desc
+ """
+ s: String!
}
-"""Desc"""
+"""
+Desc
+"""
type T implements I {
- s: String!
- id: ID!
- """Desc"""
- i: Int
+ s: String!
+ id: ID!
+ """
+ Desc
+ """
+ i: Int
}
-"""Desc"""
+"""
+Desc
+"""
enum AnEnum {
- AVal
- """Desc"""
- AnotherVal
+ AVal
+ """
+ Desc
+ """
+ AnotherVal
}
-"""Desc"""
+"""
+Desc
+"""
union A_Union = T
-"""Desc"""
+"""
+Desc
+"""
input AnInput {
- id: ID!
- """Desc"""
- i: Int
+ id: ID!
+ """
+ Desc
+ """
+ i: Int
}
#######################
@@ -49,162 +67,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -216,11 +234,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -229,77 +248,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -307,46 +327,46 @@ input StringHashFilter {
#######################
type AddTPayload {
- t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
- numUids: Int
+ t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
+ numUids: Int
}
type DeleteIPayload {
- i(filter: IFilter, order: IOrder, first: Int, offset: Int): [I]
- msg: String
- numUids: Int
+ i(filter: IFilter, order: IOrder, first: Int, offset: Int): [I]
+ msg: String
+ numUids: Int
}
type DeleteTPayload {
- t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
- msg: String
- numUids: Int
+ t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
+ msg: String
+ numUids: Int
}
type IAggregateResult {
- count: Int
- sMin: String
- sMax: String
+ count: Int
+ sMin: String
+ sMax: String
}
type TAggregateResult {
- count: Int
- sMin: String
- sMax: String
- iMin: Int
- iMax: Int
- iSum: Int
- iAvg: Float
+ count: Int
+ sMin: String
+ sMax: String
+ iMin: Int
+ iMax: Int
+ iSum: Int
+ iAvg: Float
}
type UpdateIPayload {
- i(filter: IFilter, order: IOrder, first: Int, offset: Int): [I]
- numUids: Int
+ i(filter: IFilter, order: IOrder, first: Int, offset: Int): [I]
+ numUids: Int
}
type UpdateTPayload {
- t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
- numUids: Int
+ t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
+ numUids: Int
}
#######################
@@ -354,25 +374,25 @@ type UpdateTPayload {
#######################
enum A_UnionType {
- T
+ T
}
enum IHasFilter {
- s
+ s
}
enum IOrderable {
- s
+ s
}
enum THasFilter {
- s
- i
+ s
+ i
}
enum TOrderable {
- s
- i
+ s
+ i
}
#######################
@@ -380,75 +400,83 @@ enum TOrderable {
#######################
input A_UnionFilter {
- memberTypes: [A_UnionType!]
- tFilter: TFilter
+ memberTypes: [A_UnionType!]
+ tFilter: TFilter
}
input A_UnionRef {
- tRef: TRef
+ tRef: TRef
}
input AddTInput {
- s: String!
- """Desc"""
- i: Int
+ s: String!
+ """
+ Desc
+ """
+ i: Int
}
input IFilter {
- has: [IHasFilter]
- and: [IFilter]
- or: [IFilter]
- not: IFilter
+ has: [IHasFilter]
+ and: [IFilter]
+ or: [IFilter]
+ not: IFilter
}
input IOrder {
- asc: IOrderable
- desc: IOrderable
- then: IOrder
+ asc: IOrderable
+ desc: IOrderable
+ then: IOrder
}
input IPatch {
- """Desc"""
- s: String
+ """
+ Desc
+ """
+ s: String
}
input TFilter {
- id: [ID!]
- has: [THasFilter]
- and: [TFilter]
- or: [TFilter]
- not: TFilter
+ id: [ID!]
+ has: [THasFilter]
+ and: [TFilter]
+ or: [TFilter]
+ not: TFilter
}
input TOrder {
- asc: TOrderable
- desc: TOrderable
- then: TOrder
+ asc: TOrderable
+ desc: TOrderable
+ then: TOrder
}
input TPatch {
- s: String
- """Desc"""
- i: Int
+ s: String
+ """
+ Desc
+ """
+ i: Int
}
input TRef {
- id: ID
- s: String
- """Desc"""
- i: Int
+ id: ID
+ s: String
+ """
+ Desc
+ """
+ i: Int
}
input UpdateIInput {
- filter: IFilter!
- set: IPatch
- remove: IPatch
+ filter: IFilter!
+ set: IPatch
+ remove: IPatch
}
input UpdateTInput {
- filter: TFilter!
- set: TPatch
- remove: TPatch
+ filter: TFilter!
+ set: TPatch
+ remove: TPatch
}
#######################
@@ -456,11 +484,11 @@ input UpdateTInput {
#######################
type Query {
- queryI(filter: IFilter, order: IOrder, first: Int, offset: Int): [I]
- aggregateI(filter: IFilter): IAggregateResult
- getT(id: ID!): T
- queryT(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
- aggregateT(filter: TFilter): TAggregateResult
+ queryI(filter: IFilter, order: IOrder, first: Int, offset: Int): [I]
+ aggregateI(filter: IFilter): IAggregateResult
+ getT(id: ID!): T
+ queryT(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
+ aggregateT(filter: TFilter): TAggregateResult
}
#######################
@@ -468,10 +496,9 @@ type Query {
#######################
type Mutation {
- updateI(input: UpdateIInput!): UpdateIPayload
- deleteI(filter: IFilter!): DeleteIPayload
- addT(input: [AddTInput!]!): AddTPayload
- updateT(input: UpdateTInput!): UpdateTPayload
- deleteT(filter: TFilter!): DeleteTPayload
+ updateI(input: UpdateIInput!): UpdateIPayload
+ deleteI(filter: IFilter!): DeleteIPayload
+ addT(input: [AddTInput!]!): AddTPayload
+ updateT(input: UpdateTInput!): UpdateTPayload
+ deleteT(filter: TFilter!): DeleteTPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/created-updated-directives.graphql b/graphql/schema/testdata/schemagen/output/created-updated-directives.graphql
index d7547094db3..bede3db1e9b 100644
--- a/graphql/schema/testdata/schemagen/output/created-updated-directives.graphql
+++ b/graphql/schema/testdata/schemagen/output/created-updated-directives.graphql
@@ -3,17 +3,17 @@
#######################
type Booking {
- id: ID!
- name: String!
- created: DateTime! @default(add: {value:"$now"})
- updated: DateTime! @default(add: {value:"$now"}, update: {value:"$now"})
+ id: ID!
+ name: String!
+ created: DateTime! @default(add: { value: "$now" })
+ updated: DateTime! @default(add: { value: "$now" }, update: { value: "$now" })
}
type BookingXID {
- id: String! @id
- name: String!
- created: DateTime! @default(add: {value:"$now"})
- updated: DateTime! @default(add: {value:"$now"}, update: {value:"$now"})
+ id: String! @id
+ name: String!
+ created: DateTime! @default(add: { value: "$now" })
+ updated: DateTime! @default(add: { value: "$now" }, update: { value: "$now" })
}
#######################
@@ -32,162 +32,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -199,11 +199,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -212,77 +213,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -290,57 +292,72 @@ input StringHashFilter {
#######################
type AddBookingPayload {
- booking(filter: BookingFilter, order: BookingOrder, first: Int, offset: Int): [Booking]
- numUids: Int
+ booking(filter: BookingFilter, order: BookingOrder, first: Int, offset: Int): [Booking]
+ numUids: Int
}
type AddBookingXIDPayload {
- bookingXID(filter: BookingXIDFilter, order: BookingXIDOrder, first: Int, offset: Int): [BookingXID]
- numUids: Int
+ bookingXID(
+ filter: BookingXIDFilter
+ order: BookingXIDOrder
+ first: Int
+ offset: Int
+ ): [BookingXID]
+ numUids: Int
}
type BookingAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- createdMin: DateTime
- createdMax: DateTime
- updatedMin: DateTime
- updatedMax: DateTime
+ count: Int
+ nameMin: String
+ nameMax: String
+ createdMin: DateTime
+ createdMax: DateTime
+ updatedMin: DateTime
+ updatedMax: DateTime
}
type BookingXIDAggregateResult {
- count: Int
- idMin: String
- idMax: String
- nameMin: String
- nameMax: String
- createdMin: DateTime
- createdMax: DateTime
- updatedMin: DateTime
- updatedMax: DateTime
+ count: Int
+ idMin: String
+ idMax: String
+ nameMin: String
+ nameMax: String
+ createdMin: DateTime
+ createdMax: DateTime
+ updatedMin: DateTime
+ updatedMax: DateTime
}
type DeleteBookingPayload {
- booking(filter: BookingFilter, order: BookingOrder, first: Int, offset: Int): [Booking]
- msg: String
- numUids: Int
+ booking(filter: BookingFilter, order: BookingOrder, first: Int, offset: Int): [Booking]
+ msg: String
+ numUids: Int
}
type DeleteBookingXIDPayload {
- bookingXID(filter: BookingXIDFilter, order: BookingXIDOrder, first: Int, offset: Int): [BookingXID]
- msg: String
- numUids: Int
+ bookingXID(
+ filter: BookingXIDFilter
+ order: BookingXIDOrder
+ first: Int
+ offset: Int
+ ): [BookingXID]
+ msg: String
+ numUids: Int
}
type UpdateBookingPayload {
- booking(filter: BookingFilter, order: BookingOrder, first: Int, offset: Int): [Booking]
- numUids: Int
+ booking(filter: BookingFilter, order: BookingOrder, first: Int, offset: Int): [Booking]
+ numUids: Int
}
type UpdateBookingXIDPayload {
- bookingXID(filter: BookingXIDFilter, order: BookingXIDOrder, first: Int, offset: Int): [BookingXID]
- numUids: Int
+ bookingXID(
+ filter: BookingXIDFilter
+ order: BookingXIDOrder
+ first: Int
+ offset: Int
+ ): [BookingXID]
+ numUids: Int
}
#######################
@@ -348,29 +365,29 @@ type UpdateBookingXIDPayload {
#######################
enum BookingHasFilter {
- name
- created
- updated
+ name
+ created
+ updated
}
enum BookingOrderable {
- name
- created
- updated
+ name
+ created
+ updated
}
enum BookingXIDHasFilter {
- id
- name
- created
- updated
+ id
+ name
+ created
+ updated
}
enum BookingXIDOrderable {
- id
- name
- created
- updated
+ id
+ name
+ created
+ updated
}
#######################
@@ -378,83 +395,83 @@ enum BookingXIDOrderable {
#######################
input AddBookingInput {
- name: String!
- created: DateTime
- updated: DateTime
+ name: String!
+ created: DateTime
+ updated: DateTime
}
input AddBookingXIDInput {
- id: String!
- name: String!
- created: DateTime
- updated: DateTime
+ id: String!
+ name: String!
+ created: DateTime
+ updated: DateTime
}
input BookingFilter {
- id: [ID!]
- has: [BookingHasFilter]
- and: [BookingFilter]
- or: [BookingFilter]
- not: BookingFilter
+ id: [ID!]
+ has: [BookingHasFilter]
+ and: [BookingFilter]
+ or: [BookingFilter]
+ not: BookingFilter
}
input BookingOrder {
- asc: BookingOrderable
- desc: BookingOrderable
- then: BookingOrder
+ asc: BookingOrderable
+ desc: BookingOrderable
+ then: BookingOrder
}
input BookingPatch {
- name: String
- created: DateTime
- updated: DateTime
+ name: String
+ created: DateTime
+ updated: DateTime
}
input BookingRef {
- id: ID
- name: String
- created: DateTime
- updated: DateTime
+ id: ID
+ name: String
+ created: DateTime
+ updated: DateTime
}
input BookingXIDFilter {
- id: StringHashFilter
- has: [BookingXIDHasFilter]
- and: [BookingXIDFilter]
- or: [BookingXIDFilter]
- not: BookingXIDFilter
+ id: StringHashFilter
+ has: [BookingXIDHasFilter]
+ and: [BookingXIDFilter]
+ or: [BookingXIDFilter]
+ not: BookingXIDFilter
}
input BookingXIDOrder {
- asc: BookingXIDOrderable
- desc: BookingXIDOrderable
- then: BookingXIDOrder
+ asc: BookingXIDOrderable
+ desc: BookingXIDOrderable
+ then: BookingXIDOrder
}
input BookingXIDPatch {
- id: String
- name: String
- created: DateTime
- updated: DateTime
+ id: String
+ name: String
+ created: DateTime
+ updated: DateTime
}
input BookingXIDRef {
- id: String
- name: String
- created: DateTime
- updated: DateTime
+ id: String
+ name: String
+ created: DateTime
+ updated: DateTime
}
input UpdateBookingInput {
- filter: BookingFilter!
- set: BookingPatch
- remove: BookingPatch
+ filter: BookingFilter!
+ set: BookingPatch
+ remove: BookingPatch
}
input UpdateBookingXIDInput {
- filter: BookingXIDFilter!
- set: BookingXIDPatch
- remove: BookingXIDPatch
+ filter: BookingXIDFilter!
+ set: BookingXIDPatch
+ remove: BookingXIDPatch
}
#######################
@@ -462,12 +479,17 @@ input UpdateBookingXIDInput {
#######################
type Query {
- getBooking(id: ID!): Booking
- queryBooking(filter: BookingFilter, order: BookingOrder, first: Int, offset: Int): [Booking]
- aggregateBooking(filter: BookingFilter): BookingAggregateResult
- getBookingXID(id: String!): BookingXID
- queryBookingXID(filter: BookingXIDFilter, order: BookingXIDOrder, first: Int, offset: Int): [BookingXID]
- aggregateBookingXID(filter: BookingXIDFilter): BookingXIDAggregateResult
+ getBooking(id: ID!): Booking
+ queryBooking(filter: BookingFilter, order: BookingOrder, first: Int, offset: Int): [Booking]
+ aggregateBooking(filter: BookingFilter): BookingAggregateResult
+ getBookingXID(id: String!): BookingXID
+ queryBookingXID(
+ filter: BookingXIDFilter
+ order: BookingXIDOrder
+ first: Int
+ offset: Int
+ ): [BookingXID]
+ aggregateBookingXID(filter: BookingXIDFilter): BookingXIDAggregateResult
}
#######################
@@ -475,11 +497,10 @@ type Query {
#######################
type Mutation {
- addBooking(input: [AddBookingInput!]!): AddBookingPayload
- updateBooking(input: UpdateBookingInput!): UpdateBookingPayload
- deleteBooking(filter: BookingFilter!): DeleteBookingPayload
- addBookingXID(input: [AddBookingXIDInput!]!, upsert: Boolean): AddBookingXIDPayload
- updateBookingXID(input: UpdateBookingXIDInput!): UpdateBookingXIDPayload
- deleteBookingXID(filter: BookingXIDFilter!): DeleteBookingXIDPayload
+ addBooking(input: [AddBookingInput!]!): AddBookingPayload
+ updateBooking(input: UpdateBookingInput!): UpdateBookingPayload
+ deleteBooking(filter: BookingFilter!): DeleteBookingPayload
+ addBookingXID(input: [AddBookingXIDInput!]!, upsert: Boolean): AddBookingXIDPayload
+ updateBookingXID(input: UpdateBookingXIDInput!): UpdateBookingXIDPayload
+ deleteBookingXID(filter: BookingXIDFilter!): DeleteBookingXIDPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/custom-dql-query-with-subscription.graphql b/graphql/schema/testdata/schemagen/output/custom-dql-query-with-subscription.graphql
index 881b4d5ab04..b22910f9157 100644
--- a/graphql/schema/testdata/schemagen/output/custom-dql-query-with-subscription.graphql
+++ b/graphql/schema/testdata/schemagen/output/custom-dql-query-with-subscription.graphql
@@ -3,22 +3,23 @@
#######################
type Tweets {
- id: ID!
- text: String! @search(by: [fulltext])
- author(filter: UserFilter): User @hasInverse(field: tweets)
- timestamp: DateTime! @search
+ id: ID!
+ text: String! @search(by: [fulltext])
+ author(filter: UserFilter): User @hasInverse(field: tweets)
+ timestamp: DateTime! @search
}
type User {
- screenName: String! @id
- followers: Int @search
- tweets(filter: TweetsFilter, order: TweetsOrder, first: Int, offset: Int): [Tweets] @hasInverse(field: author)
- tweetsAggregate(filter: TweetsFilter): TweetsAggregateResult
+ screenName: String! @id
+ followers: Int @search
+ tweets(filter: TweetsFilter, order: TweetsOrder, first: Int, offset: Int): [Tweets]
+ @hasInverse(field: author)
+ tweetsAggregate(filter: TweetsFilter): TweetsAggregateResult
}
type UserTweetCount @remote {
- screenName: String
- tweetCount: Int
+ screenName: String
+ tweetCount: Int
}
#######################
@@ -37,162 +38,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -204,11 +205,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -217,77 +219,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -295,53 +298,53 @@ input StringHashFilter {
#######################
type AddTweetsPayload {
- tweets(filter: TweetsFilter, order: TweetsOrder, first: Int, offset: Int): [Tweets]
- numUids: Int
+ tweets(filter: TweetsFilter, order: TweetsOrder, first: Int, offset: Int): [Tweets]
+ numUids: Int
}
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type DeleteTweetsPayload {
- tweets(filter: TweetsFilter, order: TweetsOrder, first: Int, offset: Int): [Tweets]
- msg: String
- numUids: Int
+ tweets(filter: TweetsFilter, order: TweetsOrder, first: Int, offset: Int): [Tweets]
+ msg: String
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type TweetsAggregateResult {
- count: Int
- textMin: String
- textMax: String
- timestampMin: DateTime
- timestampMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ timestampMin: DateTime
+ timestampMax: DateTime
}
type UpdateTweetsPayload {
- tweets(filter: TweetsFilter, order: TweetsOrder, first: Int, offset: Int): [Tweets]
- numUids: Int
+ tweets(filter: TweetsFilter, order: TweetsOrder, first: Int, offset: Int): [Tweets]
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- screenNameMin: String
- screenNameMax: String
- followersMin: Int
- followersMax: Int
- followersSum: Int
- followersAvg: Float
+ count: Int
+ screenNameMin: String
+ screenNameMax: String
+ followersMin: Int
+ followersMax: Int
+ followersSum: Int
+ followersAvg: Float
}
#######################
@@ -349,25 +352,25 @@ type UserAggregateResult {
#######################
enum TweetsHasFilter {
- text
- author
- timestamp
+ text
+ author
+ timestamp
}
enum TweetsOrderable {
- text
- timestamp
+ text
+ timestamp
}
enum UserHasFilter {
- screenName
- followers
- tweets
+ screenName
+ followers
+ tweets
}
enum UserOrderable {
- screenName
- followers
+ screenName
+ followers
}
#######################
@@ -375,83 +378,83 @@ enum UserOrderable {
#######################
input AddTweetsInput {
- text: String!
- author: UserRef
- timestamp: DateTime!
+ text: String!
+ author: UserRef
+ timestamp: DateTime!
}
input AddUserInput {
- screenName: String!
- followers: Int
- tweets: [TweetsRef]
+ screenName: String!
+ followers: Int
+ tweets: [TweetsRef]
}
input TweetsFilter {
- id: [ID!]
- text: StringFullTextFilter
- timestamp: DateTimeFilter
- has: [TweetsHasFilter]
- and: [TweetsFilter]
- or: [TweetsFilter]
- not: TweetsFilter
+ id: [ID!]
+ text: StringFullTextFilter
+ timestamp: DateTimeFilter
+ has: [TweetsHasFilter]
+ and: [TweetsFilter]
+ or: [TweetsFilter]
+ not: TweetsFilter
}
input TweetsOrder {
- asc: TweetsOrderable
- desc: TweetsOrderable
- then: TweetsOrder
+ asc: TweetsOrderable
+ desc: TweetsOrderable
+ then: TweetsOrder
}
input TweetsPatch {
- text: String
- author: UserRef
- timestamp: DateTime
+ text: String
+ author: UserRef
+ timestamp: DateTime
}
input TweetsRef {
- id: ID
- text: String
- author: UserRef
- timestamp: DateTime
+ id: ID
+ text: String
+ author: UserRef
+ timestamp: DateTime
}
input UpdateTweetsInput {
- filter: TweetsFilter!
- set: TweetsPatch
- remove: TweetsPatch
+ filter: TweetsFilter!
+ set: TweetsPatch
+ remove: TweetsPatch
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- screenName: StringHashFilter
- followers: IntFilter
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ screenName: StringHashFilter
+ followers: IntFilter
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- screenName: String
- followers: Int
- tweets: [TweetsRef]
+ screenName: String
+ followers: Int
+ tweets: [TweetsRef]
}
input UserRef {
- screenName: String
- followers: Int
- tweets: [TweetsRef]
+ screenName: String
+ followers: Int
+ tweets: [TweetsRef]
}
#######################
@@ -459,13 +462,17 @@ input UserRef {
#######################
type Query {
- queryUserTweetCounts: [UserTweetCount] @withSubscription @custom(dql: "query {\n queryUserTweetCounts(func: type(User)) {\n screenName: User.screenName\n tweetCount: count(User.tweets)\n }\n}")
- getTweets(id: ID!): Tweets
- queryTweets(filter: TweetsFilter, order: TweetsOrder, first: Int, offset: Int): [Tweets]
- aggregateTweets(filter: TweetsFilter): TweetsAggregateResult
- getUser(screenName: String!): User
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ queryUserTweetCounts: [UserTweetCount]
+ @withSubscription
+ @custom(
+ dql: "query {\n queryUserTweetCounts(func: type(User)) {\n screenName: User.screenName\n tweetCount: count(User.tweets)\n }\n}"
+ )
+ getTweets(id: ID!): Tweets
+ queryTweets(filter: TweetsFilter, order: TweetsOrder, first: Int, offset: Int): [Tweets]
+ aggregateTweets(filter: TweetsFilter): TweetsAggregateResult
+ getUser(screenName: String!): User
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -473,12 +480,12 @@ type Query {
#######################
type Mutation {
- addTweets(input: [AddTweetsInput!]!): AddTweetsPayload
- updateTweets(input: UpdateTweetsInput!): UpdateTweetsPayload
- deleteTweets(filter: TweetsFilter!): DeleteTweetsPayload
- addUser(input: [AddUserInput!]!, upsert: Boolean): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ addTweets(input: [AddTweetsInput!]!): AddTweetsPayload
+ updateTweets(input: UpdateTweetsInput!): UpdateTweetsPayload
+ deleteTweets(filter: TweetsFilter!): DeleteTweetsPayload
+ addUser(input: [AddUserInput!]!, upsert: Boolean): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
#######################
@@ -486,5 +493,9 @@ type Mutation {
#######################
type Subscription {
- queryUserTweetCounts: [UserTweetCount] @withSubscription @custom(dql: "query {\n queryUserTweetCounts(func: type(User)) {\n screenName: User.screenName\n tweetCount: count(User.tweets)\n }\n}")
+ queryUserTweetCounts: [UserTweetCount]
+ @withSubscription
+ @custom(
+ dql: "query {\n queryUserTweetCounts(func: type(User)) {\n screenName: User.screenName\n tweetCount: count(User.tweets)\n }\n}"
+ )
}
diff --git a/graphql/schema/testdata/schemagen/output/custom-mutation.graphql b/graphql/schema/testdata/schemagen/output/custom-mutation.graphql
index cd06b5dde7f..e456c5699a3 100644
--- a/graphql/schema/testdata/schemagen/output/custom-mutation.graphql
+++ b/graphql/schema/testdata/schemagen/output/custom-mutation.graphql
@@ -3,12 +3,12 @@
#######################
type User {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
input UpdateFavouriteUserInput {
- name: String!
+ name: String!
}
#######################
@@ -27,162 +27,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -194,11 +194,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -207,77 +208,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -285,25 +287,25 @@ input StringHashFilter {
#######################
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
#######################
@@ -311,11 +313,11 @@ type UserAggregateResult {
#######################
enum UserHasFilter {
- name
+ name
}
enum UserOrderable {
- name
+ name
}
#######################
@@ -323,36 +325,36 @@ enum UserOrderable {
#######################
input AddUserInput {
- name: String!
+ name: String!
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- id: [ID!]
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ id: [ID!]
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- name: String
+ name: String
}
input UserRef {
- id: ID
- name: String
+ id: ID
+ name: String
}
#######################
@@ -360,9 +362,9 @@ input UserRef {
#######################
type Query {
- getUser(id: ID!): User
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ getUser(id: ID!): User
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -370,9 +372,9 @@ type Query {
#######################
type Mutation {
- createMyFavouriteUsers(input: [UpdateFavouriteUserInput!]!): [User] @custom(http: {url:"http://my-api.com",method:"POST",body:"{ data: $input }"})
- addUser(input: [AddUserInput!]!): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ createMyFavouriteUsers(input: [UpdateFavouriteUserInput!]!): [User]
+ @custom(http: { url: "http://my-api.com", method: "POST", body: "{ data: $input }" })
+ addUser(input: [AddUserInput!]!): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/custom-nested-types.graphql b/graphql/schema/testdata/schemagen/output/custom-nested-types.graphql
index 7f6026fba36..8d338393763 100644
--- a/graphql/schema/testdata/schemagen/output/custom-nested-types.graphql
+++ b/graphql/schema/testdata/schemagen/output/custom-nested-types.graphql
@@ -3,29 +3,29 @@
#######################
type Car @remote {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
interface Person @remote {
- age: Int!
+ age: Int!
}
type User implements Person @remote {
- age: Int!
- id: ID!
- name: String!
- cars: [Car]
+ age: Int!
+ id: ID!
+ name: String!
+ cars: [Car]
}
input UserInput {
- name: String!
- age: Int!
- cars: [CarInput]
+ name: String!
+ age: Int!
+ cars: [CarInput]
}
input CarInput {
- name: String!
+ name: String!
}
#######################
@@ -44,162 +44,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -211,11 +211,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -224,77 +225,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -302,7 +304,7 @@ input StringHashFilter {
#######################
type Query {
- getMyFavoriteUsers(id: ID!): [User] @custom(http: {url:"http://my-api.com",method:"GET"})
+ getMyFavoriteUsers(id: ID!): [User] @custom(http: { url: "http://my-api.com", method: "GET" })
}
#######################
@@ -310,6 +312,6 @@ type Query {
#######################
type Mutation {
- createMyFavouriteUsers(input: [UserInput!]!): [User] @custom(http: {url:"http://my-api.com",method:"POST",body:"$input"})
+ createMyFavouriteUsers(input: [UserInput!]!): [User]
+ @custom(http: { url: "http://my-api.com", method: "POST", body: "$input" })
}
-
diff --git a/graphql/schema/testdata/schemagen/output/custom-query-mixed-types.graphql b/graphql/schema/testdata/schemagen/output/custom-query-mixed-types.graphql
index df50f1e6ef5..a6817a7b917 100644
--- a/graphql/schema/testdata/schemagen/output/custom-query-mixed-types.graphql
+++ b/graphql/schema/testdata/schemagen/output/custom-query-mixed-types.graphql
@@ -3,13 +3,13 @@
#######################
type User @remote {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
type Car {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
#######################
@@ -28,162 +28,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -195,11 +195,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -208,77 +209,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -286,25 +288,25 @@ input StringHashFilter {
#######################
type AddCarPayload {
- car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
- numUids: Int
+ car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
+ numUids: Int
}
type CarAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteCarPayload {
- car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
- msg: String
- numUids: Int
+ car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
+ msg: String
+ numUids: Int
}
type UpdateCarPayload {
- car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
- numUids: Int
+ car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
+ numUids: Int
}
#######################
@@ -312,11 +314,11 @@ type UpdateCarPayload {
#######################
enum CarHasFilter {
- name
+ name
}
enum CarOrderable {
- name
+ name
}
#######################
@@ -324,36 +326,36 @@ enum CarOrderable {
#######################
input AddCarInput {
- name: String!
+ name: String!
}
input CarFilter {
- id: [ID!]
- has: [CarHasFilter]
- and: [CarFilter]
- or: [CarFilter]
- not: CarFilter
+ id: [ID!]
+ has: [CarHasFilter]
+ and: [CarFilter]
+ or: [CarFilter]
+ not: CarFilter
}
input CarOrder {
- asc: CarOrderable
- desc: CarOrderable
- then: CarOrder
+ asc: CarOrderable
+ desc: CarOrderable
+ then: CarOrder
}
input CarPatch {
- name: String
+ name: String
}
input CarRef {
- id: ID
- name: String
+ id: ID
+ name: String
}
input UpdateCarInput {
- filter: CarFilter!
- set: CarPatch
- remove: CarPatch
+ filter: CarFilter!
+ set: CarPatch
+ remove: CarPatch
}
#######################
@@ -361,10 +363,10 @@ input UpdateCarInput {
#######################
type Query {
- getMyFavoriteUsers(id: ID!): [User] @custom(http: {url:"http://my-api.com",method:"GET"})
- getCar(id: ID!): Car
- queryCar(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
- aggregateCar(filter: CarFilter): CarAggregateResult
+ getMyFavoriteUsers(id: ID!): [User] @custom(http: { url: "http://my-api.com", method: "GET" })
+ getCar(id: ID!): Car
+ queryCar(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
+ aggregateCar(filter: CarFilter): CarAggregateResult
}
#######################
@@ -372,8 +374,7 @@ type Query {
#######################
type Mutation {
- addCar(input: [AddCarInput!]!): AddCarPayload
- updateCar(input: UpdateCarInput!): UpdateCarPayload
- deleteCar(filter: CarFilter!): DeleteCarPayload
+ addCar(input: [AddCarInput!]!): AddCarPayload
+ updateCar(input: UpdateCarInput!): UpdateCarPayload
+ deleteCar(filter: CarFilter!): DeleteCarPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/custom-query-not-dgraph-type.graphql b/graphql/schema/testdata/schemagen/output/custom-query-not-dgraph-type.graphql
index 6093f603aaa..9bc2378b146 100644
--- a/graphql/schema/testdata/schemagen/output/custom-query-not-dgraph-type.graphql
+++ b/graphql/schema/testdata/schemagen/output/custom-query-not-dgraph-type.graphql
@@ -3,12 +3,12 @@
#######################
type User @remote {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
input UserInput {
- name: String!
+ name: String!
}
#######################
@@ -27,162 +27,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -194,11 +194,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -207,77 +208,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -285,7 +287,7 @@ input StringHashFilter {
#######################
type Query {
- getMyFavoriteUsers(id: ID!): [User] @custom(http: {url:"http://my-api.com",method:"GET"})
+ getMyFavoriteUsers(id: ID!): [User] @custom(http: { url: "http://my-api.com", method: "GET" })
}
#######################
@@ -293,6 +295,6 @@ type Query {
#######################
type Mutation {
- createMyFavouriteUsers(input: [UserInput!]!): [User] @custom(http: {url:"http://my-api.com",method:"POST",body:"{ data: $input }"})
+ createMyFavouriteUsers(input: [UserInput!]!): [User]
+ @custom(http: { url: "http://my-api.com", method: "POST", body: "{ data: $input }" })
}
-
diff --git a/graphql/schema/testdata/schemagen/output/custom-query-with-dgraph-type.graphql b/graphql/schema/testdata/schemagen/output/custom-query-with-dgraph-type.graphql
index 06213dddd25..99e65582e90 100644
--- a/graphql/schema/testdata/schemagen/output/custom-query-with-dgraph-type.graphql
+++ b/graphql/schema/testdata/schemagen/output/custom-query-with-dgraph-type.graphql
@@ -3,8 +3,8 @@
#######################
type User {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
#######################
@@ -23,162 +23,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -190,11 +190,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -203,77 +204,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -281,25 +283,25 @@ input StringHashFilter {
#######################
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
#######################
@@ -307,11 +309,11 @@ type UserAggregateResult {
#######################
enum UserHasFilter {
- name
+ name
}
enum UserOrderable {
- name
+ name
}
#######################
@@ -319,36 +321,36 @@ enum UserOrderable {
#######################
input AddUserInput {
- name: String!
+ name: String!
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- id: [ID!]
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ id: [ID!]
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- name: String
+ name: String
}
input UserRef {
- id: ID
- name: String
+ id: ID
+ name: String
}
#######################
@@ -356,10 +358,10 @@ input UserRef {
#######################
type Query {
- getMyFavoriteUsers(id: ID!): [User] @custom(http: {url:"http://my-api.com",method:"GET"})
- getUser(id: ID!): User
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ getMyFavoriteUsers(id: ID!): [User] @custom(http: { url: "http://my-api.com", method: "GET" })
+ getUser(id: ID!): User
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -367,8 +369,7 @@ type Query {
#######################
type Mutation {
- addUser(input: [AddUserInput!]!): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ addUser(input: [AddUserInput!]!): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/deprecated.graphql b/graphql/schema/testdata/schemagen/output/deprecated.graphql
index cd3a425cc03..cd03147a304 100644
--- a/graphql/schema/testdata/schemagen/output/deprecated.graphql
+++ b/graphql/schema/testdata/schemagen/output/deprecated.graphql
@@ -3,8 +3,8 @@
#######################
type Atype {
- iamDeprecated: String @deprecated
- soAmI: String! @deprecated(reason: "because")
+ iamDeprecated: String @deprecated
+ soAmI: String! @deprecated(reason: "because")
}
#######################
@@ -23,162 +23,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -190,11 +190,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -203,77 +204,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -281,27 +283,27 @@ input StringHashFilter {
#######################
type AddAtypePayload {
- atype(filter: AtypeFilter, order: AtypeOrder, first: Int, offset: Int): [Atype]
- numUids: Int
+ atype(filter: AtypeFilter, order: AtypeOrder, first: Int, offset: Int): [Atype]
+ numUids: Int
}
type AtypeAggregateResult {
- count: Int
- iamDeprecatedMin: String
- iamDeprecatedMax: String
- soAmIMin: String
- soAmIMax: String
+ count: Int
+ iamDeprecatedMin: String
+ iamDeprecatedMax: String
+ soAmIMin: String
+ soAmIMax: String
}
type DeleteAtypePayload {
- atype(filter: AtypeFilter, order: AtypeOrder, first: Int, offset: Int): [Atype]
- msg: String
- numUids: Int
+ atype(filter: AtypeFilter, order: AtypeOrder, first: Int, offset: Int): [Atype]
+ msg: String
+ numUids: Int
}
type UpdateAtypePayload {
- atype(filter: AtypeFilter, order: AtypeOrder, first: Int, offset: Int): [Atype]
- numUids: Int
+ atype(filter: AtypeFilter, order: AtypeOrder, first: Int, offset: Int): [Atype]
+ numUids: Int
}
#######################
@@ -309,13 +311,13 @@ type UpdateAtypePayload {
#######################
enum AtypeHasFilter {
- iamDeprecated
- soAmI
+ iamDeprecated
+ soAmI
}
enum AtypeOrderable {
- iamDeprecated
- soAmI
+ iamDeprecated
+ soAmI
}
#######################
@@ -323,37 +325,37 @@ enum AtypeOrderable {
#######################
input AddAtypeInput {
- iamDeprecated: String
- soAmI: String!
+ iamDeprecated: String
+ soAmI: String!
}
input AtypeFilter {
- has: [AtypeHasFilter]
- and: [AtypeFilter]
- or: [AtypeFilter]
- not: AtypeFilter
+ has: [AtypeHasFilter]
+ and: [AtypeFilter]
+ or: [AtypeFilter]
+ not: AtypeFilter
}
input AtypeOrder {
- asc: AtypeOrderable
- desc: AtypeOrderable
- then: AtypeOrder
+ asc: AtypeOrderable
+ desc: AtypeOrderable
+ then: AtypeOrder
}
input AtypePatch {
- iamDeprecated: String
- soAmI: String
+ iamDeprecated: String
+ soAmI: String
}
input AtypeRef {
- iamDeprecated: String
- soAmI: String
+ iamDeprecated: String
+ soAmI: String
}
input UpdateAtypeInput {
- filter: AtypeFilter!
- set: AtypePatch
- remove: AtypePatch
+ filter: AtypeFilter!
+ set: AtypePatch
+ remove: AtypePatch
}
#######################
@@ -361,8 +363,8 @@ input UpdateAtypeInput {
#######################
type Query {
- queryAtype(filter: AtypeFilter, order: AtypeOrder, first: Int, offset: Int): [Atype]
- aggregateAtype(filter: AtypeFilter): AtypeAggregateResult
+ queryAtype(filter: AtypeFilter, order: AtypeOrder, first: Int, offset: Int): [Atype]
+ aggregateAtype(filter: AtypeFilter): AtypeAggregateResult
}
#######################
@@ -370,8 +372,7 @@ type Query {
#######################
type Mutation {
- addAtype(input: [AddAtypeInput!]!): AddAtypePayload
- updateAtype(input: UpdateAtypeInput!): UpdateAtypePayload
- deleteAtype(filter: AtypeFilter!): DeleteAtypePayload
+ addAtype(input: [AddAtypeInput!]!): AddAtypePayload
+ updateAtype(input: UpdateAtypeInput!): UpdateAtypePayload
+ deleteAtype(filter: AtypeFilter!): DeleteAtypePayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/dgraph-reverse-directive-on-concrete-type-with-interfaces.graphql b/graphql/schema/testdata/schemagen/output/dgraph-reverse-directive-on-concrete-type-with-interfaces.graphql
index 395ddae515b..f472615167b 100644
--- a/graphql/schema/testdata/schemagen/output/dgraph-reverse-directive-on-concrete-type-with-interfaces.graphql
+++ b/graphql/schema/testdata/schemagen/output/dgraph-reverse-directive-on-concrete-type-with-interfaces.graphql
@@ -3,25 +3,28 @@
#######################
interface Movie {
- id: ID!
- name: String!
- director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director] @dgraph(pred: "directed.movies")
- directorAggregate(filter: DirectorFilter): DirectorAggregateResult
+ id: ID!
+ name: String!
+ director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ @dgraph(pred: "directed.movies")
+ directorAggregate(filter: DirectorFilter): DirectorAggregateResult
}
type OscarMovie implements Movie {
- id: ID!
- name: String!
- director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director] @dgraph(pred: "directed.movies")
- year: Int!
- directorAggregate(filter: DirectorFilter): DirectorAggregateResult
+ id: ID!
+ name: String!
+ director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ @dgraph(pred: "directed.movies")
+ year: Int!
+ directorAggregate(filter: DirectorFilter): DirectorAggregateResult
}
type Director {
- id: ID!
- name: String!
- directed(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie] @dgraph(pred: "~directed.movies")
- directedAggregate(filter: OscarMovieFilter): OscarMovieAggregateResult
+ id: ID!
+ name: String!
+ directed(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie]
+ @dgraph(pred: "~directed.movies")
+ directedAggregate(filter: OscarMovieFilter): OscarMovieAggregateResult
}
#######################
@@ -40,162 +43,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -207,11 +210,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -220,77 +224,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -298,68 +303,83 @@ input StringHashFilter {
#######################
type AddDirectorPayload {
- director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
- numUids: Int
+ director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ numUids: Int
}
type AddOscarMoviePayload {
- oscarMovie(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie]
- numUids: Int
+ oscarMovie(
+ filter: OscarMovieFilter
+ order: OscarMovieOrder
+ first: Int
+ offset: Int
+ ): [OscarMovie]
+ numUids: Int
}
type DeleteDirectorPayload {
- director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
- msg: String
- numUids: Int
+ director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ msg: String
+ numUids: Int
}
type DeleteMoviePayload {
- movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
- msg: String
- numUids: Int
+ movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ msg: String
+ numUids: Int
}
type DeleteOscarMoviePayload {
- oscarMovie(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie]
- msg: String
- numUids: Int
+ oscarMovie(
+ filter: OscarMovieFilter
+ order: OscarMovieOrder
+ first: Int
+ offset: Int
+ ): [OscarMovie]
+ msg: String
+ numUids: Int
}
type DirectorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type MovieAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type OscarMovieAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- yearMin: Int
- yearMax: Int
- yearSum: Int
- yearAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ yearMin: Int
+ yearMax: Int
+ yearSum: Int
+ yearAvg: Float
}
type UpdateDirectorPayload {
- director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
- numUids: Int
+ director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ numUids: Int
}
type UpdateMoviePayload {
- movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
- numUids: Int
+ movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ numUids: Int
}
type UpdateOscarMoviePayload {
- oscarMovie(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie]
- numUids: Int
+ oscarMovie(
+ filter: OscarMovieFilter
+ order: OscarMovieOrder
+ first: Int
+ offset: Int
+ ): [OscarMovie]
+ numUids: Int
}
#######################
@@ -367,32 +387,32 @@ type UpdateOscarMoviePayload {
#######################
enum DirectorHasFilter {
- name
- directed
+ name
+ directed
}
enum DirectorOrderable {
- name
+ name
}
enum MovieHasFilter {
- name
- director
+ name
+ director
}
enum MovieOrderable {
- name
+ name
}
enum OscarMovieHasFilter {
- name
- director
- year
+ name
+ director
+ year
}
enum OscarMovieOrderable {
- name
- year
+ name
+ year
}
#######################
@@ -400,104 +420,104 @@ enum OscarMovieOrderable {
#######################
input AddDirectorInput {
- name: String!
+ name: String!
}
input AddOscarMovieInput {
- name: String!
- director: [DirectorRef]
- year: Int!
+ name: String!
+ director: [DirectorRef]
+ year: Int!
}
input DirectorFilter {
- id: [ID!]
- has: [DirectorHasFilter]
- and: [DirectorFilter]
- or: [DirectorFilter]
- not: DirectorFilter
+ id: [ID!]
+ has: [DirectorHasFilter]
+ and: [DirectorFilter]
+ or: [DirectorFilter]
+ not: DirectorFilter
}
input DirectorOrder {
- asc: DirectorOrderable
- desc: DirectorOrderable
- then: DirectorOrder
+ asc: DirectorOrderable
+ desc: DirectorOrderable
+ then: DirectorOrder
}
input DirectorPatch {
- name: String
+ name: String
}
input DirectorRef {
- id: ID
- name: String
+ id: ID
+ name: String
}
input MovieFilter {
- id: [ID!]
- has: [MovieHasFilter]
- and: [MovieFilter]
- or: [MovieFilter]
- not: MovieFilter
+ id: [ID!]
+ has: [MovieHasFilter]
+ and: [MovieFilter]
+ or: [MovieFilter]
+ not: MovieFilter
}
input MovieOrder {
- asc: MovieOrderable
- desc: MovieOrderable
- then: MovieOrder
+ asc: MovieOrderable
+ desc: MovieOrderable
+ then: MovieOrder
}
input MoviePatch {
- name: String
- director: [DirectorRef]
+ name: String
+ director: [DirectorRef]
}
input MovieRef {
- id: ID!
+ id: ID!
}
input OscarMovieFilter {
- id: [ID!]
- has: [OscarMovieHasFilter]
- and: [OscarMovieFilter]
- or: [OscarMovieFilter]
- not: OscarMovieFilter
+ id: [ID!]
+ has: [OscarMovieHasFilter]
+ and: [OscarMovieFilter]
+ or: [OscarMovieFilter]
+ not: OscarMovieFilter
}
input OscarMovieOrder {
- asc: OscarMovieOrderable
- desc: OscarMovieOrderable
- then: OscarMovieOrder
+ asc: OscarMovieOrderable
+ desc: OscarMovieOrderable
+ then: OscarMovieOrder
}
input OscarMoviePatch {
- name: String
- director: [DirectorRef]
- year: Int
+ name: String
+ director: [DirectorRef]
+ year: Int
}
input OscarMovieRef {
- id: ID
- name: String
- director: [DirectorRef]
- year: Int
+ id: ID
+ name: String
+ director: [DirectorRef]
+ year: Int
}
input UpdateDirectorInput {
- filter: DirectorFilter!
- set: DirectorPatch
- remove: DirectorPatch
+ filter: DirectorFilter!
+ set: DirectorPatch
+ remove: DirectorPatch
}
input UpdateMovieInput {
- filter: MovieFilter!
- set: MoviePatch
- remove: MoviePatch
+ filter: MovieFilter!
+ set: MoviePatch
+ remove: MoviePatch
}
input UpdateOscarMovieInput {
- filter: OscarMovieFilter!
- set: OscarMoviePatch
- remove: OscarMoviePatch
+ filter: OscarMovieFilter!
+ set: OscarMoviePatch
+ remove: OscarMoviePatch
}
#######################
@@ -505,15 +525,20 @@ input UpdateOscarMovieInput {
#######################
type Query {
- getMovie(id: ID!): Movie
- queryMovie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
- aggregateMovie(filter: MovieFilter): MovieAggregateResult
- getOscarMovie(id: ID!): OscarMovie
- queryOscarMovie(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie]
- aggregateOscarMovie(filter: OscarMovieFilter): OscarMovieAggregateResult
- getDirector(id: ID!): Director
- queryDirector(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
- aggregateDirector(filter: DirectorFilter): DirectorAggregateResult
+ getMovie(id: ID!): Movie
+ queryMovie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ aggregateMovie(filter: MovieFilter): MovieAggregateResult
+ getOscarMovie(id: ID!): OscarMovie
+ queryOscarMovie(
+ filter: OscarMovieFilter
+ order: OscarMovieOrder
+ first: Int
+ offset: Int
+ ): [OscarMovie]
+ aggregateOscarMovie(filter: OscarMovieFilter): OscarMovieAggregateResult
+ getDirector(id: ID!): Director
+ queryDirector(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ aggregateDirector(filter: DirectorFilter): DirectorAggregateResult
}
#######################
@@ -521,13 +546,12 @@ type Query {
#######################
type Mutation {
- updateMovie(input: UpdateMovieInput!): UpdateMoviePayload
- deleteMovie(filter: MovieFilter!): DeleteMoviePayload
- addOscarMovie(input: [AddOscarMovieInput!]!): AddOscarMoviePayload
- updateOscarMovie(input: UpdateOscarMovieInput!): UpdateOscarMoviePayload
- deleteOscarMovie(filter: OscarMovieFilter!): DeleteOscarMoviePayload
- addDirector(input: [AddDirectorInput!]!): AddDirectorPayload
- updateDirector(input: UpdateDirectorInput!): UpdateDirectorPayload
- deleteDirector(filter: DirectorFilter!): DeleteDirectorPayload
+ updateMovie(input: UpdateMovieInput!): UpdateMoviePayload
+ deleteMovie(filter: MovieFilter!): DeleteMoviePayload
+ addOscarMovie(input: [AddOscarMovieInput!]!): AddOscarMoviePayload
+ updateOscarMovie(input: UpdateOscarMovieInput!): UpdateOscarMoviePayload
+ deleteOscarMovie(filter: OscarMovieFilter!): DeleteOscarMoviePayload
+ addDirector(input: [AddDirectorInput!]!): AddDirectorPayload
+ updateDirector(input: UpdateDirectorInput!): UpdateDirectorPayload
+ deleteDirector(filter: DirectorFilter!): DeleteDirectorPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/dgraph-reverse-directive-with-interfaces.graphql b/graphql/schema/testdata/schemagen/output/dgraph-reverse-directive-with-interfaces.graphql
index 6cfe972414e..4567be7784e 100644
--- a/graphql/schema/testdata/schemagen/output/dgraph-reverse-directive-with-interfaces.graphql
+++ b/graphql/schema/testdata/schemagen/output/dgraph-reverse-directive-with-interfaces.graphql
@@ -3,25 +3,28 @@
#######################
interface Movie {
- id: ID!
- name: String!
- director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director] @dgraph(pred: "~directed.movies")
- directorAggregate(filter: DirectorFilter): DirectorAggregateResult
+ id: ID!
+ name: String!
+ director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ @dgraph(pred: "~directed.movies")
+ directorAggregate(filter: DirectorFilter): DirectorAggregateResult
}
type OscarMovie implements Movie {
- id: ID!
- name: String!
- director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director] @dgraph(pred: "~directed.movies")
- year: Int!
- directorAggregate(filter: DirectorFilter): DirectorAggregateResult
+ id: ID!
+ name: String!
+ director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ @dgraph(pred: "~directed.movies")
+ year: Int!
+ directorAggregate(filter: DirectorFilter): DirectorAggregateResult
}
type Director {
- id: ID!
- name: String!
- directed(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie] @dgraph(pred: "directed.movies")
- directedAggregate(filter: OscarMovieFilter): OscarMovieAggregateResult
+ id: ID!
+ name: String!
+ directed(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie]
+ @dgraph(pred: "directed.movies")
+ directedAggregate(filter: OscarMovieFilter): OscarMovieAggregateResult
}
#######################
@@ -40,162 +43,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -207,11 +210,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -220,77 +224,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -298,68 +303,83 @@ input StringHashFilter {
#######################
type AddDirectorPayload {
- director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
- numUids: Int
+ director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ numUids: Int
}
type AddOscarMoviePayload {
- oscarMovie(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie]
- numUids: Int
+ oscarMovie(
+ filter: OscarMovieFilter
+ order: OscarMovieOrder
+ first: Int
+ offset: Int
+ ): [OscarMovie]
+ numUids: Int
}
type DeleteDirectorPayload {
- director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
- msg: String
- numUids: Int
+ director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ msg: String
+ numUids: Int
}
type DeleteMoviePayload {
- movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
- msg: String
- numUids: Int
+ movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ msg: String
+ numUids: Int
}
type DeleteOscarMoviePayload {
- oscarMovie(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie]
- msg: String
- numUids: Int
+ oscarMovie(
+ filter: OscarMovieFilter
+ order: OscarMovieOrder
+ first: Int
+ offset: Int
+ ): [OscarMovie]
+ msg: String
+ numUids: Int
}
type DirectorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type MovieAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type OscarMovieAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- yearMin: Int
- yearMax: Int
- yearSum: Int
- yearAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ yearMin: Int
+ yearMax: Int
+ yearSum: Int
+ yearAvg: Float
}
type UpdateDirectorPayload {
- director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
- numUids: Int
+ director(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ numUids: Int
}
type UpdateMoviePayload {
- movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
- numUids: Int
+ movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ numUids: Int
}
type UpdateOscarMoviePayload {
- oscarMovie(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie]
- numUids: Int
+ oscarMovie(
+ filter: OscarMovieFilter
+ order: OscarMovieOrder
+ first: Int
+ offset: Int
+ ): [OscarMovie]
+ numUids: Int
}
#######################
@@ -367,32 +387,32 @@ type UpdateOscarMoviePayload {
#######################
enum DirectorHasFilter {
- name
- directed
+ name
+ directed
}
enum DirectorOrderable {
- name
+ name
}
enum MovieHasFilter {
- name
- director
+ name
+ director
}
enum MovieOrderable {
- name
+ name
}
enum OscarMovieHasFilter {
- name
- director
- year
+ name
+ director
+ year
}
enum OscarMovieOrderable {
- name
- year
+ name
+ year
}
#######################
@@ -400,103 +420,103 @@ enum OscarMovieOrderable {
#######################
input AddDirectorInput {
- name: String!
- directed: [OscarMovieRef]
+ name: String!
+ directed: [OscarMovieRef]
}
input AddOscarMovieInput {
- name: String!
- year: Int!
+ name: String!
+ year: Int!
}
input DirectorFilter {
- id: [ID!]
- has: [DirectorHasFilter]
- and: [DirectorFilter]
- or: [DirectorFilter]
- not: DirectorFilter
+ id: [ID!]
+ has: [DirectorHasFilter]
+ and: [DirectorFilter]
+ or: [DirectorFilter]
+ not: DirectorFilter
}
input DirectorOrder {
- asc: DirectorOrderable
- desc: DirectorOrderable
- then: DirectorOrder
+ asc: DirectorOrderable
+ desc: DirectorOrderable
+ then: DirectorOrder
}
input DirectorPatch {
- name: String
- directed: [OscarMovieRef]
+ name: String
+ directed: [OscarMovieRef]
}
input DirectorRef {
- id: ID
- name: String
- directed: [OscarMovieRef]
+ id: ID
+ name: String
+ directed: [OscarMovieRef]
}
input MovieFilter {
- id: [ID!]
- has: [MovieHasFilter]
- and: [MovieFilter]
- or: [MovieFilter]
- not: MovieFilter
+ id: [ID!]
+ has: [MovieHasFilter]
+ and: [MovieFilter]
+ or: [MovieFilter]
+ not: MovieFilter
}
input MovieOrder {
- asc: MovieOrderable
- desc: MovieOrderable
- then: MovieOrder
+ asc: MovieOrderable
+ desc: MovieOrderable
+ then: MovieOrder
}
input MoviePatch {
- name: String
+ name: String
}
input MovieRef {
- id: ID!
+ id: ID!
}
input OscarMovieFilter {
- id: [ID!]
- has: [OscarMovieHasFilter]
- and: [OscarMovieFilter]
- or: [OscarMovieFilter]
- not: OscarMovieFilter
+ id: [ID!]
+ has: [OscarMovieHasFilter]
+ and: [OscarMovieFilter]
+ or: [OscarMovieFilter]
+ not: OscarMovieFilter
}
input OscarMovieOrder {
- asc: OscarMovieOrderable
- desc: OscarMovieOrderable
- then: OscarMovieOrder
+ asc: OscarMovieOrderable
+ desc: OscarMovieOrderable
+ then: OscarMovieOrder
}
input OscarMoviePatch {
- name: String
- year: Int
+ name: String
+ year: Int
}
input OscarMovieRef {
- id: ID
- name: String
- year: Int
+ id: ID
+ name: String
+ year: Int
}
input UpdateDirectorInput {
- filter: DirectorFilter!
- set: DirectorPatch
- remove: DirectorPatch
+ filter: DirectorFilter!
+ set: DirectorPatch
+ remove: DirectorPatch
}
input UpdateMovieInput {
- filter: MovieFilter!
- set: MoviePatch
- remove: MoviePatch
+ filter: MovieFilter!
+ set: MoviePatch
+ remove: MoviePatch
}
input UpdateOscarMovieInput {
- filter: OscarMovieFilter!
- set: OscarMoviePatch
- remove: OscarMoviePatch
+ filter: OscarMovieFilter!
+ set: OscarMoviePatch
+ remove: OscarMoviePatch
}
#######################
@@ -504,15 +524,20 @@ input UpdateOscarMovieInput {
#######################
type Query {
- getMovie(id: ID!): Movie
- queryMovie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
- aggregateMovie(filter: MovieFilter): MovieAggregateResult
- getOscarMovie(id: ID!): OscarMovie
- queryOscarMovie(filter: OscarMovieFilter, order: OscarMovieOrder, first: Int, offset: Int): [OscarMovie]
- aggregateOscarMovie(filter: OscarMovieFilter): OscarMovieAggregateResult
- getDirector(id: ID!): Director
- queryDirector(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
- aggregateDirector(filter: DirectorFilter): DirectorAggregateResult
+ getMovie(id: ID!): Movie
+ queryMovie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ aggregateMovie(filter: MovieFilter): MovieAggregateResult
+ getOscarMovie(id: ID!): OscarMovie
+ queryOscarMovie(
+ filter: OscarMovieFilter
+ order: OscarMovieOrder
+ first: Int
+ offset: Int
+ ): [OscarMovie]
+ aggregateOscarMovie(filter: OscarMovieFilter): OscarMovieAggregateResult
+ getDirector(id: ID!): Director
+ queryDirector(filter: DirectorFilter, order: DirectorOrder, first: Int, offset: Int): [Director]
+ aggregateDirector(filter: DirectorFilter): DirectorAggregateResult
}
#######################
@@ -520,13 +545,12 @@ type Query {
#######################
type Mutation {
- updateMovie(input: UpdateMovieInput!): UpdateMoviePayload
- deleteMovie(filter: MovieFilter!): DeleteMoviePayload
- addOscarMovie(input: [AddOscarMovieInput!]!): AddOscarMoviePayload
- updateOscarMovie(input: UpdateOscarMovieInput!): UpdateOscarMoviePayload
- deleteOscarMovie(filter: OscarMovieFilter!): DeleteOscarMoviePayload
- addDirector(input: [AddDirectorInput!]!): AddDirectorPayload
- updateDirector(input: UpdateDirectorInput!): UpdateDirectorPayload
- deleteDirector(filter: DirectorFilter!): DeleteDirectorPayload
+ updateMovie(input: UpdateMovieInput!): UpdateMoviePayload
+ deleteMovie(filter: MovieFilter!): DeleteMoviePayload
+ addOscarMovie(input: [AddOscarMovieInput!]!): AddOscarMoviePayload
+ updateOscarMovie(input: UpdateOscarMovieInput!): UpdateOscarMoviePayload
+ deleteOscarMovie(filter: OscarMovieFilter!): DeleteOscarMoviePayload
+ addDirector(input: [AddDirectorInput!]!): AddDirectorPayload
+ updateDirector(input: UpdateDirectorInput!): UpdateDirectorPayload
+ deleteDirector(filter: DirectorFilter!): DeleteDirectorPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/embedding-directive-with-similar-queries.graphql b/graphql/schema/testdata/schemagen/output/embedding-directive-with-similar-queries.graphql
index f0ecf0a1e0d..3623d4441fb 100644
--- a/graphql/schema/testdata/schemagen/output/embedding-directive-with-similar-queries.graphql
+++ b/graphql/schema/testdata/schemagen/output/embedding-directive-with-similar-queries.graphql
@@ -3,26 +3,31 @@
#######################
type Product {
- id: String! @id
- description: String
- title: String
- imageUrl: String
- product_vector: [Float!] @embedding @search(by: ["hnsw(metric: euclidean, exponent: 4)"])
- vector_distance: Float
+ id: String! @id
+ description: String
+ title: String
+ imageUrl: String
+ product_vector: [Float!] @embedding @search(by: ["hnsw(metric: euclidean, exponent: 4)"])
+ vector_distance: Float
}
type Purchase @lambdaOnMutate(add: true) {
- user(filter: UserFilter): User @hasInverse(field: "purchase_history")
- product(filter: ProductFilter): Product
- date: DateTime @search(by: [day])
+ user(filter: UserFilter): User @hasInverse(field: "purchase_history")
+ product(filter: ProductFilter): Product
+ date: DateTime @search(by: [day])
}
type User {
- email: String! @id
- purchase_history(filter: PurchaseFilter, order: PurchaseOrder, first: Int, offset: Int): [Purchase] @hasInverse(field: user)
- user_vector: [Float!] @embedding @search(by: ["hnsw"])
- vector_distance: Float
- purchase_historyAggregate(filter: PurchaseFilter): PurchaseAggregateResult
+ email: String! @id
+ purchase_history(
+ filter: PurchaseFilter
+ order: PurchaseOrder
+ first: Int
+ offset: Int
+ ): [Purchase] @hasInverse(field: user)
+ user_vector: [Float!] @embedding @search(by: ["hnsw"])
+ vector_distance: Float
+ purchase_historyAggregate(filter: PurchaseFilter): PurchaseAggregateResult
}
#######################
@@ -41,162 +46,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -208,11 +213,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -221,77 +227,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -299,75 +306,75 @@ input StringHashFilter {
#######################
type AddProductPayload {
- product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- numUids: Int
+ product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ numUids: Int
}
type AddPurchasePayload {
- purchase(filter: PurchaseFilter, order: PurchaseOrder, first: Int, offset: Int): [Purchase]
- numUids: Int
+ purchase(filter: PurchaseFilter, order: PurchaseOrder, first: Int, offset: Int): [Purchase]
+ numUids: Int
}
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type DeleteProductPayload {
- product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- msg: String
- numUids: Int
+ product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ msg: String
+ numUids: Int
}
type DeletePurchasePayload {
- purchase(filter: PurchaseFilter, order: PurchaseOrder, first: Int, offset: Int): [Purchase]
- msg: String
- numUids: Int
+ purchase(filter: PurchaseFilter, order: PurchaseOrder, first: Int, offset: Int): [Purchase]
+ msg: String
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type ProductAggregateResult {
- count: Int
- idMin: String
- idMax: String
- descriptionMin: String
- descriptionMax: String
- titleMin: String
- titleMax: String
- imageUrlMin: String
- imageUrlMax: String
+ count: Int
+ idMin: String
+ idMax: String
+ descriptionMin: String
+ descriptionMax: String
+ titleMin: String
+ titleMax: String
+ imageUrlMin: String
+ imageUrlMax: String
}
type PurchaseAggregateResult {
- count: Int
- dateMin: DateTime
- dateMax: DateTime
+ count: Int
+ dateMin: DateTime
+ dateMax: DateTime
}
type UpdateProductPayload {
- product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- numUids: Int
+ product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ numUids: Int
}
type UpdatePurchasePayload {
- purchase(filter: PurchaseFilter, order: PurchaseOrder, first: Int, offset: Int): [Purchase]
- numUids: Int
+ purchase(filter: PurchaseFilter, order: PurchaseOrder, first: Int, offset: Int): [Purchase]
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- emailMin: String
- emailMax: String
+ count: Int
+ emailMin: String
+ emailMax: String
}
#######################
@@ -375,48 +382,48 @@ type UserAggregateResult {
#######################
enum ProductEmbedding {
- product_vector
+ product_vector
}
enum ProductHasFilter {
- id
- description
- title
- imageUrl
- product_vector
- vector_distance
+ id
+ description
+ title
+ imageUrl
+ product_vector
+ vector_distance
}
enum ProductOrderable {
- id
- description
- title
- imageUrl
+ id
+ description
+ title
+ imageUrl
}
enum PurchaseHasFilter {
- user
- product
- date
+ user
+ product
+ date
}
enum PurchaseOrderable {
- date
+ date
}
enum UserEmbedding {
- user_vector
+ user_vector
}
enum UserHasFilter {
- email
- purchase_history
- user_vector
- vector_distance
+ email
+ purchase_history
+ user_vector
+ vector_distance
}
enum UserOrderable {
- email
+ email
}
#######################
@@ -424,123 +431,123 @@ enum UserOrderable {
#######################
input AddProductInput {
- id: String!
- description: String
- title: String
- imageUrl: String
- product_vector: [Float!]
+ id: String!
+ description: String
+ title: String
+ imageUrl: String
+ product_vector: [Float!]
}
input AddPurchaseInput {
- user: UserRef
- product: ProductRef
- date: DateTime
+ user: UserRef
+ product: ProductRef
+ date: DateTime
}
input AddUserInput {
- email: String!
- purchase_history: [PurchaseRef]
- user_vector: [Float!]
+ email: String!
+ purchase_history: [PurchaseRef]
+ user_vector: [Float!]
}
input ProductFilter {
- id: StringHashFilter
- has: [ProductHasFilter]
- and: [ProductFilter]
- or: [ProductFilter]
- not: ProductFilter
+ id: StringHashFilter
+ has: [ProductHasFilter]
+ and: [ProductFilter]
+ or: [ProductFilter]
+ not: ProductFilter
}
input ProductOrder {
- asc: ProductOrderable
- desc: ProductOrderable
- then: ProductOrder
+ asc: ProductOrderable
+ desc: ProductOrderable
+ then: ProductOrder
}
input ProductPatch {
- id: String
- description: String
- title: String
- imageUrl: String
- product_vector: [Float!]
+ id: String
+ description: String
+ title: String
+ imageUrl: String
+ product_vector: [Float!]
}
input ProductRef {
- id: String
- description: String
- title: String
- imageUrl: String
- product_vector: [Float!]
+ id: String
+ description: String
+ title: String
+ imageUrl: String
+ product_vector: [Float!]
}
input PurchaseFilter {
- date: DateTimeFilter
- has: [PurchaseHasFilter]
- and: [PurchaseFilter]
- or: [PurchaseFilter]
- not: PurchaseFilter
+ date: DateTimeFilter
+ has: [PurchaseHasFilter]
+ and: [PurchaseFilter]
+ or: [PurchaseFilter]
+ not: PurchaseFilter
}
input PurchaseOrder {
- asc: PurchaseOrderable
- desc: PurchaseOrderable
- then: PurchaseOrder
+ asc: PurchaseOrderable
+ desc: PurchaseOrderable
+ then: PurchaseOrder
}
input PurchasePatch {
- user: UserRef
- product: ProductRef
- date: DateTime
+ user: UserRef
+ product: ProductRef
+ date: DateTime
}
input PurchaseRef {
- user: UserRef
- product: ProductRef
- date: DateTime
+ user: UserRef
+ product: ProductRef
+ date: DateTime
}
input UpdateProductInput {
- filter: ProductFilter!
- set: ProductPatch
- remove: ProductPatch
+ filter: ProductFilter!
+ set: ProductPatch
+ remove: ProductPatch
}
input UpdatePurchaseInput {
- filter: PurchaseFilter!
- set: PurchasePatch
- remove: PurchasePatch
+ filter: PurchaseFilter!
+ set: PurchasePatch
+ remove: PurchasePatch
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- email: StringHashFilter
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ email: StringHashFilter
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- email: String
- purchase_history: [PurchaseRef]
- user_vector: [Float!]
+ email: String
+ purchase_history: [PurchaseRef]
+ user_vector: [Float!]
}
input UserRef {
- email: String
- purchase_history: [PurchaseRef]
- user_vector: [Float!]
+ email: String
+ purchase_history: [PurchaseRef]
+ user_vector: [Float!]
}
#######################
@@ -548,18 +555,33 @@ input UserRef {
#######################
type Query {
- getProduct(id: String!): Product
- querySimilarProductById(id: String!, by: ProductEmbedding!, topK: Int!, filter: ProductFilter): [Product]
- querySimilarProductByEmbedding(by: ProductEmbedding!, topK: Int!, vector: [Float!]!, filter: ProductFilter): [Product]
- queryProduct(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- aggregateProduct(filter: ProductFilter): ProductAggregateResult
- queryPurchase(filter: PurchaseFilter, order: PurchaseOrder, first: Int, offset: Int): [Purchase]
- aggregatePurchase(filter: PurchaseFilter): PurchaseAggregateResult
- getUser(email: String!): User
- querySimilarUserById(email: String!, by: UserEmbedding!, topK: Int!, filter: UserFilter): [User]
- querySimilarUserByEmbedding(by: UserEmbedding!, topK: Int!, vector: [Float!]!, filter: UserFilter): [User]
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ getProduct(id: String!): Product
+ querySimilarProductById(
+ id: String!
+ by: ProductEmbedding!
+ topK: Int!
+ filter: ProductFilter
+ ): [Product]
+ querySimilarProductByEmbedding(
+ by: ProductEmbedding!
+ topK: Int!
+ vector: [Float!]!
+ filter: ProductFilter
+ ): [Product]
+ queryProduct(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ aggregateProduct(filter: ProductFilter): ProductAggregateResult
+ queryPurchase(filter: PurchaseFilter, order: PurchaseOrder, first: Int, offset: Int): [Purchase]
+ aggregatePurchase(filter: PurchaseFilter): PurchaseAggregateResult
+ getUser(email: String!): User
+ querySimilarUserById(email: String!, by: UserEmbedding!, topK: Int!, filter: UserFilter): [User]
+ querySimilarUserByEmbedding(
+ by: UserEmbedding!
+ topK: Int!
+ vector: [Float!]!
+ filter: UserFilter
+ ): [User]
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -567,14 +589,13 @@ type Query {
#######################
type Mutation {
- addProduct(input: [AddProductInput!]!, upsert: Boolean): AddProductPayload
- updateProduct(input: UpdateProductInput!): UpdateProductPayload
- deleteProduct(filter: ProductFilter!): DeleteProductPayload
- addPurchase(input: [AddPurchaseInput!]!): AddPurchasePayload
- updatePurchase(input: UpdatePurchaseInput!): UpdatePurchasePayload
- deletePurchase(filter: PurchaseFilter!): DeletePurchasePayload
- addUser(input: [AddUserInput!]!, upsert: Boolean): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ addProduct(input: [AddProductInput!]!, upsert: Boolean): AddProductPayload
+ updateProduct(input: UpdateProductInput!): UpdateProductPayload
+ deleteProduct(filter: ProductFilter!): DeleteProductPayload
+ addPurchase(input: [AddPurchaseInput!]!): AddPurchasePayload
+ updatePurchase(input: UpdatePurchaseInput!): UpdatePurchasePayload
+ deletePurchase(filter: PurchaseFilter!): DeletePurchasePayload
+ addUser(input: [AddUserInput!]!, upsert: Boolean): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/field-with-id-directive.graphql b/graphql/schema/testdata/schemagen/output/field-with-id-directive.graphql
index 6a0a246aa85..b243dfcd5c1 100644
--- a/graphql/schema/testdata/schemagen/output/field-with-id-directive.graphql
+++ b/graphql/schema/testdata/schemagen/output/field-with-id-directive.graphql
@@ -3,22 +3,22 @@
#######################
type Post {
- postID: ID
- content: String!
- author(filter: AuthorFilter): Author!
- genre(filter: GenreFilter): Genre
+ postID: ID
+ content: String!
+ author(filter: AuthorFilter): Author!
+ genre(filter: GenreFilter): Genre
}
type Author {
- id: ID
- name: String! @id @search(by: [regexp])
- pen_name: String
- posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- postsAggregate(filter: PostFilter): PostAggregateResult
+ id: ID
+ name: String! @id @search(by: [regexp])
+ pen_name: String
+ posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ postsAggregate(filter: PostFilter): PostAggregateResult
}
type Genre {
- name: String! @id @search(by: [exact])
+ name: String! @id @search(by: [exact])
}
#######################
@@ -37,162 +37,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -204,11 +204,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -217,77 +218,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -295,71 +297,71 @@ input StringHashFilter {
#######################
type AddAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddGenrePayload {
- genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- numUids: Int
+ genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ numUids: Int
}
type AddPostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type AuthorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- pen_nameMin: String
- pen_nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
+ pen_nameMin: String
+ pen_nameMax: String
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeleteGenrePayload {
- genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- msg: String
- numUids: Int
+ genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type GenreAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type PostAggregateResult {
- count: Int
- contentMin: String
- contentMax: String
+ count: Int
+ contentMin: String
+ contentMax: String
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdateGenrePayload {
- genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- numUids: Int
+ genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
#######################
@@ -367,32 +369,32 @@ type UpdatePostPayload {
#######################
enum AuthorHasFilter {
- name
- pen_name
- posts
+ name
+ pen_name
+ posts
}
enum AuthorOrderable {
- name
- pen_name
+ name
+ pen_name
}
enum GenreHasFilter {
- name
+ name
}
enum GenreOrderable {
- name
+ name
}
enum PostHasFilter {
- content
- author
- genre
+ content
+ author
+ genre
}
enum PostOrderable {
- content
+ content
}
#######################
@@ -400,120 +402,120 @@ enum PostOrderable {
#######################
input AddAuthorInput {
- name: String!
- pen_name: String
- posts: [PostRef]
+ name: String!
+ pen_name: String
+ posts: [PostRef]
}
input AddGenreInput {
- name: String!
+ name: String!
}
input AddPostInput {
- content: String!
- author: AuthorRef!
- genre: GenreRef
+ content: String!
+ author: AuthorRef!
+ genre: GenreRef
}
input AuthorFilter {
- id: [ID!]
- name: StringHashFilter_StringRegExpFilter
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ name: StringHashFilter_StringRegExpFilter
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorOrder {
- asc: AuthorOrderable
- desc: AuthorOrderable
- then: AuthorOrder
+ asc: AuthorOrderable
+ desc: AuthorOrderable
+ then: AuthorOrder
}
input AuthorPatch {
- name: String
- pen_name: String
- posts: [PostRef]
+ name: String
+ pen_name: String
+ posts: [PostRef]
}
input AuthorRef {
- id: ID
- name: String
- pen_name: String
- posts: [PostRef]
+ id: ID
+ name: String
+ pen_name: String
+ posts: [PostRef]
}
input GenreFilter {
- name: StringExactFilter
- has: [GenreHasFilter]
- and: [GenreFilter]
- or: [GenreFilter]
- not: GenreFilter
+ name: StringExactFilter
+ has: [GenreHasFilter]
+ and: [GenreFilter]
+ or: [GenreFilter]
+ not: GenreFilter
}
input GenreOrder {
- asc: GenreOrderable
- desc: GenreOrderable
- then: GenreOrder
+ asc: GenreOrderable
+ desc: GenreOrderable
+ then: GenreOrder
}
input GenrePatch {
- name: String
+ name: String
}
input GenreRef {
- name: String!
+ name: String!
}
input PostFilter {
- postID: [ID!]
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ postID: [ID!]
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- content: String
- author: AuthorRef
- genre: GenreRef
+ content: String
+ author: AuthorRef
+ genre: GenreRef
}
input PostRef {
- postID: ID
- content: String
- author: AuthorRef
- genre: GenreRef
+ postID: ID
+ content: String
+ author: AuthorRef
+ genre: GenreRef
}
input StringHashFilter_StringRegExpFilter {
- eq: String
- in: [String]
- regexp: String
+ eq: String
+ in: [String]
+ regexp: String
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdateGenreInput {
- filter: GenreFilter!
- set: GenrePatch
- remove: GenrePatch
+ filter: GenreFilter!
+ set: GenrePatch
+ remove: GenrePatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
#######################
@@ -521,15 +523,15 @@ input UpdatePostInput {
#######################
type Query {
- getPost(postID: ID!): Post
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
- getAuthor(id: ID, name: String): Author
- queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
- getGenre(name: String!): Genre
- queryGenre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- aggregateGenre(filter: GenreFilter): GenreAggregateResult
+ getPost(postID: ID!): Post
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
+ getAuthor(id: ID, name: String): Author
+ queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getGenre(name: String!): Genre
+ queryGenre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ aggregateGenre(filter: GenreFilter): GenreAggregateResult
}
#######################
@@ -537,14 +539,13 @@ type Query {
#######################
type Mutation {
- addPost(input: [AddPostInput!]!): AddPostPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
- addAuthor(input: [AddAuthorInput!]!, upsert: Boolean): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
- addGenre(input: [AddGenreInput!]!, upsert: Boolean): AddGenrePayload
- updateGenre(input: UpdateGenreInput!): UpdateGenrePayload
- deleteGenre(filter: GenreFilter!): DeleteGenrePayload
+ addPost(input: [AddPostInput!]!): AddPostPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
+ addAuthor(input: [AddAuthorInput!]!, upsert: Boolean): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ addGenre(input: [AddGenreInput!]!, upsert: Boolean): AddGenrePayload
+ updateGenre(input: UpdateGenreInput!): UpdateGenrePayload
+ deleteGenre(filter: GenreFilter!): DeleteGenrePayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/field-with-multiple-@id-fields.graphql b/graphql/schema/testdata/schemagen/output/field-with-multiple-@id-fields.graphql
index 71548212021..6517ca5ab13 100644
--- a/graphql/schema/testdata/schemagen/output/field-with-multiple-@id-fields.graphql
+++ b/graphql/schema/testdata/schemagen/output/field-with-multiple-@id-fields.graphql
@@ -3,22 +3,22 @@
#######################
type Post {
- postID: ID
- content: String!
- author(filter: AuthorFilter): Author!
- genre(filter: GenreFilter): Genre
+ postID: ID
+ content: String!
+ author(filter: AuthorFilter): Author!
+ genre(filter: GenreFilter): Genre
}
type Author {
- id: ID
- name: String! @id @search(by: [regexp])
- pen_name: String! @id
- posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- postsAggregate(filter: PostFilter): PostAggregateResult
+ id: ID
+ name: String! @id @search(by: [regexp])
+ pen_name: String! @id
+ posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ postsAggregate(filter: PostFilter): PostAggregateResult
}
type Genre {
- name: String! @id
+ name: String! @id
}
#######################
@@ -37,162 +37,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -204,11 +204,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -217,77 +218,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -295,71 +297,71 @@ input StringHashFilter {
#######################
type AddAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddGenrePayload {
- genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- numUids: Int
+ genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ numUids: Int
}
type AddPostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type AuthorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- pen_nameMin: String
- pen_nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
+ pen_nameMin: String
+ pen_nameMax: String
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeleteGenrePayload {
- genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- msg: String
- numUids: Int
+ genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type GenreAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type PostAggregateResult {
- count: Int
- contentMin: String
- contentMax: String
+ count: Int
+ contentMin: String
+ contentMax: String
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdateGenrePayload {
- genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- numUids: Int
+ genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
#######################
@@ -367,32 +369,32 @@ type UpdatePostPayload {
#######################
enum AuthorHasFilter {
- name
- pen_name
- posts
+ name
+ pen_name
+ posts
}
enum AuthorOrderable {
- name
- pen_name
+ name
+ pen_name
}
enum GenreHasFilter {
- name
+ name
}
enum GenreOrderable {
- name
+ name
}
enum PostHasFilter {
- content
- author
- genre
+ content
+ author
+ genre
}
enum PostOrderable {
- content
+ content
}
#######################
@@ -400,121 +402,121 @@ enum PostOrderable {
#######################
input AddAuthorInput {
- name: String!
- pen_name: String!
- posts: [PostRef]
+ name: String!
+ pen_name: String!
+ posts: [PostRef]
}
input AddGenreInput {
- name: String!
+ name: String!
}
input AddPostInput {
- content: String!
- author: AuthorRef!
- genre: GenreRef
+ content: String!
+ author: AuthorRef!
+ genre: GenreRef
}
input AuthorFilter {
- id: [ID!]
- name: StringHashFilter_StringRegExpFilter
- pen_name: StringHashFilter
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ name: StringHashFilter_StringRegExpFilter
+ pen_name: StringHashFilter
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorOrder {
- asc: AuthorOrderable
- desc: AuthorOrderable
- then: AuthorOrder
+ asc: AuthorOrderable
+ desc: AuthorOrderable
+ then: AuthorOrder
}
input AuthorPatch {
- name: String
- pen_name: String
- posts: [PostRef]
+ name: String
+ pen_name: String
+ posts: [PostRef]
}
input AuthorRef {
- id: ID
- name: String
- pen_name: String
- posts: [PostRef]
+ id: ID
+ name: String
+ pen_name: String
+ posts: [PostRef]
}
input GenreFilter {
- name: StringHashFilter
- has: [GenreHasFilter]
- and: [GenreFilter]
- or: [GenreFilter]
- not: GenreFilter
+ name: StringHashFilter
+ has: [GenreHasFilter]
+ and: [GenreFilter]
+ or: [GenreFilter]
+ not: GenreFilter
}
input GenreOrder {
- asc: GenreOrderable
- desc: GenreOrderable
- then: GenreOrder
+ asc: GenreOrderable
+ desc: GenreOrderable
+ then: GenreOrder
}
input GenrePatch {
- name: String
+ name: String
}
input GenreRef {
- name: String!
+ name: String!
}
input PostFilter {
- postID: [ID!]
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ postID: [ID!]
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- content: String
- author: AuthorRef
- genre: GenreRef
+ content: String
+ author: AuthorRef
+ genre: GenreRef
}
input PostRef {
- postID: ID
- content: String
- author: AuthorRef
- genre: GenreRef
+ postID: ID
+ content: String
+ author: AuthorRef
+ genre: GenreRef
}
input StringHashFilter_StringRegExpFilter {
- eq: String
- in: [String]
- regexp: String
+ eq: String
+ in: [String]
+ regexp: String
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdateGenreInput {
- filter: GenreFilter!
- set: GenrePatch
- remove: GenrePatch
+ filter: GenreFilter!
+ set: GenrePatch
+ remove: GenrePatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
#######################
@@ -522,15 +524,15 @@ input UpdatePostInput {
#######################
type Query {
- getPost(postID: ID!): Post
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
- getAuthor(id: ID, name: String, pen_name: String): Author
- queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
- getGenre(name: String!): Genre
- queryGenre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- aggregateGenre(filter: GenreFilter): GenreAggregateResult
+ getPost(postID: ID!): Post
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
+ getAuthor(id: ID, name: String, pen_name: String): Author
+ queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getGenre(name: String!): Genre
+ queryGenre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ aggregateGenre(filter: GenreFilter): GenreAggregateResult
}
#######################
@@ -538,14 +540,13 @@ type Query {
#######################
type Mutation {
- addPost(input: [AddPostInput!]!): AddPostPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
- addAuthor(input: [AddAuthorInput!]!, upsert: Boolean): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
- addGenre(input: [AddGenreInput!]!, upsert: Boolean): AddGenrePayload
- updateGenre(input: UpdateGenreInput!): UpdateGenrePayload
- deleteGenre(filter: GenreFilter!): DeleteGenrePayload
+ addPost(input: [AddPostInput!]!): AddPostPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
+ addAuthor(input: [AddAuthorInput!]!, upsert: Boolean): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ addGenre(input: [AddGenreInput!]!, upsert: Boolean): AddGenrePayload
+ updateGenre(input: UpdateGenreInput!): UpdateGenrePayload
+ deleteGenre(filter: GenreFilter!): DeleteGenrePayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/field-with-reverse-predicate-in-dgraph-directive.graphql b/graphql/schema/testdata/schemagen/output/field-with-reverse-predicate-in-dgraph-directive.graphql
index 31b594c2a7e..1d31ebac1cf 100644
--- a/graphql/schema/testdata/schemagen/output/field-with-reverse-predicate-in-dgraph-directive.graphql
+++ b/graphql/schema/testdata/schemagen/output/field-with-reverse-predicate-in-dgraph-directive.graphql
@@ -3,17 +3,23 @@
#######################
type Movie {
- id: ID!
- name: String!
- director(filter: MovieDirectorFilter, order: MovieDirectorOrder, first: Int, offset: Int): [MovieDirector] @dgraph(pred: "~directed.movies")
- directorAggregate(filter: MovieDirectorFilter): MovieDirectorAggregateResult
+ id: ID!
+ name: String!
+ director(
+ filter: MovieDirectorFilter
+ order: MovieDirectorOrder
+ first: Int
+ offset: Int
+ ): [MovieDirector] @dgraph(pred: "~directed.movies")
+ directorAggregate(filter: MovieDirectorFilter): MovieDirectorAggregateResult
}
type MovieDirector {
- id: ID!
- name: String!
- directed(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie] @dgraph(pred: "directed.movies")
- directedAggregate(filter: MovieFilter): MovieAggregateResult
+ id: ID!
+ name: String!
+ directed(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ @dgraph(pred: "directed.movies")
+ directedAggregate(filter: MovieFilter): MovieAggregateResult
}
#######################
@@ -32,162 +38,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -199,11 +205,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -212,77 +219,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -290,47 +298,62 @@ input StringHashFilter {
#######################
type AddMovieDirectorPayload {
- movieDirector(filter: MovieDirectorFilter, order: MovieDirectorOrder, first: Int, offset: Int): [MovieDirector]
- numUids: Int
+ movieDirector(
+ filter: MovieDirectorFilter
+ order: MovieDirectorOrder
+ first: Int
+ offset: Int
+ ): [MovieDirector]
+ numUids: Int
}
type AddMoviePayload {
- movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
- numUids: Int
+ movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ numUids: Int
}
type DeleteMovieDirectorPayload {
- movieDirector(filter: MovieDirectorFilter, order: MovieDirectorOrder, first: Int, offset: Int): [MovieDirector]
- msg: String
- numUids: Int
+ movieDirector(
+ filter: MovieDirectorFilter
+ order: MovieDirectorOrder
+ first: Int
+ offset: Int
+ ): [MovieDirector]
+ msg: String
+ numUids: Int
}
type DeleteMoviePayload {
- movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
- msg: String
- numUids: Int
+ movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ msg: String
+ numUids: Int
}
type MovieAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type MovieDirectorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type UpdateMovieDirectorPayload {
- movieDirector(filter: MovieDirectorFilter, order: MovieDirectorOrder, first: Int, offset: Int): [MovieDirector]
- numUids: Int
+ movieDirector(
+ filter: MovieDirectorFilter
+ order: MovieDirectorOrder
+ first: Int
+ offset: Int
+ ): [MovieDirector]
+ numUids: Int
}
type UpdateMoviePayload {
- movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
- numUids: Int
+ movie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ numUids: Int
}
#######################
@@ -338,21 +361,21 @@ type UpdateMoviePayload {
#######################
enum MovieDirectorHasFilter {
- name
- directed
+ name
+ directed
}
enum MovieDirectorOrderable {
- name
+ name
}
enum MovieHasFilter {
- name
- director
+ name
+ director
}
enum MovieOrderable {
- name
+ name
}
#######################
@@ -360,72 +383,72 @@ enum MovieOrderable {
#######################
input AddMovieDirectorInput {
- name: String!
- directed: [MovieRef]
+ name: String!
+ directed: [MovieRef]
}
input AddMovieInput {
- name: String!
+ name: String!
}
input MovieDirectorFilter {
- id: [ID!]
- has: [MovieDirectorHasFilter]
- and: [MovieDirectorFilter]
- or: [MovieDirectorFilter]
- not: MovieDirectorFilter
+ id: [ID!]
+ has: [MovieDirectorHasFilter]
+ and: [MovieDirectorFilter]
+ or: [MovieDirectorFilter]
+ not: MovieDirectorFilter
}
input MovieDirectorOrder {
- asc: MovieDirectorOrderable
- desc: MovieDirectorOrderable
- then: MovieDirectorOrder
+ asc: MovieDirectorOrderable
+ desc: MovieDirectorOrderable
+ then: MovieDirectorOrder
}
input MovieDirectorPatch {
- name: String
- directed: [MovieRef]
+ name: String
+ directed: [MovieRef]
}
input MovieDirectorRef {
- id: ID
- name: String
- directed: [MovieRef]
+ id: ID
+ name: String
+ directed: [MovieRef]
}
input MovieFilter {
- id: [ID!]
- has: [MovieHasFilter]
- and: [MovieFilter]
- or: [MovieFilter]
- not: MovieFilter
+ id: [ID!]
+ has: [MovieHasFilter]
+ and: [MovieFilter]
+ or: [MovieFilter]
+ not: MovieFilter
}
input MovieOrder {
- asc: MovieOrderable
- desc: MovieOrderable
- then: MovieOrder
+ asc: MovieOrderable
+ desc: MovieOrderable
+ then: MovieOrder
}
input MoviePatch {
- name: String
+ name: String
}
input MovieRef {
- id: ID
- name: String
+ id: ID
+ name: String
}
input UpdateMovieDirectorInput {
- filter: MovieDirectorFilter!
- set: MovieDirectorPatch
- remove: MovieDirectorPatch
+ filter: MovieDirectorFilter!
+ set: MovieDirectorPatch
+ remove: MovieDirectorPatch
}
input UpdateMovieInput {
- filter: MovieFilter!
- set: MoviePatch
- remove: MoviePatch
+ filter: MovieFilter!
+ set: MoviePatch
+ remove: MoviePatch
}
#######################
@@ -433,12 +456,17 @@ input UpdateMovieInput {
#######################
type Query {
- getMovie(id: ID!): Movie
- queryMovie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
- aggregateMovie(filter: MovieFilter): MovieAggregateResult
- getMovieDirector(id: ID!): MovieDirector
- queryMovieDirector(filter: MovieDirectorFilter, order: MovieDirectorOrder, first: Int, offset: Int): [MovieDirector]
- aggregateMovieDirector(filter: MovieDirectorFilter): MovieDirectorAggregateResult
+ getMovie(id: ID!): Movie
+ queryMovie(filter: MovieFilter, order: MovieOrder, first: Int, offset: Int): [Movie]
+ aggregateMovie(filter: MovieFilter): MovieAggregateResult
+ getMovieDirector(id: ID!): MovieDirector
+ queryMovieDirector(
+ filter: MovieDirectorFilter
+ order: MovieDirectorOrder
+ first: Int
+ offset: Int
+ ): [MovieDirector]
+ aggregateMovieDirector(filter: MovieDirectorFilter): MovieDirectorAggregateResult
}
#######################
@@ -446,11 +474,10 @@ type Query {
#######################
type Mutation {
- addMovie(input: [AddMovieInput!]!): AddMoviePayload
- updateMovie(input: UpdateMovieInput!): UpdateMoviePayload
- deleteMovie(filter: MovieFilter!): DeleteMoviePayload
- addMovieDirector(input: [AddMovieDirectorInput!]!): AddMovieDirectorPayload
- updateMovieDirector(input: UpdateMovieDirectorInput!): UpdateMovieDirectorPayload
- deleteMovieDirector(filter: MovieDirectorFilter!): DeleteMovieDirectorPayload
+ addMovie(input: [AddMovieInput!]!): AddMoviePayload
+ updateMovie(input: UpdateMovieInput!): UpdateMoviePayload
+ deleteMovie(filter: MovieFilter!): DeleteMoviePayload
+ addMovieDirector(input: [AddMovieDirectorInput!]!): AddMovieDirectorPayload
+ updateMovieDirector(input: UpdateMovieDirectorInput!): UpdateMovieDirectorPayload
+ deleteMovieDirector(filter: MovieDirectorFilter!): DeleteMovieDirectorPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/filter-cleanSchema-all-empty.graphql b/graphql/schema/testdata/schemagen/output/filter-cleanSchema-all-empty.graphql
index 2cf10c23814..675fcdcbba8 100644
--- a/graphql/schema/testdata/schemagen/output/filter-cleanSchema-all-empty.graphql
+++ b/graphql/schema/testdata/schemagen/output/filter-cleanSchema-all-empty.graphql
@@ -3,20 +3,20 @@
#######################
type X {
- name(filter: YFilter, first: Int, offset: Int): [Y]
- f1(filter: YFilter, first: Int, offset: Int): [Y] @dgraph(pred: "f1")
- nameAggregate(filter: YFilter): YAggregateResult
- f1Aggregate(filter: YFilter): YAggregateResult
+ name(filter: YFilter, first: Int, offset: Int): [Y]
+ f1(filter: YFilter, first: Int, offset: Int): [Y] @dgraph(pred: "f1")
+ nameAggregate(filter: YFilter): YAggregateResult
+ f1Aggregate(filter: YFilter): YAggregateResult
}
type Y {
- f1(filter: XFilter, first: Int, offset: Int): [X] @dgraph(pred: "~f1")
- f1Aggregate(filter: XFilter): XAggregateResult
+ f1(filter: XFilter, first: Int, offset: Int): [X] @dgraph(pred: "~f1")
+ f1Aggregate(filter: XFilter): XAggregateResult
}
type Z {
- add(filter: XFilter, first: Int, offset: Int): [X]
- addAggregate(filter: XFilter): XAggregateResult
+ add(filter: XFilter, first: Int, offset: Int): [X]
+ addAggregate(filter: XFilter): XAggregateResult
}
#######################
@@ -35,162 +35,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -202,11 +202,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -215,77 +216,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -293,53 +295,53 @@ input StringHashFilter {
#######################
type AddXPayload {
- x(filter: XFilter, first: Int, offset: Int): [X]
- numUids: Int
+ x(filter: XFilter, first: Int, offset: Int): [X]
+ numUids: Int
}
type AddZPayload {
- z(filter: ZFilter, first: Int, offset: Int): [Z]
- numUids: Int
+ z(filter: ZFilter, first: Int, offset: Int): [Z]
+ numUids: Int
}
type DeleteXPayload {
- x(filter: XFilter, first: Int, offset: Int): [X]
- msg: String
- numUids: Int
+ x(filter: XFilter, first: Int, offset: Int): [X]
+ msg: String
+ numUids: Int
}
type DeleteYPayload {
- y(filter: YFilter, first: Int, offset: Int): [Y]
- msg: String
- numUids: Int
+ y(filter: YFilter, first: Int, offset: Int): [Y]
+ msg: String
+ numUids: Int
}
type DeleteZPayload {
- z(filter: ZFilter, first: Int, offset: Int): [Z]
- msg: String
- numUids: Int
+ z(filter: ZFilter, first: Int, offset: Int): [Z]
+ msg: String
+ numUids: Int
}
type UpdateXPayload {
- x(filter: XFilter, first: Int, offset: Int): [X]
- numUids: Int
+ x(filter: XFilter, first: Int, offset: Int): [X]
+ numUids: Int
}
type UpdateZPayload {
- z(filter: ZFilter, first: Int, offset: Int): [Z]
- numUids: Int
+ z(filter: ZFilter, first: Int, offset: Int): [Z]
+ numUids: Int
}
type XAggregateResult {
- count: Int
+ count: Int
}
type YAggregateResult {
- count: Int
+ count: Int
}
type ZAggregateResult {
- count: Int
+ count: Int
}
#######################
@@ -347,16 +349,16 @@ type ZAggregateResult {
#######################
enum XHasFilter {
- name
- f1
+ name
+ f1
}
enum YHasFilter {
- f1
+ f1
}
enum ZHasFilter {
- add
+ add
}
#######################
@@ -364,21 +366,21 @@ enum ZHasFilter {
#######################
input XFilter {
- has: [XHasFilter]
- and: [XFilter]
- or: [XFilter]
- not: XFilter
+ has: [XHasFilter]
+ and: [XFilter]
+ or: [XFilter]
+ not: XFilter
}
input YFilter {
- not: YFilter
+ not: YFilter
}
input ZFilter {
- has: [ZHasFilter]
- and: [ZFilter]
- or: [ZFilter]
- not: ZFilter
+ has: [ZHasFilter]
+ and: [ZFilter]
+ or: [ZFilter]
+ not: ZFilter
}
#######################
@@ -386,12 +388,12 @@ input ZFilter {
#######################
type Query {
- queryX(filter: XFilter, first: Int, offset: Int): [X]
- aggregateX(filter: XFilter): XAggregateResult
- queryY(filter: YFilter, first: Int, offset: Int): [Y]
- aggregateY(filter: YFilter): YAggregateResult
- queryZ(filter: ZFilter, first: Int, offset: Int): [Z]
- aggregateZ(filter: ZFilter): ZAggregateResult
+ queryX(filter: XFilter, first: Int, offset: Int): [X]
+ aggregateX(filter: XFilter): XAggregateResult
+ queryY(filter: YFilter, first: Int, offset: Int): [Y]
+ aggregateY(filter: YFilter): YAggregateResult
+ queryZ(filter: ZFilter, first: Int, offset: Int): [Z]
+ aggregateZ(filter: ZFilter): ZAggregateResult
}
#######################
@@ -399,8 +401,7 @@ type Query {
#######################
type Mutation {
- deleteX(filter: XFilter!): DeleteXPayload
- deleteY(filter: YFilter!): DeleteYPayload
- deleteZ(filter: ZFilter!): DeleteZPayload
+ deleteX(filter: XFilter!): DeleteXPayload
+ deleteY(filter: YFilter!): DeleteYPayload
+ deleteZ(filter: ZFilter!): DeleteZPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/filter-cleanSchema-circular.graphql b/graphql/schema/testdata/schemagen/output/filter-cleanSchema-circular.graphql
index 78e536d76ed..c05def0d14f 100644
--- a/graphql/schema/testdata/schemagen/output/filter-cleanSchema-circular.graphql
+++ b/graphql/schema/testdata/schemagen/output/filter-cleanSchema-circular.graphql
@@ -3,24 +3,24 @@
#######################
type X {
- f1(filter: YFilter, first: Int, offset: Int): [Y] @dgraph(pred: "f1")
- f3(filter: ZFilter, first: Int, offset: Int): [Z] @dgraph(pred: "~f3")
- f1Aggregate(filter: YFilter): YAggregateResult
- f3Aggregate(filter: ZFilter): ZAggregateResult
+ f1(filter: YFilter, first: Int, offset: Int): [Y] @dgraph(pred: "f1")
+ f3(filter: ZFilter, first: Int, offset: Int): [Z] @dgraph(pred: "~f3")
+ f1Aggregate(filter: YFilter): YAggregateResult
+ f3Aggregate(filter: ZFilter): ZAggregateResult
}
type Y {
- f1(filter: XFilter, first: Int, offset: Int): [X] @dgraph(pred: "~f1")
- f2(filter: ZFilter, first: Int, offset: Int): [Z] @dgraph(pred: "f2")
- f1Aggregate(filter: XFilter): XAggregateResult
- f2Aggregate(filter: ZFilter): ZAggregateResult
+ f1(filter: XFilter, first: Int, offset: Int): [X] @dgraph(pred: "~f1")
+ f2(filter: ZFilter, first: Int, offset: Int): [Z] @dgraph(pred: "f2")
+ f1Aggregate(filter: XFilter): XAggregateResult
+ f2Aggregate(filter: ZFilter): ZAggregateResult
}
type Z {
- f2(filter: YFilter, first: Int, offset: Int): [Y] @dgraph(pred: "~f2")
- f3(filter: XFilter, first: Int, offset: Int): [X] @dgraph(pred: "f3")
- f2Aggregate(filter: YFilter): YAggregateResult
- f3Aggregate(filter: XFilter): XAggregateResult
+ f2(filter: YFilter, first: Int, offset: Int): [Y] @dgraph(pred: "~f2")
+ f3(filter: XFilter, first: Int, offset: Int): [X] @dgraph(pred: "f3")
+ f2Aggregate(filter: YFilter): YAggregateResult
+ f3Aggregate(filter: XFilter): XAggregateResult
}
#######################
@@ -39,162 +39,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -206,11 +206,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -219,77 +220,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -297,63 +299,63 @@ input StringHashFilter {
#######################
type AddXPayload {
- x(filter: XFilter, first: Int, offset: Int): [X]
- numUids: Int
+ x(filter: XFilter, first: Int, offset: Int): [X]
+ numUids: Int
}
type AddYPayload {
- y(filter: YFilter, first: Int, offset: Int): [Y]
- numUids: Int
+ y(filter: YFilter, first: Int, offset: Int): [Y]
+ numUids: Int
}
type AddZPayload {
- z(filter: ZFilter, first: Int, offset: Int): [Z]
- numUids: Int
+ z(filter: ZFilter, first: Int, offset: Int): [Z]
+ numUids: Int
}
type DeleteXPayload {
- x(filter: XFilter, first: Int, offset: Int): [X]
- msg: String
- numUids: Int
+ x(filter: XFilter, first: Int, offset: Int): [X]
+ msg: String
+ numUids: Int
}
type DeleteYPayload {
- y(filter: YFilter, first: Int, offset: Int): [Y]
- msg: String
- numUids: Int
+ y(filter: YFilter, first: Int, offset: Int): [Y]
+ msg: String
+ numUids: Int
}
type DeleteZPayload {
- z(filter: ZFilter, first: Int, offset: Int): [Z]
- msg: String
- numUids: Int
+ z(filter: ZFilter, first: Int, offset: Int): [Z]
+ msg: String
+ numUids: Int
}
type UpdateXPayload {
- x(filter: XFilter, first: Int, offset: Int): [X]
- numUids: Int
+ x(filter: XFilter, first: Int, offset: Int): [X]
+ numUids: Int
}
type UpdateYPayload {
- y(filter: YFilter, first: Int, offset: Int): [Y]
- numUids: Int
+ y(filter: YFilter, first: Int, offset: Int): [Y]
+ numUids: Int
}
type UpdateZPayload {
- z(filter: ZFilter, first: Int, offset: Int): [Z]
- numUids: Int
+ z(filter: ZFilter, first: Int, offset: Int): [Z]
+ numUids: Int
}
type XAggregateResult {
- count: Int
+ count: Int
}
type YAggregateResult {
- count: Int
+ count: Int
}
type ZAggregateResult {
- count: Int
+ count: Int
}
#######################
@@ -361,18 +363,18 @@ type ZAggregateResult {
#######################
enum XHasFilter {
- f1
- f3
+ f1
+ f3
}
enum YHasFilter {
- f1
- f2
+ f1
+ f2
}
enum ZHasFilter {
- f2
- f3
+ f2
+ f3
}
#######################
@@ -380,78 +382,78 @@ enum ZHasFilter {
#######################
input AddXInput {
- f1: [YRef]
+ f1: [YRef]
}
input AddYInput {
- f2: [ZRef]
+ f2: [ZRef]
}
input AddZInput {
- f3: [XRef]
+ f3: [XRef]
}
input UpdateXInput {
- filter: XFilter!
- set: XPatch
- remove: XPatch
+ filter: XFilter!
+ set: XPatch
+ remove: XPatch
}
input UpdateYInput {
- filter: YFilter!
- set: YPatch
- remove: YPatch
+ filter: YFilter!
+ set: YPatch
+ remove: YPatch
}
input UpdateZInput {
- filter: ZFilter!
- set: ZPatch
- remove: ZPatch
+ filter: ZFilter!
+ set: ZPatch
+ remove: ZPatch
}
input XFilter {
- has: [XHasFilter]
- and: [XFilter]
- or: [XFilter]
- not: XFilter
+ has: [XHasFilter]
+ and: [XFilter]
+ or: [XFilter]
+ not: XFilter
}
input XPatch {
- f1: [YRef]
+ f1: [YRef]
}
input XRef {
- f1: [YRef]
+ f1: [YRef]
}
input YFilter {
- has: [YHasFilter]
- and: [YFilter]
- or: [YFilter]
- not: YFilter
+ has: [YHasFilter]
+ and: [YFilter]
+ or: [YFilter]
+ not: YFilter
}
input YPatch {
- f2: [ZRef]
+ f2: [ZRef]
}
input YRef {
- f2: [ZRef]
+ f2: [ZRef]
}
input ZFilter {
- has: [ZHasFilter]
- and: [ZFilter]
- or: [ZFilter]
- not: ZFilter
+ has: [ZHasFilter]
+ and: [ZFilter]
+ or: [ZFilter]
+ not: ZFilter
}
input ZPatch {
- f3: [XRef]
+ f3: [XRef]
}
input ZRef {
- f3: [XRef]
+ f3: [XRef]
}
#######################
@@ -459,12 +461,12 @@ input ZRef {
#######################
type Query {
- queryX(filter: XFilter, first: Int, offset: Int): [X]
- aggregateX(filter: XFilter): XAggregateResult
- queryY(filter: YFilter, first: Int, offset: Int): [Y]
- aggregateY(filter: YFilter): YAggregateResult
- queryZ(filter: ZFilter, first: Int, offset: Int): [Z]
- aggregateZ(filter: ZFilter): ZAggregateResult
+ queryX(filter: XFilter, first: Int, offset: Int): [X]
+ aggregateX(filter: XFilter): XAggregateResult
+ queryY(filter: YFilter, first: Int, offset: Int): [Y]
+ aggregateY(filter: YFilter): YAggregateResult
+ queryZ(filter: ZFilter, first: Int, offset: Int): [Z]
+ aggregateZ(filter: ZFilter): ZAggregateResult
}
#######################
@@ -472,14 +474,13 @@ type Query {
#######################
type Mutation {
- addX(input: [AddXInput!]!): AddXPayload
- updateX(input: UpdateXInput!): UpdateXPayload
- deleteX(filter: XFilter!): DeleteXPayload
- addY(input: [AddYInput!]!): AddYPayload
- updateY(input: UpdateYInput!): UpdateYPayload
- deleteY(filter: YFilter!): DeleteYPayload
- addZ(input: [AddZInput!]!): AddZPayload
- updateZ(input: UpdateZInput!): UpdateZPayload
- deleteZ(filter: ZFilter!): DeleteZPayload
+ addX(input: [AddXInput!]!): AddXPayload
+ updateX(input: UpdateXInput!): UpdateXPayload
+ deleteX(filter: XFilter!): DeleteXPayload
+ addY(input: [AddYInput!]!): AddYPayload
+ updateY(input: UpdateYInput!): UpdateYPayload
+ deleteY(filter: YFilter!): DeleteYPayload
+ addZ(input: [AddZInput!]!): AddZPayload
+ updateZ(input: UpdateZInput!): UpdateZPayload
+ deleteZ(filter: ZFilter!): DeleteZPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/filter-cleanSchema-custom-mutation.graphql b/graphql/schema/testdata/schemagen/output/filter-cleanSchema-custom-mutation.graphql
index e94e759d18b..63cf8e888d1 100644
--- a/graphql/schema/testdata/schemagen/output/filter-cleanSchema-custom-mutation.graphql
+++ b/graphql/schema/testdata/schemagen/output/filter-cleanSchema-custom-mutation.graphql
@@ -3,12 +3,12 @@
#######################
type User {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
input UserInput {
- name: String!
+ name: String!
}
#######################
@@ -27,162 +27,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -194,11 +194,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -207,77 +208,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -285,25 +287,25 @@ input StringHashFilter {
#######################
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
#######################
@@ -311,11 +313,11 @@ type UserAggregateResult {
#######################
enum UserHasFilter {
- name
+ name
}
enum UserOrderable {
- name
+ name
}
#######################
@@ -323,36 +325,36 @@ enum UserOrderable {
#######################
input AddUserInput {
- name: String!
+ name: String!
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- id: [ID!]
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ id: [ID!]
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- name: String
+ name: String
}
input UserRef {
- id: ID
- name: String
+ id: ID
+ name: String
}
#######################
@@ -360,9 +362,9 @@ input UserRef {
#######################
type Query {
- getUser(id: ID!): User
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ getUser(id: ID!): User
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -370,9 +372,9 @@ type Query {
#######################
type Mutation {
- addMyFavouriteUsers(input: [UserInput!]!): [User] @custom(http: {url:"http://my-api.com",method:"POST",body:"{ data: $input }"})
- addUser(input: [AddUserInput!]!): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ addMyFavouriteUsers(input: [UserInput!]!): [User]
+ @custom(http: { url: "http://my-api.com", method: "POST", body: "{ data: $input }" })
+ addUser(input: [AddUserInput!]!): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/filter-cleanSchema-directLink.graphql b/graphql/schema/testdata/schemagen/output/filter-cleanSchema-directLink.graphql
index e26f5cc4672..8d105fcc8fa 100644
--- a/graphql/schema/testdata/schemagen/output/filter-cleanSchema-directLink.graphql
+++ b/graphql/schema/testdata/schemagen/output/filter-cleanSchema-directLink.graphql
@@ -3,22 +3,22 @@
#######################
type X {
- f1(filter: YFilter, first: Int, offset: Int): [Y] @dgraph(pred: "f1")
- name: String
- id: ID
- f1Aggregate(filter: YFilter): YAggregateResult
+ f1(filter: YFilter, first: Int, offset: Int): [Y] @dgraph(pred: "f1")
+ name: String
+ id: ID
+ f1Aggregate(filter: YFilter): YAggregateResult
}
type Y {
- f2(filter: ZFilter, first: Int, offset: Int): [Z] @dgraph(pred: "~f2")
- f1(filter: XFilter, order: XOrder, first: Int, offset: Int): [X] @dgraph(pred: "~f1")
- f2Aggregate(filter: ZFilter): ZAggregateResult
- f1Aggregate(filter: XFilter): XAggregateResult
+ f2(filter: ZFilter, first: Int, offset: Int): [Z] @dgraph(pred: "~f2")
+ f1(filter: XFilter, order: XOrder, first: Int, offset: Int): [X] @dgraph(pred: "~f1")
+ f2Aggregate(filter: ZFilter): ZAggregateResult
+ f1Aggregate(filter: XFilter): XAggregateResult
}
type Z {
- f2(filter: YFilter, first: Int, offset: Int): [Y] @dgraph(pred: "f2")
- f2Aggregate(filter: YFilter): YAggregateResult
+ f2(filter: YFilter, first: Int, offset: Int): [Y] @dgraph(pred: "f2")
+ f2Aggregate(filter: YFilter): YAggregateResult
}
#######################
@@ -37,162 +37,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -204,11 +204,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -217,77 +218,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -295,55 +297,55 @@ input StringHashFilter {
#######################
type AddXPayload {
- x(filter: XFilter, order: XOrder, first: Int, offset: Int): [X]
- numUids: Int
+ x(filter: XFilter, order: XOrder, first: Int, offset: Int): [X]
+ numUids: Int
}
type AddZPayload {
- z(filter: ZFilter, first: Int, offset: Int): [Z]
- numUids: Int
+ z(filter: ZFilter, first: Int, offset: Int): [Z]
+ numUids: Int
}
type DeleteXPayload {
- x(filter: XFilter, order: XOrder, first: Int, offset: Int): [X]
- msg: String
- numUids: Int
+ x(filter: XFilter, order: XOrder, first: Int, offset: Int): [X]
+ msg: String
+ numUids: Int
}
type DeleteYPayload {
- y(filter: YFilter, first: Int, offset: Int): [Y]
- msg: String
- numUids: Int
+ y(filter: YFilter, first: Int, offset: Int): [Y]
+ msg: String
+ numUids: Int
}
type DeleteZPayload {
- z(filter: ZFilter, first: Int, offset: Int): [Z]
- msg: String
- numUids: Int
+ z(filter: ZFilter, first: Int, offset: Int): [Z]
+ msg: String
+ numUids: Int
}
type UpdateXPayload {
- x(filter: XFilter, order: XOrder, first: Int, offset: Int): [X]
- numUids: Int
+ x(filter: XFilter, order: XOrder, first: Int, offset: Int): [X]
+ numUids: Int
}
type UpdateZPayload {
- z(filter: ZFilter, first: Int, offset: Int): [Z]
- numUids: Int
+ z(filter: ZFilter, first: Int, offset: Int): [Z]
+ numUids: Int
}
type XAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type YAggregateResult {
- count: Int
+ count: Int
}
type ZAggregateResult {
- count: Int
+ count: Int
}
#######################
@@ -351,21 +353,21 @@ type ZAggregateResult {
#######################
enum XHasFilter {
- f1
- name
+ f1
+ name
}
enum XOrderable {
- name
+ name
}
enum YHasFilter {
- f2
- f1
+ f2
+ f1
}
enum ZHasFilter {
- f2
+ f2
}
#######################
@@ -373,47 +375,47 @@ enum ZHasFilter {
#######################
input AddXInput {
- name: String
+ name: String
}
input UpdateXInput {
- filter: XFilter!
- set: XPatch
- remove: XPatch
+ filter: XFilter!
+ set: XPatch
+ remove: XPatch
}
input XFilter {
- id: [ID!]
- has: [XHasFilter]
- and: [XFilter]
- or: [XFilter]
- not: XFilter
+ id: [ID!]
+ has: [XHasFilter]
+ and: [XFilter]
+ or: [XFilter]
+ not: XFilter
}
input XOrder {
- asc: XOrderable
- desc: XOrderable
- then: XOrder
+ asc: XOrderable
+ desc: XOrderable
+ then: XOrder
}
input XPatch {
- name: String
+ name: String
}
input XRef {
- id: ID
- name: String
+ id: ID
+ name: String
}
input YFilter {
- not: YFilter
+ not: YFilter
}
input ZFilter {
- has: [ZHasFilter]
- and: [ZFilter]
- or: [ZFilter]
- not: ZFilter
+ has: [ZHasFilter]
+ and: [ZFilter]
+ or: [ZFilter]
+ not: ZFilter
}
#######################
@@ -421,13 +423,13 @@ input ZFilter {
#######################
type Query {
- getX(id: ID!): X
- queryX(filter: XFilter, order: XOrder, first: Int, offset: Int): [X]
- aggregateX(filter: XFilter): XAggregateResult
- queryY(filter: YFilter, first: Int, offset: Int): [Y]
- aggregateY(filter: YFilter): YAggregateResult
- queryZ(filter: ZFilter, first: Int, offset: Int): [Z]
- aggregateZ(filter: ZFilter): ZAggregateResult
+ getX(id: ID!): X
+ queryX(filter: XFilter, order: XOrder, first: Int, offset: Int): [X]
+ aggregateX(filter: XFilter): XAggregateResult
+ queryY(filter: YFilter, first: Int, offset: Int): [Y]
+ aggregateY(filter: YFilter): YAggregateResult
+ queryZ(filter: ZFilter, first: Int, offset: Int): [Z]
+ aggregateZ(filter: ZFilter): ZAggregateResult
}
#######################
@@ -435,10 +437,9 @@ type Query {
#######################
type Mutation {
- addX(input: [AddXInput!]!): AddXPayload
- updateX(input: UpdateXInput!): UpdateXPayload
- deleteX(filter: XFilter!): DeleteXPayload
- deleteY(filter: YFilter!): DeleteYPayload
- deleteZ(filter: ZFilter!): DeleteZPayload
+ addX(input: [AddXInput!]!): AddXPayload
+ updateX(input: UpdateXInput!): UpdateXPayload
+ deleteX(filter: XFilter!): DeleteXPayload
+ deleteY(filter: YFilter!): DeleteYPayload
+ deleteZ(filter: ZFilter!): DeleteZPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/generate-directive.graphql b/graphql/schema/testdata/schemagen/output/generate-directive.graphql
index b5cda816303..cb3e0dd0406 100644
--- a/graphql/schema/testdata/schemagen/output/generate-directive.graphql
+++ b/graphql/schema/testdata/schemagen/output/generate-directive.graphql
@@ -2,24 +2,34 @@
# Input Schema
#######################
-interface Character @secret(field: "password") @generate(query: {get:false,password:false}, subscription: false) {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
-}
-
-type Human implements Character @generate(query: {aggregate:true}, subscription: true) @secret(field: "password") {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- totalCredits: Int
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
-}
-
-type Person @withSubscription @generate(query: {get:false,query:true,password:true,aggregate:false}, mutation: {add:false,delete:false}, subscription: false) {
- id: ID!
- name: String!
+interface Character
+ @secret(field: "password")
+ @generate(query: { get: false, password: false }, subscription: false) {
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+}
+
+type Human implements Character
+ @generate(query: { aggregate: true }, subscription: true)
+ @secret(field: "password") {
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ totalCredits: Int
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+}
+
+type Person
+ @withSubscription
+ @generate(
+ query: { get: false, query: true, password: true, aggregate: false }
+ mutation: { add: false, delete: false }
+ subscription: false
+ ) {
+ id: ID!
+ name: String!
}
#######################
@@ -38,162 +48,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -205,11 +215,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -218,77 +229,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -296,57 +308,57 @@ input StringHashFilter {
#######################
type AddHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ numUids: Int
}
type CharacterAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteCharacterPayload {
- character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- msg: String
- numUids: Int
+ character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ msg: String
+ numUids: Int
}
type DeleteHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- msg: String
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ msg: String
+ numUids: Int
}
type HumanAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- totalCreditsMin: Int
- totalCreditsMax: Int
- totalCreditsSum: Int
- totalCreditsAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ totalCreditsMin: Int
+ totalCreditsMax: Int
+ totalCreditsSum: Int
+ totalCreditsAvg: Float
}
type PersonAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type UpdateCharacterPayload {
- character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- numUids: Int
+ character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ numUids: Int
}
type UpdateHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ numUids: Int
}
type UpdatePersonPayload {
- person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
- numUids: Int
+ person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
+ numUids: Int
}
#######################
@@ -354,31 +366,31 @@ type UpdatePersonPayload {
#######################
enum CharacterHasFilter {
- name
- friends
+ name
+ friends
}
enum CharacterOrderable {
- name
+ name
}
enum HumanHasFilter {
- name
- friends
- totalCredits
+ name
+ friends
+ totalCredits
}
enum HumanOrderable {
- name
- totalCredits
+ name
+ totalCredits
}
enum PersonHasFilter {
- name
+ name
}
enum PersonOrderable {
- name
+ name
}
#######################
@@ -386,106 +398,106 @@ enum PersonOrderable {
#######################
input AddHumanInput {
- name: String!
- friends: [CharacterRef]
- totalCredits: Int
- password: String!
+ name: String!
+ friends: [CharacterRef]
+ totalCredits: Int
+ password: String!
}
input CharacterFilter {
- id: [ID!]
- name: StringExactFilter
- has: [CharacterHasFilter]
- and: [CharacterFilter]
- or: [CharacterFilter]
- not: CharacterFilter
+ id: [ID!]
+ name: StringExactFilter
+ has: [CharacterHasFilter]
+ and: [CharacterFilter]
+ or: [CharacterFilter]
+ not: CharacterFilter
}
input CharacterOrder {
- asc: CharacterOrderable
- desc: CharacterOrderable
- then: CharacterOrder
+ asc: CharacterOrderable
+ desc: CharacterOrderable
+ then: CharacterOrder
}
input CharacterPatch {
- name: String
- friends: [CharacterRef]
- password: String
+ name: String
+ friends: [CharacterRef]
+ password: String
}
input CharacterRef {
- id: ID!
+ id: ID!
}
input HumanFilter {
- id: [ID!]
- name: StringExactFilter
- has: [HumanHasFilter]
- and: [HumanFilter]
- or: [HumanFilter]
- not: HumanFilter
+ id: [ID!]
+ name: StringExactFilter
+ has: [HumanHasFilter]
+ and: [HumanFilter]
+ or: [HumanFilter]
+ not: HumanFilter
}
input HumanOrder {
- asc: HumanOrderable
- desc: HumanOrderable
- then: HumanOrder
+ asc: HumanOrderable
+ desc: HumanOrderable
+ then: HumanOrder
}
input HumanPatch {
- name: String
- friends: [CharacterRef]
- totalCredits: Int
- password: String
+ name: String
+ friends: [CharacterRef]
+ totalCredits: Int
+ password: String
}
input HumanRef {
- id: ID
- name: String
- friends: [CharacterRef]
- totalCredits: Int
- password: String
+ id: ID
+ name: String
+ friends: [CharacterRef]
+ totalCredits: Int
+ password: String
}
input PersonFilter {
- id: [ID!]
- has: [PersonHasFilter]
- and: [PersonFilter]
- or: [PersonFilter]
- not: PersonFilter
+ id: [ID!]
+ has: [PersonHasFilter]
+ and: [PersonFilter]
+ or: [PersonFilter]
+ not: PersonFilter
}
input PersonOrder {
- asc: PersonOrderable
- desc: PersonOrderable
- then: PersonOrder
+ asc: PersonOrderable
+ desc: PersonOrderable
+ then: PersonOrder
}
input PersonPatch {
- name: String
+ name: String
}
input PersonRef {
- id: ID
- name: String
+ id: ID
+ name: String
}
input UpdateCharacterInput {
- filter: CharacterFilter!
- set: CharacterPatch
- remove: CharacterPatch
+ filter: CharacterFilter!
+ set: CharacterPatch
+ remove: CharacterPatch
}
input UpdateHumanInput {
- filter: HumanFilter!
- set: HumanPatch
- remove: HumanPatch
+ filter: HumanFilter!
+ set: HumanPatch
+ remove: HumanPatch
}
input UpdatePersonInput {
- filter: PersonFilter!
- set: PersonPatch
- remove: PersonPatch
+ filter: PersonFilter!
+ set: PersonPatch
+ remove: PersonPatch
}
#######################
@@ -493,13 +505,18 @@ input UpdatePersonInput {
#######################
type Query {
- queryCharacter(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- aggregateCharacter(filter: CharacterFilter): CharacterAggregateResult
- getHuman(id: ID!): Human
- checkHumanPassword(id: ID!, password: String!): Human
- queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- aggregateHuman(filter: HumanFilter): HumanAggregateResult
- queryPerson(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
+ queryCharacter(
+ filter: CharacterFilter
+ order: CharacterOrder
+ first: Int
+ offset: Int
+ ): [Character]
+ aggregateCharacter(filter: CharacterFilter): CharacterAggregateResult
+ getHuman(id: ID!): Human
+ checkHumanPassword(id: ID!, password: String!): Human
+ queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ aggregateHuman(filter: HumanFilter): HumanAggregateResult
+ queryPerson(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
}
#######################
@@ -507,12 +524,12 @@ type Query {
#######################
type Mutation {
- updateCharacter(input: UpdateCharacterInput!): UpdateCharacterPayload
- deleteCharacter(filter: CharacterFilter!): DeleteCharacterPayload
- addHuman(input: [AddHumanInput!]!): AddHumanPayload
- updateHuman(input: UpdateHumanInput!): UpdateHumanPayload
- deleteHuman(filter: HumanFilter!): DeleteHumanPayload
- updatePerson(input: UpdatePersonInput!): UpdatePersonPayload
+ updateCharacter(input: UpdateCharacterInput!): UpdateCharacterPayload
+ deleteCharacter(filter: CharacterFilter!): DeleteCharacterPayload
+ addHuman(input: [AddHumanInput!]!): AddHumanPayload
+ updateHuman(input: UpdateHumanInput!): UpdateHumanPayload
+ deleteHuman(filter: HumanFilter!): DeleteHumanPayload
+ updatePerson(input: UpdatePersonInput!): UpdatePersonPayload
}
#######################
@@ -520,8 +537,8 @@ type Mutation {
#######################
type Subscription {
- getHuman(id: ID!): Human
- queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- aggregateHuman(filter: HumanFilter): HumanAggregateResult
- queryPerson(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
+ getHuman(id: ID!): Human
+ queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ aggregateHuman(filter: HumanFilter): HumanAggregateResult
+ queryPerson(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
}
diff --git a/graphql/schema/testdata/schemagen/output/geo-type.graphql b/graphql/schema/testdata/schemagen/output/geo-type.graphql
index 290522facbb..d1bda13f933 100644
--- a/graphql/schema/testdata/schemagen/output/geo-type.graphql
+++ b/graphql/schema/testdata/schemagen/output/geo-type.graphql
@@ -3,14 +3,14 @@
#######################
type Hotel {
- id: ID!
- name: String!
- location: Point @search
- secretLocation: Point
- area: Polygon @search
- secretArea: Polygon
- branches: MultiPolygon @search
- secretBranches: MultiPolygon
+ id: ID!
+ name: String!
+ location: Point @search
+ secretLocation: Point
+ area: Polygon @search
+ secretArea: Polygon
+ branches: MultiPolygon @search
+ secretBranches: MultiPolygon
}
#######################
@@ -29,162 +29,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -196,11 +196,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -209,77 +210,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -287,25 +289,25 @@ input StringHashFilter {
#######################
type AddHotelPayload {
- hotel(filter: HotelFilter, order: HotelOrder, first: Int, offset: Int): [Hotel]
- numUids: Int
+ hotel(filter: HotelFilter, order: HotelOrder, first: Int, offset: Int): [Hotel]
+ numUids: Int
}
type DeleteHotelPayload {
- hotel(filter: HotelFilter, order: HotelOrder, first: Int, offset: Int): [Hotel]
- msg: String
- numUids: Int
+ hotel(filter: HotelFilter, order: HotelOrder, first: Int, offset: Int): [Hotel]
+ msg: String
+ numUids: Int
}
type HotelAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type UpdateHotelPayload {
- hotel(filter: HotelFilter, order: HotelOrder, first: Int, offset: Int): [Hotel]
- numUids: Int
+ hotel(filter: HotelFilter, order: HotelOrder, first: Int, offset: Int): [Hotel]
+ numUids: Int
}
#######################
@@ -313,17 +315,17 @@ type UpdateHotelPayload {
#######################
enum HotelHasFilter {
- name
- location
- secretLocation
- area
- secretArea
- branches
- secretBranches
+ name
+ location
+ secretLocation
+ area
+ secretArea
+ branches
+ secretBranches
}
enum HotelOrderable {
- name
+ name
}
#######################
@@ -331,57 +333,57 @@ enum HotelOrderable {
#######################
input AddHotelInput {
- name: String!
- location: PointRef
- secretLocation: PointRef
- area: PolygonRef
- secretArea: PolygonRef
- branches: MultiPolygonRef
- secretBranches: MultiPolygonRef
+ name: String!
+ location: PointRef
+ secretLocation: PointRef
+ area: PolygonRef
+ secretArea: PolygonRef
+ branches: MultiPolygonRef
+ secretBranches: MultiPolygonRef
}
input HotelFilter {
- id: [ID!]
- location: PointGeoFilter
- area: PolygonGeoFilter
- branches: PolygonGeoFilter
- has: [HotelHasFilter]
- and: [HotelFilter]
- or: [HotelFilter]
- not: HotelFilter
+ id: [ID!]
+ location: PointGeoFilter
+ area: PolygonGeoFilter
+ branches: PolygonGeoFilter
+ has: [HotelHasFilter]
+ and: [HotelFilter]
+ or: [HotelFilter]
+ not: HotelFilter
}
input HotelOrder {
- asc: HotelOrderable
- desc: HotelOrderable
- then: HotelOrder
+ asc: HotelOrderable
+ desc: HotelOrderable
+ then: HotelOrder
}
input HotelPatch {
- name: String
- location: PointRef
- secretLocation: PointRef
- area: PolygonRef
- secretArea: PolygonRef
- branches: MultiPolygonRef
- secretBranches: MultiPolygonRef
+ name: String
+ location: PointRef
+ secretLocation: PointRef
+ area: PolygonRef
+ secretArea: PolygonRef
+ branches: MultiPolygonRef
+ secretBranches: MultiPolygonRef
}
input HotelRef {
- id: ID
- name: String
- location: PointRef
- secretLocation: PointRef
- area: PolygonRef
- secretArea: PolygonRef
- branches: MultiPolygonRef
- secretBranches: MultiPolygonRef
+ id: ID
+ name: String
+ location: PointRef
+ secretLocation: PointRef
+ area: PolygonRef
+ secretArea: PolygonRef
+ branches: MultiPolygonRef
+ secretBranches: MultiPolygonRef
}
input UpdateHotelInput {
- filter: HotelFilter!
- set: HotelPatch
- remove: HotelPatch
+ filter: HotelFilter!
+ set: HotelPatch
+ remove: HotelPatch
}
#######################
@@ -389,9 +391,9 @@ input UpdateHotelInput {
#######################
type Query {
- getHotel(id: ID!): Hotel
- queryHotel(filter: HotelFilter, order: HotelOrder, first: Int, offset: Int): [Hotel]
- aggregateHotel(filter: HotelFilter): HotelAggregateResult
+ getHotel(id: ID!): Hotel
+ queryHotel(filter: HotelFilter, order: HotelOrder, first: Int, offset: Int): [Hotel]
+ aggregateHotel(filter: HotelFilter): HotelAggregateResult
}
#######################
@@ -399,8 +401,7 @@ type Query {
#######################
type Mutation {
- addHotel(input: [AddHotelInput!]!): AddHotelPayload
- updateHotel(input: UpdateHotelInput!): UpdateHotelPayload
- deleteHotel(filter: HotelFilter!): DeleteHotelPayload
+ addHotel(input: [AddHotelInput!]!): AddHotelPayload
+ updateHotel(input: UpdateHotelInput!): UpdateHotelPayload
+ deleteHotel(filter: HotelFilter!): DeleteHotelPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/hasInverse-with-interface-having-directive.graphql b/graphql/schema/testdata/schemagen/output/hasInverse-with-interface-having-directive.graphql
index 4a92ac3aa88..60863ef7572 100644
--- a/graphql/schema/testdata/schemagen/output/hasInverse-with-interface-having-directive.graphql
+++ b/graphql/schema/testdata/schemagen/output/hasInverse-with-interface-having-directive.graphql
@@ -3,33 +3,34 @@
#######################
type Author {
- id: ID!
- name: String! @search(by: [hash])
- posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post] @hasInverse(field: author)
- postsAggregate(filter: PostFilter): PostAggregateResult
+ id: ID!
+ name: String! @search(by: [hash])
+ posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ @hasInverse(field: author)
+ postsAggregate(filter: PostFilter): PostAggregateResult
}
interface Post {
- id: ID!
- text: String @search(by: [fulltext])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author! @hasInverse(field: posts)
+ id: ID!
+ text: String @search(by: [fulltext])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author! @hasInverse(field: posts)
}
type Question implements Post {
- id: ID!
- text: String @search(by: [fulltext])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author! @hasInverse(field: posts)
- answered: Boolean
+ id: ID!
+ text: String @search(by: [fulltext])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author! @hasInverse(field: posts)
+ answered: Boolean
}
type Answer implements Post {
- id: ID!
- text: String @search(by: [fulltext])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author! @hasInverse(field: posts)
- markedUseful: Boolean
+ id: ID!
+ text: String @search(by: [fulltext])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author! @hasInverse(field: posts)
+ markedUseful: Boolean
}
#######################
@@ -48,162 +49,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -215,11 +216,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -228,77 +230,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -306,92 +309,92 @@ input StringHashFilter {
#######################
type AddAnswerPayload {
- answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- numUids: Int
+ answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ numUids: Int
}
type AddAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ numUids: Int
}
type AnswerAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type AuthorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteAnswerPayload {
- answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- msg: String
- numUids: Int
+ answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ msg: String
+ numUids: Int
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type DeleteQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- msg: String
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type QuestionAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type UpdateAnswerPayload {
- answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- numUids: Int
+ answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ numUids: Int
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type UpdateQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ numUids: Int
}
#######################
@@ -399,47 +402,47 @@ type UpdateQuestionPayload {
#######################
enum AnswerHasFilter {
- text
- datePublished
- author
- markedUseful
+ text
+ datePublished
+ author
+ markedUseful
}
enum AnswerOrderable {
- text
- datePublished
+ text
+ datePublished
}
enum AuthorHasFilter {
- name
- posts
+ name
+ posts
}
enum AuthorOrderable {
- name
+ name
}
enum PostHasFilter {
- text
- datePublished
- author
+ text
+ datePublished
+ author
}
enum PostOrderable {
- text
- datePublished
+ text
+ datePublished
}
enum QuestionHasFilter {
- text
- datePublished
- author
- answered
+ text
+ datePublished
+ author
+ answered
}
enum QuestionOrderable {
- text
- datePublished
+ text
+ datePublished
}
#######################
@@ -447,160 +450,160 @@ enum QuestionOrderable {
#######################
input AddAnswerInput {
- text: String
- datePublished: DateTime
- author: AuthorRef!
- markedUseful: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef!
+ markedUseful: Boolean
}
input AddAuthorInput {
- name: String!
- posts: [PostRef]
+ name: String!
+ posts: [PostRef]
}
input AddQuestionInput {
- text: String
- datePublished: DateTime
- author: AuthorRef!
- answered: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef!
+ answered: Boolean
}
input AnswerFilter {
- id: [ID!]
- text: StringFullTextFilter
- datePublished: DateTimeFilter
- has: [AnswerHasFilter]
- and: [AnswerFilter]
- or: [AnswerFilter]
- not: AnswerFilter
+ id: [ID!]
+ text: StringFullTextFilter
+ datePublished: DateTimeFilter
+ has: [AnswerHasFilter]
+ and: [AnswerFilter]
+ or: [AnswerFilter]
+ not: AnswerFilter
}
input AnswerOrder {
- asc: AnswerOrderable
- desc: AnswerOrderable
- then: AnswerOrder
+ asc: AnswerOrderable
+ desc: AnswerOrderable
+ then: AnswerOrder
}
input AnswerPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
- markedUseful: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ markedUseful: Boolean
}
input AnswerRef {
- id: ID
- text: String
- datePublished: DateTime
- author: AuthorRef
- markedUseful: Boolean
+ id: ID
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ markedUseful: Boolean
}
input AuthorFilter {
- id: [ID!]
- name: StringHashFilter
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ name: StringHashFilter
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorOrder {
- asc: AuthorOrderable
- desc: AuthorOrderable
- then: AuthorOrder
+ asc: AuthorOrderable
+ desc: AuthorOrderable
+ then: AuthorOrder
}
input AuthorPatch {
- name: String
- posts: [PostRef]
+ name: String
+ posts: [PostRef]
}
input AuthorRef {
- id: ID
- name: String
- posts: [PostRef]
+ id: ID
+ name: String
+ posts: [PostRef]
}
input PostFilter {
- id: [ID!]
- text: StringFullTextFilter
- datePublished: DateTimeFilter
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ id: [ID!]
+ text: StringFullTextFilter
+ datePublished: DateTimeFilter
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
}
input PostRef {
- id: ID!
+ id: ID!
}
input QuestionFilter {
- id: [ID!]
- text: StringFullTextFilter
- datePublished: DateTimeFilter
- has: [QuestionHasFilter]
- and: [QuestionFilter]
- or: [QuestionFilter]
- not: QuestionFilter
+ id: [ID!]
+ text: StringFullTextFilter
+ datePublished: DateTimeFilter
+ has: [QuestionHasFilter]
+ and: [QuestionFilter]
+ or: [QuestionFilter]
+ not: QuestionFilter
}
input QuestionOrder {
- asc: QuestionOrderable
- desc: QuestionOrderable
- then: QuestionOrder
+ asc: QuestionOrderable
+ desc: QuestionOrderable
+ then: QuestionOrder
}
input QuestionPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
- answered: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ answered: Boolean
}
input QuestionRef {
- id: ID
- text: String
- datePublished: DateTime
- author: AuthorRef
- answered: Boolean
+ id: ID
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ answered: Boolean
}
input UpdateAnswerInput {
- filter: AnswerFilter!
- set: AnswerPatch
- remove: AnswerPatch
+ filter: AnswerFilter!
+ set: AnswerPatch
+ remove: AnswerPatch
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
input UpdateQuestionInput {
- filter: QuestionFilter!
- set: QuestionPatch
- remove: QuestionPatch
+ filter: QuestionFilter!
+ set: QuestionPatch
+ remove: QuestionPatch
}
#######################
@@ -608,18 +611,18 @@ input UpdateQuestionInput {
#######################
type Query {
- getAuthor(id: ID!): Author
- queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
- getPost(id: ID!): Post
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
- getQuestion(id: ID!): Question
- queryQuestion(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- aggregateQuestion(filter: QuestionFilter): QuestionAggregateResult
- getAnswer(id: ID!): Answer
- queryAnswer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- aggregateAnswer(filter: AnswerFilter): AnswerAggregateResult
+ getAuthor(id: ID!): Author
+ queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getPost(id: ID!): Post
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
+ getQuestion(id: ID!): Question
+ queryQuestion(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ aggregateQuestion(filter: QuestionFilter): QuestionAggregateResult
+ getAnswer(id: ID!): Answer
+ queryAnswer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ aggregateAnswer(filter: AnswerFilter): AnswerAggregateResult
}
#######################
@@ -627,16 +630,15 @@ type Query {
#######################
type Mutation {
- addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
- addQuestion(input: [AddQuestionInput!]!): AddQuestionPayload
- updateQuestion(input: UpdateQuestionInput!): UpdateQuestionPayload
- deleteQuestion(filter: QuestionFilter!): DeleteQuestionPayload
- addAnswer(input: [AddAnswerInput!]!): AddAnswerPayload
- updateAnswer(input: UpdateAnswerInput!): UpdateAnswerPayload
- deleteAnswer(filter: AnswerFilter!): DeleteAnswerPayload
+ addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
+ addQuestion(input: [AddQuestionInput!]!): AddQuestionPayload
+ updateQuestion(input: UpdateQuestionInput!): UpdateQuestionPayload
+ deleteQuestion(filter: QuestionFilter!): DeleteQuestionPayload
+ addAnswer(input: [AddAnswerInput!]!): AddAnswerPayload
+ updateAnswer(input: UpdateAnswerInput!): UpdateAnswerPayload
+ deleteAnswer(filter: AnswerFilter!): DeleteAnswerPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/hasInverse-with-interface.graphql b/graphql/schema/testdata/schemagen/output/hasInverse-with-interface.graphql
index 6a0e7001198..ea2d33a0b0f 100644
--- a/graphql/schema/testdata/schemagen/output/hasInverse-with-interface.graphql
+++ b/graphql/schema/testdata/schemagen/output/hasInverse-with-interface.graphql
@@ -3,35 +3,37 @@
#######################
type Author {
- id: ID!
- name: String! @search(by: [hash])
- questions(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question] @hasInverse(field: author)
- answers(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer] @hasInverse(field: author)
- questionsAggregate(filter: QuestionFilter): QuestionAggregateResult
- answersAggregate(filter: AnswerFilter): AnswerAggregateResult
+ id: ID!
+ name: String! @search(by: [hash])
+ questions(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ @hasInverse(field: author)
+ answers(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ @hasInverse(field: author)
+ questionsAggregate(filter: QuestionFilter): QuestionAggregateResult
+ answersAggregate(filter: AnswerFilter): AnswerAggregateResult
}
interface Post {
- id: ID!
- text: String @search(by: [fulltext])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author!
+ id: ID!
+ text: String @search(by: [fulltext])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author!
}
type Question implements Post {
- id: ID!
- text: String @search(by: [fulltext])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author! @hasInverse(field: questions)
- answered: Boolean
+ id: ID!
+ text: String @search(by: [fulltext])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author! @hasInverse(field: questions)
+ answered: Boolean
}
type Answer implements Post {
- id: ID!
- text: String @search(by: [fulltext])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author! @hasInverse(field: answers)
- markedUseful: Boolean
+ id: ID!
+ text: String @search(by: [fulltext])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author! @hasInverse(field: answers)
+ markedUseful: Boolean
}
#######################
@@ -50,162 +52,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -217,11 +219,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -230,77 +233,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -308,92 +312,92 @@ input StringHashFilter {
#######################
type AddAnswerPayload {
- answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- numUids: Int
+ answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ numUids: Int
}
type AddAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ numUids: Int
}
type AnswerAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type AuthorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteAnswerPayload {
- answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- msg: String
- numUids: Int
+ answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ msg: String
+ numUids: Int
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type DeleteQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- msg: String
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type QuestionAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type UpdateAnswerPayload {
- answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- numUids: Int
+ answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ numUids: Int
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type UpdateQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ numUids: Int
}
#######################
@@ -401,48 +405,48 @@ type UpdateQuestionPayload {
#######################
enum AnswerHasFilter {
- text
- datePublished
- author
- markedUseful
+ text
+ datePublished
+ author
+ markedUseful
}
enum AnswerOrderable {
- text
- datePublished
+ text
+ datePublished
}
enum AuthorHasFilter {
- name
- questions
- answers
+ name
+ questions
+ answers
}
enum AuthorOrderable {
- name
+ name
}
enum PostHasFilter {
- text
- datePublished
- author
+ text
+ datePublished
+ author
}
enum PostOrderable {
- text
- datePublished
+ text
+ datePublished
}
enum QuestionHasFilter {
- text
- datePublished
- author
- answered
+ text
+ datePublished
+ author
+ answered
}
enum QuestionOrderable {
- text
- datePublished
+ text
+ datePublished
}
#######################
@@ -450,163 +454,163 @@ enum QuestionOrderable {
#######################
input AddAnswerInput {
- text: String
- datePublished: DateTime
- author: AuthorRef!
- markedUseful: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef!
+ markedUseful: Boolean
}
input AddAuthorInput {
- name: String!
- questions: [QuestionRef]
- answers: [AnswerRef]
+ name: String!
+ questions: [QuestionRef]
+ answers: [AnswerRef]
}
input AddQuestionInput {
- text: String
- datePublished: DateTime
- author: AuthorRef!
- answered: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef!
+ answered: Boolean
}
input AnswerFilter {
- id: [ID!]
- text: StringFullTextFilter
- datePublished: DateTimeFilter
- has: [AnswerHasFilter]
- and: [AnswerFilter]
- or: [AnswerFilter]
- not: AnswerFilter
+ id: [ID!]
+ text: StringFullTextFilter
+ datePublished: DateTimeFilter
+ has: [AnswerHasFilter]
+ and: [AnswerFilter]
+ or: [AnswerFilter]
+ not: AnswerFilter
}
input AnswerOrder {
- asc: AnswerOrderable
- desc: AnswerOrderable
- then: AnswerOrder
+ asc: AnswerOrderable
+ desc: AnswerOrderable
+ then: AnswerOrder
}
input AnswerPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
- markedUseful: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ markedUseful: Boolean
}
input AnswerRef {
- id: ID
- text: String
- datePublished: DateTime
- author: AuthorRef
- markedUseful: Boolean
+ id: ID
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ markedUseful: Boolean
}
input AuthorFilter {
- id: [ID!]
- name: StringHashFilter
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ name: StringHashFilter
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorOrder {
- asc: AuthorOrderable
- desc: AuthorOrderable
- then: AuthorOrder
+ asc: AuthorOrderable
+ desc: AuthorOrderable
+ then: AuthorOrder
}
input AuthorPatch {
- name: String
- questions: [QuestionRef]
- answers: [AnswerRef]
+ name: String
+ questions: [QuestionRef]
+ answers: [AnswerRef]
}
input AuthorRef {
- id: ID
- name: String
- questions: [QuestionRef]
- answers: [AnswerRef]
+ id: ID
+ name: String
+ questions: [QuestionRef]
+ answers: [AnswerRef]
}
input PostFilter {
- id: [ID!]
- text: StringFullTextFilter
- datePublished: DateTimeFilter
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ id: [ID!]
+ text: StringFullTextFilter
+ datePublished: DateTimeFilter
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
}
input PostRef {
- id: ID!
+ id: ID!
}
input QuestionFilter {
- id: [ID!]
- text: StringFullTextFilter
- datePublished: DateTimeFilter
- has: [QuestionHasFilter]
- and: [QuestionFilter]
- or: [QuestionFilter]
- not: QuestionFilter
+ id: [ID!]
+ text: StringFullTextFilter
+ datePublished: DateTimeFilter
+ has: [QuestionHasFilter]
+ and: [QuestionFilter]
+ or: [QuestionFilter]
+ not: QuestionFilter
}
input QuestionOrder {
- asc: QuestionOrderable
- desc: QuestionOrderable
- then: QuestionOrder
+ asc: QuestionOrderable
+ desc: QuestionOrderable
+ then: QuestionOrder
}
input QuestionPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
- answered: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ answered: Boolean
}
input QuestionRef {
- id: ID
- text: String
- datePublished: DateTime
- author: AuthorRef
- answered: Boolean
+ id: ID
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ answered: Boolean
}
input UpdateAnswerInput {
- filter: AnswerFilter!
- set: AnswerPatch
- remove: AnswerPatch
+ filter: AnswerFilter!
+ set: AnswerPatch
+ remove: AnswerPatch
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
input UpdateQuestionInput {
- filter: QuestionFilter!
- set: QuestionPatch
- remove: QuestionPatch
+ filter: QuestionFilter!
+ set: QuestionPatch
+ remove: QuestionPatch
}
#######################
@@ -614,18 +618,18 @@ input UpdateQuestionInput {
#######################
type Query {
- getAuthor(id: ID!): Author
- queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
- getPost(id: ID!): Post
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
- getQuestion(id: ID!): Question
- queryQuestion(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- aggregateQuestion(filter: QuestionFilter): QuestionAggregateResult
- getAnswer(id: ID!): Answer
- queryAnswer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- aggregateAnswer(filter: AnswerFilter): AnswerAggregateResult
+ getAuthor(id: ID!): Author
+ queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getPost(id: ID!): Post
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
+ getQuestion(id: ID!): Question
+ queryQuestion(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ aggregateQuestion(filter: QuestionFilter): QuestionAggregateResult
+ getAnswer(id: ID!): Answer
+ queryAnswer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ aggregateAnswer(filter: AnswerFilter): AnswerAggregateResult
}
#######################
@@ -633,16 +637,15 @@ type Query {
#######################
type Mutation {
- addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
- addQuestion(input: [AddQuestionInput!]!): AddQuestionPayload
- updateQuestion(input: UpdateQuestionInput!): UpdateQuestionPayload
- deleteQuestion(filter: QuestionFilter!): DeleteQuestionPayload
- addAnswer(input: [AddAnswerInput!]!): AddAnswerPayload
- updateAnswer(input: UpdateAnswerInput!): UpdateAnswerPayload
- deleteAnswer(filter: AnswerFilter!): DeleteAnswerPayload
+ addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
+ addQuestion(input: [AddQuestionInput!]!): AddQuestionPayload
+ updateQuestion(input: UpdateQuestionInput!): UpdateQuestionPayload
+ deleteQuestion(filter: QuestionFilter!): DeleteQuestionPayload
+ addAnswer(input: [AddAnswerInput!]!): AddAnswerPayload
+ updateAnswer(input: UpdateAnswerInput!): UpdateAnswerPayload
+ deleteAnswer(filter: AnswerFilter!): DeleteAnswerPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/hasInverse-with-type-having-directive.graphql b/graphql/schema/testdata/schemagen/output/hasInverse-with-type-having-directive.graphql
index 4a92ac3aa88..60863ef7572 100644
--- a/graphql/schema/testdata/schemagen/output/hasInverse-with-type-having-directive.graphql
+++ b/graphql/schema/testdata/schemagen/output/hasInverse-with-type-having-directive.graphql
@@ -3,33 +3,34 @@
#######################
type Author {
- id: ID!
- name: String! @search(by: [hash])
- posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post] @hasInverse(field: author)
- postsAggregate(filter: PostFilter): PostAggregateResult
+ id: ID!
+ name: String! @search(by: [hash])
+ posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ @hasInverse(field: author)
+ postsAggregate(filter: PostFilter): PostAggregateResult
}
interface Post {
- id: ID!
- text: String @search(by: [fulltext])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author! @hasInverse(field: posts)
+ id: ID!
+ text: String @search(by: [fulltext])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author! @hasInverse(field: posts)
}
type Question implements Post {
- id: ID!
- text: String @search(by: [fulltext])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author! @hasInverse(field: posts)
- answered: Boolean
+ id: ID!
+ text: String @search(by: [fulltext])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author! @hasInverse(field: posts)
+ answered: Boolean
}
type Answer implements Post {
- id: ID!
- text: String @search(by: [fulltext])
- datePublished: DateTime @search
- author(filter: AuthorFilter): Author! @hasInverse(field: posts)
- markedUseful: Boolean
+ id: ID!
+ text: String @search(by: [fulltext])
+ datePublished: DateTime @search
+ author(filter: AuthorFilter): Author! @hasInverse(field: posts)
+ markedUseful: Boolean
}
#######################
@@ -48,162 +49,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -215,11 +216,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -228,77 +230,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -306,92 +309,92 @@ input StringHashFilter {
#######################
type AddAnswerPayload {
- answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- numUids: Int
+ answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ numUids: Int
}
type AddAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ numUids: Int
}
type AnswerAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type AuthorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteAnswerPayload {
- answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- msg: String
- numUids: Int
+ answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ msg: String
+ numUids: Int
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type DeleteQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- msg: String
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type QuestionAggregateResult {
- count: Int
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type UpdateAnswerPayload {
- answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- numUids: Int
+ answer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ numUids: Int
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type UpdateQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ numUids: Int
}
#######################
@@ -399,47 +402,47 @@ type UpdateQuestionPayload {
#######################
enum AnswerHasFilter {
- text
- datePublished
- author
- markedUseful
+ text
+ datePublished
+ author
+ markedUseful
}
enum AnswerOrderable {
- text
- datePublished
+ text
+ datePublished
}
enum AuthorHasFilter {
- name
- posts
+ name
+ posts
}
enum AuthorOrderable {
- name
+ name
}
enum PostHasFilter {
- text
- datePublished
- author
+ text
+ datePublished
+ author
}
enum PostOrderable {
- text
- datePublished
+ text
+ datePublished
}
enum QuestionHasFilter {
- text
- datePublished
- author
- answered
+ text
+ datePublished
+ author
+ answered
}
enum QuestionOrderable {
- text
- datePublished
+ text
+ datePublished
}
#######################
@@ -447,160 +450,160 @@ enum QuestionOrderable {
#######################
input AddAnswerInput {
- text: String
- datePublished: DateTime
- author: AuthorRef!
- markedUseful: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef!
+ markedUseful: Boolean
}
input AddAuthorInput {
- name: String!
- posts: [PostRef]
+ name: String!
+ posts: [PostRef]
}
input AddQuestionInput {
- text: String
- datePublished: DateTime
- author: AuthorRef!
- answered: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef!
+ answered: Boolean
}
input AnswerFilter {
- id: [ID!]
- text: StringFullTextFilter
- datePublished: DateTimeFilter
- has: [AnswerHasFilter]
- and: [AnswerFilter]
- or: [AnswerFilter]
- not: AnswerFilter
+ id: [ID!]
+ text: StringFullTextFilter
+ datePublished: DateTimeFilter
+ has: [AnswerHasFilter]
+ and: [AnswerFilter]
+ or: [AnswerFilter]
+ not: AnswerFilter
}
input AnswerOrder {
- asc: AnswerOrderable
- desc: AnswerOrderable
- then: AnswerOrder
+ asc: AnswerOrderable
+ desc: AnswerOrderable
+ then: AnswerOrder
}
input AnswerPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
- markedUseful: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ markedUseful: Boolean
}
input AnswerRef {
- id: ID
- text: String
- datePublished: DateTime
- author: AuthorRef
- markedUseful: Boolean
+ id: ID
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ markedUseful: Boolean
}
input AuthorFilter {
- id: [ID!]
- name: StringHashFilter
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ name: StringHashFilter
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorOrder {
- asc: AuthorOrderable
- desc: AuthorOrderable
- then: AuthorOrder
+ asc: AuthorOrderable
+ desc: AuthorOrderable
+ then: AuthorOrder
}
input AuthorPatch {
- name: String
- posts: [PostRef]
+ name: String
+ posts: [PostRef]
}
input AuthorRef {
- id: ID
- name: String
- posts: [PostRef]
+ id: ID
+ name: String
+ posts: [PostRef]
}
input PostFilter {
- id: [ID!]
- text: StringFullTextFilter
- datePublished: DateTimeFilter
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ id: [ID!]
+ text: StringFullTextFilter
+ datePublished: DateTimeFilter
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
}
input PostRef {
- id: ID!
+ id: ID!
}
input QuestionFilter {
- id: [ID!]
- text: StringFullTextFilter
- datePublished: DateTimeFilter
- has: [QuestionHasFilter]
- and: [QuestionFilter]
- or: [QuestionFilter]
- not: QuestionFilter
+ id: [ID!]
+ text: StringFullTextFilter
+ datePublished: DateTimeFilter
+ has: [QuestionHasFilter]
+ and: [QuestionFilter]
+ or: [QuestionFilter]
+ not: QuestionFilter
}
input QuestionOrder {
- asc: QuestionOrderable
- desc: QuestionOrderable
- then: QuestionOrder
+ asc: QuestionOrderable
+ desc: QuestionOrderable
+ then: QuestionOrder
}
input QuestionPatch {
- text: String
- datePublished: DateTime
- author: AuthorRef
- answered: Boolean
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ answered: Boolean
}
input QuestionRef {
- id: ID
- text: String
- datePublished: DateTime
- author: AuthorRef
- answered: Boolean
+ id: ID
+ text: String
+ datePublished: DateTime
+ author: AuthorRef
+ answered: Boolean
}
input UpdateAnswerInput {
- filter: AnswerFilter!
- set: AnswerPatch
- remove: AnswerPatch
+ filter: AnswerFilter!
+ set: AnswerPatch
+ remove: AnswerPatch
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
input UpdateQuestionInput {
- filter: QuestionFilter!
- set: QuestionPatch
- remove: QuestionPatch
+ filter: QuestionFilter!
+ set: QuestionPatch
+ remove: QuestionPatch
}
#######################
@@ -608,18 +611,18 @@ input UpdateQuestionInput {
#######################
type Query {
- getAuthor(id: ID!): Author
- queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
- getPost(id: ID!): Post
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
- getQuestion(id: ID!): Question
- queryQuestion(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- aggregateQuestion(filter: QuestionFilter): QuestionAggregateResult
- getAnswer(id: ID!): Answer
- queryAnswer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
- aggregateAnswer(filter: AnswerFilter): AnswerAggregateResult
+ getAuthor(id: ID!): Author
+ queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getPost(id: ID!): Post
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
+ getQuestion(id: ID!): Question
+ queryQuestion(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ aggregateQuestion(filter: QuestionFilter): QuestionAggregateResult
+ getAnswer(id: ID!): Answer
+ queryAnswer(filter: AnswerFilter, order: AnswerOrder, first: Int, offset: Int): [Answer]
+ aggregateAnswer(filter: AnswerFilter): AnswerAggregateResult
}
#######################
@@ -627,16 +630,15 @@ type Query {
#######################
type Mutation {
- addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
- addQuestion(input: [AddQuestionInput!]!): AddQuestionPayload
- updateQuestion(input: UpdateQuestionInput!): UpdateQuestionPayload
- deleteQuestion(filter: QuestionFilter!): DeleteQuestionPayload
- addAnswer(input: [AddAnswerInput!]!): AddAnswerPayload
- updateAnswer(input: UpdateAnswerInput!): UpdateAnswerPayload
- deleteAnswer(filter: AnswerFilter!): DeleteAnswerPayload
+ addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
+ addQuestion(input: [AddQuestionInput!]!): AddQuestionPayload
+ updateQuestion(input: UpdateQuestionInput!): UpdateQuestionPayload
+ deleteQuestion(filter: QuestionFilter!): DeleteQuestionPayload
+ addAnswer(input: [AddAnswerInput!]!): AddAnswerPayload
+ updateAnswer(input: UpdateAnswerInput!): UpdateAnswerPayload
+ deleteAnswer(filter: AnswerFilter!): DeleteAnswerPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/hasInverse.graphql b/graphql/schema/testdata/schemagen/output/hasInverse.graphql
index c9e38071c69..b6aec055aca 100644
--- a/graphql/schema/testdata/schemagen/output/hasInverse.graphql
+++ b/graphql/schema/testdata/schemagen/output/hasInverse.graphql
@@ -3,14 +3,14 @@
#######################
type Post {
- id: ID!
- author(filter: AuthorFilter): Author! @hasInverse(field: "posts")
+ id: ID!
+ author(filter: AuthorFilter): Author! @hasInverse(field: "posts")
}
type Author {
- id: ID!
- posts(filter: PostFilter, first: Int, offset: Int): [Post!]! @hasInverse(field: "author")
- postsAggregate(filter: PostFilter): PostAggregateResult
+ id: ID!
+ posts(filter: PostFilter, first: Int, offset: Int): [Post!]! @hasInverse(field: "author")
+ postsAggregate(filter: PostFilter): PostAggregateResult
}
#######################
@@ -29,162 +29,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -196,11 +196,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -209,77 +210,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -287,43 +289,43 @@ input StringHashFilter {
#######################
type AddAuthorPayload {
- author(filter: AuthorFilter, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddPostPayload {
- post(filter: PostFilter, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, first: Int, offset: Int): [Post]
+ numUids: Int
}
type AuthorAggregateResult {
- count: Int
+ count: Int
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
+ count: Int
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, first: Int, offset: Int): [Post]
+ numUids: Int
}
#######################
@@ -331,11 +333,11 @@ type UpdatePostPayload {
#######################
enum AuthorHasFilter {
- posts
+ posts
}
enum PostHasFilter {
- author
+ author
}
#######################
@@ -343,57 +345,57 @@ enum PostHasFilter {
#######################
input AddAuthorInput {
- posts: [PostRef!]!
+ posts: [PostRef!]!
}
input AddPostInput {
- author: AuthorRef!
+ author: AuthorRef!
}
input AuthorFilter {
- id: [ID!]
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorPatch {
- posts: [PostRef!]
+ posts: [PostRef!]
}
input AuthorRef {
- id: ID
- posts: [PostRef!]
+ id: ID
+ posts: [PostRef!]
}
input PostFilter {
- id: [ID!]
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ id: [ID!]
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostPatch {
- author: AuthorRef
+ author: AuthorRef
}
input PostRef {
- id: ID
- author: AuthorRef
+ id: ID
+ author: AuthorRef
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
#######################
@@ -401,12 +403,12 @@ input UpdatePostInput {
#######################
type Query {
- getPost(id: ID!): Post
- queryPost(filter: PostFilter, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
- getAuthor(id: ID!): Author
- queryAuthor(filter: AuthorFilter, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getPost(id: ID!): Post
+ queryPost(filter: PostFilter, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
+ getAuthor(id: ID!): Author
+ queryAuthor(filter: AuthorFilter, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
}
#######################
@@ -414,11 +416,10 @@ type Query {
#######################
type Mutation {
- addPost(input: [AddPostInput!]!): AddPostPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
- addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ addPost(input: [AddPostInput!]!): AddPostPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
+ addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/hasInverse_withSubscription.graphql b/graphql/schema/testdata/schemagen/output/hasInverse_withSubscription.graphql
index 36b180d322c..e225b95ed13 100644
--- a/graphql/schema/testdata/schemagen/output/hasInverse_withSubscription.graphql
+++ b/graphql/schema/testdata/schemagen/output/hasInverse_withSubscription.graphql
@@ -3,14 +3,14 @@
#######################
type Post {
- id: ID!
- author(filter: AuthorFilter): Author! @hasInverse(field: "posts")
+ id: ID!
+ author(filter: AuthorFilter): Author! @hasInverse(field: "posts")
}
type Author @withSubscription {
- id: ID!
- posts(filter: PostFilter, first: Int, offset: Int): [Post!]! @hasInverse(field: "author")
- postsAggregate(filter: PostFilter): PostAggregateResult
+ id: ID!
+ posts(filter: PostFilter, first: Int, offset: Int): [Post!]! @hasInverse(field: "author")
+ postsAggregate(filter: PostFilter): PostAggregateResult
}
#######################
@@ -29,162 +29,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -196,11 +196,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -209,77 +210,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -287,43 +289,43 @@ input StringHashFilter {
#######################
type AddAuthorPayload {
- author(filter: AuthorFilter, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddPostPayload {
- post(filter: PostFilter, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, first: Int, offset: Int): [Post]
+ numUids: Int
}
type AuthorAggregateResult {
- count: Int
+ count: Int
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
+ count: Int
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, first: Int, offset: Int): [Post]
+ numUids: Int
}
#######################
@@ -331,11 +333,11 @@ type UpdatePostPayload {
#######################
enum AuthorHasFilter {
- posts
+ posts
}
enum PostHasFilter {
- author
+ author
}
#######################
@@ -343,57 +345,57 @@ enum PostHasFilter {
#######################
input AddAuthorInput {
- posts: [PostRef!]!
+ posts: [PostRef!]!
}
input AddPostInput {
- author: AuthorRef!
+ author: AuthorRef!
}
input AuthorFilter {
- id: [ID!]
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorPatch {
- posts: [PostRef!]
+ posts: [PostRef!]
}
input AuthorRef {
- id: ID
- posts: [PostRef!]
+ id: ID
+ posts: [PostRef!]
}
input PostFilter {
- id: [ID!]
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ id: [ID!]
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostPatch {
- author: AuthorRef
+ author: AuthorRef
}
input PostRef {
- id: ID
- author: AuthorRef
+ id: ID
+ author: AuthorRef
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
#######################
@@ -401,12 +403,12 @@ input UpdatePostInput {
#######################
type Query {
- getPost(id: ID!): Post
- queryPost(filter: PostFilter, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
- getAuthor(id: ID!): Author
- queryAuthor(filter: AuthorFilter, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getPost(id: ID!): Post
+ queryPost(filter: PostFilter, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
+ getAuthor(id: ID!): Author
+ queryAuthor(filter: AuthorFilter, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
}
#######################
@@ -414,12 +416,12 @@ type Query {
#######################
type Mutation {
- addPost(input: [AddPostInput!]!): AddPostPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
- addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ addPost(input: [AddPostInput!]!): AddPostPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
+ addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
}
#######################
@@ -427,7 +429,7 @@ type Mutation {
#######################
type Subscription {
- getAuthor(id: ID!): Author
- queryAuthor(filter: AuthorFilter, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getAuthor(id: ID!): Author
+ queryAuthor(filter: AuthorFilter, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
}
diff --git a/graphql/schema/testdata/schemagen/output/hasfilter.graphql b/graphql/schema/testdata/schemagen/output/hasfilter.graphql
index 41964b18b97..9606e1900cc 100644
--- a/graphql/schema/testdata/schemagen/output/hasfilter.graphql
+++ b/graphql/schema/testdata/schemagen/output/hasfilter.graphql
@@ -3,16 +3,16 @@
#######################
interface I {
- id: ID!
+ id: ID!
}
type T implements I {
- id: ID!
- text: String
+ id: ID!
+ text: String
}
type B {
- name: String
+ name: String
}
#######################
@@ -31,162 +31,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -198,11 +198,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -211,77 +212,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -289,57 +291,57 @@ input StringHashFilter {
#######################
type AddBPayload {
- b(filter: BFilter, order: BOrder, first: Int, offset: Int): [B]
- numUids: Int
+ b(filter: BFilter, order: BOrder, first: Int, offset: Int): [B]
+ numUids: Int
}
type AddTPayload {
- t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
- numUids: Int
+ t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
+ numUids: Int
}
type BAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteBPayload {
- b(filter: BFilter, order: BOrder, first: Int, offset: Int): [B]
- msg: String
- numUids: Int
+ b(filter: BFilter, order: BOrder, first: Int, offset: Int): [B]
+ msg: String
+ numUids: Int
}
type DeleteIPayload {
- i(filter: IFilter, first: Int, offset: Int): [I]
- msg: String
- numUids: Int
+ i(filter: IFilter, first: Int, offset: Int): [I]
+ msg: String
+ numUids: Int
}
type DeleteTPayload {
- t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
- msg: String
- numUids: Int
+ t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
+ msg: String
+ numUids: Int
}
type IAggregateResult {
- count: Int
+ count: Int
}
type TAggregateResult {
- count: Int
- textMin: String
- textMax: String
+ count: Int
+ textMin: String
+ textMax: String
}
type UpdateBPayload {
- b(filter: BFilter, order: BOrder, first: Int, offset: Int): [B]
- numUids: Int
+ b(filter: BFilter, order: BOrder, first: Int, offset: Int): [B]
+ numUids: Int
}
type UpdateTPayload {
- t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
- numUids: Int
+ t(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
+ numUids: Int
}
#######################
@@ -347,19 +349,19 @@ type UpdateTPayload {
#######################
enum BHasFilter {
- name
+ name
}
enum BOrderable {
- name
+ name
}
enum THasFilter {
- text
+ text
}
enum TOrderable {
- text
+ text
}
#######################
@@ -367,76 +369,76 @@ enum TOrderable {
#######################
input AddBInput {
- name: String
+ name: String
}
input AddTInput {
- text: String
+ text: String
}
input BFilter {
- has: [BHasFilter]
- and: [BFilter]
- or: [BFilter]
- not: BFilter
+ has: [BHasFilter]
+ and: [BFilter]
+ or: [BFilter]
+ not: BFilter
}
input BOrder {
- asc: BOrderable
- desc: BOrderable
- then: BOrder
+ asc: BOrderable
+ desc: BOrderable
+ then: BOrder
}
input BPatch {
- name: String
+ name: String
}
input BRef {
- name: String
+ name: String
}
input IFilter {
- id: [ID!]
- not: IFilter
+ id: [ID!]
+ not: IFilter
}
input IRef {
- id: ID!
+ id: ID!
}
input TFilter {
- id: [ID!]
- has: [THasFilter]
- and: [TFilter]
- or: [TFilter]
- not: TFilter
+ id: [ID!]
+ has: [THasFilter]
+ and: [TFilter]
+ or: [TFilter]
+ not: TFilter
}
input TOrder {
- asc: TOrderable
- desc: TOrderable
- then: TOrder
+ asc: TOrderable
+ desc: TOrderable
+ then: TOrder
}
input TPatch {
- text: String
+ text: String
}
input TRef {
- id: ID
- text: String
+ id: ID
+ text: String
}
input UpdateBInput {
- filter: BFilter!
- set: BPatch
- remove: BPatch
+ filter: BFilter!
+ set: BPatch
+ remove: BPatch
}
input UpdateTInput {
- filter: TFilter!
- set: TPatch
- remove: TPatch
+ filter: TFilter!
+ set: TPatch
+ remove: TPatch
}
#######################
@@ -444,14 +446,14 @@ input UpdateTInput {
#######################
type Query {
- getI(id: ID!): I
- queryI(filter: IFilter, first: Int, offset: Int): [I]
- aggregateI(filter: IFilter): IAggregateResult
- getT(id: ID!): T
- queryT(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
- aggregateT(filter: TFilter): TAggregateResult
- queryB(filter: BFilter, order: BOrder, first: Int, offset: Int): [B]
- aggregateB(filter: BFilter): BAggregateResult
+ getI(id: ID!): I
+ queryI(filter: IFilter, first: Int, offset: Int): [I]
+ aggregateI(filter: IFilter): IAggregateResult
+ getT(id: ID!): T
+ queryT(filter: TFilter, order: TOrder, first: Int, offset: Int): [T]
+ aggregateT(filter: TFilter): TAggregateResult
+ queryB(filter: BFilter, order: BOrder, first: Int, offset: Int): [B]
+ aggregateB(filter: BFilter): BAggregateResult
}
#######################
@@ -459,12 +461,11 @@ type Query {
#######################
type Mutation {
- deleteI(filter: IFilter!): DeleteIPayload
- addT(input: [AddTInput!]!): AddTPayload
- updateT(input: UpdateTInput!): UpdateTPayload
- deleteT(filter: TFilter!): DeleteTPayload
- addB(input: [AddBInput!]!): AddBPayload
- updateB(input: UpdateBInput!): UpdateBPayload
- deleteB(filter: BFilter!): DeleteBPayload
+ deleteI(filter: IFilter!): DeleteIPayload
+ addT(input: [AddTInput!]!): AddTPayload
+ updateT(input: UpdateTInput!): UpdateTPayload
+ deleteT(filter: TFilter!): DeleteTPayload
+ addB(input: [AddBInput!]!): AddBPayload
+ updateB(input: UpdateBInput!): UpdateBPayload
+ deleteB(filter: BFilter!): DeleteBPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/ignore-unsupported-directive.graphql b/graphql/schema/testdata/schemagen/output/ignore-unsupported-directive.graphql
index 517bca9bbe4..16ca925f8fc 100644
--- a/graphql/schema/testdata/schemagen/output/ignore-unsupported-directive.graphql
+++ b/graphql/schema/testdata/schemagen/output/ignore-unsupported-directive.graphql
@@ -3,15 +3,15 @@
#######################
enum Role {
- Admin
- User
+ Admin
+ User
}
type Product {
- id: ID!
- price: Float! @search
- name: String! @search @dgraph(pred: "p")
- name2: String! @search @dgraph(pred: "p")
+ id: ID!
+ price: Float! @search
+ name: String! @search @dgraph(pred: "p")
+ name2: String! @search @dgraph(pred: "p")
}
#######################
@@ -30,162 +30,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -197,11 +197,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -210,77 +211,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -288,31 +290,31 @@ input StringHashFilter {
#######################
type AddProductPayload {
- product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- numUids: Int
+ product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ numUids: Int
}
type DeleteProductPayload {
- product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- msg: String
- numUids: Int
+ product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ msg: String
+ numUids: Int
}
type ProductAggregateResult {
- count: Int
- priceMin: Float
- priceMax: Float
- priceSum: Float
- priceAvg: Float
- nameMin: String
- nameMax: String
- name2Min: String
- name2Max: String
+ count: Int
+ priceMin: Float
+ priceMax: Float
+ priceSum: Float
+ priceAvg: Float
+ nameMin: String
+ nameMax: String
+ name2Min: String
+ name2Max: String
}
type UpdateProductPayload {
- product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- numUids: Int
+ product(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ numUids: Int
}
#######################
@@ -320,15 +322,15 @@ type UpdateProductPayload {
#######################
enum ProductHasFilter {
- price
- name
- name2
+ price
+ name
+ name2
}
enum ProductOrderable {
- price
- name
- name2
+ price
+ name
+ name2
}
#######################
@@ -336,45 +338,45 @@ enum ProductOrderable {
#######################
input AddProductInput {
- price: Float!
- name: String!
- name2: String!
+ price: Float!
+ name: String!
+ name2: String!
}
input ProductFilter {
- id: [ID!]
- price: FloatFilter
- name: StringTermFilter
- name2: StringTermFilter
- has: [ProductHasFilter]
- and: [ProductFilter]
- or: [ProductFilter]
- not: ProductFilter
+ id: [ID!]
+ price: FloatFilter
+ name: StringTermFilter
+ name2: StringTermFilter
+ has: [ProductHasFilter]
+ and: [ProductFilter]
+ or: [ProductFilter]
+ not: ProductFilter
}
input ProductOrder {
- asc: ProductOrderable
- desc: ProductOrderable
- then: ProductOrder
+ asc: ProductOrderable
+ desc: ProductOrderable
+ then: ProductOrder
}
input ProductPatch {
- price: Float
- name: String
- name2: String
+ price: Float
+ name: String
+ name2: String
}
input ProductRef {
- id: ID
- price: Float
- name: String
- name2: String
+ id: ID
+ price: Float
+ name: String
+ name2: String
}
input UpdateProductInput {
- filter: ProductFilter!
- set: ProductPatch
- remove: ProductPatch
+ filter: ProductFilter!
+ set: ProductPatch
+ remove: ProductPatch
}
#######################
@@ -382,9 +384,9 @@ input UpdateProductInput {
#######################
type Query {
- getProduct(id: ID!): Product
- queryProduct(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
- aggregateProduct(filter: ProductFilter): ProductAggregateResult
+ getProduct(id: ID!): Product
+ queryProduct(filter: ProductFilter, order: ProductOrder, first: Int, offset: Int): [Product]
+ aggregateProduct(filter: ProductFilter): ProductAggregateResult
}
#######################
@@ -392,8 +394,7 @@ type Query {
#######################
type Mutation {
- addProduct(input: [AddProductInput!]!): AddProductPayload
- updateProduct(input: UpdateProductInput!): UpdateProductPayload
- deleteProduct(filter: ProductFilter!): DeleteProductPayload
+ addProduct(input: [AddProductInput!]!): AddProductPayload
+ updateProduct(input: UpdateProductInput!): UpdateProductPayload
+ deleteProduct(filter: ProductFilter!): DeleteProductPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/interface-with-dgraph-pred.graphql b/graphql/schema/testdata/schemagen/output/interface-with-dgraph-pred.graphql
index ecd7d1905e0..cad1b0d0f77 100644
--- a/graphql/schema/testdata/schemagen/output/interface-with-dgraph-pred.graphql
+++ b/graphql/schema/testdata/schemagen/output/interface-with-dgraph-pred.graphql
@@ -3,24 +3,26 @@
#######################
type Object {
- id: ID!
- name: String
- ownedBy(filter: PersonFilter): Person @dgraph(pred: "Object.owner")
+ id: ID!
+ name: String
+ ownedBy(filter: PersonFilter): Person @dgraph(pred: "Object.owner")
}
type BusinessMan implements Person {
- id: ID!
- name: String
- owns(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object] @dgraph(pred: "~Object.owner")
- companyName: String
- ownsAggregate(filter: ObjectFilter): ObjectAggregateResult
+ id: ID!
+ name: String
+ owns(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object]
+ @dgraph(pred: "~Object.owner")
+ companyName: String
+ ownsAggregate(filter: ObjectFilter): ObjectAggregateResult
}
interface Person {
- id: ID!
- name: String
- owns(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object] @dgraph(pred: "~Object.owner")
- ownsAggregate(filter: ObjectFilter): ObjectAggregateResult
+ id: ID!
+ name: String
+ owns(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object]
+ @dgraph(pred: "~Object.owner")
+ ownsAggregate(filter: ObjectFilter): ObjectAggregateResult
}
#######################
@@ -39,162 +41,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -206,11 +208,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -219,77 +222,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -297,66 +301,81 @@ input StringHashFilter {
#######################
type AddBusinessManPayload {
- businessMan(filter: BusinessManFilter, order: BusinessManOrder, first: Int, offset: Int): [BusinessMan]
- numUids: Int
+ businessMan(
+ filter: BusinessManFilter
+ order: BusinessManOrder
+ first: Int
+ offset: Int
+ ): [BusinessMan]
+ numUids: Int
}
type AddObjectPayload {
- object(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object]
- numUids: Int
+ object(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object]
+ numUids: Int
}
type BusinessManAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- companyNameMin: String
- companyNameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
+ companyNameMin: String
+ companyNameMax: String
}
type DeleteBusinessManPayload {
- businessMan(filter: BusinessManFilter, order: BusinessManOrder, first: Int, offset: Int): [BusinessMan]
- msg: String
- numUids: Int
+ businessMan(
+ filter: BusinessManFilter
+ order: BusinessManOrder
+ first: Int
+ offset: Int
+ ): [BusinessMan]
+ msg: String
+ numUids: Int
}
type DeleteObjectPayload {
- object(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object]
- msg: String
- numUids: Int
+ object(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object]
+ msg: String
+ numUids: Int
}
type DeletePersonPayload {
- person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
- msg: String
- numUids: Int
+ person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
+ msg: String
+ numUids: Int
}
type ObjectAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type PersonAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type UpdateBusinessManPayload {
- businessMan(filter: BusinessManFilter, order: BusinessManOrder, first: Int, offset: Int): [BusinessMan]
- numUids: Int
+ businessMan(
+ filter: BusinessManFilter
+ order: BusinessManOrder
+ first: Int
+ offset: Int
+ ): [BusinessMan]
+ numUids: Int
}
type UpdateObjectPayload {
- object(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object]
- numUids: Int
+ object(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object]
+ numUids: Int
}
type UpdatePersonPayload {
- person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
- numUids: Int
+ person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
+ numUids: Int
}
#######################
@@ -364,32 +383,32 @@ type UpdatePersonPayload {
#######################
enum BusinessManHasFilter {
- name
- owns
- companyName
+ name
+ owns
+ companyName
}
enum BusinessManOrderable {
- name
- companyName
+ name
+ companyName
}
enum ObjectHasFilter {
- name
- ownedBy
+ name
+ ownedBy
}
enum ObjectOrderable {
- name
+ name
}
enum PersonHasFilter {
- name
- owns
+ name
+ owns
}
enum PersonOrderable {
- name
+ name
}
#######################
@@ -397,103 +416,103 @@ enum PersonOrderable {
#######################
input AddBusinessManInput {
- name: String
- companyName: String
+ name: String
+ companyName: String
}
input AddObjectInput {
- name: String
- ownedBy: PersonRef
+ name: String
+ ownedBy: PersonRef
}
input BusinessManFilter {
- id: [ID!]
- has: [BusinessManHasFilter]
- and: [BusinessManFilter]
- or: [BusinessManFilter]
- not: BusinessManFilter
+ id: [ID!]
+ has: [BusinessManHasFilter]
+ and: [BusinessManFilter]
+ or: [BusinessManFilter]
+ not: BusinessManFilter
}
input BusinessManOrder {
- asc: BusinessManOrderable
- desc: BusinessManOrderable
- then: BusinessManOrder
+ asc: BusinessManOrderable
+ desc: BusinessManOrderable
+ then: BusinessManOrder
}
input BusinessManPatch {
- name: String
- companyName: String
+ name: String
+ companyName: String
}
input BusinessManRef {
- id: ID
- name: String
- companyName: String
+ id: ID
+ name: String
+ companyName: String
}
input ObjectFilter {
- id: [ID!]
- has: [ObjectHasFilter]
- and: [ObjectFilter]
- or: [ObjectFilter]
- not: ObjectFilter
+ id: [ID!]
+ has: [ObjectHasFilter]
+ and: [ObjectFilter]
+ or: [ObjectFilter]
+ not: ObjectFilter
}
input ObjectOrder {
- asc: ObjectOrderable
- desc: ObjectOrderable
- then: ObjectOrder
+ asc: ObjectOrderable
+ desc: ObjectOrderable
+ then: ObjectOrder
}
input ObjectPatch {
- name: String
- ownedBy: PersonRef
+ name: String
+ ownedBy: PersonRef
}
input ObjectRef {
- id: ID
- name: String
- ownedBy: PersonRef
+ id: ID
+ name: String
+ ownedBy: PersonRef
}
input PersonFilter {
- id: [ID!]
- has: [PersonHasFilter]
- and: [PersonFilter]
- or: [PersonFilter]
- not: PersonFilter
+ id: [ID!]
+ has: [PersonHasFilter]
+ and: [PersonFilter]
+ or: [PersonFilter]
+ not: PersonFilter
}
input PersonOrder {
- asc: PersonOrderable
- desc: PersonOrderable
- then: PersonOrder
+ asc: PersonOrderable
+ desc: PersonOrderable
+ then: PersonOrder
}
input PersonPatch {
- name: String
+ name: String
}
input PersonRef {
- id: ID!
+ id: ID!
}
input UpdateBusinessManInput {
- filter: BusinessManFilter!
- set: BusinessManPatch
- remove: BusinessManPatch
+ filter: BusinessManFilter!
+ set: BusinessManPatch
+ remove: BusinessManPatch
}
input UpdateObjectInput {
- filter: ObjectFilter!
- set: ObjectPatch
- remove: ObjectPatch
+ filter: ObjectFilter!
+ set: ObjectPatch
+ remove: ObjectPatch
}
input UpdatePersonInput {
- filter: PersonFilter!
- set: PersonPatch
- remove: PersonPatch
+ filter: PersonFilter!
+ set: PersonPatch
+ remove: PersonPatch
}
#######################
@@ -501,15 +520,20 @@ input UpdatePersonInput {
#######################
type Query {
- getObject(id: ID!): Object
- queryObject(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object]
- aggregateObject(filter: ObjectFilter): ObjectAggregateResult
- getBusinessMan(id: ID!): BusinessMan
- queryBusinessMan(filter: BusinessManFilter, order: BusinessManOrder, first: Int, offset: Int): [BusinessMan]
- aggregateBusinessMan(filter: BusinessManFilter): BusinessManAggregateResult
- getPerson(id: ID!): Person
- queryPerson(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
- aggregatePerson(filter: PersonFilter): PersonAggregateResult
+ getObject(id: ID!): Object
+ queryObject(filter: ObjectFilter, order: ObjectOrder, first: Int, offset: Int): [Object]
+ aggregateObject(filter: ObjectFilter): ObjectAggregateResult
+ getBusinessMan(id: ID!): BusinessMan
+ queryBusinessMan(
+ filter: BusinessManFilter
+ order: BusinessManOrder
+ first: Int
+ offset: Int
+ ): [BusinessMan]
+ aggregateBusinessMan(filter: BusinessManFilter): BusinessManAggregateResult
+ getPerson(id: ID!): Person
+ queryPerson(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
+ aggregatePerson(filter: PersonFilter): PersonAggregateResult
}
#######################
@@ -517,13 +541,12 @@ type Query {
#######################
type Mutation {
- addObject(input: [AddObjectInput!]!): AddObjectPayload
- updateObject(input: UpdateObjectInput!): UpdateObjectPayload
- deleteObject(filter: ObjectFilter!): DeleteObjectPayload
- addBusinessMan(input: [AddBusinessManInput!]!): AddBusinessManPayload
- updateBusinessMan(input: UpdateBusinessManInput!): UpdateBusinessManPayload
- deleteBusinessMan(filter: BusinessManFilter!): DeleteBusinessManPayload
- updatePerson(input: UpdatePersonInput!): UpdatePersonPayload
- deletePerson(filter: PersonFilter!): DeletePersonPayload
+ addObject(input: [AddObjectInput!]!): AddObjectPayload
+ updateObject(input: UpdateObjectInput!): UpdateObjectPayload
+ deleteObject(filter: ObjectFilter!): DeleteObjectPayload
+ addBusinessMan(input: [AddBusinessManInput!]!): AddBusinessManPayload
+ updateBusinessMan(input: UpdateBusinessManInput!): UpdateBusinessManPayload
+ deleteBusinessMan(filter: BusinessManFilter!): DeleteBusinessManPayload
+ updatePerson(input: UpdatePersonInput!): UpdatePersonPayload
+ deletePerson(filter: PersonFilter!): DeletePersonPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/interface-with-id-directive.graphql b/graphql/schema/testdata/schemagen/output/interface-with-id-directive.graphql
index 5230262e99f..4a29180d8ff 100644
--- a/graphql/schema/testdata/schemagen/output/interface-with-id-directive.graphql
+++ b/graphql/schema/testdata/schemagen/output/interface-with-id-directive.graphql
@@ -3,20 +3,20 @@
#######################
interface LibraryItem {
- refID: String! @id(interface: false)
- itemID: String! @id(interface: true)
+ refID: String! @id(interface: false)
+ itemID: String! @id(interface: true)
}
type Book implements LibraryItem {
- refID: String! @id(interface: false)
- itemID: String! @id(interface: true)
- title: String
- author: String
+ refID: String! @id(interface: false)
+ itemID: String! @id(interface: true)
+ title: String
+ author: String
}
type Library {
- items(filter: LibraryItemFilter, order: LibraryItemOrder, first: Int, offset: Int): [LibraryItem]
- itemsAggregate(filter: LibraryItemFilter): LibraryItemAggregateResult
+ items(filter: LibraryItemFilter, order: LibraryItemOrder, first: Int, offset: Int): [LibraryItem]
+ itemsAggregate(filter: LibraryItemFilter): LibraryItemAggregateResult
}
#######################
@@ -35,162 +35,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -202,11 +202,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -215,77 +216,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -293,70 +295,80 @@ input StringHashFilter {
#######################
type AddBookPayload {
- book(filter: BookFilter, order: BookOrder, first: Int, offset: Int): [Book]
- numUids: Int
+ book(filter: BookFilter, order: BookOrder, first: Int, offset: Int): [Book]
+ numUids: Int
}
type AddLibraryPayload {
- library(filter: LibraryFilter, first: Int, offset: Int): [Library]
- numUids: Int
+ library(filter: LibraryFilter, first: Int, offset: Int): [Library]
+ numUids: Int
}
type BookAggregateResult {
- count: Int
- refIDMin: String
- refIDMax: String
- itemIDMin: String
- itemIDMax: String
- titleMin: String
- titleMax: String
- authorMin: String
- authorMax: String
+ count: Int
+ refIDMin: String
+ refIDMax: String
+ itemIDMin: String
+ itemIDMax: String
+ titleMin: String
+ titleMax: String
+ authorMin: String
+ authorMax: String
}
type DeleteBookPayload {
- book(filter: BookFilter, order: BookOrder, first: Int, offset: Int): [Book]
- msg: String
- numUids: Int
+ book(filter: BookFilter, order: BookOrder, first: Int, offset: Int): [Book]
+ msg: String
+ numUids: Int
}
type DeleteLibraryItemPayload {
- libraryItem(filter: LibraryItemFilter, order: LibraryItemOrder, first: Int, offset: Int): [LibraryItem]
- msg: String
- numUids: Int
+ libraryItem(
+ filter: LibraryItemFilter
+ order: LibraryItemOrder
+ first: Int
+ offset: Int
+ ): [LibraryItem]
+ msg: String
+ numUids: Int
}
type DeleteLibraryPayload {
- library(filter: LibraryFilter, first: Int, offset: Int): [Library]
- msg: String
- numUids: Int
+ library(filter: LibraryFilter, first: Int, offset: Int): [Library]
+ msg: String
+ numUids: Int
}
type LibraryAggregateResult {
- count: Int
+ count: Int
}
type LibraryItemAggregateResult {
- count: Int
- refIDMin: String
- refIDMax: String
- itemIDMin: String
- itemIDMax: String
+ count: Int
+ refIDMin: String
+ refIDMax: String
+ itemIDMin: String
+ itemIDMax: String
}
type UpdateBookPayload {
- book(filter: BookFilter, order: BookOrder, first: Int, offset: Int): [Book]
- numUids: Int
+ book(filter: BookFilter, order: BookOrder, first: Int, offset: Int): [Book]
+ numUids: Int
}
type UpdateLibraryItemPayload {
- libraryItem(filter: LibraryItemFilter, order: LibraryItemOrder, first: Int, offset: Int): [LibraryItem]
- numUids: Int
+ libraryItem(
+ filter: LibraryItemFilter
+ order: LibraryItemOrder
+ first: Int
+ offset: Int
+ ): [LibraryItem]
+ numUids: Int
}
type UpdateLibraryPayload {
- library(filter: LibraryFilter, first: Int, offset: Int): [Library]
- numUids: Int
+ library(filter: LibraryFilter, first: Int, offset: Int): [Library]
+ numUids: Int
}
#######################
@@ -364,31 +376,31 @@ type UpdateLibraryPayload {
#######################
enum BookHasFilter {
- refID
- itemID
- title
- author
+ refID
+ itemID
+ title
+ author
}
enum BookOrderable {
- refID
- itemID
- title
- author
+ refID
+ itemID
+ title
+ author
}
enum LibraryHasFilter {
- items
+ items
}
enum LibraryItemHasFilter {
- refID
- itemID
+ refID
+ itemID
}
enum LibraryItemOrderable {
- refID
- itemID
+ refID
+ itemID
}
#######################
@@ -396,100 +408,100 @@ enum LibraryItemOrderable {
#######################
input AddBookInput {
- refID: String!
- itemID: String!
- title: String
- author: String
+ refID: String!
+ itemID: String!
+ title: String
+ author: String
}
input AddLibraryInput {
- items: [LibraryItemRef]
+ items: [LibraryItemRef]
}
input BookFilter {
- refID: StringHashFilter
- itemID: StringHashFilter
- has: [BookHasFilter]
- and: [BookFilter]
- or: [BookFilter]
- not: BookFilter
+ refID: StringHashFilter
+ itemID: StringHashFilter
+ has: [BookHasFilter]
+ and: [BookFilter]
+ or: [BookFilter]
+ not: BookFilter
}
input BookOrder {
- asc: BookOrderable
- desc: BookOrderable
- then: BookOrder
+ asc: BookOrderable
+ desc: BookOrderable
+ then: BookOrder
}
input BookPatch {
- refID: String
- itemID: String
- title: String
- author: String
+ refID: String
+ itemID: String
+ title: String
+ author: String
}
input BookRef {
- refID: String
- itemID: String
- title: String
- author: String
+ refID: String
+ itemID: String
+ title: String
+ author: String
}
input LibraryFilter {
- has: [LibraryHasFilter]
- and: [LibraryFilter]
- or: [LibraryFilter]
- not: LibraryFilter
+ has: [LibraryHasFilter]
+ and: [LibraryFilter]
+ or: [LibraryFilter]
+ not: LibraryFilter
}
input LibraryItemFilter {
- refID: StringHashFilter
- itemID: StringHashFilter
- has: [LibraryItemHasFilter]
- and: [LibraryItemFilter]
- or: [LibraryItemFilter]
- not: LibraryItemFilter
+ refID: StringHashFilter
+ itemID: StringHashFilter
+ has: [LibraryItemHasFilter]
+ and: [LibraryItemFilter]
+ or: [LibraryItemFilter]
+ not: LibraryItemFilter
}
input LibraryItemOrder {
- asc: LibraryItemOrderable
- desc: LibraryItemOrderable
- then: LibraryItemOrder
+ asc: LibraryItemOrderable
+ desc: LibraryItemOrderable
+ then: LibraryItemOrder
}
input LibraryItemPatch {
- refID: String
- itemID: String
+ refID: String
+ itemID: String
}
input LibraryItemRef {
- refID: String!
+ refID: String!
}
input LibraryPatch {
- items: [LibraryItemRef]
+ items: [LibraryItemRef]
}
input LibraryRef {
- items: [LibraryItemRef]
+ items: [LibraryItemRef]
}
input UpdateBookInput {
- filter: BookFilter!
- set: BookPatch
- remove: BookPatch
+ filter: BookFilter!
+ set: BookPatch
+ remove: BookPatch
}
input UpdateLibraryInput {
- filter: LibraryFilter!
- set: LibraryPatch
- remove: LibraryPatch
+ filter: LibraryFilter!
+ set: LibraryPatch
+ remove: LibraryPatch
}
input UpdateLibraryItemInput {
- filter: LibraryItemFilter!
- set: LibraryItemPatch
- remove: LibraryItemPatch
+ filter: LibraryItemFilter!
+ set: LibraryItemPatch
+ remove: LibraryItemPatch
}
#######################
@@ -497,14 +509,22 @@ input UpdateLibraryItemInput {
#######################
type Query {
- getLibraryItem(refID: String, itemID: String): LibraryItem @deprecated(reason: "@id argument for get query on interface is being deprecated. Only those @id fields which have interface argument set to true will be available in getQuery argument on interface post v21.11.0, please update your schema accordingly.")
- queryLibraryItem(filter: LibraryItemFilter, order: LibraryItemOrder, first: Int, offset: Int): [LibraryItem]
- aggregateLibraryItem(filter: LibraryItemFilter): LibraryItemAggregateResult
- getBook(refID: String, itemID: String): Book
- queryBook(filter: BookFilter, order: BookOrder, first: Int, offset: Int): [Book]
- aggregateBook(filter: BookFilter): BookAggregateResult
- queryLibrary(filter: LibraryFilter, first: Int, offset: Int): [Library]
- aggregateLibrary(filter: LibraryFilter): LibraryAggregateResult
+ getLibraryItem(refID: String, itemID: String): LibraryItem
+ @deprecated(
+ reason: "@id argument for get query on interface is being deprecated. Only those @id fields which have interface argument set to true will be available in getQuery argument on interface post v21.11.0, please update your schema accordingly."
+ )
+ queryLibraryItem(
+ filter: LibraryItemFilter
+ order: LibraryItemOrder
+ first: Int
+ offset: Int
+ ): [LibraryItem]
+ aggregateLibraryItem(filter: LibraryItemFilter): LibraryItemAggregateResult
+ getBook(refID: String, itemID: String): Book
+ queryBook(filter: BookFilter, order: BookOrder, first: Int, offset: Int): [Book]
+ aggregateBook(filter: BookFilter): BookAggregateResult
+ queryLibrary(filter: LibraryFilter, first: Int, offset: Int): [Library]
+ aggregateLibrary(filter: LibraryFilter): LibraryAggregateResult
}
#######################
@@ -512,13 +532,12 @@ type Query {
#######################
type Mutation {
- updateLibraryItem(input: UpdateLibraryItemInput!): UpdateLibraryItemPayload
- deleteLibraryItem(filter: LibraryItemFilter!): DeleteLibraryItemPayload
- addBook(input: [AddBookInput!]!, upsert: Boolean): AddBookPayload
- updateBook(input: UpdateBookInput!): UpdateBookPayload
- deleteBook(filter: BookFilter!): DeleteBookPayload
- addLibrary(input: [AddLibraryInput!]!): AddLibraryPayload
- updateLibrary(input: UpdateLibraryInput!): UpdateLibraryPayload
- deleteLibrary(filter: LibraryFilter!): DeleteLibraryPayload
+ updateLibraryItem(input: UpdateLibraryItemInput!): UpdateLibraryItemPayload
+ deleteLibraryItem(filter: LibraryItemFilter!): DeleteLibraryItemPayload
+ addBook(input: [AddBookInput!]!, upsert: Boolean): AddBookPayload
+ updateBook(input: UpdateBookInput!): UpdateBookPayload
+ deleteBook(filter: BookFilter!): DeleteBookPayload
+ addLibrary(input: [AddLibraryInput!]!): AddLibraryPayload
+ updateLibrary(input: UpdateLibraryInput!): UpdateLibraryPayload
+ deleteLibrary(filter: LibraryFilter!): DeleteLibraryPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/interface-with-no-ids.graphql b/graphql/schema/testdata/schemagen/output/interface-with-no-ids.graphql
index 229a55e6141..581845221d5 100644
--- a/graphql/schema/testdata/schemagen/output/interface-with-no-ids.graphql
+++ b/graphql/schema/testdata/schemagen/output/interface-with-no-ids.graphql
@@ -3,18 +3,18 @@
#######################
interface Message {
- text: String
+ text: String
}
type Question implements Message {
- text: String
- askedBy(filter: UserFilter): User
+ text: String
+ askedBy(filter: UserFilter): User
}
type User {
- name: String
- messages(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- messagesAggregate(filter: MessageFilter): MessageAggregateResult
+ name: String
+ messages(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ messagesAggregate(filter: MessageFilter): MessageAggregateResult
}
#######################
@@ -33,162 +33,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -200,11 +200,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -213,77 +214,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -291,64 +293,64 @@ input StringHashFilter {
#######################
type AddQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ numUids: Int
}
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type DeleteMessagePayload {
- message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- msg: String
- numUids: Int
+ message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ msg: String
+ numUids: Int
}
type DeleteQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- msg: String
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ msg: String
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type MessageAggregateResult {
- count: Int
- textMin: String
- textMax: String
+ count: Int
+ textMin: String
+ textMax: String
}
type QuestionAggregateResult {
- count: Int
- textMin: String
- textMax: String
+ count: Int
+ textMin: String
+ textMax: String
}
type UpdateMessagePayload {
- message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- numUids: Int
+ message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ numUids: Int
}
type UpdateQuestionPayload {
- question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- numUids: Int
+ question(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
#######################
@@ -356,29 +358,29 @@ type UserAggregateResult {
#######################
enum MessageHasFilter {
- text
+ text
}
enum MessageOrderable {
- text
+ text
}
enum QuestionHasFilter {
- text
- askedBy
+ text
+ askedBy
}
enum QuestionOrderable {
- text
+ text
}
enum UserHasFilter {
- name
- messages
+ name
+ messages
}
enum UserOrderable {
- name
+ name
}
#######################
@@ -386,91 +388,91 @@ enum UserOrderable {
#######################
input AddQuestionInput {
- text: String
- askedBy: UserRef
+ text: String
+ askedBy: UserRef
}
input AddUserInput {
- name: String
+ name: String
}
input MessageFilter {
- has: [MessageHasFilter]
- and: [MessageFilter]
- or: [MessageFilter]
- not: MessageFilter
+ has: [MessageHasFilter]
+ and: [MessageFilter]
+ or: [MessageFilter]
+ not: MessageFilter
}
input MessageOrder {
- asc: MessageOrderable
- desc: MessageOrderable
- then: MessageOrder
+ asc: MessageOrderable
+ desc: MessageOrderable
+ then: MessageOrder
}
input MessagePatch {
- text: String
+ text: String
}
input QuestionFilter {
- has: [QuestionHasFilter]
- and: [QuestionFilter]
- or: [QuestionFilter]
- not: QuestionFilter
+ has: [QuestionHasFilter]
+ and: [QuestionFilter]
+ or: [QuestionFilter]
+ not: QuestionFilter
}
input QuestionOrder {
- asc: QuestionOrderable
- desc: QuestionOrderable
- then: QuestionOrder
+ asc: QuestionOrderable
+ desc: QuestionOrderable
+ then: QuestionOrder
}
input QuestionPatch {
- text: String
- askedBy: UserRef
+ text: String
+ askedBy: UserRef
}
input QuestionRef {
- text: String
- askedBy: UserRef
+ text: String
+ askedBy: UserRef
}
input UpdateMessageInput {
- filter: MessageFilter!
- set: MessagePatch
- remove: MessagePatch
+ filter: MessageFilter!
+ set: MessagePatch
+ remove: MessagePatch
}
input UpdateQuestionInput {
- filter: QuestionFilter!
- set: QuestionPatch
- remove: QuestionPatch
+ filter: QuestionFilter!
+ set: QuestionPatch
+ remove: QuestionPatch
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- name: String
+ name: String
}
input UserRef {
- name: String
+ name: String
}
#######################
@@ -478,12 +480,12 @@ input UserRef {
#######################
type Query {
- queryMessage(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- aggregateMessage(filter: MessageFilter): MessageAggregateResult
- queryQuestion(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
- aggregateQuestion(filter: QuestionFilter): QuestionAggregateResult
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ queryMessage(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ aggregateMessage(filter: MessageFilter): MessageAggregateResult
+ queryQuestion(filter: QuestionFilter, order: QuestionOrder, first: Int, offset: Int): [Question]
+ aggregateQuestion(filter: QuestionFilter): QuestionAggregateResult
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -491,13 +493,12 @@ type Query {
#######################
type Mutation {
- updateMessage(input: UpdateMessageInput!): UpdateMessagePayload
- deleteMessage(filter: MessageFilter!): DeleteMessagePayload
- addQuestion(input: [AddQuestionInput!]!): AddQuestionPayload
- updateQuestion(input: UpdateQuestionInput!): UpdateQuestionPayload
- deleteQuestion(filter: QuestionFilter!): DeleteQuestionPayload
- addUser(input: [AddUserInput!]!): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ updateMessage(input: UpdateMessageInput!): UpdateMessagePayload
+ deleteMessage(filter: MessageFilter!): DeleteMessagePayload
+ addQuestion(input: [AddQuestionInput!]!): AddQuestionPayload
+ updateQuestion(input: UpdateQuestionInput!): UpdateQuestionPayload
+ deleteQuestion(filter: QuestionFilter!): DeleteQuestionPayload
+ addUser(input: [AddUserInput!]!): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/interfaces-with-types-and-password.graphql b/graphql/schema/testdata/schemagen/output/interfaces-with-types-and-password.graphql
index d274d2470db..8599070e74a 100644
--- a/graphql/schema/testdata/schemagen/output/interfaces-with-types-and-password.graphql
+++ b/graphql/schema/testdata/schemagen/output/interfaces-with-types-and-password.graphql
@@ -3,43 +3,43 @@
#######################
interface Character @secret(field: "password") {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- appearsIn: [Episode!]! @search
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ appearsIn: [Episode!]! @search
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
}
type Human implements Character @secret(field: "password") {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- appearsIn: [Episode!]! @search
- starships(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- totalCredits: Int
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
- starshipsAggregate(filter: StarshipFilter): StarshipAggregateResult
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ appearsIn: [Episode!]! @search
+ starships(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ totalCredits: Int
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ starshipsAggregate(filter: StarshipFilter): StarshipAggregateResult
}
type Droid implements Character @secret(field: "password") {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- appearsIn: [Episode!]! @search
- primaryFunction: String
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ appearsIn: [Episode!]! @search
+ primaryFunction: String
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
}
enum Episode {
- NEWHOPE
- EMPIRE
- JEDI
+ NEWHOPE
+ EMPIRE
+ JEDI
}
type Starship {
- id: ID!
- name: String! @search(by: [term])
- length: Float
+ id: ID!
+ name: String! @search(by: [term])
+ length: Float
}
#######################
@@ -58,162 +58,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -225,11 +225,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -238,77 +239,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -316,96 +318,96 @@ input StringHashFilter {
#######################
type AddDroidPayload {
- droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- numUids: Int
+ droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ numUids: Int
}
type AddHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ numUids: Int
}
type AddStarshipPayload {
- starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- numUids: Int
+ starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ numUids: Int
}
type CharacterAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteCharacterPayload {
- character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- msg: String
- numUids: Int
+ character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ msg: String
+ numUids: Int
}
type DeleteDroidPayload {
- droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- msg: String
- numUids: Int
+ droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ msg: String
+ numUids: Int
}
type DeleteHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- msg: String
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ msg: String
+ numUids: Int
}
type DeleteStarshipPayload {
- starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- msg: String
- numUids: Int
+ starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ msg: String
+ numUids: Int
}
type DroidAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- primaryFunctionMin: String
- primaryFunctionMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
+ primaryFunctionMin: String
+ primaryFunctionMax: String
}
type HumanAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- totalCreditsMin: Int
- totalCreditsMax: Int
- totalCreditsSum: Int
- totalCreditsAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ totalCreditsMin: Int
+ totalCreditsMax: Int
+ totalCreditsSum: Int
+ totalCreditsAvg: Float
}
type StarshipAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- lengthMin: Float
- lengthMax: Float
- lengthSum: Float
- lengthAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ lengthMin: Float
+ lengthMax: Float
+ lengthSum: Float
+ lengthAvg: Float
}
type UpdateCharacterPayload {
- character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- numUids: Int
+ character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ numUids: Int
}
type UpdateDroidPayload {
- droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- numUids: Int
+ droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ numUids: Int
}
type UpdateHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ numUids: Int
}
type UpdateStarshipPayload {
- starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- numUids: Int
+ starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ numUids: Int
}
#######################
@@ -413,48 +415,48 @@ type UpdateStarshipPayload {
#######################
enum CharacterHasFilter {
- name
- friends
- appearsIn
+ name
+ friends
+ appearsIn
}
enum CharacterOrderable {
- name
+ name
}
enum DroidHasFilter {
- name
- friends
- appearsIn
- primaryFunction
+ name
+ friends
+ appearsIn
+ primaryFunction
}
enum DroidOrderable {
- name
- primaryFunction
+ name
+ primaryFunction
}
enum HumanHasFilter {
- name
- friends
- appearsIn
- starships
- totalCredits
+ name
+ friends
+ appearsIn
+ starships
+ totalCredits
}
enum HumanOrderable {
- name
- totalCredits
+ name
+ totalCredits
}
enum StarshipHasFilter {
- name
- length
+ name
+ length
}
enum StarshipOrderable {
- name
- length
+ name
+ length
}
#######################
@@ -462,175 +464,175 @@ enum StarshipOrderable {
#######################
input AddDroidInput {
- name: String!
- friends: [CharacterRef]
- appearsIn: [Episode!]!
- primaryFunction: String
- password: String!
+ name: String!
+ friends: [CharacterRef]
+ appearsIn: [Episode!]!
+ primaryFunction: String
+ password: String!
}
input AddHumanInput {
- name: String!
- friends: [CharacterRef]
- appearsIn: [Episode!]!
- starships: [StarshipRef]
- totalCredits: Int
- password: String!
+ name: String!
+ friends: [CharacterRef]
+ appearsIn: [Episode!]!
+ starships: [StarshipRef]
+ totalCredits: Int
+ password: String!
}
input AddStarshipInput {
- name: String!
- length: Float
+ name: String!
+ length: Float
}
input CharacterFilter {
- id: [ID!]
- name: StringExactFilter
- appearsIn: Episode_hash
- has: [CharacterHasFilter]
- and: [CharacterFilter]
- or: [CharacterFilter]
- not: CharacterFilter
+ id: [ID!]
+ name: StringExactFilter
+ appearsIn: Episode_hash
+ has: [CharacterHasFilter]
+ and: [CharacterFilter]
+ or: [CharacterFilter]
+ not: CharacterFilter
}
input CharacterOrder {
- asc: CharacterOrderable
- desc: CharacterOrderable
- then: CharacterOrder
+ asc: CharacterOrderable
+ desc: CharacterOrderable
+ then: CharacterOrder
}
input CharacterPatch {
- name: String
- friends: [CharacterRef]
- appearsIn: [Episode!]
- password: String
+ name: String
+ friends: [CharacterRef]
+ appearsIn: [Episode!]
+ password: String
}
input CharacterRef {
- id: ID!
+ id: ID!
}
input DroidFilter {
- id: [ID!]
- name: StringExactFilter
- appearsIn: Episode_hash
- has: [DroidHasFilter]
- and: [DroidFilter]
- or: [DroidFilter]
- not: DroidFilter
+ id: [ID!]
+ name: StringExactFilter
+ appearsIn: Episode_hash
+ has: [DroidHasFilter]
+ and: [DroidFilter]
+ or: [DroidFilter]
+ not: DroidFilter
}
input DroidOrder {
- asc: DroidOrderable
- desc: DroidOrderable
- then: DroidOrder
+ asc: DroidOrderable
+ desc: DroidOrderable
+ then: DroidOrder
}
input DroidPatch {
- name: String
- friends: [CharacterRef]
- appearsIn: [Episode!]
- primaryFunction: String
- password: String
+ name: String
+ friends: [CharacterRef]
+ appearsIn: [Episode!]
+ primaryFunction: String
+ password: String
}
input DroidRef {
- id: ID
- name: String
- friends: [CharacterRef]
- appearsIn: [Episode!]
- primaryFunction: String
- password: String
+ id: ID
+ name: String
+ friends: [CharacterRef]
+ appearsIn: [Episode!]
+ primaryFunction: String
+ password: String
}
input Episode_hash {
- eq: Episode
- in: [Episode]
+ eq: Episode
+ in: [Episode]
}
input HumanFilter {
- id: [ID!]
- name: StringExactFilter
- appearsIn: Episode_hash
- has: [HumanHasFilter]
- and: [HumanFilter]
- or: [HumanFilter]
- not: HumanFilter
+ id: [ID!]
+ name: StringExactFilter
+ appearsIn: Episode_hash
+ has: [HumanHasFilter]
+ and: [HumanFilter]
+ or: [HumanFilter]
+ not: HumanFilter
}
input HumanOrder {
- asc: HumanOrderable
- desc: HumanOrderable
- then: HumanOrder
+ asc: HumanOrderable
+ desc: HumanOrderable
+ then: HumanOrder
}
input HumanPatch {
- name: String
- friends: [CharacterRef]
- appearsIn: [Episode!]
- starships: [StarshipRef]
- totalCredits: Int
- password: String
+ name: String
+ friends: [CharacterRef]
+ appearsIn: [Episode!]
+ starships: [StarshipRef]
+ totalCredits: Int
+ password: String
}
input HumanRef {
- id: ID
- name: String
- friends: [CharacterRef]
- appearsIn: [Episode!]
- starships: [StarshipRef]
- totalCredits: Int
- password: String
+ id: ID
+ name: String
+ friends: [CharacterRef]
+ appearsIn: [Episode!]
+ starships: [StarshipRef]
+ totalCredits: Int
+ password: String
}
input StarshipFilter {
- id: [ID!]
- name: StringTermFilter
- has: [StarshipHasFilter]
- and: [StarshipFilter]
- or: [StarshipFilter]
- not: StarshipFilter
+ id: [ID!]
+ name: StringTermFilter
+ has: [StarshipHasFilter]
+ and: [StarshipFilter]
+ or: [StarshipFilter]
+ not: StarshipFilter
}
input StarshipOrder {
- asc: StarshipOrderable
- desc: StarshipOrderable
- then: StarshipOrder
+ asc: StarshipOrderable
+ desc: StarshipOrderable
+ then: StarshipOrder
}
input StarshipPatch {
- name: String
- length: Float
+ name: String
+ length: Float
}
input StarshipRef {
- id: ID
- name: String
- length: Float
+ id: ID
+ name: String
+ length: Float
}
input UpdateCharacterInput {
- filter: CharacterFilter!
- set: CharacterPatch
- remove: CharacterPatch
+ filter: CharacterFilter!
+ set: CharacterPatch
+ remove: CharacterPatch
}
input UpdateDroidInput {
- filter: DroidFilter!
- set: DroidPatch
- remove: DroidPatch
+ filter: DroidFilter!
+ set: DroidPatch
+ remove: DroidPatch
}
input UpdateHumanInput {
- filter: HumanFilter!
- set: HumanPatch
- remove: HumanPatch
+ filter: HumanFilter!
+ set: HumanPatch
+ remove: HumanPatch
}
input UpdateStarshipInput {
- filter: StarshipFilter!
- set: StarshipPatch
- remove: StarshipPatch
+ filter: StarshipFilter!
+ set: StarshipPatch
+ remove: StarshipPatch
}
#######################
@@ -638,21 +640,26 @@ input UpdateStarshipInput {
#######################
type Query {
- getCharacter(id: ID!): Character
- checkCharacterPassword(id: ID!, password: String!): Character
- queryCharacter(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- aggregateCharacter(filter: CharacterFilter): CharacterAggregateResult
- getHuman(id: ID!): Human
- checkHumanPassword(id: ID!, password: String!): Human
- queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- aggregateHuman(filter: HumanFilter): HumanAggregateResult
- getDroid(id: ID!): Droid
- checkDroidPassword(id: ID!, password: String!): Droid
- queryDroid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- aggregateDroid(filter: DroidFilter): DroidAggregateResult
- getStarship(id: ID!): Starship
- queryStarship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- aggregateStarship(filter: StarshipFilter): StarshipAggregateResult
+ getCharacter(id: ID!): Character
+ checkCharacterPassword(id: ID!, password: String!): Character
+ queryCharacter(
+ filter: CharacterFilter
+ order: CharacterOrder
+ first: Int
+ offset: Int
+ ): [Character]
+ aggregateCharacter(filter: CharacterFilter): CharacterAggregateResult
+ getHuman(id: ID!): Human
+ checkHumanPassword(id: ID!, password: String!): Human
+ queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ aggregateHuman(filter: HumanFilter): HumanAggregateResult
+ getDroid(id: ID!): Droid
+ checkDroidPassword(id: ID!, password: String!): Droid
+ queryDroid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ aggregateDroid(filter: DroidFilter): DroidAggregateResult
+ getStarship(id: ID!): Starship
+ queryStarship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ aggregateStarship(filter: StarshipFilter): StarshipAggregateResult
}
#######################
@@ -660,16 +667,15 @@ type Query {
#######################
type Mutation {
- updateCharacter(input: UpdateCharacterInput!): UpdateCharacterPayload
- deleteCharacter(filter: CharacterFilter!): DeleteCharacterPayload
- addHuman(input: [AddHumanInput!]!): AddHumanPayload
- updateHuman(input: UpdateHumanInput!): UpdateHumanPayload
- deleteHuman(filter: HumanFilter!): DeleteHumanPayload
- addDroid(input: [AddDroidInput!]!): AddDroidPayload
- updateDroid(input: UpdateDroidInput!): UpdateDroidPayload
- deleteDroid(filter: DroidFilter!): DeleteDroidPayload
- addStarship(input: [AddStarshipInput!]!): AddStarshipPayload
- updateStarship(input: UpdateStarshipInput!): UpdateStarshipPayload
- deleteStarship(filter: StarshipFilter!): DeleteStarshipPayload
+ updateCharacter(input: UpdateCharacterInput!): UpdateCharacterPayload
+ deleteCharacter(filter: CharacterFilter!): DeleteCharacterPayload
+ addHuman(input: [AddHumanInput!]!): AddHumanPayload
+ updateHuman(input: UpdateHumanInput!): UpdateHumanPayload
+ deleteHuman(filter: HumanFilter!): DeleteHumanPayload
+ addDroid(input: [AddDroidInput!]!): AddDroidPayload
+ updateDroid(input: UpdateDroidInput!): UpdateDroidPayload
+ deleteDroid(filter: DroidFilter!): DeleteDroidPayload
+ addStarship(input: [AddStarshipInput!]!): AddStarshipPayload
+ updateStarship(input: UpdateStarshipInput!): UpdateStarshipPayload
+ deleteStarship(filter: StarshipFilter!): DeleteStarshipPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/interfaces-with-types.graphql b/graphql/schema/testdata/schemagen/output/interfaces-with-types.graphql
index 5557ee1e3df..488ed7779b6 100644
--- a/graphql/schema/testdata/schemagen/output/interfaces-with-types.graphql
+++ b/graphql/schema/testdata/schemagen/output/interfaces-with-types.graphql
@@ -3,43 +3,43 @@
#######################
interface Character {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- appearsIn: [Episode!]! @search
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ appearsIn: [Episode!]! @search
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
}
type Human implements Character {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- appearsIn: [Episode!]! @search
- starships(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- totalCredits: Int
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
- starshipsAggregate(filter: StarshipFilter): StarshipAggregateResult
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ appearsIn: [Episode!]! @search
+ starships(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ totalCredits: Int
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ starshipsAggregate(filter: StarshipFilter): StarshipAggregateResult
}
type Droid implements Character {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- appearsIn: [Episode!]! @search
- primaryFunction: String
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ appearsIn: [Episode!]! @search
+ primaryFunction: String
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
}
enum Episode {
- NEWHOPE
- EMPIRE
- JEDI
+ NEWHOPE
+ EMPIRE
+ JEDI
}
type Starship {
- id: ID!
- name: String! @search(by: [term])
- length: Float
+ id: ID!
+ name: String! @search(by: [term])
+ length: Float
}
#######################
@@ -58,162 +58,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -225,11 +225,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -238,77 +239,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -316,96 +318,96 @@ input StringHashFilter {
#######################
type AddDroidPayload {
- droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- numUids: Int
+ droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ numUids: Int
}
type AddHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ numUids: Int
}
type AddStarshipPayload {
- starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- numUids: Int
+ starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ numUids: Int
}
type CharacterAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteCharacterPayload {
- character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- msg: String
- numUids: Int
+ character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ msg: String
+ numUids: Int
}
type DeleteDroidPayload {
- droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- msg: String
- numUids: Int
+ droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ msg: String
+ numUids: Int
}
type DeleteHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- msg: String
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ msg: String
+ numUids: Int
}
type DeleteStarshipPayload {
- starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- msg: String
- numUids: Int
+ starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ msg: String
+ numUids: Int
}
type DroidAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- primaryFunctionMin: String
- primaryFunctionMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
+ primaryFunctionMin: String
+ primaryFunctionMax: String
}
type HumanAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- totalCreditsMin: Int
- totalCreditsMax: Int
- totalCreditsSum: Int
- totalCreditsAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ totalCreditsMin: Int
+ totalCreditsMax: Int
+ totalCreditsSum: Int
+ totalCreditsAvg: Float
}
type StarshipAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- lengthMin: Float
- lengthMax: Float
- lengthSum: Float
- lengthAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ lengthMin: Float
+ lengthMax: Float
+ lengthSum: Float
+ lengthAvg: Float
}
type UpdateCharacterPayload {
- character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- numUids: Int
+ character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ numUids: Int
}
type UpdateDroidPayload {
- droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- numUids: Int
+ droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ numUids: Int
}
type UpdateHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ numUids: Int
}
type UpdateStarshipPayload {
- starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- numUids: Int
+ starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ numUids: Int
}
#######################
@@ -413,48 +415,48 @@ type UpdateStarshipPayload {
#######################
enum CharacterHasFilter {
- name
- friends
- appearsIn
+ name
+ friends
+ appearsIn
}
enum CharacterOrderable {
- name
+ name
}
enum DroidHasFilter {
- name
- friends
- appearsIn
- primaryFunction
+ name
+ friends
+ appearsIn
+ primaryFunction
}
enum DroidOrderable {
- name
- primaryFunction
+ name
+ primaryFunction
}
enum HumanHasFilter {
- name
- friends
- appearsIn
- starships
- totalCredits
+ name
+ friends
+ appearsIn
+ starships
+ totalCredits
}
enum HumanOrderable {
- name
- totalCredits
+ name
+ totalCredits
}
enum StarshipHasFilter {
- name
- length
+ name
+ length
}
enum StarshipOrderable {
- name
- length
+ name
+ length
}
#######################
@@ -462,168 +464,168 @@ enum StarshipOrderable {
#######################
input AddDroidInput {
- name: String!
- friends: [CharacterRef]
- appearsIn: [Episode!]!
- primaryFunction: String
+ name: String!
+ friends: [CharacterRef]
+ appearsIn: [Episode!]!
+ primaryFunction: String
}
input AddHumanInput {
- name: String!
- friends: [CharacterRef]
- appearsIn: [Episode!]!
- starships: [StarshipRef]
- totalCredits: Int
+ name: String!
+ friends: [CharacterRef]
+ appearsIn: [Episode!]!
+ starships: [StarshipRef]
+ totalCredits: Int
}
input AddStarshipInput {
- name: String!
- length: Float
+ name: String!
+ length: Float
}
input CharacterFilter {
- id: [ID!]
- name: StringExactFilter
- appearsIn: Episode_hash
- has: [CharacterHasFilter]
- and: [CharacterFilter]
- or: [CharacterFilter]
- not: CharacterFilter
+ id: [ID!]
+ name: StringExactFilter
+ appearsIn: Episode_hash
+ has: [CharacterHasFilter]
+ and: [CharacterFilter]
+ or: [CharacterFilter]
+ not: CharacterFilter
}
input CharacterOrder {
- asc: CharacterOrderable
- desc: CharacterOrderable
- then: CharacterOrder
+ asc: CharacterOrderable
+ desc: CharacterOrderable
+ then: CharacterOrder
}
input CharacterPatch {
- name: String
- friends: [CharacterRef]
- appearsIn: [Episode!]
+ name: String
+ friends: [CharacterRef]
+ appearsIn: [Episode!]
}
input CharacterRef {
- id: ID!
+ id: ID!
}
input DroidFilter {
- id: [ID!]
- name: StringExactFilter
- appearsIn: Episode_hash
- has: [DroidHasFilter]
- and: [DroidFilter]
- or: [DroidFilter]
- not: DroidFilter
+ id: [ID!]
+ name: StringExactFilter
+ appearsIn: Episode_hash
+ has: [DroidHasFilter]
+ and: [DroidFilter]
+ or: [DroidFilter]
+ not: DroidFilter
}
input DroidOrder {
- asc: DroidOrderable
- desc: DroidOrderable
- then: DroidOrder
+ asc: DroidOrderable
+ desc: DroidOrderable
+ then: DroidOrder
}
input DroidPatch {
- name: String
- friends: [CharacterRef]
- appearsIn: [Episode!]
- primaryFunction: String
+ name: String
+ friends: [CharacterRef]
+ appearsIn: [Episode!]
+ primaryFunction: String
}
input DroidRef {
- id: ID
- name: String
- friends: [CharacterRef]
- appearsIn: [Episode!]
- primaryFunction: String
+ id: ID
+ name: String
+ friends: [CharacterRef]
+ appearsIn: [Episode!]
+ primaryFunction: String
}
input Episode_hash {
- eq: Episode
- in: [Episode]
+ eq: Episode
+ in: [Episode]
}
input HumanFilter {
- id: [ID!]
- name: StringExactFilter
- appearsIn: Episode_hash
- has: [HumanHasFilter]
- and: [HumanFilter]
- or: [HumanFilter]
- not: HumanFilter
+ id: [ID!]
+ name: StringExactFilter
+ appearsIn: Episode_hash
+ has: [HumanHasFilter]
+ and: [HumanFilter]
+ or: [HumanFilter]
+ not: HumanFilter
}
input HumanOrder {
- asc: HumanOrderable
- desc: HumanOrderable
- then: HumanOrder
+ asc: HumanOrderable
+ desc: HumanOrderable
+ then: HumanOrder
}
input HumanPatch {
- name: String
- friends: [CharacterRef]
- appearsIn: [Episode!]
- starships: [StarshipRef]
- totalCredits: Int
+ name: String
+ friends: [CharacterRef]
+ appearsIn: [Episode!]
+ starships: [StarshipRef]
+ totalCredits: Int
}
input HumanRef {
- id: ID
- name: String
- friends: [CharacterRef]
- appearsIn: [Episode!]
- starships: [StarshipRef]
- totalCredits: Int
+ id: ID
+ name: String
+ friends: [CharacterRef]
+ appearsIn: [Episode!]
+ starships: [StarshipRef]
+ totalCredits: Int
}
input StarshipFilter {
- id: [ID!]
- name: StringTermFilter
- has: [StarshipHasFilter]
- and: [StarshipFilter]
- or: [StarshipFilter]
- not: StarshipFilter
+ id: [ID!]
+ name: StringTermFilter
+ has: [StarshipHasFilter]
+ and: [StarshipFilter]
+ or: [StarshipFilter]
+ not: StarshipFilter
}
input StarshipOrder {
- asc: StarshipOrderable
- desc: StarshipOrderable
- then: StarshipOrder
+ asc: StarshipOrderable
+ desc: StarshipOrderable
+ then: StarshipOrder
}
input StarshipPatch {
- name: String
- length: Float
+ name: String
+ length: Float
}
input StarshipRef {
- id: ID
- name: String
- length: Float
+ id: ID
+ name: String
+ length: Float
}
input UpdateCharacterInput {
- filter: CharacterFilter!
- set: CharacterPatch
- remove: CharacterPatch
+ filter: CharacterFilter!
+ set: CharacterPatch
+ remove: CharacterPatch
}
input UpdateDroidInput {
- filter: DroidFilter!
- set: DroidPatch
- remove: DroidPatch
+ filter: DroidFilter!
+ set: DroidPatch
+ remove: DroidPatch
}
input UpdateHumanInput {
- filter: HumanFilter!
- set: HumanPatch
- remove: HumanPatch
+ filter: HumanFilter!
+ set: HumanPatch
+ remove: HumanPatch
}
input UpdateStarshipInput {
- filter: StarshipFilter!
- set: StarshipPatch
- remove: StarshipPatch
+ filter: StarshipFilter!
+ set: StarshipPatch
+ remove: StarshipPatch
}
#######################
@@ -631,18 +633,23 @@ input UpdateStarshipInput {
#######################
type Query {
- getCharacter(id: ID!): Character
- queryCharacter(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- aggregateCharacter(filter: CharacterFilter): CharacterAggregateResult
- getHuman(id: ID!): Human
- queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- aggregateHuman(filter: HumanFilter): HumanAggregateResult
- getDroid(id: ID!): Droid
- queryDroid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- aggregateDroid(filter: DroidFilter): DroidAggregateResult
- getStarship(id: ID!): Starship
- queryStarship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- aggregateStarship(filter: StarshipFilter): StarshipAggregateResult
+ getCharacter(id: ID!): Character
+ queryCharacter(
+ filter: CharacterFilter
+ order: CharacterOrder
+ first: Int
+ offset: Int
+ ): [Character]
+ aggregateCharacter(filter: CharacterFilter): CharacterAggregateResult
+ getHuman(id: ID!): Human
+ queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ aggregateHuman(filter: HumanFilter): HumanAggregateResult
+ getDroid(id: ID!): Droid
+ queryDroid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ aggregateDroid(filter: DroidFilter): DroidAggregateResult
+ getStarship(id: ID!): Starship
+ queryStarship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ aggregateStarship(filter: StarshipFilter): StarshipAggregateResult
}
#######################
@@ -650,16 +657,15 @@ type Query {
#######################
type Mutation {
- updateCharacter(input: UpdateCharacterInput!): UpdateCharacterPayload
- deleteCharacter(filter: CharacterFilter!): DeleteCharacterPayload
- addHuman(input: [AddHumanInput!]!): AddHumanPayload
- updateHuman(input: UpdateHumanInput!): UpdateHumanPayload
- deleteHuman(filter: HumanFilter!): DeleteHumanPayload
- addDroid(input: [AddDroidInput!]!): AddDroidPayload
- updateDroid(input: UpdateDroidInput!): UpdateDroidPayload
- deleteDroid(filter: DroidFilter!): DeleteDroidPayload
- addStarship(input: [AddStarshipInput!]!): AddStarshipPayload
- updateStarship(input: UpdateStarshipInput!): UpdateStarshipPayload
- deleteStarship(filter: StarshipFilter!): DeleteStarshipPayload
+ updateCharacter(input: UpdateCharacterInput!): UpdateCharacterPayload
+ deleteCharacter(filter: CharacterFilter!): DeleteCharacterPayload
+ addHuman(input: [AddHumanInput!]!): AddHumanPayload
+ updateHuman(input: UpdateHumanInput!): UpdateHumanPayload
+ deleteHuman(filter: HumanFilter!): DeleteHumanPayload
+ addDroid(input: [AddDroidInput!]!): AddDroidPayload
+ updateDroid(input: UpdateDroidInput!): UpdateDroidPayload
+ deleteDroid(filter: DroidFilter!): DeleteDroidPayload
+ addStarship(input: [AddStarshipInput!]!): AddStarshipPayload
+ updateStarship(input: UpdateStarshipInput!): UpdateStarshipPayload
+ deleteStarship(filter: StarshipFilter!): DeleteStarshipPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/lambda-directive.graphql b/graphql/schema/testdata/schemagen/output/lambda-directive.graphql
index 2b81f7d964e..1f0f4eee988 100644
--- a/graphql/schema/testdata/schemagen/output/lambda-directive.graphql
+++ b/graphql/schema/testdata/schemagen/output/lambda-directive.graphql
@@ -3,10 +3,10 @@
#######################
type User {
- id: ID!
- firstName: String!
- lastName: String!
- fullName: String @lambda
+ id: ID!
+ firstName: String!
+ lastName: String!
+ fullName: String @lambda
}
#######################
@@ -25,162 +25,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -192,11 +192,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -205,77 +206,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -283,27 +285,27 @@ input StringHashFilter {
#######################
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- firstNameMin: String
- firstNameMax: String
- lastNameMin: String
- lastNameMax: String
+ count: Int
+ firstNameMin: String
+ firstNameMax: String
+ lastNameMin: String
+ lastNameMax: String
}
#######################
@@ -311,13 +313,13 @@ type UserAggregateResult {
#######################
enum UserHasFilter {
- firstName
- lastName
+ firstName
+ lastName
}
enum UserOrderable {
- firstName
- lastName
+ firstName
+ lastName
}
#######################
@@ -325,39 +327,39 @@ enum UserOrderable {
#######################
input AddUserInput {
- firstName: String!
- lastName: String!
+ firstName: String!
+ lastName: String!
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- id: [ID!]
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ id: [ID!]
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- firstName: String
- lastName: String
+ firstName: String
+ lastName: String
}
input UserRef {
- id: ID
- firstName: String
- lastName: String
+ id: ID
+ firstName: String
+ lastName: String
}
#######################
@@ -365,10 +367,10 @@ input UserRef {
#######################
type Query {
- queryUserNames(id: [ID!]!): [String] @lambda
- getUser(id: ID!): User
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ queryUserNames(id: [ID!]!): [String] @lambda
+ getUser(id: ID!): User
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -376,9 +378,8 @@ type Query {
#######################
type Mutation {
- createUser(firstName: String!, lastName: String!): User @lambda
- addUser(input: [AddUserInput!]!): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ createUser(firstName: String!, lastName: String!): User @lambda
+ addUser(input: [AddUserInput!]!): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/language-tags.graphql b/graphql/schema/testdata/schemagen/output/language-tags.graphql
index 174a6cde433..2e21d265153 100644
--- a/graphql/schema/testdata/schemagen/output/language-tags.graphql
+++ b/graphql/schema/testdata/schemagen/output/language-tags.graphql
@@ -3,23 +3,23 @@
#######################
interface Node {
- f1: String
+ f1: String
}
type Person implements Node {
- f1: String
- f1Hi: String @dgraph(pred: "Node.f1@hi")
- f2: String @dgraph(pred: "T.f@no")
- f3: String @dgraph(pred: "f3@en")
- name: String! @id
- nameHi: String @dgraph(pred: "Person.name@hi") @search(by: ["term","exact"])
- nameEn: String @dgraph(pred: "Person.name@en") @search(by: [regexp])
- nameHiEn: String @dgraph(pred: "Person.name@hi:en")
- nameHi_En_Untag: String @dgraph(pred: "Person.name@hi:en:.")
- name_Untag_AnyLang: String @dgraph(pred: "Person.name@.")
- address: String @search(by: [fulltext])
- addressHi: String @dgraph(pred: "Person.address@hi")
- professionEn: String @dgraph(pred: "Person.profession@en")
+ f1: String
+ f1Hi: String @dgraph(pred: "Node.f1@hi")
+ f2: String @dgraph(pred: "T.f@no")
+ f3: String @dgraph(pred: "f3@en")
+ name: String! @id
+ nameHi: String @dgraph(pred: "Person.name@hi") @search(by: ["term", "exact"])
+ nameEn: String @dgraph(pred: "Person.name@en") @search(by: [regexp])
+ nameHiEn: String @dgraph(pred: "Person.name@hi:en")
+ nameHi_En_Untag: String @dgraph(pred: "Person.name@hi:en:.")
+ name_Untag_AnyLang: String @dgraph(pred: "Person.name@.")
+ address: String @search(by: [fulltext])
+ addressHi: String @dgraph(pred: "Person.address@hi")
+ professionEn: String @dgraph(pred: "Person.profession@en")
}
#######################
@@ -38,162 +38,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -205,11 +205,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -218,77 +219,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -296,66 +298,66 @@ input StringHashFilter {
#######################
type AddPersonPayload {
- person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
- numUids: Int
+ person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
+ numUids: Int
}
type DeleteNodePayload {
- node(filter: NodeFilter, order: NodeOrder, first: Int, offset: Int): [Node]
- msg: String
- numUids: Int
+ node(filter: NodeFilter, order: NodeOrder, first: Int, offset: Int): [Node]
+ msg: String
+ numUids: Int
}
type DeletePersonPayload {
- person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
- msg: String
- numUids: Int
+ person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
+ msg: String
+ numUids: Int
}
type NodeAggregateResult {
- count: Int
- f1Min: String
- f1Max: String
+ count: Int
+ f1Min: String
+ f1Max: String
}
type PersonAggregateResult {
- count: Int
- f1Min: String
- f1Max: String
- f1HiMin: String
- f1HiMax: String
- f2Min: String
- f2Max: String
- f3Min: String
- f3Max: String
- nameMin: String
- nameMax: String
- nameHiMin: String
- nameHiMax: String
- nameEnMin: String
- nameEnMax: String
- nameHiEnMin: String
- nameHiEnMax: String
- nameHi_En_UntagMin: String
- nameHi_En_UntagMax: String
- name_Untag_AnyLangMin: String
- name_Untag_AnyLangMax: String
- addressMin: String
- addressMax: String
- addressHiMin: String
- addressHiMax: String
- professionEnMin: String
- professionEnMax: String
+ count: Int
+ f1Min: String
+ f1Max: String
+ f1HiMin: String
+ f1HiMax: String
+ f2Min: String
+ f2Max: String
+ f3Min: String
+ f3Max: String
+ nameMin: String
+ nameMax: String
+ nameHiMin: String
+ nameHiMax: String
+ nameEnMin: String
+ nameEnMax: String
+ nameHiEnMin: String
+ nameHiEnMax: String
+ nameHi_En_UntagMin: String
+ nameHi_En_UntagMax: String
+ name_Untag_AnyLangMin: String
+ name_Untag_AnyLangMax: String
+ addressMin: String
+ addressMax: String
+ addressHiMin: String
+ addressHiMax: String
+ professionEnMin: String
+ professionEnMax: String
}
type UpdateNodePayload {
- node(filter: NodeFilter, order: NodeOrder, first: Int, offset: Int): [Node]
- numUids: Int
+ node(filter: NodeFilter, order: NodeOrder, first: Int, offset: Int): [Node]
+ numUids: Int
}
type UpdatePersonPayload {
- person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
- numUids: Int
+ person(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
+ numUids: Int
}
#######################
@@ -363,39 +365,39 @@ type UpdatePersonPayload {
#######################
enum NodeHasFilter {
- f1
+ f1
}
enum NodeOrderable {
- f1
+ f1
}
enum PersonHasFilter {
- f1
- f1Hi
- f2
- f3
- name
- nameHi
- nameEn
- name_Untag_AnyLang
- address
- addressHi
- professionEn
+ f1
+ f1Hi
+ f2
+ f3
+ name
+ nameHi
+ nameEn
+ name_Untag_AnyLang
+ address
+ addressHi
+ professionEn
}
enum PersonOrderable {
- f1
- f1Hi
- f2
- f3
- name
- nameHi
- nameEn
- name_Untag_AnyLang
- address
- addressHi
- professionEn
+ f1
+ f1Hi
+ f2
+ f3
+ name
+ nameHi
+ nameEn
+ name_Untag_AnyLang
+ address
+ addressHi
+ professionEn
}
#######################
@@ -403,100 +405,100 @@ enum PersonOrderable {
#######################
input AddPersonInput {
- f1: String
- f1Hi: String
- f2: String
- f3: String
- name: String!
- nameHi: String
- nameEn: String
- address: String
- addressHi: String
- professionEn: String
+ f1: String
+ f1Hi: String
+ f2: String
+ f3: String
+ name: String!
+ nameHi: String
+ nameEn: String
+ address: String
+ addressHi: String
+ professionEn: String
}
input NodeFilter {
- has: [NodeHasFilter]
- and: [NodeFilter]
- or: [NodeFilter]
- not: NodeFilter
+ has: [NodeHasFilter]
+ and: [NodeFilter]
+ or: [NodeFilter]
+ not: NodeFilter
}
input NodeOrder {
- asc: NodeOrderable
- desc: NodeOrderable
- then: NodeOrder
+ asc: NodeOrderable
+ desc: NodeOrderable
+ then: NodeOrder
}
input NodePatch {
- f1: String
+ f1: String
}
input PersonFilter {
- name: StringHashFilter
- nameHi: StringExactFilter_StringTermFilter
- nameEn: StringRegExpFilter
- address: StringFullTextFilter
- has: [PersonHasFilter]
- and: [PersonFilter]
- or: [PersonFilter]
- not: PersonFilter
+ name: StringHashFilter
+ nameHi: StringExactFilter_StringTermFilter
+ nameEn: StringRegExpFilter
+ address: StringFullTextFilter
+ has: [PersonHasFilter]
+ and: [PersonFilter]
+ or: [PersonFilter]
+ not: PersonFilter
}
input PersonOrder {
- asc: PersonOrderable
- desc: PersonOrderable
- then: PersonOrder
+ asc: PersonOrderable
+ desc: PersonOrderable
+ then: PersonOrder
}
input PersonPatch {
- f1: String
- f1Hi: String
- f2: String
- f3: String
- name: String
- nameHi: String
- nameEn: String
- address: String
- addressHi: String
- professionEn: String
+ f1: String
+ f1Hi: String
+ f2: String
+ f3: String
+ name: String
+ nameHi: String
+ nameEn: String
+ address: String
+ addressHi: String
+ professionEn: String
}
input PersonRef {
- f1: String
- f1Hi: String
- f2: String
- f3: String
- name: String
- nameHi: String
- nameEn: String
- address: String
- addressHi: String
- professionEn: String
+ f1: String
+ f1Hi: String
+ f2: String
+ f3: String
+ name: String
+ nameHi: String
+ nameEn: String
+ address: String
+ addressHi: String
+ professionEn: String
}
input StringExactFilter_StringTermFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
- allofterms: String
- anyofterms: String
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
+ allofterms: String
+ anyofterms: String
}
input UpdateNodeInput {
- filter: NodeFilter!
- set: NodePatch
- remove: NodePatch
+ filter: NodeFilter!
+ set: NodePatch
+ remove: NodePatch
}
input UpdatePersonInput {
- filter: PersonFilter!
- set: PersonPatch
- remove: PersonPatch
+ filter: PersonFilter!
+ set: PersonPatch
+ remove: PersonPatch
}
#######################
@@ -504,11 +506,11 @@ input UpdatePersonInput {
#######################
type Query {
- queryNode(filter: NodeFilter, order: NodeOrder, first: Int, offset: Int): [Node]
- aggregateNode(filter: NodeFilter): NodeAggregateResult
- getPerson(name: String!): Person
- queryPerson(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
- aggregatePerson(filter: PersonFilter): PersonAggregateResult
+ queryNode(filter: NodeFilter, order: NodeOrder, first: Int, offset: Int): [Node]
+ aggregateNode(filter: NodeFilter): NodeAggregateResult
+ getPerson(name: String!): Person
+ queryPerson(filter: PersonFilter, order: PersonOrder, first: Int, offset: Int): [Person]
+ aggregatePerson(filter: PersonFilter): PersonAggregateResult
}
#######################
@@ -516,10 +518,9 @@ type Query {
#######################
type Mutation {
- updateNode(input: UpdateNodeInput!): UpdateNodePayload
- deleteNode(filter: NodeFilter!): DeleteNodePayload
- addPerson(input: [AddPersonInput!]!, upsert: Boolean): AddPersonPayload
- updatePerson(input: UpdatePersonInput!): UpdatePersonPayload
- deletePerson(filter: PersonFilter!): DeletePersonPayload
+ updateNode(input: UpdateNodeInput!): UpdateNodePayload
+ deleteNode(filter: NodeFilter!): DeleteNodePayload
+ addPerson(input: [AddPersonInput!]!, upsert: Boolean): AddPersonPayload
+ updatePerson(input: UpdatePersonInput!): UpdatePersonPayload
+ deletePerson(filter: PersonFilter!): DeletePersonPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/no-id-field-with-searchables.graphql b/graphql/schema/testdata/schemagen/output/no-id-field-with-searchables.graphql
index 66f3178e52c..d2a6d41a48e 100644
--- a/graphql/schema/testdata/schemagen/output/no-id-field-with-searchables.graphql
+++ b/graphql/schema/testdata/schemagen/output/no-id-field-with-searchables.graphql
@@ -3,7 +3,7 @@
#######################
type Post {
- content: String! @search
+ content: String! @search
}
#######################
@@ -22,162 +22,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -189,11 +189,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -202,77 +203,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -280,25 +282,25 @@ input StringHashFilter {
#######################
type AddPostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
- contentMin: String
- contentMax: String
+ count: Int
+ contentMin: String
+ contentMax: String
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
#######################
@@ -306,11 +308,11 @@ type UpdatePostPayload {
#######################
enum PostHasFilter {
- content
+ content
}
enum PostOrderable {
- content
+ content
}
#######################
@@ -318,35 +320,35 @@ enum PostOrderable {
#######################
input AddPostInput {
- content: String!
+ content: String!
}
input PostFilter {
- content: StringTermFilter
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ content: StringTermFilter
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- content: String
+ content: String
}
input PostRef {
- content: String
+ content: String
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
#######################
@@ -354,8 +356,8 @@ input UpdatePostInput {
#######################
type Query {
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
}
#######################
@@ -363,8 +365,7 @@ type Query {
#######################
type Mutation {
- addPost(input: [AddPostInput!]!): AddPostPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
+ addPost(input: [AddPostInput!]!): AddPostPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/no-id-field.graphql b/graphql/schema/testdata/schemagen/output/no-id-field.graphql
index 1b45c328a3a..994156cfb62 100644
--- a/graphql/schema/testdata/schemagen/output/no-id-field.graphql
+++ b/graphql/schema/testdata/schemagen/output/no-id-field.graphql
@@ -3,20 +3,20 @@
#######################
type Post {
- content: String!
- author(filter: AuthorFilter): Author!
- genre(filter: GenreFilter): Genre
+ content: String!
+ author(filter: AuthorFilter): Author!
+ genre(filter: GenreFilter): Genre
}
type Author {
- id: ID
- name: String
- posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- postsAggregate(filter: PostFilter): PostAggregateResult
+ id: ID
+ name: String
+ posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ postsAggregate(filter: PostFilter): PostAggregateResult
}
type Genre {
- name: String!
+ name: String!
}
#######################
@@ -35,162 +35,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -202,11 +202,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -215,77 +216,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -293,69 +295,69 @@ input StringHashFilter {
#######################
type AddAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddGenrePayload {
- genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- numUids: Int
+ genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ numUids: Int
}
type AddPostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type AuthorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeleteGenrePayload {
- genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- msg: String
- numUids: Int
+ genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type GenreAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type PostAggregateResult {
- count: Int
- contentMin: String
- contentMax: String
+ count: Int
+ contentMin: String
+ contentMax: String
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdateGenrePayload {
- genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- numUids: Int
+ genre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
#######################
@@ -363,30 +365,30 @@ type UpdatePostPayload {
#######################
enum AuthorHasFilter {
- name
- posts
+ name
+ posts
}
enum AuthorOrderable {
- name
+ name
}
enum GenreHasFilter {
- name
+ name
}
enum GenreOrderable {
- name
+ name
}
enum PostHasFilter {
- content
- author
- genre
+ content
+ author
+ genre
}
enum PostOrderable {
- content
+ content
}
#######################
@@ -394,107 +396,107 @@ enum PostOrderable {
#######################
input AddAuthorInput {
- name: String
- posts: [PostRef]
+ name: String
+ posts: [PostRef]
}
input AddGenreInput {
- name: String!
+ name: String!
}
input AddPostInput {
- content: String!
- author: AuthorRef!
- genre: GenreRef
+ content: String!
+ author: AuthorRef!
+ genre: GenreRef
}
input AuthorFilter {
- id: [ID!]
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorOrder {
- asc: AuthorOrderable
- desc: AuthorOrderable
- then: AuthorOrder
+ asc: AuthorOrderable
+ desc: AuthorOrderable
+ then: AuthorOrder
}
input AuthorPatch {
- name: String
- posts: [PostRef]
+ name: String
+ posts: [PostRef]
}
input AuthorRef {
- id: ID
- name: String
- posts: [PostRef]
+ id: ID
+ name: String
+ posts: [PostRef]
}
input GenreFilter {
- has: [GenreHasFilter]
- and: [GenreFilter]
- or: [GenreFilter]
- not: GenreFilter
+ has: [GenreHasFilter]
+ and: [GenreFilter]
+ or: [GenreFilter]
+ not: GenreFilter
}
input GenreOrder {
- asc: GenreOrderable
- desc: GenreOrderable
- then: GenreOrder
+ asc: GenreOrderable
+ desc: GenreOrderable
+ then: GenreOrder
}
input GenrePatch {
- name: String
+ name: String
}
input GenreRef {
- name: String
+ name: String
}
input PostFilter {
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- content: String
- author: AuthorRef
- genre: GenreRef
+ content: String
+ author: AuthorRef
+ genre: GenreRef
}
input PostRef {
- content: String
- author: AuthorRef
- genre: GenreRef
+ content: String
+ author: AuthorRef
+ genre: GenreRef
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdateGenreInput {
- filter: GenreFilter!
- set: GenrePatch
- remove: GenrePatch
+ filter: GenreFilter!
+ set: GenrePatch
+ remove: GenrePatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
#######################
@@ -502,13 +504,13 @@ input UpdatePostInput {
#######################
type Query {
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
- getAuthor(id: ID!): Author
- queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
- queryGenre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
- aggregateGenre(filter: GenreFilter): GenreAggregateResult
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
+ getAuthor(id: ID!): Author
+ queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ queryGenre(filter: GenreFilter, order: GenreOrder, first: Int, offset: Int): [Genre]
+ aggregateGenre(filter: GenreFilter): GenreAggregateResult
}
#######################
@@ -516,14 +518,13 @@ type Query {
#######################
type Mutation {
- addPost(input: [AddPostInput!]!): AddPostPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
- addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
- addGenre(input: [AddGenreInput!]!): AddGenrePayload
- updateGenre(input: UpdateGenreInput!): UpdateGenrePayload
- deleteGenre(filter: GenreFilter!): DeleteGenrePayload
+ addPost(input: [AddPostInput!]!): AddPostPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
+ addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ addGenre(input: [AddGenreInput!]!): AddGenrePayload
+ updateGenre(input: UpdateGenreInput!): UpdateGenrePayload
+ deleteGenre(filter: GenreFilter!): DeleteGenrePayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/password-type.graphql b/graphql/schema/testdata/schemagen/output/password-type.graphql
index 9b637e1796e..2ac7bb558e9 100644
--- a/graphql/schema/testdata/schemagen/output/password-type.graphql
+++ b/graphql/schema/testdata/schemagen/output/password-type.graphql
@@ -3,8 +3,8 @@
#######################
type Author @secret(field: "pwd") {
- name: String! @id
- token: String
+ name: String! @id
+ token: String
}
#######################
@@ -23,162 +23,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -190,11 +190,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -203,77 +204,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -281,27 +283,27 @@ input StringHashFilter {
#######################
type AddAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AuthorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- tokenMin: String
- tokenMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
+ tokenMin: String
+ tokenMax: String
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
#######################
@@ -309,13 +311,13 @@ type UpdateAuthorPayload {
#######################
enum AuthorHasFilter {
- name
- token
+ name
+ token
}
enum AuthorOrderable {
- name
- token
+ name
+ token
}
#######################
@@ -323,41 +325,41 @@ enum AuthorOrderable {
#######################
input AddAuthorInput {
- name: String!
- token: String
- pwd: String!
+ name: String!
+ token: String
+ pwd: String!
}
input AuthorFilter {
- name: StringHashFilter
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ name: StringHashFilter
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorOrder {
- asc: AuthorOrderable
- desc: AuthorOrderable
- then: AuthorOrder
+ asc: AuthorOrderable
+ desc: AuthorOrderable
+ then: AuthorOrder
}
input AuthorPatch {
- name: String
- token: String
- pwd: String
+ name: String
+ token: String
+ pwd: String
}
input AuthorRef {
- name: String
- token: String
- pwd: String
+ name: String
+ token: String
+ pwd: String
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
#######################
@@ -365,10 +367,10 @@ input UpdateAuthorInput {
#######################
type Query {
- getAuthor(name: String!): Author
- checkAuthorPassword(name: String!, pwd: String!): Author
- queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getAuthor(name: String!): Author
+ checkAuthorPassword(name: String!, pwd: String!): Author
+ queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
}
#######################
@@ -376,8 +378,7 @@ type Query {
#######################
type Mutation {
- addAuthor(input: [AddAuthorInput!]!, upsert: Boolean): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ addAuthor(input: [AddAuthorInput!]!, upsert: Boolean): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/random.graphql b/graphql/schema/testdata/schemagen/output/random.graphql
index 3d0c9112385..a15b78d4ee0 100644
--- a/graphql/schema/testdata/schemagen/output/random.graphql
+++ b/graphql/schema/testdata/schemagen/output/random.graphql
@@ -3,26 +3,26 @@
#######################
type Mission {
- id: ID!
- crew: [Astronaut]
- designation: String!
- startDate: String
- endDate: String
+ id: ID!
+ crew: [Astronaut]
+ designation: String!
+ startDate: String
+ endDate: String
}
type Astronaut @key(fields: "id") @extends {
- id: ID! @external
- missions: [Mission]
+ id: ID! @external
+ missions: [Mission]
}
type User @remote {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
type Car {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
#######################
@@ -41,162 +41,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -214,68 +214,68 @@ directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJ
directive @cacheControl(maxAge: Int!) on QUERY
input IntFilter {
- eq: Int
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -283,19 +283,19 @@ input StringHashFilter {
#######################
type CarAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type MissionAggregateResult {
- count: Int
- designationMin: String
- designationMax: String
- startDateMin: String
- startDateMax: String
- endDateMin: String
- endDateMax: String
+ count: Int
+ designationMin: String
+ designationMax: String
+ startDateMin: String
+ startDateMax: String
+ endDateMin: String
+ endDateMax: String
}
#######################
@@ -303,24 +303,24 @@ type MissionAggregateResult {
#######################
enum CarHasFilter {
- name
+ name
}
enum CarOrderable {
- name
+ name
}
enum MissionHasFilter {
- crew
- designation
- startDate
- endDate
+ crew
+ designation
+ startDate
+ endDate
}
enum MissionOrderable {
- designation
- startDate
- endDate
+ designation
+ startDate
+ endDate
}
#######################
@@ -328,31 +328,31 @@ enum MissionOrderable {
#######################
input CarFilter {
- id: [ID!]
- has: CarHasFilter
- and: [CarFilter]
- or: [CarFilter]
- not: CarFilter
+ id: [ID!]
+ has: CarHasFilter
+ and: [CarFilter]
+ or: [CarFilter]
+ not: CarFilter
}
input CarOrder {
- asc: CarOrderable
- desc: CarOrderable
- then: CarOrder
+ asc: CarOrderable
+ desc: CarOrderable
+ then: CarOrder
}
input MissionFilter {
- id: [ID!]
- has: MissionHasFilter
- and: [MissionFilter]
- or: [MissionFilter]
- not: MissionFilter
+ id: [ID!]
+ has: MissionHasFilter
+ and: [MissionFilter]
+ or: [MissionFilter]
+ not: MissionFilter
}
input MissionOrder {
- asc: MissionOrderable
- desc: MissionOrderable
- then: MissionOrder
+ asc: MissionOrderable
+ desc: MissionOrderable
+ then: MissionOrder
}
#######################
@@ -360,12 +360,11 @@ input MissionOrder {
#######################
type Query {
- getMyFavoriteUsers(id: ID!): [User]
- getMission(id: ID!): Mission
- queryMission(filter: MissionFilter, order: MissionOrder, first: Int, offset: Int): [Mission]
- aggregateMission(filter: MissionFilter): MissionAggregateResult
- getCar(id: ID!): Car
- queryCar(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
- aggregateCar(filter: CarFilter): CarAggregateResult
+ getMyFavoriteUsers(id: ID!): [User]
+ getMission(id: ID!): Mission
+ queryMission(filter: MissionFilter, order: MissionOrder, first: Int, offset: Int): [Mission]
+ aggregateMission(filter: MissionFilter): MissionAggregateResult
+ getCar(id: ID!): Car
+ queryCar(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
+ aggregateCar(filter: CarFilter): CarAggregateResult
}
-
diff --git a/graphql/schema/testdata/schemagen/output/searchables-references.graphql b/graphql/schema/testdata/schemagen/output/searchables-references.graphql
index 783620de915..7a04645004e 100644
--- a/graphql/schema/testdata/schemagen/output/searchables-references.graphql
+++ b/graphql/schema/testdata/schemagen/output/searchables-references.graphql
@@ -3,18 +3,18 @@
#######################
type Author {
- id: ID!
- name: String! @search(by: [hash])
- dob: DateTime
- posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- postsAggregate(filter: PostFilter): PostAggregateResult
+ id: ID!
+ name: String! @search(by: [hash])
+ dob: DateTime
+ posts(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ postsAggregate(filter: PostFilter): PostAggregateResult
}
type Post {
- postID: ID!
- title: String! @search(by: ["term","fulltext"])
- text: String @search(by: ["fulltext","term"])
- datePublished: DateTime
+ postID: ID!
+ title: String! @search(by: ["term", "fulltext"])
+ text: String @search(by: ["fulltext", "term"])
+ datePublished: DateTime
}
#######################
@@ -33,162 +33,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -200,11 +200,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -213,77 +214,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -291,53 +293,53 @@ input StringHashFilter {
#######################
type AddAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddPostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type AuthorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- dobMin: DateTime
- dobMax: DateTime
+ count: Int
+ nameMin: String
+ nameMax: String
+ dobMin: DateTime
+ dobMax: DateTime
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
- titleMin: String
- titleMax: String
- textMin: String
- textMax: String
- datePublishedMin: DateTime
- datePublishedMax: DateTime
+ count: Int
+ titleMin: String
+ titleMax: String
+ textMin: String
+ textMax: String
+ datePublishedMin: DateTime
+ datePublishedMax: DateTime
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
#######################
@@ -345,26 +347,26 @@ type UpdatePostPayload {
#######################
enum AuthorHasFilter {
- name
- dob
- posts
+ name
+ dob
+ posts
}
enum AuthorOrderable {
- name
- dob
+ name
+ dob
}
enum PostHasFilter {
- title
- text
- datePublished
+ title
+ text
+ datePublished
}
enum PostOrderable {
- title
- text
- datePublished
+ title
+ text
+ datePublished
}
#######################
@@ -372,91 +374,91 @@ enum PostOrderable {
#######################
input AddAuthorInput {
- name: String!
- dob: DateTime
- posts: [PostRef]
+ name: String!
+ dob: DateTime
+ posts: [PostRef]
}
input AddPostInput {
- title: String!
- text: String
- datePublished: DateTime
+ title: String!
+ text: String
+ datePublished: DateTime
}
input AuthorFilter {
- id: [ID!]
- name: StringHashFilter
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ name: StringHashFilter
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorOrder {
- asc: AuthorOrderable
- desc: AuthorOrderable
- then: AuthorOrder
+ asc: AuthorOrderable
+ desc: AuthorOrderable
+ then: AuthorOrder
}
input AuthorPatch {
- name: String
- dob: DateTime
- posts: [PostRef]
+ name: String
+ dob: DateTime
+ posts: [PostRef]
}
input AuthorRef {
- id: ID
- name: String
- dob: DateTime
- posts: [PostRef]
+ id: ID
+ name: String
+ dob: DateTime
+ posts: [PostRef]
}
input PostFilter {
- postID: [ID!]
- title: StringFullTextFilter_StringTermFilter
- text: StringFullTextFilter_StringTermFilter
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ postID: [ID!]
+ title: StringFullTextFilter_StringTermFilter
+ text: StringFullTextFilter_StringTermFilter
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- title: String
- text: String
- datePublished: DateTime
+ title: String
+ text: String
+ datePublished: DateTime
}
input PostRef {
- postID: ID
- title: String
- text: String
- datePublished: DateTime
+ postID: ID
+ title: String
+ text: String
+ datePublished: DateTime
}
input StringFullTextFilter_StringTermFilter {
- alloftext: String
- anyoftext: String
- allofterms: String
- anyofterms: String
+ alloftext: String
+ anyoftext: String
+ allofterms: String
+ anyofterms: String
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
#######################
@@ -464,12 +466,12 @@ input UpdatePostInput {
#######################
type Query {
- getAuthor(id: ID!): Author
- queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
- getPost(postID: ID!): Post
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
+ getAuthor(id: ID!): Author
+ queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getPost(postID: ID!): Post
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
}
#######################
@@ -477,11 +479,10 @@ type Query {
#######################
type Mutation {
- addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
- addPost(input: [AddPostInput!]!): AddPostPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
+ addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ addPost(input: [AddPostInput!]!): AddPostPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/searchables.graphql b/graphql/schema/testdata/schemagen/output/searchables.graphql
index ac2387bae25..355030a84f5 100644
--- a/graphql/schema/testdata/schemagen/output/searchables.graphql
+++ b/graphql/schema/testdata/schemagen/output/searchables.graphql
@@ -3,38 +3,38 @@
#######################
type Post {
- postID: ID!
- title: String! @search(by: [term])
- titleByEverything: String! @search(by: ["term","fulltext","trigram","hash"])
- text: String @search(by: [fulltext])
- tags: [String] @search(by: [trigram])
- tagsHash: [String] @search(by: [hash])
- tagsExact: [String] @search(by: [exact])
- publishByYear: DateTime @search(by: [year])
- publishByMonth: DateTime @search(by: [month])
- publishByDay: DateTime @search(by: [day])
- publishByHour: DateTime @search(by: [hour])
- publishTimestamp: Int64 @search
- numViewers: Int64 @search(by: [int64])
- numLikes: Int @search
- score: Float @search
- isPublished: Boolean @search
- postType: PostType @search
- postTypeNonNull: PostType! @search
- postTypeList: [PostType] @search
- postTypeTrigram: PostType @search(by: [trigram])
- postTypeRegexp: PostType @search(by: [regexp])
- postTypeExact: [PostType] @search(by: [exact])
- postTypeHash: PostType @search(by: [hash])
- postTypeRegexpExact: PostType @search(by: ["exact","regexp"])
- postTypeHashRegexp: PostType @search(by: ["hash","regexp"])
- postTypeNone: PostType @search(by: [])
+ postID: ID!
+ title: String! @search(by: [term])
+ titleByEverything: String! @search(by: ["term", "fulltext", "trigram", "hash"])
+ text: String @search(by: [fulltext])
+ tags: [String] @search(by: [trigram])
+ tagsHash: [String] @search(by: [hash])
+ tagsExact: [String] @search(by: [exact])
+ publishByYear: DateTime @search(by: [year])
+ publishByMonth: DateTime @search(by: [month])
+ publishByDay: DateTime @search(by: [day])
+ publishByHour: DateTime @search(by: [hour])
+ publishTimestamp: Int64 @search
+ numViewers: Int64 @search(by: [int64])
+ numLikes: Int @search
+ score: Float @search
+ isPublished: Boolean @search
+ postType: PostType @search
+ postTypeNonNull: PostType! @search
+ postTypeList: [PostType] @search
+ postTypeTrigram: PostType @search(by: [trigram])
+ postTypeRegexp: PostType @search(by: [regexp])
+ postTypeExact: [PostType] @search(by: [exact])
+ postTypeHash: PostType @search(by: [hash])
+ postTypeRegexpExact: PostType @search(by: ["exact", "regexp"])
+ postTypeHashRegexp: PostType @search(by: ["hash", "regexp"])
+ postTypeNone: PostType @search(by: [])
}
enum PostType {
- Fact
- Question
- Opinion
+ Fact
+ Question
+ Opinion
}
#######################
@@ -53,162 +53,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -220,11 +220,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -233,77 +234,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -311,53 +313,53 @@ input StringHashFilter {
#######################
type AddPostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
- titleMin: String
- titleMax: String
- titleByEverythingMin: String
- titleByEverythingMax: String
- textMin: String
- textMax: String
- publishByYearMin: DateTime
- publishByYearMax: DateTime
- publishByMonthMin: DateTime
- publishByMonthMax: DateTime
- publishByDayMin: DateTime
- publishByDayMax: DateTime
- publishByHourMin: DateTime
- publishByHourMax: DateTime
- publishTimestampMin: Int64
- publishTimestampMax: Int64
- publishTimestampSum: Int64
- publishTimestampAvg: Float
- numViewersMin: Int64
- numViewersMax: Int64
- numViewersSum: Int64
- numViewersAvg: Float
- numLikesMin: Int
- numLikesMax: Int
- numLikesSum: Int
- numLikesAvg: Float
- scoreMin: Float
- scoreMax: Float
- scoreSum: Float
- scoreAvg: Float
+ count: Int
+ titleMin: String
+ titleMax: String
+ titleByEverythingMin: String
+ titleByEverythingMax: String
+ textMin: String
+ textMax: String
+ publishByYearMin: DateTime
+ publishByYearMax: DateTime
+ publishByMonthMin: DateTime
+ publishByMonthMax: DateTime
+ publishByDayMin: DateTime
+ publishByDayMax: DateTime
+ publishByHourMin: DateTime
+ publishByHourMax: DateTime
+ publishTimestampMin: Int64
+ publishTimestampMax: Int64
+ publishTimestampSum: Int64
+ publishTimestampAvg: Float
+ numViewersMin: Int64
+ numViewersMax: Int64
+ numViewersSum: Int64
+ numViewersAvg: Float
+ numLikesMin: Int
+ numLikesMax: Int
+ numLikesSum: Int
+ numLikesAvg: Float
+ scoreMin: Float
+ scoreMax: Float
+ scoreSum: Float
+ scoreAvg: Float
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
#######################
@@ -365,45 +367,45 @@ type UpdatePostPayload {
#######################
enum PostHasFilter {
- title
- titleByEverything
- text
- tags
- tagsHash
- tagsExact
- publishByYear
- publishByMonth
- publishByDay
- publishByHour
- publishTimestamp
- numViewers
- numLikes
- score
- isPublished
- postType
- postTypeNonNull
- postTypeList
- postTypeTrigram
- postTypeRegexp
- postTypeExact
- postTypeHash
- postTypeRegexpExact
- postTypeHashRegexp
- postTypeNone
+ title
+ titleByEverything
+ text
+ tags
+ tagsHash
+ tagsExact
+ publishByYear
+ publishByMonth
+ publishByDay
+ publishByHour
+ publishTimestamp
+ numViewers
+ numLikes
+ score
+ isPublished
+ postType
+ postTypeNonNull
+ postTypeList
+ postTypeTrigram
+ postTypeRegexp
+ postTypeExact
+ postTypeHash
+ postTypeRegexpExact
+ postTypeHashRegexp
+ postTypeNone
}
enum PostOrderable {
- title
- titleByEverything
- text
- publishByYear
- publishByMonth
- publishByDay
- publishByHour
- publishTimestamp
- numViewers
- numLikes
- score
+ title
+ titleByEverything
+ text
+ publishByYear
+ publishByMonth
+ publishByDay
+ publishByHour
+ publishTimestamp
+ numViewers
+ numLikes
+ score
}
#######################
@@ -411,175 +413,175 @@ enum PostOrderable {
#######################
input AddPostInput {
- title: String!
- titleByEverything: String!
- text: String
- tags: [String]
- tagsHash: [String]
- tagsExact: [String]
- publishByYear: DateTime
- publishByMonth: DateTime
- publishByDay: DateTime
- publishByHour: DateTime
- publishTimestamp: Int64
- numViewers: Int64
- numLikes: Int
- score: Float
- isPublished: Boolean
- postType: PostType
- postTypeNonNull: PostType!
- postTypeList: [PostType]
- postTypeTrigram: PostType
- postTypeRegexp: PostType
- postTypeExact: [PostType]
- postTypeHash: PostType
- postTypeRegexpExact: PostType
- postTypeHashRegexp: PostType
- postTypeNone: PostType
+ title: String!
+ titleByEverything: String!
+ text: String
+ tags: [String]
+ tagsHash: [String]
+ tagsExact: [String]
+ publishByYear: DateTime
+ publishByMonth: DateTime
+ publishByDay: DateTime
+ publishByHour: DateTime
+ publishTimestamp: Int64
+ numViewers: Int64
+ numLikes: Int
+ score: Float
+ isPublished: Boolean
+ postType: PostType
+ postTypeNonNull: PostType!
+ postTypeList: [PostType]
+ postTypeTrigram: PostType
+ postTypeRegexp: PostType
+ postTypeExact: [PostType]
+ postTypeHash: PostType
+ postTypeRegexpExact: PostType
+ postTypeHashRegexp: PostType
+ postTypeNone: PostType
}
input PostFilter {
- postID: [ID!]
- title: StringTermFilter
- titleByEverything: StringFullTextFilter_StringHashFilter_StringTermFilter_StringRegExpFilter
- text: StringFullTextFilter
- tags: StringRegExpFilter
- tagsHash: StringHashFilter
- tagsExact: StringExactFilter
- publishByYear: DateTimeFilter
- publishByMonth: DateTimeFilter
- publishByDay: DateTimeFilter
- publishByHour: DateTimeFilter
- publishTimestamp: Int64Filter
- numViewers: Int64Filter
- numLikes: IntFilter
- score: FloatFilter
- isPublished: Boolean
- postType: PostType_hash
- postTypeNonNull: PostType_hash
- postTypeList: PostType_hash
- postTypeTrigram: StringRegExpFilter
- postTypeRegexp: StringRegExpFilter
- postTypeExact: PostType_exact
- postTypeHash: PostType_hash
- postTypeRegexpExact: PostType_exact_StringRegExpFilter
- postTypeHashRegexp: PostType_hash_StringRegExpFilter
- postTypeNone: PostType_hash
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ postID: [ID!]
+ title: StringTermFilter
+ titleByEverything: StringFullTextFilter_StringHashFilter_StringTermFilter_StringRegExpFilter
+ text: StringFullTextFilter
+ tags: StringRegExpFilter
+ tagsHash: StringHashFilter
+ tagsExact: StringExactFilter
+ publishByYear: DateTimeFilter
+ publishByMonth: DateTimeFilter
+ publishByDay: DateTimeFilter
+ publishByHour: DateTimeFilter
+ publishTimestamp: Int64Filter
+ numViewers: Int64Filter
+ numLikes: IntFilter
+ score: FloatFilter
+ isPublished: Boolean
+ postType: PostType_hash
+ postTypeNonNull: PostType_hash
+ postTypeList: PostType_hash
+ postTypeTrigram: StringRegExpFilter
+ postTypeRegexp: StringRegExpFilter
+ postTypeExact: PostType_exact
+ postTypeHash: PostType_hash
+ postTypeRegexpExact: PostType_exact_StringRegExpFilter
+ postTypeHashRegexp: PostType_hash_StringRegExpFilter
+ postTypeNone: PostType_hash
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- title: String
- titleByEverything: String
- text: String
- tags: [String]
- tagsHash: [String]
- tagsExact: [String]
- publishByYear: DateTime
- publishByMonth: DateTime
- publishByDay: DateTime
- publishByHour: DateTime
- publishTimestamp: Int64
- numViewers: Int64
- numLikes: Int
- score: Float
- isPublished: Boolean
- postType: PostType
- postTypeNonNull: PostType
- postTypeList: [PostType]
- postTypeTrigram: PostType
- postTypeRegexp: PostType
- postTypeExact: [PostType]
- postTypeHash: PostType
- postTypeRegexpExact: PostType
- postTypeHashRegexp: PostType
- postTypeNone: PostType
+ title: String
+ titleByEverything: String
+ text: String
+ tags: [String]
+ tagsHash: [String]
+ tagsExact: [String]
+ publishByYear: DateTime
+ publishByMonth: DateTime
+ publishByDay: DateTime
+ publishByHour: DateTime
+ publishTimestamp: Int64
+ numViewers: Int64
+ numLikes: Int
+ score: Float
+ isPublished: Boolean
+ postType: PostType
+ postTypeNonNull: PostType
+ postTypeList: [PostType]
+ postTypeTrigram: PostType
+ postTypeRegexp: PostType
+ postTypeExact: [PostType]
+ postTypeHash: PostType
+ postTypeRegexpExact: PostType
+ postTypeHashRegexp: PostType
+ postTypeNone: PostType
}
input PostRef {
- postID: ID
- title: String
- titleByEverything: String
- text: String
- tags: [String]
- tagsHash: [String]
- tagsExact: [String]
- publishByYear: DateTime
- publishByMonth: DateTime
- publishByDay: DateTime
- publishByHour: DateTime
- publishTimestamp: Int64
- numViewers: Int64
- numLikes: Int
- score: Float
- isPublished: Boolean
- postType: PostType
- postTypeNonNull: PostType
- postTypeList: [PostType]
- postTypeTrigram: PostType
- postTypeRegexp: PostType
- postTypeExact: [PostType]
- postTypeHash: PostType
- postTypeRegexpExact: PostType
- postTypeHashRegexp: PostType
- postTypeNone: PostType
+ postID: ID
+ title: String
+ titleByEverything: String
+ text: String
+ tags: [String]
+ tagsHash: [String]
+ tagsExact: [String]
+ publishByYear: DateTime
+ publishByMonth: DateTime
+ publishByDay: DateTime
+ publishByHour: DateTime
+ publishTimestamp: Int64
+ numViewers: Int64
+ numLikes: Int
+ score: Float
+ isPublished: Boolean
+ postType: PostType
+ postTypeNonNull: PostType
+ postTypeList: [PostType]
+ postTypeTrigram: PostType
+ postTypeRegexp: PostType
+ postTypeExact: [PostType]
+ postTypeHash: PostType
+ postTypeRegexpExact: PostType
+ postTypeHashRegexp: PostType
+ postTypeNone: PostType
}
input PostType_exact {
- eq: PostType
- in: [PostType]
- le: PostType
- lt: PostType
- ge: PostType
- gt: PostType
- between: PostType
+ eq: PostType
+ in: [PostType]
+ le: PostType
+ lt: PostType
+ ge: PostType
+ gt: PostType
+ between: PostType
}
input PostType_exact_StringRegExpFilter {
- eq: PostType
- in: [PostType]
- le: PostType
- lt: PostType
- ge: PostType
- gt: PostType
- between: PostType
- regexp: String
+ eq: PostType
+ in: [PostType]
+ le: PostType
+ lt: PostType
+ ge: PostType
+ gt: PostType
+ between: PostType
+ regexp: String
}
input PostType_hash {
- eq: PostType
- in: [PostType]
+ eq: PostType
+ in: [PostType]
}
input PostType_hash_StringRegExpFilter {
- eq: PostType
- in: [PostType]
- regexp: String
+ eq: PostType
+ in: [PostType]
+ regexp: String
}
input StringFullTextFilter_StringHashFilter_StringTermFilter_StringRegExpFilter {
- alloftext: String
- anyoftext: String
- eq: String
- in: [String]
- allofterms: String
- anyofterms: String
- regexp: String
+ alloftext: String
+ anyoftext: String
+ eq: String
+ in: [String]
+ allofterms: String
+ anyofterms: String
+ regexp: String
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
#######################
@@ -587,9 +589,9 @@ input UpdatePostInput {
#######################
type Query {
- getPost(postID: ID!): Post
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
+ getPost(postID: ID!): Post
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
}
#######################
@@ -597,8 +599,7 @@ type Query {
#######################
type Mutation {
- addPost(input: [AddPostInput!]!): AddPostPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
+ addPost(input: [AddPostInput!]!): AddPostPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/single-type-with-enum.graphql b/graphql/schema/testdata/schemagen/output/single-type-with-enum.graphql
index 0645d3f34ae..5405db1dde2 100644
--- a/graphql/schema/testdata/schemagen/output/single-type-with-enum.graphql
+++ b/graphql/schema/testdata/schemagen/output/single-type-with-enum.graphql
@@ -3,16 +3,16 @@
#######################
type Post {
- id: ID!
- title: String!
- text: String
- postType: PostType!
+ id: ID!
+ title: String!
+ text: String
+ postType: PostType!
}
enum PostType {
- Statement
- Question
- Answer
+ Statement
+ Question
+ Answer
}
#######################
@@ -31,162 +31,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -198,11 +198,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -211,77 +212,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -289,27 +291,27 @@ input StringHashFilter {
#######################
type AddPostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
- titleMin: String
- titleMax: String
- textMin: String
- textMax: String
+ count: Int
+ titleMin: String
+ titleMax: String
+ textMin: String
+ textMax: String
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
#######################
@@ -317,14 +319,14 @@ type UpdatePostPayload {
#######################
enum PostHasFilter {
- title
- text
- postType
+ title
+ text
+ postType
}
enum PostOrderable {
- title
- text
+ title
+ text
}
#######################
@@ -332,42 +334,42 @@ enum PostOrderable {
#######################
input AddPostInput {
- title: String!
- text: String
- postType: PostType!
+ title: String!
+ text: String
+ postType: PostType!
}
input PostFilter {
- id: [ID!]
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ id: [ID!]
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- title: String
- text: String
- postType: PostType
+ title: String
+ text: String
+ postType: PostType
}
input PostRef {
- id: ID
- title: String
- text: String
- postType: PostType
+ id: ID
+ title: String
+ text: String
+ postType: PostType
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
#######################
@@ -375,9 +377,9 @@ input UpdatePostInput {
#######################
type Query {
- getPost(id: ID!): Post
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
+ getPost(id: ID!): Post
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
}
#######################
@@ -385,8 +387,7 @@ type Query {
#######################
type Mutation {
- addPost(input: [AddPostInput!]!): AddPostPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
+ addPost(input: [AddPostInput!]!): AddPostPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/single-type.graphql b/graphql/schema/testdata/schemagen/output/single-type.graphql
index cd8dcc92926..cf2c87d1bf8 100644
--- a/graphql/schema/testdata/schemagen/output/single-type.graphql
+++ b/graphql/schema/testdata/schemagen/output/single-type.graphql
@@ -3,11 +3,11 @@
#######################
type Message {
- id: ID!
- content: String!
- author: String
- uniqueId: Int64
- datePosted: DateTime
+ id: ID!
+ content: String!
+ author: String
+ uniqueId: Int64
+ datePosted: DateTime
}
#######################
@@ -26,162 +26,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -193,11 +193,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -206,77 +207,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -284,33 +286,33 @@ input StringHashFilter {
#######################
type AddMessagePayload {
- message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- numUids: Int
+ message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ numUids: Int
}
type DeleteMessagePayload {
- message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- msg: String
- numUids: Int
+ message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ msg: String
+ numUids: Int
}
type MessageAggregateResult {
- count: Int
- contentMin: String
- contentMax: String
- authorMin: String
- authorMax: String
- uniqueIdMin: Int64
- uniqueIdMax: Int64
- uniqueIdSum: Int64
- uniqueIdAvg: Float
- datePostedMin: DateTime
- datePostedMax: DateTime
+ count: Int
+ contentMin: String
+ contentMax: String
+ authorMin: String
+ authorMax: String
+ uniqueIdMin: Int64
+ uniqueIdMax: Int64
+ uniqueIdSum: Int64
+ uniqueIdAvg: Float
+ datePostedMin: DateTime
+ datePostedMax: DateTime
}
type UpdateMessagePayload {
- message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- numUids: Int
+ message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ numUids: Int
}
#######################
@@ -318,17 +320,17 @@ type UpdateMessagePayload {
#######################
enum MessageHasFilter {
- content
- author
- uniqueId
- datePosted
+ content
+ author
+ uniqueId
+ datePosted
}
enum MessageOrderable {
- content
- author
- uniqueId
- datePosted
+ content
+ author
+ uniqueId
+ datePosted
}
#######################
@@ -336,45 +338,45 @@ enum MessageOrderable {
#######################
input AddMessageInput {
- content: String!
- author: String
- uniqueId: Int64
- datePosted: DateTime
+ content: String!
+ author: String
+ uniqueId: Int64
+ datePosted: DateTime
}
input MessageFilter {
- id: [ID!]
- has: [MessageHasFilter]
- and: [MessageFilter]
- or: [MessageFilter]
- not: MessageFilter
+ id: [ID!]
+ has: [MessageHasFilter]
+ and: [MessageFilter]
+ or: [MessageFilter]
+ not: MessageFilter
}
input MessageOrder {
- asc: MessageOrderable
- desc: MessageOrderable
- then: MessageOrder
+ asc: MessageOrderable
+ desc: MessageOrderable
+ then: MessageOrder
}
input MessagePatch {
- content: String
- author: String
- uniqueId: Int64
- datePosted: DateTime
+ content: String
+ author: String
+ uniqueId: Int64
+ datePosted: DateTime
}
input MessageRef {
- id: ID
- content: String
- author: String
- uniqueId: Int64
- datePosted: DateTime
+ id: ID
+ content: String
+ author: String
+ uniqueId: Int64
+ datePosted: DateTime
}
input UpdateMessageInput {
- filter: MessageFilter!
- set: MessagePatch
- remove: MessagePatch
+ filter: MessageFilter!
+ set: MessagePatch
+ remove: MessagePatch
}
#######################
@@ -382,9 +384,9 @@ input UpdateMessageInput {
#######################
type Query {
- getMessage(id: ID!): Message
- queryMessage(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- aggregateMessage(filter: MessageFilter): MessageAggregateResult
+ getMessage(id: ID!): Message
+ queryMessage(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ aggregateMessage(filter: MessageFilter): MessageAggregateResult
}
#######################
@@ -392,8 +394,7 @@ type Query {
#######################
type Mutation {
- addMessage(input: [AddMessageInput!]!): AddMessagePayload
- updateMessage(input: UpdateMessageInput!): UpdateMessagePayload
- deleteMessage(filter: MessageFilter!): DeleteMessagePayload
+ addMessage(input: [AddMessageInput!]!): AddMessagePayload
+ updateMessage(input: UpdateMessageInput!): UpdateMessagePayload
+ deleteMessage(filter: MessageFilter!): DeleteMessagePayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/type-implements-multiple-interfaces.graphql b/graphql/schema/testdata/schemagen/output/type-implements-multiple-interfaces.graphql
index fb367928dd7..1d5f2c3975c 100644
--- a/graphql/schema/testdata/schemagen/output/type-implements-multiple-interfaces.graphql
+++ b/graphql/schema/testdata/schemagen/output/type-implements-multiple-interfaces.graphql
@@ -3,25 +3,25 @@
#######################
interface Character {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
}
interface Employee {
- employeeId: String!
- title: String!
+ employeeId: String!
+ title: String!
}
type Human implements Character & Employee {
- employeeId: String!
- title: String!
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- totalCredits: Int
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ employeeId: String!
+ title: String!
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ totalCredits: Int
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
}
#######################
@@ -40,162 +40,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -207,11 +207,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -220,77 +221,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -298,69 +300,69 @@ input StringHashFilter {
#######################
type AddHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ numUids: Int
}
type CharacterAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteCharacterPayload {
- character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- msg: String
- numUids: Int
+ character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ msg: String
+ numUids: Int
}
type DeleteEmployeePayload {
- employee(filter: EmployeeFilter, order: EmployeeOrder, first: Int, offset: Int): [Employee]
- msg: String
- numUids: Int
+ employee(filter: EmployeeFilter, order: EmployeeOrder, first: Int, offset: Int): [Employee]
+ msg: String
+ numUids: Int
}
type DeleteHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- msg: String
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ msg: String
+ numUids: Int
}
type EmployeeAggregateResult {
- count: Int
- employeeIdMin: String
- employeeIdMax: String
- titleMin: String
- titleMax: String
+ count: Int
+ employeeIdMin: String
+ employeeIdMax: String
+ titleMin: String
+ titleMax: String
}
type HumanAggregateResult {
- count: Int
- employeeIdMin: String
- employeeIdMax: String
- titleMin: String
- titleMax: String
- nameMin: String
- nameMax: String
- totalCreditsMin: Int
- totalCreditsMax: Int
- totalCreditsSum: Int
- totalCreditsAvg: Float
+ count: Int
+ employeeIdMin: String
+ employeeIdMax: String
+ titleMin: String
+ titleMax: String
+ nameMin: String
+ nameMax: String
+ totalCreditsMin: Int
+ totalCreditsMax: Int
+ totalCreditsSum: Int
+ totalCreditsAvg: Float
}
type UpdateCharacterPayload {
- character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- numUids: Int
+ character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ numUids: Int
}
type UpdateEmployeePayload {
- employee(filter: EmployeeFilter, order: EmployeeOrder, first: Int, offset: Int): [Employee]
- numUids: Int
+ employee(filter: EmployeeFilter, order: EmployeeOrder, first: Int, offset: Int): [Employee]
+ numUids: Int
}
type UpdateHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ numUids: Int
}
#######################
@@ -368,37 +370,37 @@ type UpdateHumanPayload {
#######################
enum CharacterHasFilter {
- name
- friends
+ name
+ friends
}
enum CharacterOrderable {
- name
+ name
}
enum EmployeeHasFilter {
- employeeId
- title
+ employeeId
+ title
}
enum EmployeeOrderable {
- employeeId
- title
+ employeeId
+ title
}
enum HumanHasFilter {
- employeeId
- title
- name
- friends
- totalCredits
+ employeeId
+ title
+ name
+ friends
+ totalCredits
}
enum HumanOrderable {
- employeeId
- title
- name
- totalCredits
+ employeeId
+ title
+ name
+ totalCredits
}
#######################
@@ -406,103 +408,103 @@ enum HumanOrderable {
#######################
input AddHumanInput {
- employeeId: String!
- title: String!
- name: String!
- friends: [CharacterRef]
- totalCredits: Int
+ employeeId: String!
+ title: String!
+ name: String!
+ friends: [CharacterRef]
+ totalCredits: Int
}
input CharacterFilter {
- id: [ID!]
- name: StringExactFilter
- has: [CharacterHasFilter]
- and: [CharacterFilter]
- or: [CharacterFilter]
- not: CharacterFilter
+ id: [ID!]
+ name: StringExactFilter
+ has: [CharacterHasFilter]
+ and: [CharacterFilter]
+ or: [CharacterFilter]
+ not: CharacterFilter
}
input CharacterOrder {
- asc: CharacterOrderable
- desc: CharacterOrderable
- then: CharacterOrder
+ asc: CharacterOrderable
+ desc: CharacterOrderable
+ then: CharacterOrder
}
input CharacterPatch {
- name: String
- friends: [CharacterRef]
+ name: String
+ friends: [CharacterRef]
}
input CharacterRef {
- id: ID!
+ id: ID!
}
input EmployeeFilter {
- has: [EmployeeHasFilter]
- and: [EmployeeFilter]
- or: [EmployeeFilter]
- not: EmployeeFilter
+ has: [EmployeeHasFilter]
+ and: [EmployeeFilter]
+ or: [EmployeeFilter]
+ not: EmployeeFilter
}
input EmployeeOrder {
- asc: EmployeeOrderable
- desc: EmployeeOrderable
- then: EmployeeOrder
+ asc: EmployeeOrderable
+ desc: EmployeeOrderable
+ then: EmployeeOrder
}
input EmployeePatch {
- employeeId: String
- title: String
+ employeeId: String
+ title: String
}
input HumanFilter {
- id: [ID!]
- name: StringExactFilter
- has: [HumanHasFilter]
- and: [HumanFilter]
- or: [HumanFilter]
- not: HumanFilter
+ id: [ID!]
+ name: StringExactFilter
+ has: [HumanHasFilter]
+ and: [HumanFilter]
+ or: [HumanFilter]
+ not: HumanFilter
}
input HumanOrder {
- asc: HumanOrderable
- desc: HumanOrderable
- then: HumanOrder
+ asc: HumanOrderable
+ desc: HumanOrderable
+ then: HumanOrder
}
input HumanPatch {
- employeeId: String
- title: String
- name: String
- friends: [CharacterRef]
- totalCredits: Int
+ employeeId: String
+ title: String
+ name: String
+ friends: [CharacterRef]
+ totalCredits: Int
}
input HumanRef {
- id: ID
- employeeId: String
- title: String
- name: String
- friends: [CharacterRef]
- totalCredits: Int
+ id: ID
+ employeeId: String
+ title: String
+ name: String
+ friends: [CharacterRef]
+ totalCredits: Int
}
input UpdateCharacterInput {
- filter: CharacterFilter!
- set: CharacterPatch
- remove: CharacterPatch
+ filter: CharacterFilter!
+ set: CharacterPatch
+ remove: CharacterPatch
}
input UpdateEmployeeInput {
- filter: EmployeeFilter!
- set: EmployeePatch
- remove: EmployeePatch
+ filter: EmployeeFilter!
+ set: EmployeePatch
+ remove: EmployeePatch
}
input UpdateHumanInput {
- filter: HumanFilter!
- set: HumanPatch
- remove: HumanPatch
+ filter: HumanFilter!
+ set: HumanPatch
+ remove: HumanPatch
}
#######################
@@ -510,14 +512,19 @@ input UpdateHumanInput {
#######################
type Query {
- getCharacter(id: ID!): Character
- queryCharacter(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- aggregateCharacter(filter: CharacterFilter): CharacterAggregateResult
- queryEmployee(filter: EmployeeFilter, order: EmployeeOrder, first: Int, offset: Int): [Employee]
- aggregateEmployee(filter: EmployeeFilter): EmployeeAggregateResult
- getHuman(id: ID!): Human
- queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- aggregateHuman(filter: HumanFilter): HumanAggregateResult
+ getCharacter(id: ID!): Character
+ queryCharacter(
+ filter: CharacterFilter
+ order: CharacterOrder
+ first: Int
+ offset: Int
+ ): [Character]
+ aggregateCharacter(filter: CharacterFilter): CharacterAggregateResult
+ queryEmployee(filter: EmployeeFilter, order: EmployeeOrder, first: Int, offset: Int): [Employee]
+ aggregateEmployee(filter: EmployeeFilter): EmployeeAggregateResult
+ getHuman(id: ID!): Human
+ queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ aggregateHuman(filter: HumanFilter): HumanAggregateResult
}
#######################
@@ -525,12 +532,11 @@ type Query {
#######################
type Mutation {
- updateCharacter(input: UpdateCharacterInput!): UpdateCharacterPayload
- deleteCharacter(filter: CharacterFilter!): DeleteCharacterPayload
- updateEmployee(input: UpdateEmployeeInput!): UpdateEmployeePayload
- deleteEmployee(filter: EmployeeFilter!): DeleteEmployeePayload
- addHuman(input: [AddHumanInput!]!): AddHumanPayload
- updateHuman(input: UpdateHumanInput!): UpdateHumanPayload
- deleteHuman(filter: HumanFilter!): DeleteHumanPayload
+ updateCharacter(input: UpdateCharacterInput!): UpdateCharacterPayload
+ deleteCharacter(filter: CharacterFilter!): DeleteCharacterPayload
+ updateEmployee(input: UpdateEmployeeInput!): UpdateEmployeePayload
+ deleteEmployee(filter: EmployeeFilter!): DeleteEmployeePayload
+ addHuman(input: [AddHumanInput!]!): AddHumanPayload
+ updateHuman(input: UpdateHumanInput!): UpdateHumanPayload
+ deleteHuman(filter: HumanFilter!): DeleteHumanPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/type-reference.graphql b/graphql/schema/testdata/schemagen/output/type-reference.graphql
index 6cd97099546..51b252900dc 100644
--- a/graphql/schema/testdata/schemagen/output/type-reference.graphql
+++ b/graphql/schema/testdata/schemagen/output/type-reference.graphql
@@ -3,15 +3,15 @@
#######################
type Post {
- id: ID!
- title: String!
- text: String
- author(filter: AuthorFilter): Author!
+ id: ID!
+ title: String!
+ text: String
+ author(filter: AuthorFilter): Author!
}
type Author {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
#######################
@@ -30,162 +30,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -197,11 +197,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -210,77 +211,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -288,49 +290,49 @@ input StringHashFilter {
#######################
type AddAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type AddPostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
type AuthorAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- msg: String
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ msg: String
+ numUids: Int
}
type DeletePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- msg: String
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ msg: String
+ numUids: Int
}
type PostAggregateResult {
- count: Int
- titleMin: String
- titleMax: String
- textMin: String
- textMax: String
+ count: Int
+ titleMin: String
+ titleMax: String
+ textMin: String
+ textMax: String
}
type UpdateAuthorPayload {
- author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- numUids: Int
+ author(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ numUids: Int
}
type UpdatePostPayload {
- post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- numUids: Int
+ post(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ numUids: Int
}
#######################
@@ -338,22 +340,22 @@ type UpdatePostPayload {
#######################
enum AuthorHasFilter {
- name
+ name
}
enum AuthorOrderable {
- name
+ name
}
enum PostHasFilter {
- title
- text
- author
+ title
+ text
+ author
}
enum PostOrderable {
- title
- text
+ title
+ text
}
#######################
@@ -361,75 +363,75 @@ enum PostOrderable {
#######################
input AddAuthorInput {
- name: String!
+ name: String!
}
input AddPostInput {
- title: String!
- text: String
- author: AuthorRef!
+ title: String!
+ text: String
+ author: AuthorRef!
}
input AuthorFilter {
- id: [ID!]
- has: [AuthorHasFilter]
- and: [AuthorFilter]
- or: [AuthorFilter]
- not: AuthorFilter
+ id: [ID!]
+ has: [AuthorHasFilter]
+ and: [AuthorFilter]
+ or: [AuthorFilter]
+ not: AuthorFilter
}
input AuthorOrder {
- asc: AuthorOrderable
- desc: AuthorOrderable
- then: AuthorOrder
+ asc: AuthorOrderable
+ desc: AuthorOrderable
+ then: AuthorOrder
}
input AuthorPatch {
- name: String
+ name: String
}
input AuthorRef {
- id: ID
- name: String
+ id: ID
+ name: String
}
input PostFilter {
- id: [ID!]
- has: [PostHasFilter]
- and: [PostFilter]
- or: [PostFilter]
- not: PostFilter
+ id: [ID!]
+ has: [PostHasFilter]
+ and: [PostFilter]
+ or: [PostFilter]
+ not: PostFilter
}
input PostOrder {
- asc: PostOrderable
- desc: PostOrderable
- then: PostOrder
+ asc: PostOrderable
+ desc: PostOrderable
+ then: PostOrder
}
input PostPatch {
- title: String
- text: String
- author: AuthorRef
+ title: String
+ text: String
+ author: AuthorRef
}
input PostRef {
- id: ID
- title: String
- text: String
- author: AuthorRef
+ id: ID
+ title: String
+ text: String
+ author: AuthorRef
}
input UpdateAuthorInput {
- filter: AuthorFilter!
- set: AuthorPatch
- remove: AuthorPatch
+ filter: AuthorFilter!
+ set: AuthorPatch
+ remove: AuthorPatch
}
input UpdatePostInput {
- filter: PostFilter!
- set: PostPatch
- remove: PostPatch
+ filter: PostFilter!
+ set: PostPatch
+ remove: PostPatch
}
#######################
@@ -437,12 +439,12 @@ input UpdatePostInput {
#######################
type Query {
- getPost(id: ID!): Post
- queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
- aggregatePost(filter: PostFilter): PostAggregateResult
- getAuthor(id: ID!): Author
- queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
- aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
+ getPost(id: ID!): Post
+ queryPost(filter: PostFilter, order: PostOrder, first: Int, offset: Int): [Post]
+ aggregatePost(filter: PostFilter): PostAggregateResult
+ getAuthor(id: ID!): Author
+ queryAuthor(filter: AuthorFilter, order: AuthorOrder, first: Int, offset: Int): [Author]
+ aggregateAuthor(filter: AuthorFilter): AuthorAggregateResult
}
#######################
@@ -450,11 +452,10 @@ type Query {
#######################
type Mutation {
- addPost(input: [AddPostInput!]!): AddPostPayload
- updatePost(input: UpdatePostInput!): UpdatePostPayload
- deletePost(filter: PostFilter!): DeletePostPayload
- addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
- updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
- deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
+ addPost(input: [AddPostInput!]!): AddPostPayload
+ updatePost(input: UpdatePostInput!): UpdatePostPayload
+ deletePost(filter: PostFilter!): DeletePostPayload
+ addAuthor(input: [AddAuthorInput!]!): AddAuthorPayload
+ updateAuthor(input: UpdateAuthorInput!): UpdateAuthorPayload
+ deleteAuthor(filter: AuthorFilter!): DeleteAuthorPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/type-with-arguments-on-field.graphql b/graphql/schema/testdata/schemagen/output/type-with-arguments-on-field.graphql
index 8e1aebb77bc..a92c589fdcc 100644
--- a/graphql/schema/testdata/schemagen/output/type-with-arguments-on-field.graphql
+++ b/graphql/schema/testdata/schemagen/output/type-with-arguments-on-field.graphql
@@ -3,16 +3,16 @@
#######################
interface Abstract {
- id: ID!
- name(random: Int!, size: String): String!
+ id: ID!
+ name(random: Int!, size: String): String!
}
type Message implements Abstract {
- id: ID!
- name(random: Int!, size: String): String!
- content(pick: Int!, name: String): String!
- author: String
- datePosted: DateTime
+ id: ID!
+ name(random: Int!, size: String): String!
+ content(pick: Int!, name: String): String!
+ author: String
+ datePosted: DateTime
}
#######################
@@ -31,162 +31,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -198,11 +198,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -211,77 +212,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -289,48 +291,48 @@ input StringHashFilter {
#######################
type AbstractAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type AddMessagePayload {
- message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- numUids: Int
+ message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ numUids: Int
}
type DeleteAbstractPayload {
- abstract(filter: AbstractFilter, order: AbstractOrder, first: Int, offset: Int): [Abstract]
- msg: String
- numUids: Int
+ abstract(filter: AbstractFilter, order: AbstractOrder, first: Int, offset: Int): [Abstract]
+ msg: String
+ numUids: Int
}
type DeleteMessagePayload {
- message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- msg: String
- numUids: Int
+ message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ msg: String
+ numUids: Int
}
type MessageAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- contentMin: String
- contentMax: String
- authorMin: String
- authorMax: String
- datePostedMin: DateTime
- datePostedMax: DateTime
+ count: Int
+ nameMin: String
+ nameMax: String
+ contentMin: String
+ contentMax: String
+ authorMin: String
+ authorMax: String
+ datePostedMin: DateTime
+ datePostedMax: DateTime
}
type UpdateAbstractPayload {
- abstract(filter: AbstractFilter, order: AbstractOrder, first: Int, offset: Int): [Abstract]
- numUids: Int
+ abstract(filter: AbstractFilter, order: AbstractOrder, first: Int, offset: Int): [Abstract]
+ numUids: Int
}
type UpdateMessagePayload {
- message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- numUids: Int
+ message(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ numUids: Int
}
#######################
@@ -338,25 +340,25 @@ type UpdateMessagePayload {
#######################
enum AbstractHasFilter {
- name
+ name
}
enum AbstractOrderable {
- name
+ name
}
enum MessageHasFilter {
- name
- content
- author
- datePosted
+ name
+ content
+ author
+ datePosted
}
enum MessageOrderable {
- name
- content
- author
- datePosted
+ name
+ content
+ author
+ datePosted
}
#######################
@@ -364,73 +366,73 @@ enum MessageOrderable {
#######################
input AbstractFilter {
- id: [ID!]
- has: [AbstractHasFilter]
- and: [AbstractFilter]
- or: [AbstractFilter]
- not: AbstractFilter
+ id: [ID!]
+ has: [AbstractHasFilter]
+ and: [AbstractFilter]
+ or: [AbstractFilter]
+ not: AbstractFilter
}
input AbstractOrder {
- asc: AbstractOrderable
- desc: AbstractOrderable
- then: AbstractOrder
+ asc: AbstractOrderable
+ desc: AbstractOrderable
+ then: AbstractOrder
}
input AbstractPatch {
- name: String
+ name: String
}
input AbstractRef {
- id: ID!
+ id: ID!
}
input AddMessageInput {
- name: String!
- content: String!
- author: String
- datePosted: DateTime
+ name: String!
+ content: String!
+ author: String
+ datePosted: DateTime
}
input MessageFilter {
- id: [ID!]
- has: [MessageHasFilter]
- and: [MessageFilter]
- or: [MessageFilter]
- not: MessageFilter
+ id: [ID!]
+ has: [MessageHasFilter]
+ and: [MessageFilter]
+ or: [MessageFilter]
+ not: MessageFilter
}
input MessageOrder {
- asc: MessageOrderable
- desc: MessageOrderable
- then: MessageOrder
+ asc: MessageOrderable
+ desc: MessageOrderable
+ then: MessageOrder
}
input MessagePatch {
- name: String
- content: String
- author: String
- datePosted: DateTime
+ name: String
+ content: String
+ author: String
+ datePosted: DateTime
}
input MessageRef {
- id: ID
- name: String
- content: String
- author: String
- datePosted: DateTime
+ id: ID
+ name: String
+ content: String
+ author: String
+ datePosted: DateTime
}
input UpdateAbstractInput {
- filter: AbstractFilter!
- set: AbstractPatch
- remove: AbstractPatch
+ filter: AbstractFilter!
+ set: AbstractPatch
+ remove: AbstractPatch
}
input UpdateMessageInput {
- filter: MessageFilter!
- set: MessagePatch
- remove: MessagePatch
+ filter: MessageFilter!
+ set: MessagePatch
+ remove: MessagePatch
}
#######################
@@ -438,12 +440,12 @@ input UpdateMessageInput {
#######################
type Query {
- getAbstract(id: ID!): Abstract
- queryAbstract(filter: AbstractFilter, order: AbstractOrder, first: Int, offset: Int): [Abstract]
- aggregateAbstract(filter: AbstractFilter): AbstractAggregateResult
- getMessage(id: ID!): Message
- queryMessage(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
- aggregateMessage(filter: MessageFilter): MessageAggregateResult
+ getAbstract(id: ID!): Abstract
+ queryAbstract(filter: AbstractFilter, order: AbstractOrder, first: Int, offset: Int): [Abstract]
+ aggregateAbstract(filter: AbstractFilter): AbstractAggregateResult
+ getMessage(id: ID!): Message
+ queryMessage(filter: MessageFilter, order: MessageOrder, first: Int, offset: Int): [Message]
+ aggregateMessage(filter: MessageFilter): MessageAggregateResult
}
#######################
@@ -451,10 +453,9 @@ type Query {
#######################
type Mutation {
- updateAbstract(input: UpdateAbstractInput!): UpdateAbstractPayload
- deleteAbstract(filter: AbstractFilter!): DeleteAbstractPayload
- addMessage(input: [AddMessageInput!]!): AddMessagePayload
- updateMessage(input: UpdateMessageInput!): UpdateMessagePayload
- deleteMessage(filter: MessageFilter!): DeleteMessagePayload
+ updateAbstract(input: UpdateAbstractInput!): UpdateAbstractPayload
+ deleteAbstract(filter: AbstractFilter!): DeleteAbstractPayload
+ addMessage(input: [AddMessageInput!]!): AddMessagePayload
+ updateMessage(input: UpdateMessageInput!): UpdateMessagePayload
+ deleteMessage(filter: MessageFilter!): DeleteMessagePayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/type-with-custom-field-on-dgraph-type.graphql b/graphql/schema/testdata/schemagen/output/type-with-custom-field-on-dgraph-type.graphql
index b530551a062..a0b7f564819 100644
--- a/graphql/schema/testdata/schemagen/output/type-with-custom-field-on-dgraph-type.graphql
+++ b/graphql/schema/testdata/schemagen/output/type-with-custom-field-on-dgraph-type.graphql
@@ -3,15 +3,16 @@
#######################
type Car {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
type User {
- id: ID!
- name: String @custom(http: {url:"http://mock:8888/userNames",method:"GET",body:"{uid: $id}"})
- age: Int! @search
- cars: [Car] @custom(http: {url:"http://mock:8888/cars",method:"GET",body:"{uid: $id}"})
+ id: ID!
+ name: String
+ @custom(http: { url: "http://mock:8888/userNames", method: "GET", body: "{uid: $id}" })
+ age: Int! @search
+ cars: [Car] @custom(http: { url: "http://mock:8888/cars", method: "GET", body: "{uid: $id}" })
}
#######################
@@ -30,162 +31,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -197,11 +198,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -210,77 +212,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -288,49 +291,49 @@ input StringHashFilter {
#######################
type AddCarPayload {
- car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
- numUids: Int
+ car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
+ numUids: Int
}
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type CarAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteCarPayload {
- car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
- msg: String
- numUids: Int
+ car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
+ msg: String
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type UpdateCarPayload {
- car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
- numUids: Int
+ car(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- ageMin: Int
- ageMax: Int
- ageSum: Int
- ageAvg: Float
+ count: Int
+ ageMin: Int
+ ageMax: Int
+ ageSum: Int
+ ageAvg: Float
}
#######################
@@ -338,19 +341,19 @@ type UserAggregateResult {
#######################
enum CarHasFilter {
- name
+ name
}
enum CarOrderable {
- name
+ name
}
enum UserHasFilter {
- age
+ age
}
enum UserOrderable {
- age
+ age
}
#######################
@@ -358,70 +361,70 @@ enum UserOrderable {
#######################
input AddCarInput {
- name: String!
+ name: String!
}
input AddUserInput {
- age: Int!
+ age: Int!
}
input CarFilter {
- id: [ID!]
- has: [CarHasFilter]
- and: [CarFilter]
- or: [CarFilter]
- not: CarFilter
+ id: [ID!]
+ has: [CarHasFilter]
+ and: [CarFilter]
+ or: [CarFilter]
+ not: CarFilter
}
input CarOrder {
- asc: CarOrderable
- desc: CarOrderable
- then: CarOrder
+ asc: CarOrderable
+ desc: CarOrderable
+ then: CarOrder
}
input CarPatch {
- name: String
+ name: String
}
input CarRef {
- id: ID
- name: String
+ id: ID
+ name: String
}
input UpdateCarInput {
- filter: CarFilter!
- set: CarPatch
- remove: CarPatch
+ filter: CarFilter!
+ set: CarPatch
+ remove: CarPatch
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- id: [ID!]
- age: IntFilter
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ id: [ID!]
+ age: IntFilter
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- age: Int
+ age: Int
}
input UserRef {
- id: ID
- age: Int
+ id: ID
+ age: Int
}
#######################
@@ -429,12 +432,12 @@ input UserRef {
#######################
type Query {
- getCar(id: ID!): Car
- queryCar(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
- aggregateCar(filter: CarFilter): CarAggregateResult
- getUser(id: ID!): User
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ getCar(id: ID!): Car
+ queryCar(filter: CarFilter, order: CarOrder, first: Int, offset: Int): [Car]
+ aggregateCar(filter: CarFilter): CarAggregateResult
+ getUser(id: ID!): User
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -442,11 +445,10 @@ type Query {
#######################
type Mutation {
- addCar(input: [AddCarInput!]!): AddCarPayload
- updateCar(input: UpdateCarInput!): UpdateCarPayload
- deleteCar(filter: CarFilter!): DeleteCarPayload
- addUser(input: [AddUserInput!]!): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ addCar(input: [AddCarInput!]!): AddCarPayload
+ updateCar(input: UpdateCarInput!): UpdateCarPayload
+ deleteCar(filter: CarFilter!): DeleteCarPayload
+ addUser(input: [AddUserInput!]!): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/type-with-custom-fields-on-remote-type.graphql b/graphql/schema/testdata/schemagen/output/type-with-custom-fields-on-remote-type.graphql
index e2f05334889..b3607aaeec1 100644
--- a/graphql/schema/testdata/schemagen/output/type-with-custom-fields-on-remote-type.graphql
+++ b/graphql/schema/testdata/schemagen/output/type-with-custom-fields-on-remote-type.graphql
@@ -3,15 +3,16 @@
#######################
type Car @remote {
- id: ID!
- name: String!
+ id: ID!
+ name: String!
}
type User {
- id: ID!
- name: String @custom(http: {url:"http://mock:8888/userNames",method:"GET",body:"{uid: $id}"})
- age: Int! @search
- cars: [Car] @custom(http: {url:"http://mock:8888/cars",method:"GET",body:"{uid: $id}"})
+ id: ID!
+ name: String
+ @custom(http: { url: "http://mock:8888/userNames", method: "GET", body: "{uid: $id}" })
+ age: Int! @search
+ cars: [Car] @custom(http: { url: "http://mock:8888/cars", method: "GET", body: "{uid: $id}" })
}
#######################
@@ -30,162 +31,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -197,11 +198,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -210,77 +212,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -288,27 +291,27 @@ input StringHashFilter {
#######################
type AddUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type DeleteUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- msg: String
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ msg: String
+ numUids: Int
}
type UpdateUserPayload {
- user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- numUids: Int
+ user(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ numUids: Int
}
type UserAggregateResult {
- count: Int
- ageMin: Int
- ageMax: Int
- ageSum: Int
- ageAvg: Float
+ count: Int
+ ageMin: Int
+ ageMax: Int
+ ageSum: Int
+ ageAvg: Float
}
#######################
@@ -316,11 +319,11 @@ type UserAggregateResult {
#######################
enum UserHasFilter {
- age
+ age
}
enum UserOrderable {
- age
+ age
}
#######################
@@ -328,37 +331,37 @@ enum UserOrderable {
#######################
input AddUserInput {
- age: Int!
+ age: Int!
}
input UpdateUserInput {
- filter: UserFilter!
- set: UserPatch
- remove: UserPatch
+ filter: UserFilter!
+ set: UserPatch
+ remove: UserPatch
}
input UserFilter {
- id: [ID!]
- age: IntFilter
- has: [UserHasFilter]
- and: [UserFilter]
- or: [UserFilter]
- not: UserFilter
+ id: [ID!]
+ age: IntFilter
+ has: [UserHasFilter]
+ and: [UserFilter]
+ or: [UserFilter]
+ not: UserFilter
}
input UserOrder {
- asc: UserOrderable
- desc: UserOrderable
- then: UserOrder
+ asc: UserOrderable
+ desc: UserOrderable
+ then: UserOrder
}
input UserPatch {
- age: Int
+ age: Int
}
input UserRef {
- id: ID
- age: Int
+ id: ID
+ age: Int
}
#######################
@@ -366,9 +369,9 @@ input UserRef {
#######################
type Query {
- getUser(id: ID!): User
- queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
- aggregateUser(filter: UserFilter): UserAggregateResult
+ getUser(id: ID!): User
+ queryUser(filter: UserFilter, order: UserOrder, first: Int, offset: Int): [User]
+ aggregateUser(filter: UserFilter): UserAggregateResult
}
#######################
@@ -376,8 +379,7 @@ type Query {
#######################
type Mutation {
- addUser(input: [AddUserInput!]!): AddUserPayload
- updateUser(input: UpdateUserInput!): UpdateUserPayload
- deleteUser(filter: UserFilter!): DeleteUserPayload
+ addUser(input: [AddUserInput!]!): AddUserPayload
+ updateUser(input: UpdateUserInput!): UpdateUserPayload
+ deleteUser(filter: UserFilter!): DeleteUserPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/type-without-orderables.graphql b/graphql/schema/testdata/schemagen/output/type-without-orderables.graphql
index 7dc66b26104..281b3651bae 100644
--- a/graphql/schema/testdata/schemagen/output/type-without-orderables.graphql
+++ b/graphql/schema/testdata/schemagen/output/type-without-orderables.graphql
@@ -3,10 +3,10 @@
#######################
type Data {
- id: ID!
- intList: [Int]
- stringList: [String]
- metaData(filter: DataFilter): Data
+ id: ID!
+ intList: [Int]
+ stringList: [String]
+ metaData(filter: DataFilter): Data
}
#######################
@@ -25,162 +25,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -192,11 +192,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -205,77 +206,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -283,23 +285,23 @@ input StringHashFilter {
#######################
type AddDataPayload {
- data(filter: DataFilter, first: Int, offset: Int): [Data]
- numUids: Int
+ data(filter: DataFilter, first: Int, offset: Int): [Data]
+ numUids: Int
}
type DataAggregateResult {
- count: Int
+ count: Int
}
type DeleteDataPayload {
- data(filter: DataFilter, first: Int, offset: Int): [Data]
- msg: String
- numUids: Int
+ data(filter: DataFilter, first: Int, offset: Int): [Data]
+ msg: String
+ numUids: Int
}
type UpdateDataPayload {
- data(filter: DataFilter, first: Int, offset: Int): [Data]
- numUids: Int
+ data(filter: DataFilter, first: Int, offset: Int): [Data]
+ numUids: Int
}
#######################
@@ -307,9 +309,9 @@ type UpdateDataPayload {
#######################
enum DataHasFilter {
- intList
- stringList
- metaData
+ intList
+ stringList
+ metaData
}
#######################
@@ -317,36 +319,36 @@ enum DataHasFilter {
#######################
input AddDataInput {
- intList: [Int]
- stringList: [String]
- metaData: DataRef
+ intList: [Int]
+ stringList: [String]
+ metaData: DataRef
}
input DataFilter {
- id: [ID!]
- has: [DataHasFilter]
- and: [DataFilter]
- or: [DataFilter]
- not: DataFilter
+ id: [ID!]
+ has: [DataHasFilter]
+ and: [DataFilter]
+ or: [DataFilter]
+ not: DataFilter
}
input DataPatch {
- intList: [Int]
- stringList: [String]
- metaData: DataRef
+ intList: [Int]
+ stringList: [String]
+ metaData: DataRef
}
input DataRef {
- id: ID
- intList: [Int]
- stringList: [String]
- metaData: DataRef
+ id: ID
+ intList: [Int]
+ stringList: [String]
+ metaData: DataRef
}
input UpdateDataInput {
- filter: DataFilter!
- set: DataPatch
- remove: DataPatch
+ filter: DataFilter!
+ set: DataPatch
+ remove: DataPatch
}
#######################
@@ -354,9 +356,9 @@ input UpdateDataInput {
#######################
type Query {
- getData(id: ID!): Data
- queryData(filter: DataFilter, first: Int, offset: Int): [Data]
- aggregateData(filter: DataFilter): DataAggregateResult
+ getData(id: ID!): Data
+ queryData(filter: DataFilter, first: Int, offset: Int): [Data]
+ aggregateData(filter: DataFilter): DataAggregateResult
}
#######################
@@ -364,8 +366,7 @@ type Query {
#######################
type Mutation {
- addData(input: [AddDataInput!]!): AddDataPayload
- updateData(input: UpdateDataInput!): UpdateDataPayload
- deleteData(filter: DataFilter!): DeleteDataPayload
+ addData(input: [AddDataInput!]!): AddDataPayload
+ updateData(input: UpdateDataInput!): UpdateDataPayload
+ deleteData(filter: DataFilter!): DeleteDataPayload
}
-
diff --git a/graphql/schema/testdata/schemagen/output/union.graphql b/graphql/schema/testdata/schemagen/output/union.graphql
index a9fcb67dcae..d0d9664120f 100644
--- a/graphql/schema/testdata/schemagen/output/union.graphql
+++ b/graphql/schema/testdata/schemagen/output/union.graphql
@@ -3,46 +3,46 @@
#######################
interface Character {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- enemyOf(filter: ResidentFilter): Resident
- appearsIn: [Episode!]! @search
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ enemyOf(filter: ResidentFilter): Resident
+ appearsIn: [Episode!]! @search
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
}
type Human implements Character {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- enemyOf(filter: ResidentFilter): Resident
- appearsIn: [Episode!]! @search
- starships(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- totalCredits: Int
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
- starshipsAggregate(filter: StarshipFilter): StarshipAggregateResult
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ enemyOf(filter: ResidentFilter): Resident
+ appearsIn: [Episode!]! @search
+ starships(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ totalCredits: Int
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ starshipsAggregate(filter: StarshipFilter): StarshipAggregateResult
}
type Droid implements Character {
- id: ID!
- name: String! @search(by: [exact])
- friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- enemyOf(filter: ResidentFilter): Resident
- appearsIn: [Episode!]! @search
- primaryFunction: String
- friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
+ id: ID!
+ name: String! @search(by: [exact])
+ friends(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ enemyOf(filter: ResidentFilter): Resident
+ appearsIn: [Episode!]! @search
+ primaryFunction: String
+ friendsAggregate(filter: CharacterFilter): CharacterAggregateResult
}
enum Episode {
- NEWHOPE
- EMPIRE
- JEDI
+ NEWHOPE
+ EMPIRE
+ JEDI
}
type Starship {
- id: ID!
- name: String! @search(by: [term])
- length: Float
+ id: ID!
+ name: String! @search(by: [term])
+ length: Float
}
union Resident = Human | Droid | Starship
@@ -50,10 +50,10 @@ union Resident = Human | Droid | Starship
union Tool @remote = Droid | Starship
type Planet {
- id: ID!
- name: String!
- residents(filter: ResidentFilter, first: Int, offset: Int): [Resident!] @dgraph(pred: "residents")
- bestTool: Tool @custom(http: {url:"http://mock:8888/tool/$id",method:"GET"})
+ id: ID!
+ name: String!
+ residents(filter: ResidentFilter, first: Int, offset: Int): [Resident!] @dgraph(pred: "residents")
+ bestTool: Tool @custom(http: { url: "http://mock:8888/tool/$id", method: "GET" })
}
#######################
@@ -72,162 +72,162 @@ For example: "1985-04-12T23:20:50.52Z" represents 20 mins 50.52 secs after the 2
"""
scalar DateTime
-input IntRange{
- min: Int!
- max: Int!
+input IntRange {
+ min: Int!
+ max: Int!
}
-input FloatRange{
- min: Float!
- max: Float!
+input FloatRange {
+ min: Float!
+ max: Float!
}
-input Int64Range{
- min: Int64!
- max: Int64!
+input Int64Range {
+ min: Int64!
+ max: Int64!
}
-input DateTimeRange{
- min: DateTime!
- max: DateTime!
+input DateTimeRange {
+ min: DateTime!
+ max: DateTime!
}
-input StringRange{
- min: String!
- max: String!
+input StringRange {
+ min: String!
+ max: String!
}
enum DgraphIndex {
- int
- int64
- float
- bool
- hash
- exact
- term
- fulltext
- trigram
- regexp
- year
- month
- day
- hour
- geo
- hnsw
+ int
+ int64
+ float
+ bool
+ hash
+ exact
+ term
+ fulltext
+ trigram
+ regexp
+ year
+ month
+ day
+ hour
+ geo
+ hnsw
}
input AuthRule {
- and: [AuthRule]
- or: [AuthRule]
- not: AuthRule
- rule: String
+ and: [AuthRule]
+ or: [AuthRule]
+ not: AuthRule
+ rule: String
}
enum HTTPMethod {
- GET
- POST
- PUT
- PATCH
- DELETE
+ GET
+ POST
+ PUT
+ PATCH
+ DELETE
}
enum Mode {
- BATCH
- SINGLE
+ BATCH
+ SINGLE
}
input CustomHTTP {
- url: String!
- method: HTTPMethod!
- body: String
- graphql: String
- mode: Mode
- forwardHeaders: [String!]
- secretHeaders: [String!]
- introspectionHeaders: [String!]
- skipIntrospection: Boolean
+ url: String!
+ method: HTTPMethod!
+ body: String
+ graphql: String
+ mode: Mode
+ forwardHeaders: [String!]
+ secretHeaders: [String!]
+ introspectionHeaders: [String!]
+ skipIntrospection: Boolean
}
input DgraphDefault {
- value: String
+ value: String
}
type Point {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input PointRef {
- longitude: Float!
- latitude: Float!
+ longitude: Float!
+ latitude: Float!
}
input NearFilter {
- distance: Float!
- coordinate: PointRef!
+ distance: Float!
+ coordinate: PointRef!
}
input PointGeoFilter {
- near: NearFilter
- within: WithinFilter
+ near: NearFilter
+ within: WithinFilter
}
type PointList {
- points: [Point!]!
+ points: [Point!]!
}
input PointListRef {
- points: [PointRef!]!
+ points: [PointRef!]!
}
type Polygon {
- coordinates: [PointList!]!
+ coordinates: [PointList!]!
}
input PolygonRef {
- coordinates: [PointListRef!]!
+ coordinates: [PointListRef!]!
}
type MultiPolygon {
- polygons: [Polygon!]!
+ polygons: [Polygon!]!
}
input MultiPolygonRef {
- polygons: [PolygonRef!]!
+ polygons: [PolygonRef!]!
}
input WithinFilter {
- polygon: PolygonRef!
+ polygon: PolygonRef!
}
input ContainsFilter {
- point: PointRef
- polygon: PolygonRef
+ point: PointRef
+ polygon: PolygonRef
}
input IntersectsFilter {
- polygon: PolygonRef
- multiPolygon: MultiPolygonRef
+ polygon: PolygonRef
+ multiPolygon: MultiPolygonRef
}
input PolygonGeoFilter {
- near: NearFilter
- within: WithinFilter
- contains: ContainsFilter
- intersects: IntersectsFilter
+ near: NearFilter
+ within: WithinFilter
+ contains: ContainsFilter
+ intersects: IntersectsFilter
}
input GenerateQueryParams {
- get: Boolean
- query: Boolean
- password: Boolean
- aggregate: Boolean
+ get: Boolean
+ query: Boolean
+ password: Boolean
+ aggregate: Boolean
}
input GenerateMutationParams {
- add: Boolean
- update: Boolean
- delete: Boolean
+ add: Boolean
+ update: Boolean
+ delete: Boolean
}
directive @hasInverse(field: String!) on FIELD_DEFINITION
@@ -239,11 +239,12 @@ directive @default(add: DgraphDefault, update: DgraphDefault) on FIELD_DEFINITIO
directive @withSubscription on OBJECT | INTERFACE | FIELD_DEFINITION
directive @secret(field: String!, pred: String) on OBJECT | INTERFACE
directive @auth(
- password: AuthRule
- query: AuthRule,
- add: AuthRule,
- update: AuthRule,
- delete: AuthRule) on OBJECT | INTERFACE
+ password: AuthRule
+ query: AuthRule
+ add: AuthRule
+ update: AuthRule
+ delete: AuthRule
+) on OBJECT | INTERFACE
directive @custom(http: CustomHTTP, dql: String) on FIELD_DEFINITION
directive @remote on OBJECT | INTERFACE | UNION | INPUT_OBJECT | ENUM
directive @remoteResponse(name: String) on FIELD_DEFINITION
@@ -252,77 +253,78 @@ directive @lambda on FIELD_DEFINITION
directive @lambdaOnMutate(add: Boolean, update: Boolean, delete: Boolean) on OBJECT | INTERFACE
directive @cacheControl(maxAge: Int!) on QUERY
directive @generate(
- query: GenerateQueryParams,
- mutation: GenerateMutationParams,
- subscription: Boolean) on OBJECT | INTERFACE
+ query: GenerateQueryParams
+ mutation: GenerateMutationParams
+ subscription: Boolean
+) on OBJECT | INTERFACE
input IntFilter {
- eq: Int
- in: [Int]
- le: Int
- lt: Int
- ge: Int
- gt: Int
- between: IntRange
+ eq: Int
+ in: [Int]
+ le: Int
+ lt: Int
+ ge: Int
+ gt: Int
+ between: IntRange
}
input Int64Filter {
- eq: Int64
- in: [Int64]
- le: Int64
- lt: Int64
- ge: Int64
- gt: Int64
- between: Int64Range
+ eq: Int64
+ in: [Int64]
+ le: Int64
+ lt: Int64
+ ge: Int64
+ gt: Int64
+ between: Int64Range
}
input FloatFilter {
- eq: Float
- in: [Float]
- le: Float
- lt: Float
- ge: Float
- gt: Float
- between: FloatRange
+ eq: Float
+ in: [Float]
+ le: Float
+ lt: Float
+ ge: Float
+ gt: Float
+ between: FloatRange
}
input DateTimeFilter {
- eq: DateTime
- in: [DateTime]
- le: DateTime
- lt: DateTime
- ge: DateTime
- gt: DateTime
- between: DateTimeRange
+ eq: DateTime
+ in: [DateTime]
+ le: DateTime
+ lt: DateTime
+ ge: DateTime
+ gt: DateTime
+ between: DateTimeRange
}
input StringTermFilter {
- allofterms: String
- anyofterms: String
+ allofterms: String
+ anyofterms: String
}
input StringRegExpFilter {
- regexp: String
+ regexp: String
}
input StringFullTextFilter {
- alloftext: String
- anyoftext: String
+ alloftext: String
+ anyoftext: String
}
input StringExactFilter {
- eq: String
- in: [String]
- le: String
- lt: String
- ge: String
- gt: String
- between: StringRange
+ eq: String
+ in: [String]
+ le: String
+ lt: String
+ ge: String
+ gt: String
+ between: StringRange
}
input StringHashFilter {
- eq: String
- in: [String]
+ eq: String
+ in: [String]
}
#######################
@@ -330,118 +332,118 @@ input StringHashFilter {
#######################
type AddDroidPayload {
- droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- numUids: Int
+ droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ numUids: Int
}
type AddHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ numUids: Int
}
type AddPlanetPayload {
- planet(filter: PlanetFilter, order: PlanetOrder, first: Int, offset: Int): [Planet]
- numUids: Int
+ planet(filter: PlanetFilter, order: PlanetOrder, first: Int, offset: Int): [Planet]
+ numUids: Int
}
type AddStarshipPayload {
- starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- numUids: Int
+ starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ numUids: Int
}
type CharacterAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type DeleteCharacterPayload {
- character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- msg: String
- numUids: Int
+ character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ msg: String
+ numUids: Int
}
type DeleteDroidPayload {
- droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- msg: String
- numUids: Int
+ droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ msg: String
+ numUids: Int
}
type DeleteHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- msg: String
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ msg: String
+ numUids: Int
}
type DeletePlanetPayload {
- planet(filter: PlanetFilter, order: PlanetOrder, first: Int, offset: Int): [Planet]
- msg: String
- numUids: Int
+ planet(filter: PlanetFilter, order: PlanetOrder, first: Int, offset: Int): [Planet]
+ msg: String
+ numUids: Int
}
type DeleteStarshipPayload {
- starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- msg: String
- numUids: Int
+ starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ msg: String
+ numUids: Int
}
type DroidAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- primaryFunctionMin: String
- primaryFunctionMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
+ primaryFunctionMin: String
+ primaryFunctionMax: String
}
type HumanAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- totalCreditsMin: Int
- totalCreditsMax: Int
- totalCreditsSum: Int
- totalCreditsAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ totalCreditsMin: Int
+ totalCreditsMax: Int
+ totalCreditsSum: Int
+ totalCreditsAvg: Float
}
type PlanetAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
+ count: Int
+ nameMin: String
+ nameMax: String
}
type StarshipAggregateResult {
- count: Int
- nameMin: String
- nameMax: String
- lengthMin: Float
- lengthMax: Float
- lengthSum: Float
- lengthAvg: Float
+ count: Int
+ nameMin: String
+ nameMax: String
+ lengthMin: Float
+ lengthMax: Float
+ lengthSum: Float
+ lengthAvg: Float
}
type UpdateCharacterPayload {
- character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- numUids: Int
+ character(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
+ numUids: Int
}
type UpdateDroidPayload {
- droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- numUids: Int
+ droid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ numUids: Int
}
type UpdateHumanPayload {
- human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- numUids: Int
+ human(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ numUids: Int
}
type UpdatePlanetPayload {
- planet(filter: PlanetFilter, order: PlanetOrder, first: Int, offset: Int): [Planet]
- numUids: Int
+ planet(filter: PlanetFilter, order: PlanetOrder, first: Int, offset: Int): [Planet]
+ numUids: Int
}
type UpdateStarshipPayload {
- starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- numUids: Int
+ starship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ numUids: Int
}
#######################
@@ -449,66 +451,66 @@ type UpdateStarshipPayload {
#######################
enum CharacterHasFilter {
- name
- friends
- enemyOf
- appearsIn
+ name
+ friends
+ enemyOf
+ appearsIn
}
enum CharacterOrderable {
- name
+ name
}
enum DroidHasFilter {
- name
- friends
- enemyOf
- appearsIn
- primaryFunction
+ name
+ friends
+ enemyOf
+ appearsIn
+ primaryFunction
}
enum DroidOrderable {
- name
- primaryFunction
+ name
+ primaryFunction
}
enum HumanHasFilter {
- name
- friends
- enemyOf
- appearsIn
- starships
- totalCredits
+ name
+ friends
+ enemyOf
+ appearsIn
+ starships
+ totalCredits
}
enum HumanOrderable {
- name
- totalCredits
+ name
+ totalCredits
}
enum PlanetHasFilter {
- name
- residents
+ name
+ residents
}
enum PlanetOrderable {
- name
+ name
}
enum ResidentType {
- Human
- Droid
- Starship
+ Human
+ Droid
+ Starship
}
enum StarshipHasFilter {
- name
- length
+ name
+ length
}
enum StarshipOrderable {
- name
- length
+ name
+ length
}
#######################
@@ -516,224 +518,224 @@ enum StarshipOrderable {
#######################
input AddDroidInput {
- name: String!
- friends: [CharacterRef]
- enemyOf: ResidentRef
- appearsIn: [Episode!]!
- primaryFunction: String
+ name: String!
+ friends: [CharacterRef]
+ enemyOf: ResidentRef
+ appearsIn: [Episode!]!
+ primaryFunction: String
}
input AddHumanInput {
- name: String!
- friends: [CharacterRef]
- enemyOf: ResidentRef
- appearsIn: [Episode!]!
- starships: [StarshipRef]
- totalCredits: Int
+ name: String!
+ friends: [CharacterRef]
+ enemyOf: ResidentRef
+ appearsIn: [Episode!]!
+ starships: [StarshipRef]
+ totalCredits: Int
}
input AddPlanetInput {
- name: String!
- residents: [ResidentRef!]
+ name: String!
+ residents: [ResidentRef!]
}
input AddStarshipInput {
- name: String!
- length: Float
+ name: String!
+ length: Float
}
input CharacterFilter {
- id: [ID!]
- name: StringExactFilter
- appearsIn: Episode_hash
- has: [CharacterHasFilter]
- and: [CharacterFilter]
- or: [CharacterFilter]
- not: CharacterFilter
+ id: [ID!]
+ name: StringExactFilter
+ appearsIn: Episode_hash
+ has: [CharacterHasFilter]
+ and: [CharacterFilter]
+ or: [CharacterFilter]
+ not: CharacterFilter
}
input CharacterOrder {
- asc: CharacterOrderable
- desc: CharacterOrderable
- then: CharacterOrder
+ asc: CharacterOrderable
+ desc: CharacterOrderable
+ then: CharacterOrder
}
input CharacterPatch {
- name: String
- friends: [CharacterRef]
- enemyOf: ResidentRef
- appearsIn: [Episode!]
+ name: String
+ friends: [CharacterRef]
+ enemyOf: ResidentRef
+ appearsIn: [Episode!]
}
input CharacterRef {
- id: ID!
+ id: ID!
}
input DroidFilter {
- id: [ID!]
- name: StringExactFilter
- appearsIn: Episode_hash
- has: [DroidHasFilter]
- and: [DroidFilter]
- or: [DroidFilter]
- not: DroidFilter
+ id: [ID!]
+ name: StringExactFilter
+ appearsIn: Episode_hash
+ has: [DroidHasFilter]
+ and: [DroidFilter]
+ or: [DroidFilter]
+ not: DroidFilter
}
input DroidOrder {
- asc: DroidOrderable
- desc: DroidOrderable
- then: DroidOrder
+ asc: DroidOrderable
+ desc: DroidOrderable
+ then: DroidOrder
}
input DroidPatch {
- name: String
- friends: [CharacterRef]
- enemyOf: ResidentRef
- appearsIn: [Episode!]
- primaryFunction: String
+ name: String
+ friends: [CharacterRef]
+ enemyOf: ResidentRef
+ appearsIn: [Episode!]
+ primaryFunction: String
}
input DroidRef {
- id: ID
- name: String
- friends: [CharacterRef]
- enemyOf: ResidentRef
- appearsIn: [Episode!]
- primaryFunction: String
+ id: ID
+ name: String
+ friends: [CharacterRef]
+ enemyOf: ResidentRef
+ appearsIn: [Episode!]
+ primaryFunction: String
}
input Episode_hash {
- eq: Episode
- in: [Episode]
+ eq: Episode
+ in: [Episode]
}
input HumanFilter {
- id: [ID!]
- name: StringExactFilter
- appearsIn: Episode_hash
- has: [HumanHasFilter]
- and: [HumanFilter]
- or: [HumanFilter]
- not: HumanFilter
+ id: [ID!]
+ name: StringExactFilter
+ appearsIn: Episode_hash
+ has: [HumanHasFilter]
+ and: [HumanFilter]
+ or: [HumanFilter]
+ not: HumanFilter
}
input HumanOrder {
- asc: HumanOrderable
- desc: HumanOrderable
- then: HumanOrder
+ asc: HumanOrderable
+ desc: HumanOrderable
+ then: HumanOrder
}
input HumanPatch {
- name: String
- friends: [CharacterRef]
- enemyOf: ResidentRef
- appearsIn: [Episode!]
- starships: [StarshipRef]
- totalCredits: Int
+ name: String
+ friends: [CharacterRef]
+ enemyOf: ResidentRef
+ appearsIn: [Episode!]
+ starships: [StarshipRef]
+ totalCredits: Int
}
input HumanRef {
- id: ID
- name: String
- friends: [CharacterRef]
- enemyOf: ResidentRef
- appearsIn: [Episode!]
- starships: [StarshipRef]
- totalCredits: Int
+ id: ID
+ name: String
+ friends: [CharacterRef]
+ enemyOf: ResidentRef
+ appearsIn: [Episode!]
+ starships: [StarshipRef]
+ totalCredits: Int
}
input PlanetFilter {
- id: [ID!]
- has: [PlanetHasFilter]
- and: [PlanetFilter]
- or: [PlanetFilter]
- not: PlanetFilter
+ id: [ID!]
+ has: [PlanetHasFilter]
+ and: [PlanetFilter]
+ or: [PlanetFilter]
+ not: PlanetFilter
}
input PlanetOrder {
- asc: PlanetOrderable
- desc: PlanetOrderable
- then: PlanetOrder
+ asc: PlanetOrderable
+ desc: PlanetOrderable
+ then: PlanetOrder
}
input PlanetPatch {
- name: String
- residents: [ResidentRef!]
+ name: String
+ residents: [ResidentRef!]
}
input PlanetRef {
- id: ID
- name: String
- residents: [ResidentRef!]
+ id: ID
+ name: String
+ residents: [ResidentRef!]
}
input ResidentFilter {
- memberTypes: [ResidentType!]
- humanFilter: HumanFilter
- droidFilter: DroidFilter
- starshipFilter: StarshipFilter
+ memberTypes: [ResidentType!]
+ humanFilter: HumanFilter
+ droidFilter: DroidFilter
+ starshipFilter: StarshipFilter
}
input ResidentRef {
- humanRef: HumanRef
- droidRef: DroidRef
- starshipRef: StarshipRef
+ humanRef: HumanRef
+ droidRef: DroidRef
+ starshipRef: StarshipRef
}
input StarshipFilter {
- id: [ID!]
- name: StringTermFilter
- has: [StarshipHasFilter]
- and: [StarshipFilter]
- or: [StarshipFilter]
- not: StarshipFilter
+ id: [ID!]
+ name: StringTermFilter
+ has: [StarshipHasFilter]
+ and: [StarshipFilter]
+ or: [StarshipFilter]
+ not: StarshipFilter
}
input StarshipOrder {
- asc: StarshipOrderable
- desc: StarshipOrderable
- then: StarshipOrder
+ asc: StarshipOrderable
+ desc: StarshipOrderable
+ then: StarshipOrder
}
input StarshipPatch {
- name: String
- length: Float
+ name: String
+ length: Float
}
input StarshipRef {
- id: ID
- name: String
- length: Float
+ id: ID
+ name: String
+ length: Float
}
input UpdateCharacterInput {
- filter: CharacterFilter!
- set: CharacterPatch
- remove: CharacterPatch
+ filter: CharacterFilter!
+ set: CharacterPatch
+ remove: CharacterPatch
}
input UpdateDroidInput {
- filter: DroidFilter!
- set: DroidPatch
- remove: DroidPatch
+ filter: DroidFilter!
+ set: DroidPatch
+ remove: DroidPatch
}
input UpdateHumanInput {
- filter: HumanFilter!
- set: HumanPatch
- remove: HumanPatch
+ filter: HumanFilter!
+ set: HumanPatch
+ remove: HumanPatch
}
input UpdatePlanetInput {
- filter: PlanetFilter!
- set: PlanetPatch
- remove: PlanetPatch
+ filter: PlanetFilter!
+ set: PlanetPatch
+ remove: PlanetPatch
}
input UpdateStarshipInput {
- filter: StarshipFilter!
- set: StarshipPatch
- remove: StarshipPatch
+ filter: StarshipFilter!
+ set: StarshipPatch
+ remove: StarshipPatch
}
#######################
@@ -741,21 +743,26 @@ input UpdateStarshipInput {
#######################
type Query {
- getCharacter(id: ID!): Character
- queryCharacter(filter: CharacterFilter, order: CharacterOrder, first: Int, offset: Int): [Character]
- aggregateCharacter(filter: CharacterFilter): CharacterAggregateResult
- getHuman(id: ID!): Human
- queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
- aggregateHuman(filter: HumanFilter): HumanAggregateResult
- getDroid(id: ID!): Droid
- queryDroid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
- aggregateDroid(filter: DroidFilter): DroidAggregateResult
- getStarship(id: ID!): Starship
- queryStarship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
- aggregateStarship(filter: StarshipFilter): StarshipAggregateResult
- getPlanet(id: ID!): Planet
- queryPlanet(filter: PlanetFilter, order: PlanetOrder, first: Int, offset: Int): [Planet]
- aggregatePlanet(filter: PlanetFilter): PlanetAggregateResult
+ getCharacter(id: ID!): Character
+ queryCharacter(
+ filter: CharacterFilter
+ order: CharacterOrder
+ first: Int
+ offset: Int
+ ): [Character]
+ aggregateCharacter(filter: CharacterFilter): CharacterAggregateResult
+ getHuman(id: ID!): Human
+ queryHuman(filter: HumanFilter, order: HumanOrder, first: Int, offset: Int): [Human]
+ aggregateHuman(filter: HumanFilter): HumanAggregateResult
+ getDroid(id: ID!): Droid
+ queryDroid(filter: DroidFilter, order: DroidOrder, first: Int, offset: Int): [Droid]
+ aggregateDroid(filter: DroidFilter): DroidAggregateResult
+ getStarship(id: ID!): Starship
+ queryStarship(filter: StarshipFilter, order: StarshipOrder, first: Int, offset: Int): [Starship]
+ aggregateStarship(filter: StarshipFilter): StarshipAggregateResult
+ getPlanet(id: ID!): Planet
+ queryPlanet(filter: PlanetFilter, order: PlanetOrder, first: Int, offset: Int): [Planet]
+ aggregatePlanet(filter: PlanetFilter): PlanetAggregateResult
}
#######################
@@ -763,19 +770,18 @@ type Query {
#######################
type Mutation {
- updateCharacter(input: UpdateCharacterInput!): UpdateCharacterPayload
- deleteCharacter(filter: CharacterFilter!): DeleteCharacterPayload
- addHuman(input: [AddHumanInput!]!): AddHumanPayload
- updateHuman(input: UpdateHumanInput!): UpdateHumanPayload
- deleteHuman(filter: HumanFilter!): DeleteHumanPayload
- addDroid(input: [AddDroidInput!]!): AddDroidPayload
- updateDroid(input: UpdateDroidInput!): UpdateDroidPayload
- deleteDroid(filter: DroidFilter!): DeleteDroidPayload
- addStarship(input: [AddStarshipInput!]!): AddStarshipPayload
- updateStarship(input: UpdateStarshipInput!): UpdateStarshipPayload
- deleteStarship(filter: StarshipFilter!): DeleteStarshipPayload
- addPlanet(input: [AddPlanetInput!]!): AddPlanetPayload
- updatePlanet(input: UpdatePlanetInput!): UpdatePlanetPayload
- deletePlanet(filter: PlanetFilter!): DeletePlanetPayload
+ updateCharacter(input: UpdateCharacterInput!): UpdateCharacterPayload
+ deleteCharacter(filter: CharacterFilter!): DeleteCharacterPayload
+ addHuman(input: [AddHumanInput!]!): AddHumanPayload
+ updateHuman(input: UpdateHumanInput!): UpdateHumanPayload
+ deleteHuman(filter: HumanFilter!): DeleteHumanPayload
+ addDroid(input: [AddDroidInput!]!): AddDroidPayload
+ updateDroid(input: UpdateDroidInput!): UpdateDroidPayload
+ deleteDroid(filter: DroidFilter!): DeleteDroidPayload
+ addStarship(input: [AddStarshipInput!]!): AddStarshipPayload
+ updateStarship(input: UpdateStarshipInput!): UpdateStarshipPayload
+ deleteStarship(filter: StarshipFilter!): DeleteStarshipPayload
+ addPlanet(input: [AddPlanetInput!]!): AddPlanetPayload
+ updatePlanet(input: UpdatePlanetInput!): UpdatePlanetPayload
+ deletePlanet(filter: PlanetFilter!): DeletePlanetPayload
}
-
diff --git a/graphql/testdata/custom_bench/README.md b/graphql/testdata/custom_bench/README.md
index 4ae04cf7c2f..f8941e7a000 100644
--- a/graphql/testdata/custom_bench/README.md
+++ b/graphql/testdata/custom_bench/README.md
@@ -1,52 +1,56 @@
# README
-### About
+## About
+
This directory contains some scripts and resources which were used to perform benchmarking and
profiling of normal and `@custom` HTTP queries. `@custom` HTTP queries were benchmarked for both
-SINGLE and BATCH mode over REST. Please have a look at the [discuss post](https://discuss.dgraph.io/t/graphql-query-mutation-benchmarking-result/8604/5)
-to find out more about the results.
+SINGLE and BATCH mode over REST. Please have a look at the
+[discuss post](https://discuss.dgraph.io/t/graphql-query-mutation-benchmarking-result/8604/5) to
+find out more about the results.
+
+## Usage
-### Usage
-* First, generate some data for the Restaurant schema provided with [datagen](../datagen). Follow
-the datagen README on how to do that. At the end of that, you will have a `~/__data` directory
-, that is all we need to get started.
-* Find out the `maxTxnTs` for that data, by starting zero and alpha in that directory and sending
-a GET request to `/state` endpoint of alpha. Search for `maxTxnTs` in the HTTP response, and you
-will get the value. Copy that value. Set `maxTxnTs` const in [graphql_profiler.go](profiling/graphql_profiler.go)
-to that value. Now, stop the alpha and zero.
-* Copy that data directory `~/__data` inside [profiling](profiling) directory as `__data`.
-* Copy `schema.graphql` from [datagen](../datagen) inside [profiling/__data](profiling/__data).
-* Now, make sure no other dgraph instance is running on your host machine or in docker. These
-scripts use the default ports, so they may conflict. Also, be sure that your system has enough
-RAM, otherwise, some queries may lead to OOM and killing of alpha processes on host machine and
-docker.
-* Also make sure that `localhost:9000` is available, as the `dgraph_api_server` uses that port.
-* Now, checkout dgraph to `abhimanyu/benchmarking` branch & do a `make install`. We will use
- dgraph binary built from that branch, as it exposes a header to measure GraphQL layer time.
-* Change your current working directory to the directory containing this README file.
-* `$ go build dgraph_api_server.go`
-* `$ nohup ./dgraph_api_server > dgraph_api_server.out &` - nohup is useful if you are on ssh.
-* `$ cd profiling`
-* `$ go build graphql_profiler.go`
-* `$ nohup ./graphql_profiler > graphql_profiler.out &`
+- First, generate some data for the Restaurant schema provided with [datagen](../datagen). Follow
+ the datagen README on how to do that. At the end of that, you will have a `~/__data` directory ,
+ that is all we need to get started.
+- Find out the `maxTxnTs` for that data, by starting zero and alpha in that directory and sending a
+ GET request to `/state` endpoint of alpha. Search for `maxTxnTs` in the HTTP response, and you
+ will get the value. Copy that value. Set `maxTxnTs` const in
+ [graphql_profiler.go](profiling/graphql_profiler.go) to that value. Now, stop the alpha and zero.
+- Copy that data directory `~/__data` inside [profiling](profiling) directory as `__data`.
+- Copy `schema.graphql` from [datagen](../datagen) inside [profiling/\_\_data](profiling/__data).
+- Now, make sure no other dgraph instance is running on your host machine or in docker. These
+ scripts use the default ports, so they may conflict. Also, be sure that your system has enough
+ RAM, otherwise, some queries may lead to OOM and killing of alpha processes on host machine and
+ docker.
+- Also make sure that `localhost:9000` is available, as the `dgraph_api_server` uses that port.
+- Now, checkout dgraph to `abhimanyu/benchmarking` branch & do a `make install`. We will use dgraph
+ binary built from that branch, as it exposes a header to measure GraphQL layer time.
+- Change your current working directory to the directory containing this README file.
+- `$ go build dgraph_api_server.go`
+- `$ nohup ./dgraph_api_server > dgraph_api_server.out &` - nohup is useful if you are on ssh.
+- `$ cd profiling`
+- `$ go build graphql_profiler.go`
+- `$ nohup ./graphql_profiler > graphql_profiler.out &`
-The last step should start the profiler. It will keep collecting all the benchmarking and
-profiling information for you. If you are on ssh, you can exit now and come back later to find
-the results inside [profiling/results](profiling/results) directory. For each benchmark schema and
-its corresponding queries, you will get the results inside respective sub-directories inside the
-results directory. The profiler also writes a log file named `graphql_profiler.log`. You can look
-at that, `graphql_profiler.out`, or `dgraph_api_server.out` to find out more about any errors that
-may happen during the run.
+The last step should start the profiler. It will keep collecting all the benchmarking and profiling
+information for you. If you are on ssh, you can exit now and come back later to find the results
+inside [profiling/results](profiling/results) directory. For each benchmark schema and its
+corresponding queries, you will get the results inside respective sub-directories inside the results
+directory. The profiler also writes a log file named `graphql_profiler.log`. You can look at that,
+`graphql_profiler.out`, or `dgraph_api_server.out` to find out more about any errors that may happen
+during the run.
+
+## How does it work
-### How does it work
There are many directories inside [profiling/benchmarks](profiling/benchmarks) directory. Each
-directory contains a `schema.graphql` file and another `queries` directory, which in-turn
-contains some `.query` files. Each `.query` file contains a query which is run against the
-corresponding schema.
+directory contains a `schema.graphql` file and another `queries` directory, which in-turn contains
+some `.query` files. Each `.query` file contains a query which is run against the corresponding
+schema.
-The schema file in [0-th benchmark](profiling/benchmarks/0) is a simple schema. It does not have
-any custom directives. So, when queries are run against this schema, it would just collect
-benchmark data for pure GraphQL layer.
+The schema file in [0-th benchmark](profiling/benchmarks/0) is a simple schema. It does not have any
+custom directives. So, when queries are run against this schema, it would just collect benchmark
+data for pure GraphQL layer.
The rest of i-th benchmark directories contain schemas with `@custom` directive, varying over SINGLE
and BATCH mode and also where the `@custom` is applied.
@@ -54,19 +58,19 @@ and BATCH mode and also where the `@custom` is applied.
The profiler first starts dgraph zero and alpha in docker with the simple schema contained in the
`__data` directory. The docker instance serves as the final API server for `@custom` HTTP calls.
Then, for each benchmarking schema it starts a dgraph instance on host, applying that schema and
-performing all the queries for that schema against the host dgraph instance. The
-`dgraph_api_server` acts as the necessary middleware between the host dgraph instance and the
-docker dgraph instance.
+performing all the queries for that schema against the host dgraph instance. The `dgraph_api_server`
+acts as the necessary middleware between the host dgraph instance and the docker dgraph instance.
For each schema, the collected benchmarking and profiling results are saved inside a sub-directory
of results directory. This is done for each query for that schema. The main files to look for are:
-* `_Stats.txt`: This contains the overall average results for all queries for a schema.
-* `_Durations.txt`: This contains the actual and average results for each query.
-* `*_tracing.txt`: These files contain the Errors and Extensions reported by the GraphQL layer
- for that query.
-* `*heap*.prof`: Files named like this are heap profiles. There are 3 kinds of heap profiles
- saved for each query. Pre, during and post. There may be many `during` profiles as a query may
- take a long time to complete.
-* `*profile.prof`: Files named like this are CPU profiles for that query.
-You will need to use `go tool pprof` to analyze these CPU and heap profiles.
\ No newline at end of file
+- `_Stats.txt`: This contains the overall average results for all queries for a schema.
+- `_Durations.txt`: This contains the actual and average results for each query.
+- `*_tracing.txt`: These files contain the Errors and Extensions reported by the GraphQL layer for
+ that query.
+- `*heap*.prof`: Files named like this are heap profiles. There are 3 kinds of heap profiles saved
+ for each query. Pre, during and post. There may be many `during` profiles as a query may take a
+ long time to complete.
+- `*profile.prof`: Files named like this are CPU profiles for that query.
+
+You will need to use `go tool pprof` to analyze these CPU and heap profiles.
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/0/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/0/schema.graphql
index 1c65e7e21bc..6af200b0c80 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/0/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/0/schema.graphql
@@ -1,60 +1,60 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String!
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String!
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/1_batch/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/1_batch/schema.graphql
index 96b564bdf46..d7aa86ffd17 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/1_batch/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/1_batch/schema.graphql
@@ -1,65 +1,68 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Restaurant"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Restaurant"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String!
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/1_single/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/1_single/schema.graphql
index 2f0474cf274..450b56f8dfc 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/1_single/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/1_single/schema.graphql
@@ -1,65 +1,68 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Restaurant"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Restaurant"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String!
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/2_batch/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/2_batch/schema.graphql
index 9dd9b705264..94a6d453242 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/2_batch/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/2_batch/schema.graphql
@@ -1,70 +1,76 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Restaurant"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- pic: String
- addr: RestaurantAddress!
- rating: Float @custom(http: {
- url: "http://localhost:9000/getBatchType?field=rating&type=Restaurant"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Restaurant"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=rating&type=Restaurant"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String!
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/2_single/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/2_single/schema.graphql
index 8ac89e8539e..b2836a7ecc0 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/2_single/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/2_single/schema.graphql
@@ -1,70 +1,76 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Restaurant"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- pic: String
- addr: RestaurantAddress!
- rating: Float @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=rating&type=Restaurant"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Restaurant"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=rating&type=Restaurant"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String!
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/3_batch/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/3_batch/schema.graphql
index 0f2065e3638..50b72b1e063 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/3_batch/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/3_batch/schema.graphql
@@ -1,65 +1,68 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String!
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Dish"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Dish"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/3_single/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/3_single/schema.graphql
index a3a4896f5c0..c47e4740640 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/3_single/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/3_single/schema.graphql
@@ -1,65 +1,68 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String!
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Dish"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Dish"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/4_batch/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/4_batch/schema.graphql
index 0c3f031c077..0d9ce1f447d 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/4_batch/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/4_batch/schema.graphql
@@ -1,70 +1,76 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String!
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Dish"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- pic: String
- price: Float! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=price&type=Dish"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Dish"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ pic: String
+ price: Float!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=price&type=Dish"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/4_single/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/4_single/schema.graphql
index a6d3dd809cb..72ca216707c 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/4_single/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/4_single/schema.graphql
@@ -1,70 +1,76 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String!
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Dish"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- pic: String
- price: Float! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=price&type=Dish"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Dish"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ pic: String
+ price: Float!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=price&type=Dish"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/5_batch/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/5_batch/schema.graphql
index be76055c5a4..08294eb8bda 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/5_batch/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/5_batch/schema.graphql
@@ -1,65 +1,68 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String!
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Cuisine"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Cuisine"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String!
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/5_single/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/5_single/schema.graphql
index bd4e8224745..7c6451c6bcd 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/5_single/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/5_single/schema.graphql
@@ -1,65 +1,68 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String!
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Cuisine"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Cuisine"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String!
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/6_batch/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/6_batch/schema.graphql
index d90a766c971..d16311e0b5d 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/6_batch/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/6_batch/schema.graphql
@@ -1,70 +1,76 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Restaurant"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Restaurant"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Dish"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Dish"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/6_single/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/6_single/schema.graphql
index 7ce3ad73188..5fb6d2f0072 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/6_single/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/6_single/schema.graphql
@@ -1,70 +1,76 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Restaurant"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Restaurant"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Dish"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Dish"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/7_batch/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/7_batch/schema.graphql
index 06587fd51f9..8d91b9074db 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/7_batch/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/7_batch/schema.graphql
@@ -1,80 +1,92 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Restaurant"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- pic: String
- addr: RestaurantAddress!
- rating: Float @custom(http: {
- url: "http://localhost:9000/getBatchType?field=rating&type=Restaurant"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Restaurant"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=rating&type=Restaurant"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Dish"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- pic: String
- price: Float! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=price&type=Dish"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Dish"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ pic: String
+ price: Float!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=price&type=Dish"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/7_single/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/7_single/schema.graphql
index 5d93e9a1e36..fd97bb6f45b 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/7_single/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/7_single/schema.graphql
@@ -1,80 +1,92 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Restaurant"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- pic: String
- addr: RestaurantAddress!
- rating: Float @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=rating&type=Restaurant"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Restaurant"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=rating&type=Restaurant"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Dish"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- pic: String
- price: Float! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=price&type=Dish"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Dish"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ pic: String
+ price: Float!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=price&type=Dish"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/8_batch/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/8_batch/schema.graphql
index 0120e194d95..6803d06d733 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/8_batch/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/8_batch/schema.graphql
@@ -1,85 +1,100 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Restaurant"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- pic: String
- addr: RestaurantAddress!
- rating: Float @custom(http: {
- url: "http://localhost:9000/getBatchType?field=rating&type=Restaurant"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Restaurant"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=rating&type=Restaurant"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Cuisine"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Cuisine"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=name&type=Dish"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- pic: String
- price: Float! @custom(http: {
- url: "http://localhost:9000/getBatchType?field=price&type=Dish"
- method: POST
- mode: BATCH
- body: "{id: $id}"
- })
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=name&type=Dish"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ pic: String
+ price: Float!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getBatchType?field=price&type=Dish"
+ method: POST
+ mode: BATCH
+ body: "{id: $id}"
+ }
+ )
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/custom_bench/profiling/benchmarks/8_single/schema.graphql b/graphql/testdata/custom_bench/profiling/benchmarks/8_single/schema.graphql
index b78a0052aa1..df1f2c8e9d1 100644
--- a/graphql/testdata/custom_bench/profiling/benchmarks/8_single/schema.graphql
+++ b/graphql/testdata/custom_bench/profiling/benchmarks/8_single/schema.graphql
@@ -1,85 +1,100 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Restaurant"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- pic: String
- addr: RestaurantAddress!
- rating: Float @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=rating&type=Restaurant"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Restaurant"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=rating&type=Restaurant"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Cuisine"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Cuisine"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=name&type=Dish"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- pic: String
- price: Float! @custom(http: {
- url: "http://localhost:9000/getType?id=$id&field=price&type=Dish"
- method: GET
- mode: SINGLE
- skipIntrospection: true
- })
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
+ id: ID!
+ name: String!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=name&type=Dish"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ pic: String
+ price: Float!
+ @custom(
+ http: {
+ url: "http://localhost:9000/getType?id=$id&field=price&type=Dish"
+ method: GET
+ mode: SINGLE
+ skipIntrospection: true
+ }
+ )
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
}
diff --git a/graphql/testdata/datagen/README.md b/graphql/testdata/datagen/README.md
index 4c206e322c8..4db3edcde17 100644
--- a/graphql/testdata/datagen/README.md
+++ b/graphql/testdata/datagen/README.md
@@ -1,40 +1,44 @@
# README
-### About
-This is `datagen`. A command line tool to generate data using Dgraph's GraphQL API. At present
-, it is written for a [specific schema](schema.graphql), and so generates data only for that
- schema. It uses an existing [dataset](data/zomato-restaurants-data.zip) which contains data
- about Restaurants, while it generates Dish data at random.
+## About
+
+This is `datagen`. A command line tool to generate data using Dgraph's GraphQL API. At present , it
+is written for a [specific schema](schema.graphql), and so generates data only for that schema. It
+uses an existing [dataset](data/zomato-restaurants-data.zip) which contains data about Restaurants,
+while it generates Dish data at random.
+
+## Usage
-### Usage
It needs a running dgraph instance to work. So, let's start a dgraph instance first. Follow these
- steps:
-* `$ mkdir ~/__data && cd ~/__data` - we will start dgraph zero and alpha in this directory, so
- that the data is stored here, and can be reused later whenever required.
-* `$ dgraph zero`
-* `$ dgraph alpha`
-
-Now, change your working directory to the directory containing this README file, and run
- following commands:
+steps:
+
+- `$ mkdir ~/__data && cd ~/__data` - we will start dgraph zero and alpha in this directory, so that
+ the data is stored here, and can be reused later whenever required.
+- `$ dgraph zero`
+- `$ dgraph alpha`
+
+Now, change your working directory to the directory containing this README file, and run following
+commands:
+
1. `$ go build`
2. `$ curl -X POST localhost:8080/admin/schema --data-binary '@schema.graphql'`
3. `$ unzip data/zomato-restaurants-data.zip -d data`
-4. The above command will output some JSON files in the data directory. Out of them `file1.json
-` is corrupt, rest will work.
+4. The above command will output some JSON files in the data directory. Out of them `file1.json` is
+ corrupt, rest will work.
5. Edit the `conf.yaml`:
- * set `restaurantDataFilePath` to `data/file2.json` - i.e., we are importing the data in `file2
- .json` to dgraph.
- * set `maxErrorsInRestaurantAddition` to `1000000`. Basically, a very high value, as some of
- the restaurants are duplicates in and across the data files.
- * set `maxDishes4AnyRestaurant` to `1000` - i.e., every restaurant will have at max 1000 Dishes.
- * `authorizationHeader` and `jwt` in configuration refer to the header and JWT values for
- `@auth` directive, if you have any in your schema. In the schema given with this, there is
- no `@auth` directive, so no need to pay any attention to them.
+ - set `restaurantDataFilePath` to `data/file2.json` - i.e., we are importing the data in
+ `file2 .json` to dgraph.
+ - set `maxErrorsInRestaurantAddition` to `1000000`. Basically, a very high value, as some of the
+ restaurants are duplicates in and across the data files.
+ - set `maxDishes4AnyRestaurant` to `1000` - i.e., every restaurant will have at max 1000 Dishes.
+ - `authorizationHeader` and `jwt` in configuration refer to the header and JWT values for `@auth`
+ directive, if you have any in your schema. In the schema given with this, there is no `@auth`
+ directive, so no need to pay any attention to them.
6. `$ ./datagen --config conf.yaml` - this will start the data generator using the configuration
- file. Once it finishes, all the data in `data/file2.json` would have been imported into dgraph.
+ file. Once it finishes, all the data in `data/file2.json` would have been imported into dgraph.
7. Repeat steps 5 & 6 with different data files. i.e., keep setting `restaurantDataFilePath` to
- other data files and importing them.
-8. This is all that is required to import the data in dgraph. Now you can stop alpha and zero
-, and keep the `~/__data` directory safe to reuse it later.
-
- You can always look for help with `$ ./datagen --help`
\ No newline at end of file
+ other data files and importing them.
+8. This is all that is required to import the data in dgraph. Now you can stop alpha and zero , and
+ keep the `~/__data` directory safe to reuse it later.
+
+You can always look for help with `$ ./datagen --help`
diff --git a/graphql/testdata/datagen/schema.graphql b/graphql/testdata/datagen/schema.graphql
index 10fbc16fffc..6af200b0c80 100644
--- a/graphql/testdata/datagen/schema.graphql
+++ b/graphql/testdata/datagen/schema.graphql
@@ -1,60 +1,60 @@
type Country {
- cid: ID!
- id: String! @id
- name: String!
- cities: [City]
+ cid: ID!
+ id: String! @id
+ name: String!
+ cities: [City]
}
type City {
- cid: ID!
- id: String! @id
- name: String!
- country: Country! @hasInverse(field: cities)
- restaurants: [RestaurantAddress] @hasInverse(field: city)
+ cid: ID!
+ id: String! @id
+ name: String!
+ country: Country! @hasInverse(field: cities)
+ restaurants: [RestaurantAddress] @hasInverse(field: city)
}
interface Location {
- id: ID!
- lat: Float!
- long: Float!
- address: String!
- locality: String!
- city: City!
- zipcode: Int
+ id: ID!
+ lat: Float!
+ long: Float!
+ address: String!
+ locality: String!
+ city: City!
+ zipcode: Int
}
type RestaurantAddress implements Location {
- restaurant: Restaurant! @hasInverse(field: addr)
+ restaurant: Restaurant! @hasInverse(field: addr)
}
type Restaurant {
- id: ID!
- xid: String! @id
- name: String!
- pic: String
- addr: RestaurantAddress!
- rating: Float
- costFor2: Float
- currency: String
- cuisines: [Cuisine]
- dishes: [Dish] @hasInverse(field: servedBy)
- createdAt: DateTime!
+ id: ID!
+ xid: String! @id
+ name: String!
+ pic: String
+ addr: RestaurantAddress!
+ rating: Float
+ costFor2: Float
+ currency: String
+ cuisines: [Cuisine]
+ dishes: [Dish] @hasInverse(field: servedBy)
+ createdAt: DateTime!
}
type Cuisine {
- id: ID!
- name: String! @id
- restaurants: [Restaurant] @hasInverse(field: cuisines)
- dishes: [Dish] @hasInverse(field: cuisine)
+ id: ID!
+ name: String! @id
+ restaurants: [Restaurant] @hasInverse(field: cuisines)
+ dishes: [Dish] @hasInverse(field: cuisine)
}
type Dish {
- id: ID!
- name: String!
- pic: String
- price: Float!
- description: String
- isVeg: Boolean!
- cuisine: Cuisine
- servedBy: Restaurant!
-}
\ No newline at end of file
+ id: ID!
+ name: String!
+ pic: String
+ price: Float!
+ description: String
+ isVeg: Boolean!
+ cuisine: Cuisine
+ servedBy: Restaurant!
+}
diff --git a/logo-dark.png b/logo-dark.png
index 3620c4f3b87f3723f0953c2c603fd1872e7477fc..e7bffaeb4d7368816615f6cbbbaed8d5b5c6127e 100644
GIT binary patch
literal 6102
zcmV;{7b)n8P)$-958qL~Xt*gWMNlkR?S>k&=pJ
zQJGYPrZ1Ep@d>R|EFX4bMbOYJF(Y(A4b3%mA7&rs8-xSXX`Xg=&dh&?>jT%zbD#gr
zi{Jm|;=0exd}V{B7J4mQW^j{*M-8?aJZ;6jHo4LU9~2glQc7v+T*IMO3|QD=-~_eT
zV5`Adq?A%RFv0@+d9Q`r4E7l~>8jXn@C_R*A*Gbk#Dp#jAF$#H1E*Mf4IZ||DWsHA
znjm!hINji(af7umXz;4R{t3eOAvba;DW#M~XT^ZQZDX_$Ua-mK7S`CJN9c9~hufrS
zgKI79809l>*v}QDlv4VC7T#rWoxxCBfbO)xU9ROc#$bbm_HvfNRs-AWB|pR(Qc5ZP
zFN2naN83Vkw-vv3Er&4)HaN;)^C&Hc&2HonQc5WSfGrkVafOA!wqEWNZs1HN$&D=%*%rS4w9rpVDb066mm8KJZi`hGdTnrl!KD_yW#LAHpIULf!A%B#wLL7G-CyGX
zc{V);SGKM7riJg3QcClU0~}{?k-_aYc-G*p;hyaj>KWHFzyb194zZtswzUS0t_Mje
zrMXV%w#n%hzHG3?V5fo8QiaD1Rxt$zs|ou
z3pW_-FmQ(IIV(On#UAhZZENi^xRaDpsykatR~TGl#cKx6aJ_29RU9CjEI0VJ?I+j@
z-yx-x>dG@&Wr{%p|4B-vo0g=&;m$V-+yy_Qm{ssGO12EQB*uUW2r
z2ETVL$1n-@ag2q>+jhHqEZk2@Db*7ER~Ggg3{IhK@qmqGN0L%Xv&9yRjqaZeAA+b^J!NnX
zBiPHi2Cp2%n)@w0X_I3~DW%z9>As<+p}Wl{7t=l_*5LEgu=aj~$89ZLK}spjBny`r
z?5bht{@dV(Ze#%?*vE+mcTVH9#6zA<4=JTI6Kt}<;LX~FZpBWc>s|{hN4WsA*I@g!
z_K+*?w#8ynN~x2qIQ8v=5!NtxZyoKRgXeL$6<_MWcMcj{M@lJmij8GQ8a!pNzlNc^
z#bE82^TmbVcIdlz*w4pEDW#4a0}k#n*jK~Q-Da@K%jpIfrOi-vtTj@h!M2=qNGVM_
ztA$@T*j>xMZt1?tCdF7;0JLiGqzn!_C37pNNx*BcDh
zFnG7Pmi}?C7Ao``?3{t%9kO&i@4B8NXfqFjcci`Q&1%jur#Wd(1g}j_wqkn?ce*dM
z_jTLZ%L)ru%;0kk8eB`8c^16G{-thGyGUscnH#}d__-B(YZbbM9X7er7CjS=zEwOj
z!!Lx_+OHg%r@=eyH)_r@r#Ykc!E4Z0!-En-7H+Yh6B&oFz@XLEBH3r~lEH&kU4Q6M
z@a{3Xf58Yf2;SeRcTb}3Q8%m4s@t8sd(=Znss5M?!Mn9~<6{+@ZO~)_o=uNUK5p<`
zgH1L$&lW32+yVEqj{Y)Qa5TeaKX{ii1?s!iP3j%$pyRtkT|`PXSL@(4_=thEs>1UI
z7YW@=k!Nt2g$q0WLa7#B#oOj;@Q$ROq~7cJt|q0Lt9S5L-M7@PuN!>R7CjsU`*@$h
zlQZzluh?J-z3V?YO
zya4JYj_r>*>5%HIzQMbmQm@@^*CRG)9`wtku-@SH8GQEJ0p?rq0;u12Y#T|brs^BK
zO@2_L;C<4Q<{aKbkRbg&AZSc`r<
z%K2aW(1X^Z(fw}J%ynnP0_`y&3J}qxGk(w&1EuU^MyzV!@Q%X+l`T)aw|{Y4buM5o
zT4M+fB5P(8fPQ4n$=oQGU{FTU@ZwV}#SOK4h>iF$62W|AU5Yp0Zw#p3MRbpJc=0Xv
zsm%bUM7>OJSvESt+q}uNn3Uk4@H3sUs>HoqY}D|+f-NdL8VO($j;PNeOe`b3_y9)$
zQid1*;E6PP(Kejn#W-wMzcm;$65+)x+*hAL%&jw2O~;fU-m^O5`!rucj@gD@l2g)p
zu07sw;PB2?+2f*fFnon;is{G4BN<+d#ePT~UM!Q|el!n9c+rN#iray)!x>)ujHikj
z#MG#juBJOBNB7?Ax$f8TUXsrgb;ZGw_rp&yrGdlyiF{L|PWTcxz1uwu;JJLqaC`*A
z+Xtz`iw?Yy?=tpa1?FQhR$?3Oc)LaE!|O7(LQa(TVjZ^Q6driHjl&q;KHOK?6P(3v
zT=j1KXshW7vGloaXDrYWA4NghV}j|ia8!&;L;=;!4|YD`~}xo*VE_U=E(s`9?$_~-5}o5&A=A|llQYU5AT
zq&o<@3wzIV?yhY4VOl|?O`J3h32I_Stzct~*2EF9npQMAYPDKzNi``EO(+<_s>I9`
z{K29jibB+|qWln+Wq-iFGdtNao4wEO-h0ov_nv!vUNh$}m}Ow#&9l#a&i5(9Xd1rW
zi^vCs{}S`?t_kx4HsD6=T7VfE05BR@Q;hIhR^Cm(oS^fIfm5-278s)p0mlGau}kGS
zyq75hz;VE@vD*ub$a8o%08=yoU>vX|@ai0+bBh?KmwWE(@-+=2r86*ENK#+KXnc16
zfs{WSp}{^siFtVM#_mpK2{;z$Al(7LQ-**tU}?d^YquI0qYwc9K)Uu^j_m=}*j<{(
z@LsJf0Aqpe*j<&!@U8;}W%(aj8|iid70Lv@<`ivTw@!1Jt{PH?v>XeHhE@)X{efP}IH%*XC+pezsJ{V$IXAE%WtK%)@&vc2DIHgeKDM0EQ?Bz=cH!ZwoL^
zAp+Kh`I;-Rk#zr@r|`}W@OOFxb~Skj@ApFfy-!jmFl9GKY!u|JOx0Lr2w!9HRfnlq
zjIVQ;rNKQvY{Tdpj1CuJtKqGfhxc;qo>Ue9gWdH3|6Dc|IK0;@WWZN}Z})mmP>z6i
z3l!eB6aa7uc8l^5-f_wSZ~=A=nXY&riqIIy7f5QNEOMH`>*->A9VpU@epk%H`z`G5
zQx<@4V>dNuv@Lo9#rwmEd>FMGuY3ko
z8l-@_z6Peeq5H}oiWJ`6F%Rz&>~2&RfG0@T62glZ(+U{g_Z4BlFyQF|hWE<~2-r%x
zZ6Swug>nR(h21xl2|WF4L{_}laJELtBF8xmn~MDV*&OrmK7`%E5Wf9=RsjJM3K-s>
zD*}KcfK!16;1b{#U?p%6pn%~$Q2_xfNVh-a@Geu1fQiAvYqU55(R~76Cn*FZbqYqS
zBd~&B6Z7z{#_oJ&0ca!L4;2tl0URz+c<)mI0iOmu;Gclyz>7c=ISD_!AQswVrhrIk97gv=WCj0l%){G=-Pu{K(CqF}2!MAA6yAH3XTTuf
zTfkc2AQlk`@4-B8WsMFoyo-t#-jq^BjME{E7E-NY3W%hR!qasabw*^;+LE}27Z?E?
z!tS`NZZx*LSs?&kj6-;VX~1TJSqn55AiO(5dwIm}=rD)m6m!LUdeP$Km;XCSO$Z^n
zLrLoIFgh5a6B?JtIJ|#}-5bgRZ~}JMCyFu{??|J%@B7N%r8onkWB|?bqOpJC$?1tnyF%IwDVXRbvqp(||5CA(%IlRC^
z2G#`p6u1?*6lefGYkAA7P~m+;AprinuMgK^5#BCESScRF(_aUjSxZtCc)Apm_!UM(
z>^V1%;RO=F+t^iQ{Q;QWe<~oL95`I6;RQ|x4q*2_aN|HPQxqw@dlUlf1Le2JExd0Q
zcr|z%)%sHfgr^HJiAIcw;4?m^;k_`-x0Qfq(mkhufD-^pIK1nydmA`jIRJL#FTD2c
zfRNv^+Zea--dn)vt|z546cB!ehe@o9XmqzrQryA|B!EWjmWCLz?FtC^MyZAur~ukX
zwI#RbkUV0eF`0Dx84ofNC^cEQ(*Jg)+m
zQ>~NLjnv5D)T<6pOG#=TNsZ6|XwZ@Px)G!Ok&UiN>?WxSDL&x^P60l|t~tm!MBvY{
zdpFm44ZytsB^=&susb)%cK`(p?*U*;$bTQ+lce~Cw_ab#MJKwt4X9PuEr!*
zW3;#X$D8m}JoZn
z0K2a%2f(btytx*9LK*NFcDKhgyhD%9n|gz;|3gjV>BP5*CQL;Ugvj9
zJ-ooFzzX`e5hzzcz?ZPwmF3kxp9gjTlz4bg3~QPeFbQZbLU=oYI%NU43^?4^3ObhI
z?Sj$zg5N@sQVOYD{h(HLczQmvvHio-N>Y~K6(zi%13sJKa~3cS_yTYP@H5~L{o4Ud
zR0x1dU<-B~z$F=6_cIUJ2M~ksCV-CuULH9JxE5%oe?JH%ymqa?UuAgadKrDYRg&Tx
z-lVRI+6}`b^*KDPh-hg4_(|zW6%df1I5q?06(ZntVE+KK2EGf-0*(bv0j2|sfwxH4
z1ay>Qc)I`(V%GuO2^`(eILzySH%YgTben;}p@i4&Fz^^qZT}FI*}xk5wi6f~_wWwI
z)BAV@88p*|U;c&uj+S9GzI)o(p~%Lz=i{kX>K5Pdt_4QqaRx*?!LM+
z=-UC{1>gbTUSJjQI?##T9l+JtH3O@G7lGqL2=6P{?E*FeD}gNxY#&f1NwE&^$CqDv
zYAHadR`xf8ml}=4=w^%#L^ia4cshEoc4HXcy}&I%rSb@<0d|nPc3{3FVc#zCN+G;m
z01xOSxZ5QOsA6Czg%IB3fUN}63K&WHWQ2FFDlvK`pBF@Sf9)^zHLHPo4Z+iuc-k4!
z(Dq*G?oGQZE-xj5h9
z9Ry4OHj$f`fvJ-8i3x8Pe9g=2O4HMGcsgAcNagriil;Xs8`|DmCZ6t6nvx6e9^hr*
z31As;HIP&h0jB|X0o&+X2k^hZSAZeCW(M1gEv4{w0mcJQlDTcbcWisNz>&b(e1}((
zfT6%Lpqb1x0bd2mBrDOb)+mX`X)y2Ml`avr-_+5R!&|4e+GEK
zMZkQ(026`XSq#bP1O}-@0W*QyfK@&Ae-8og0KWk41Cl^O{Qy1%%m%&-JO(@hECFT#
zm0^U}GC2S5foFg>fwmr@eFnG_SO^T$Cmo*7!>BnAq1wbWeO?ySX*Nb%A{toR(~2p(
zK~CfVQ%Sc+F#-$$$`w$dr;`B*7ukMb^b|&i3Nf#N8LGl)14f+@jco7jl1@xw8($ka
zTu$TwUnkubIe8CnDSuZH3DoN|_}Uuc@v`lBnr!`bOycQC557r%4v^9Xav}+MfOHSa
zap5fmBvpgao{;u-moimj)dRCN3{T53YKdrIZBHv+#dUH50|o>8fVN)GO#yG@{eW~c
z<+$*c3eM94yue}H*?o;CcGC8t{N)fP{JgMge~d
z{G7fmlHv#eBqX}LkdiUy6wPpmG9^SYkeF?tcB@p1|V
zm;fvXI>=o!@MSqJyrs_i3bC(dYBe>R%N?ng!Dw@2gKGqyTJUr~)pD;j776efU@5SN
z%&iA52gbnPcq(VM-Xt)z6m9LI5@
z0Hgo!H72qJU!#-*>NOFgjS-6O2N+$?jNv2YIF1t)7=6$ySdF>vfs)ivd_5O|=`clTAa&v1E%Z-OhGQPJcO_9c)ACp24=hCVUFXJ5K@|9
zImf#fUz3o^l_Bai8KYP87F%C!7&YSQCVZX4Tvf_(9H$gp*m0
z`|-3CUvQ>T%%VoIy}Hl4Lb+;IpFIh
zQkvpk&vhKf835CDCZ1j%D69iScRls0a?y1h$H@@0G#H~L7=4uWd0JmT!_(>RLomm2
zoE(tU6ny7(Gi$=TomCavaBTa)(sj9pIbrwF;v~jNZj)Jw^{=w3w+nn=@T-
c9mjEg2lOnLI4&smS^xk507*qoM6N<$f>52EzyJUM
literal 13481
zcmXxLV{~25|Nb2{K1mwq#I|kQPMYMzwr$(C?KHO4#ztcsjgz~d-}nBnYt35M?EPds
z^M36;kxB}Z0C*rg7#J8pT1rd>3=ACf&zr$Q|8oyc6VLxLQ0Bt&!eC&H@d)pR-~Q#f
zOtqxV%#_II<{
zy!^68g?iwRc>RGMZV#qLSpr3?*YSMWI!tR{m
z;razb;yD_45a;}Hq5%GbZ$Lqh*2@@0U682)2C;HRF-O$=Hy|#@Jc@GmA^_I%ZCS;|
zxB!1OSG8&{R^3=0YMp?@Gh0t{ZprLmC80rFGv!_1S_zmZXUbUFXlU1(j0)tXWDucA
zBl0*yk_&(SQ@{W@Ojjceta>5h5=IN`61!2UbG+rEk+Lumv)_#P&Oj}k>#cg6`$)t2
z(mXE#Se|YTJGQJzTX@g{!XrL~x9P1sOlGNrW8LUBo1`;feZ7~!{>+p*O+7@xcJ;H^
z>1uDmAO-cY|J>M0XTp`O?wI44Uw;g*JoygPn}v`O1J+gb#RG*%Ib$61)>&o3;T%ON
z7=F2Dxp>ZVXll;=V1tsszf#V>2dnJdrE}3k{$!x^j~QU}4Rt41#N%^|L}s5FYN|pA
zGlMaJpPecp@B4mJIJ(4ldZP1y43(AZDW8k7VCLDhg>sWf)Q6flzD;fN;+cWmRPsgB
z2iv%I5-wB=F#t}ih!W_HsYNh95L98=;O@^x#uEB%+VwNnCQVh^T*mK9%Q*DG_XXxp
zKD7>LzOfo&cqUk4d`Jv
zJ|T6B`my0)Z$1py@ZF#wic6LmlJHmPHtFW0u763IWCwNqENBE;jvg@@>?P07Yf(=v
ztR@J&rY4z4dbD^kyYbFfZADbEqpsD{*f542SMJJqgGC;W`X=Zb9WS?+EL(+g7+P1q
z7oK@8w)x5LT3IpMw`7&x@((1r=p4K28V_Wq9C{kxtOyn~)&l_q;TYFO2YRRgF?fka
zqeR~`OL5-AVZtKS$2$rrMdE6zJS4$pbh%+m#g-i%;KKd!>VI9=f&RGMt`S$-(|XRdp}W+~hA`ctRa>cx!%a@3
ziA_5lj0J0GY?|gqo1FU&BI_|kQZTtkcL*pv>xA{E
z{28_=ly1AgF!H*EMhmAt6`!PpUf5OF_9__4X-zvDa_}b2Q?i-;Y$APrm(wUm7*R72
z7Q=7DLUj#m`2=Sv*fh5^Bpzj8@{k2uj>z!&3Ai#)A&CA}v?=8W3DK0=
zwqr(O0vS0TlzFU;#(h`KPtg9YaWKl=4(SldF3qXqq<`;V&($K#33t|*#M)#)yLSFE%_C^Ncg{t8}C%r);w6|i(tKOxXM;wq>H^cYk1n3z#Ykx@R_`Wc%|c1YInnA
z^+SRtT|A8sFc;q4+~XS|LX3i47~L)!paMOAW!o$*Gv@!0uP~wcEAVd^VNmxg-_^KifSunXic@wZ)MQ4iG?=4
zcfw}=MofESX`ojjOLX$QyPlx`Jnz)Jt`v@ubr+T_so*#+&P|eK1s7AHfq7hSIY-qTZ-v5QRy3ls@spB*IM`WP$>?s!@h5GZX8i7
zRr>r!+DtB{RFwC3=#lSfv!!ByGC$Hm|i|%o=JsfPQ)|g`4EQwf=}M`}Wi5P-W1s
z)>RVns>o_gaj^9KMcGmZr2wt;iRxUwy%m;zqi8$INgD15f11SqoT#rc(wZ$cESL}&
zJuCa6PBIV=4Fd1H;xO*(peUis=%?6*s^}0E@|C;^K@PMC?mbG!_Gx$(_5mXzUgbBP
zA6}R}!APp2mVa}@O71H9YkWR+NkOwj4{KPy9_8BVpUKBVUrSD(M%&v;6JpA~;15J@
zba^wOxP~*JW%k^o3UOHp=2A;A>mc6$QYLfmc^k7Nv!9jztFd#Q#AGThAVJAb*TeA)
z-!ec@QbpOzG%~10L5BR~t|4i~r`J%hdywpFEUs5_>O9&PwQwGt5KUS`{#8)x5D1r2wZe6wr5hD`ER43{2DhrNOLt00yE5Prx{=^ltQ+gds|Hdz;U
znB0VKChYZO-TGsV3~3%C)zG){{tnn{1KVM6ereuRTtIBr3j)5v&Dx-}fs|eNw;SFi
zaZano4?fDO!qwHzsqEF2#o@WM2FKcRfud1bd)kd{VR*7E4-&W=@h_o@W
zb!CCw)2%FO`+<)kz^raG@nUg?nyK*H>S0o{T$Jr_H*ac$^JK!FzYGcn%NCc;eX|i&
ztJhw#@!vOT)t^)30wbMYZWWSr(v*yEck+@UEW#id!Tvy+WU)F2d^kj0LkH#Z5BtNL
zN=&kbhMJ-QMr1xZM2Ah9<&@F0MMv-6E|Zk+ux_A44`Lod$J75v!ab2VMk=kPPsjRi5t$WxWVtNti%M$TL
z6W(2fu!Oq-8>|XWN2AMM@YxA*7q=^~OvHNP?ch|gE}!!v5899%@U3{wxCoaH8Qg$U
z3qnJq=hp*qTiR!BY9zEu6OcRGR*Kj8lRUW7>q`VR-s6bBrVSVPpMv~~3PNM<@|1b82Z8MctlQ61FMWw`*@z*!07f~w@{^SUXN7wv}2Ibnz;
zvNfl?P8@CD%;e`}Ggx;`fzXvLqJGaa3B4nu@<1=3)CI?Lms*{MUpXB_%J;Had@XgT
zxtR~<58YFrHT>W{;@bn$eV(llxB?e7vhwk
zIS_^}c|3Tw0)UJ@NrB&1XXE!QrH|O+da4>rY%k8aR%EJ1bW83q(B5wg=Af{SkUm!V
zKlv_sTWoh(LiKi*YpO+5!Rwu+>XXDekta_=@2*?hbZrxZK?vlQ{qOQX?J2~Vl|QlM
zA>2!&8Wp)tB9kAVahBa4Pg`0wR82=O?ucBLiKLA(s>&$jKzm)Gm}{bx99wHi-Gr$X
zi~dDh($S42eySmd<4R&zfO|$~O~Gyy#NX;U5w!2jrJv-o>hDzXlgP;^1n4*tnbXmf
z&?gT%*aOMIKB*UC67U85lNFToYn<|NDLB*h$)LgUjU>&*WOG_crC+V`q8>}XZIk;3
zPy8Dq$MfyF1Wq;tGP~xh>nhNky%tR0CyA-J#ZhJ7
z*#ugTacy`u7Nv8ByT@@GwBVF+AH=qqRwRN%UXyT
zl4~@D+|r>sZ1c5Dn`U|?cF@{;jl}seK^v(G!EUF$A*xC(E{gR2kQZsB;=m&v!PW=+
z65h(=O;LMUOD6^|s|t(LhA)z<&)bqav1~Jl7vPu;Jf(^Rg(q{9isg*N&-MhMkqr2f
z2_k7{GBToy)rIt#83+J?f$W8Cc4RPpWyI#3q`6rs&?pSiXo1-z-=IHPN%a`1VJ=46
z)+Y!C+YI@HSc{LhTz`zwskkUCWezr$FCWqerrW;nDDQNC4Gh(f&AbQ2QovmO#A`84
zxH5(9n3T$%sQ8K-oZAv-DJ@ecE9R-mIXpm|4N`yD=;o;Rbxu0L#L!D-1HQ4d~pk
zoa*+%5FwSwLI<(HPvbHOwXufv!MlFW6HO*QfRy&%0Ud`mc6v~TEguy;ONk;4z!nP+S{rYpeI|i-pFR8wn?yR@2coCYF|kF)4odr#h7>4sD0wJCpQ{T
zE3O}#j{Ab^0?a#zl}1q(kXYa#<=BY1Jt6wo&WCscTX)c8J3FSFMqi#3ay5X?nwO#p
zEcxr_Nl3kr`p40Hb)|=F8+h|4f#vgWd^*N&@2bWae`;cd^acL_7UxPgZ2~wjT-~KXoVGF&h<{&hI?KMpj4>GG9L1wzYD&&V
zJgqYi$L)vLz@^aR&RZUB`s)A6Ko7qQ#;q>{Cx+M_Grr^^ru#uDT^Hz2DqgobZ<8qB
zfP@a
z_uaf((7CoMt8d{lRni;Pbkc2`MVZV=c7`I5X<;=u13(H*^shRC@%H*bjdnE5i-2L?(e
z)DKTM`ph$GAHDvQB;9q4zwO84+obyd4tZ-Uh!;>|*>BgqG5_asswQwOUj>W-DMfUpSZq&5^PtSaqA~gKz)n{eGu+5J%)kz?Ce2-UPS(tvqlOn87$Y)q$8jU;B(zYh7(w
zIMbNd`{O%lp1SH0iUK4xut6cUboQ8GkG2HYxSeH>1MrkAl`s~O^MU*sSb0DBz2o0L
zW&vPZ@;IfSo3cdyf}?#)dKhgg$HJy!o&{I_?}0WGR-Sz?kuQ1$eZ
zM;7C@eV4Kud%_p@>6~o{Ib2Xi32cR@=RG8wEvyT61sI)sBn?U^Z%@+cou%(Ug9~-$
zs_gkJ*S|PU*Yr#Y7M$p~x^~zSgdw}769t(<-2`+gjjDPeu3kUihDv^PA1~qwyxaf}
zt(#&8tahZkk@_sGiPCTHw2omb>Q?xis^ou*HH7uwyZIi;PEu+|;>}z37X`iwpi%g%
z)#vZn@*)81)Ve6P5VX(h0l`HA^)5W{y>V)^%Z8YK0J2BrV%jka!l8Q?+YZk}i)jWaVh|?&K%*8u^k%MtF|;fxHo4Lk?dv
z^6Umli>T{g!UOq?&&JZ}!W`%-wuDvNeuv`}lXj=!ZO6dfI3>smz3q`UoI17|d4-Ax
z_v^MC6J3XY=aGOVkK4Ef=FPgy5Vk8h(`{cL{Oh}bMa^szgUX6E*6qWHNC6xR{0Wkp
zK;Zs}L%uL!q_>Cj>7*u}^I1HLhXVV&<%FA=pP)RcgQ)_fx-ZCI>kVfz+g*X$DiK@@
z7uRl-#uH7mSGJR`8RO-(DGw;?URwGO=wUHz(eTRn50X0!dS
z`2V)WiVM7Xa~fM8^f@m-GE3xpzas8epK(MSE>~makrsB$7i?g+n#Xp_a#Vo3=o+;Z
zA`Yb@oGHDP0qcC~e&)Kiyc-($r5WL2pQ$^1{@zJj!@~NZ1i7^pCV{J|{B}z-v#Q2y
zJD}azq?E`zOZkVMQ&kum@p+7bHqIy_Q&Cp53+4ujs66fu%ti63bUj!NMS-Is8S=Z;v
zE;#L5J)!|J(tR@1iLFZ}eV8BMrha2mB*(@36aIqE^PtqtFrl2V)@lC8H^7r(b*zEX
zt=J&-W9||9&kFHA(+Mi1(*erJ4mH^&`Iq^f<+06pWEOrT#1O>6i8U`licNSAf9SE4
z3CNbV-a*ashZcuX-JaYSPE93l@-zK%ActMM>VPYd
zWmt#7@Zg?Ien|eXm|msavYjQKX2ps6hPq&=t&m^a{A^@)8!?t+lT%6`k`HG;8*6H1
z$_^>evRyGbAJrH&ev>l{p^1Rj>Xo4(b!BWqGeAY4j0B>>n6e1w3_-RW){JIex=NjYHhbD3ZUWI4>J9tQI*SxKmf)3iPKh$*Bf9oQAh>83#)uH(0LL
zDJo5hb96PL=1G61;`;Tfhp9-f$BB}EWG^y{rft~a=rz?y*d%_1fq9xI;YdRIK+5?C
z*vLk+0oTmzQqfJb>pH4E6oVJM)+Czpc&eX$+6|Z(qY2SFc2+9uZ00|b5V!uPEqgDV
z%%9Lqk?d35UD*jf(@c{hb5>7PQO~(n&A~^lGml8
z=Lygh%Q2peEU$cXGO^yr5dCe`ZRXF(
z&}7$(DB8Jz`$ze{qNdU_02O#@PhYe98xc!m_}wur@r>wy>n5t6@xVlKLw
z5=&R`gP*jPt9h_cd1gcS`c&u9%G|h%??-XRpw~mK((n3Qo|6O)k>;ei<&{_$l{+|i
z6>|o<7<^1!k|>T#u|gNZ`9V4ObSct^^j&T95l8Za(l-?@wj)pepS4d8OYg?=qVI)P
zjA4$I@j1YD9;^`PiZh9<_}UqxtBEser2Z8Usr
zt3qw)af#gWF;#mMS-p+cq|zQ2!LGN^;vQPUbfpBlE&&6_+h-<0Ke^;H7f|AfhI8VS
zjA1Xb6t3DNonPB}OHtkNS!YEi;pLDM`81PeqOd#1)mzwIA7>vSNXjZW=#>DnPcX;#
zX&JJvv%w(=`n*k5kGNxf!zQU`pEo7<*OQ!u()%6joT|Xdy6n5nSG+^r4SexwVBuuc
za`s=fAwpJ;7v3au_>l7{wzO;NmW}VI9)A8rT0nr_Pku+S$Mw{?V}g9iON>hz--?+|
z{9eQzse{-^`H$I<*;20)>fwRT3vjz1DqWvND6{b0aq96>()nv3EG-^vSB%wEtF*#z
zYvD}=)#{@d6vG$|j))bD#3w4jdwl`h$IP3*V!PSlez~Gs1Zz>$?C9>L25tqcP=C3P
zZ?@njxQ(xhhx=AOspKYRyyJlNl4qDk{FMp#u~|}`oTleACokef!~{*yTsPoqlA?i!
zO$_~Py*%1GO}})`wQ?|ks~_-(9neq5kd-%*&m{PhO?aawX%
z)(fLWI_#g!^&~`n@Cyjwg!K7+VxnxnhMbJd=fmZmb8OFo7V#JLA17!Nts_NV;)dQ3Me#}KOlgfr;aSAaBwct7HimKoR!nx
z$AVKP%#FrI5VA6#QY{c%e^=1%u+sfgDq64Y#*3l;V%SO_8APf)&ia0R>0#<=_viKVcfEV
zDTh2>7*%kO`5=56lCHx_-*M!@)b%{zt$Hp%Kw_O7cQ)YB@fvxIFZGZ5a;3%yf&%lo
z!8coOOcFOHNAgli>G;J6Zsngo11$35^`>|P8BRWqdTk)nJG$>nzDTDpUpRyIu3)s@2k$y7pG(5W-W!-4DcHaQ=B0m5#f>Y
zo2@6SeTF~@QI|WT&yYH9X(jeuTIsuG^Ian4fnoqHUrJWE8KeHDMe69l;BM)GOz=b$
zB&M^OADd%DUbOiGdfEQa0IosO=NeV1n|m`e%M=$vEF6M05u&3xok$
zv6?0<7Repsbsv?zoe7$&j7rU~`<3PSQE+b8wZ9xrq=Ic-E)*a&0A#q=c0TVW3JvZ9
z+dokpFRf%X4k>iVc}o_AlGzh1x;sH`(ei@}e(J|V&%%iA$cSw>W$_t&>8`->+Q81y
zY@Iy_GuXKFvnju&D4y7CyDpwziRxmWPIzNrcegw(b*q;B|5ShuphJ&K$3axQENV4z
z+Luwu#$1fy+Lg@o5?KGWgRxj(>6;wP>4(Ceb@NbVLP87^?jzrLHR^8?vp_x5o1?3llLL&K)2gFE5(F;2j5dv{+PHpe?MFBBb0;uKY;sxH6Q_6RE$lvR`Z`wgNk}HTA_P}RB}8Lqa$v|
zF&ophGQ1UUrm%QxenBzPUmoiqMwg{Ij_&()#(Ww)oJsDESZw;k)g20VDj-w@C{
zX!)cHr91u`I1a#*Wg$EBi8(Ld#fwSu$?JDUJgs^=W%@*~G!49wy>-$~_RlyQwDZyf
z*yBK8I_uQ|Pd?<|y_B6c=Q|#!{}m1hILPun+KO>bVaS)eKJ
zO8$Xc?u}|Ho?Ay%BNFdn{5n;?0O?1x>P4aT|2Hzc6_ILudfO1k@JGzR%edeAGJJ1m
zVyoVy&qcA>&^x}?>Un8gL62$vJ#Vl)N+if+<0M{i6{6v-*n5t%u-3k93#+
zO&_tb^XpQLqYozT4(m|4q$A42nIcW`Tv5g+Q=Oj|YFhd~z0;doI5ssvK!w(vpOt71
zz3J%`tl>nx9>jh6OQMu8-ux!j&zw3uui{nvu4W#(xznq0_n%&sqFGlihvj!3`*o@xm`{F%
zCRk0g>F0@;fyIG}!wzt$gm7d64TY`)|E&&j&{lRtIT1dr7j^~j#9q0hCFoPUrYz~+
z9Df46$s^+=f8bK4PT)Egh7sBMBDoa}d8
zn)FtM9b65|X~zVR)XaBWBuU#m6*PgDzj#x_SK7!sn4s{^Wc#Uv2xiB~bQh?xA4zvb
zi2{cU{~IN80I@i%HrnKA?eNU97_*Z6fm4DBX}EAtw8d(1T7y5gJmVd}&*bPys(yFR
znNj8ni%bi?fbH6UuRrI3!;RCZq-oW*d5XkjUSpeu;hqA;soYk}ezK
zmU`p{h6beBKARWQiDFxV28af@1{$R-5agIFqpdUatu%jDQypnEUcTpM@+vJ;%1E;{
z{FBqEchi%HwO7Yi|Ls*~=^m@S{K*Av7cqCE$ejK$}
z5cmu`jtUAmfhF?FPI`bs*dH&lMHM*cu}OS!rB7}G^=J0u5y7TJ_HILML%_p?M51va
zcnG4{yo;W&?cnSH3|f^-lJyg?0XB-B)Zp4+1HR?;M>r9Ua9Bns6JP-OZ%4(b>qLZ1%?)KN*XY9
zni&pkLI!oxgSL+xei9f9d(pDSlJsip><^ys9aSt0%btzSYjWqGD^cb(VoayUs?ilO8wer=3gBc
zRL2gFMk^Z1A4{B3jjaM$zn?gMycCv}Iwj*74y=e9wqN%55&3{?tqn2}MBcsUNy2mOJ3QrwB}=TEy+L+9~?56ahxIC%X#
z+u1OZGL_bE4^gz>kd88lQ;ZfcTB;a+4C2kn)sawi-7Wo(!86^MxFD
z=e@rB`lmeohDYD$!UqRB+j<>dd(Ywz)B~VkWCF#P@gR03q6M3KKKPCN
zW&xc?n~si;P#&L=R0q#n1zU8Wt#Ut@EU3ogjOW|>z43$dM1A=Cit+OzT~lkgK~d<5
z$7Oie|pZ^y0T{6$A8-V2Fc4D$XE$JYoAF&r?QEN~fzD$x;M#FE4l{>1{lZ*YIo?E<>%?RvlrN|8nQNyqp|evb223~hL<9_j8q*+`?rw`utVLe*@dKAqRErS~(-aKkseo0w
zxR%3_)wUK4HbQglUk_Zr8-tDUDWU9tnc6|LmC5
zyb>s0wuIY|m@86eB+kydquoebFz;Z{g~Ax_u+l$wQayhT%6F25XfBa6#0%xr*|zJp
zLmvQWr}2!%2wBvAAD|o6h&TC{T)LwhhQyb>!VJ7sg?@QBw1T=q+0f4P&UpFJOz9Kp
zOx6cqiSipTGTPnnw$$)cX+u;OU0hS2+Fwf&M|aNn*yVgvzcfkBi@zPe`FPSWOdH;Z
zL?>@s=iP`R+y-x=AZy=vDjDCSa7yvU{Y^V_iEe=jCqD-U8#jQzd>7I;@C(&vQ~BQM
z6KmapkMlNq5EFBV`)hkWARr#i<1kR4Q!1YGI#xx~f&qftv?f>mj(Pf+Vn)*pbpi+N
z?$=LNA^{RKQRhg^8c?>kEy)(Pp_;lGE``ST6S)&Ajb%?#zX2&M8K?W+$aiX{dKD=l
zfilgz#N+a#A6^6G$dw-fwgE4vQk4wO8RT)StgbA((dZ-i_JR)$x{=7-8zS0H(K2+}
z4RI^hZ~9AXj+n=c-D~6QjSUpl8!-DWjO2EJ;1tDQj3wavU%>R%!oxezVVTEfkfO+h
zQ51c*r6^|qMDPB~t_K1eJuX6L+G8Z4NpW3KQYBI5f-ho1Vc&&wnq@)z@2Naw?_i!u
z*t>$pZ!hvw@R`T1Q&mb+mgasB+?cIqff@No)|jebe^m9|HkZerrXAWZ<*0Aijjy?m
zR9BX;mQWk}9C$!>pv;U{`Jgyt>HdG8^%baA$-wh*xv?Y+Ebr^O;
z9sI#0_LZC&TeJ^SBcpi#7B!i+sq=OJdSqZS$FY|q^*p_mX-DE>kz#+#A4@0U!<-$3
zwqHZ9`}L>PH{*`?_wer3W>B=2eKYBP;vm*aQB2`Ns(9!?8U<)vKGE9i+pFhzis9=z
zv**Uxop{r{Y|5o6KSKdp2V*|8*BJ0i;H7p>XC9JPVmlY$-(}j#tSm!NVj5OjVGV0b
z_bvom|5cqPA>Xp&MOHP%#2Y-b>cdP}x8riQ+V#C$_S(mE$LVm!n6Zp&3lp(t$z7*G
z9l#!$ZD8k^
zH}eP0gafh%*%hQC#^lr1&pA2h&D=`DSWkye>;&qba;8_ffvvOJn6IBUZyV_5o`DKy
zam(TukwwS&QexNO4_g^|T8I2tPt1vQrKszOA+G32a%yOPx#;0T|3O?6`rUjBhW0)J
z5)3EG4qocojg08NL|Qnk=dN|8SsIG_W{)l(d^z$~cY=LV_WgME2kmbwdl6RTgfUpC;uECh6w
z?h3xqLrWU%9-t3y_6&Z91bGWf6!(d{_!TCtMD)SNmkqXe^A+Ypzo&|75B>!+E~-I9
zFIVn>eVLgfv4}QRg;&o72z#i$gC9cEot6l&%`1&LzYrV$eij|)t$e!KE`$`CT#ON4
zg#6AbXVKx|Id$k8cW}RibW$I)tzWIT?aU@+_i)}0ZpP8FH25HGNO-FBBrc
zO)0E+XrLe!&51wlpMVzjI-@6S53^)bIqEcpa{Z2yf0USa=0=R7J#=mmFR_fR7uC%#
zg}n5_gtWz!r&Y67V-8r!F9y8
zo^v?;s0EAI@&ogM?MCCFPTB&0B|b2DFoQnzdJOD6GPFniX_aNY9^DFnla>a()JbZ%i>1@+-Q`m+6zjJah`w;|13Q=t&zNzhcCX)Em=o&Z@;3%A
zFPP3rg#8!D1d`Az1O>u?WnAN
zA{_w|?Rd?j*jFgc!%FK+ah^oT8Np<}Xmg!~`yL3mZ0JM&{XgcqPutx_^!Ek~DK+4D
zM`B`j`9yAr&j2#=d(Viz8}&DAIc2anIbZ!mn*3l}sx1+Us~wqAX2qGWls@|hZCiPzaZj7M^<
zczD-o%|#F%a){w|AVuC&IT+v|bCV{WK!5`0$*sZq13Q46@ZrfX@-Kj95@b(UyJbP<
zh39zy9u_J*cyLg`pkBI
z!-`a2)_5j%NN8HJmbXOst*+5qD(Ol_RK2G?Sd6j-wHZUF&Fo&-0n7;XSj#>aQ2;Dv
zGeitC6MFMIXT!{?USbe$JQr-0pWU{A?8L0e5?sL)-qQ<$vZtwjI7}F%L*$RvRqS)i
z?*09?f}+{{kNETt)&)-UgYXV3Xv(V>ox6GOT|niyR4u`b-mSOn2L6G(+c+)eY{)5J
zg<}GyhF^_akKF&bTT2*p6YqANCfZ>wS`V6$=wD)+6Z=AnW4GQOEvVB5KlAF$ABmB|
zIGFtX7`k%xZbq!M&Q1SAAW3WqzEy@|4dqW}P?^r%cYFO(zxQ4_gAM>$oy4niOpIL9
zCi|Vk;;?*XX1Gt&ALi;8sSebQ(`@MnZTmft
z5mIEjMZ|Sm+`tie(>*RiO=#V4=}qi=s~Bq+Sl@`IGfyy_{~s`%Z32#8dOW5&wA}64
zP`q)!5Q52C`GjS#A}#hj_$aFFFWs{d-Iq-xHsMG952sC87c}g%*CWoXm4!LYD`&6c
zzGJq}t4Ek#A8z!^_D6SDa^wx6<=^Lx>izrE<3BaPTJL;8E@Nn%99^#-{C8hST3kV_
JPQ)PS{{utNN&o-=
diff --git a/logo.png b/logo.png
index 67b556d84c94e94467d8112f63ffb44140638592..e4a1b4b41ca64a4229c1e1bb11510b8635644d02 100644
GIT binary patch
literal 5747
zcmbVQ`#%%j{~u$9t=wXaawoZr*eK*OqNJh=lFZ~1%@~rri7_O(W002NOEzmXq00)sh
zPXdG3qYU&DlD%->)712eCl(6;1Wl$Uy}xL36-Ezp!2C8gg>pk5#E^@X`AgoWP)yMM
zMx|yIPSAZSvoum0S$#(LxA8?GwSgKlmXQG0R}o_`ZIS9o=Af|!uVk>3?$U!C8ewqW
z!1-v==+g;9=nL-|aPag6-FGFrPT!otJXq?hHzgX}oQ4j|KLp{ev;zG&R>|2M#C1vl
zXWSdy7w!tc;dGPwo&fBM?Fe=5qI0{zQ4z<~(6T|FtIJ6r{lsz}??JMv7pSjui}=f&MomEjvv
zQ+{%}r)c|)>ldLuUqt?F&o958?;M-!M
zO&-lP67#XJQL@ipp6fyGPNhrHylnsT;w|h6006)6zs>>3%z^;`B4w6n<7+{aE7KRh
z6%r)7HtK#*T_deT)t`T{SykBiBKIZK-p)QI$^eeGtTb}d%s|^ySL9W_J*;{QmEm92
zUC~LmE_ocDxRpH^#;#eXb2irw-VCkoH@8&%&B|HQZ{e{_mj55y2Yv0V8T=#K0@O#x
z`~Ger&qUUAA-%<`0!!vy*sMGZ*7W?ZOSH*j<+J(o@tUWbaSSob
z4-_3q=6YAgovrkhFIiZL&=a=co6?q<`8_RT<2q~|cWL*bJ38L?g39MhNKmK-%3of?
zs(vXO>t?_fwBw;$GG9(ogU%xNr-x4xPT6|sV*AUECdvOE!=~4ZCUFtlQXgi{#f0ypLGjnnzjZaao_c
zf856|IzE>7{3>z8E2uON*zE)DT0@7wKkG*CvPiyBKL3SOA#sMx?BaqJ>4a4G{~4Fq
zW*=rxtAVy0Ed$SZG2FitqM`xXk`ws4>J~Oed3MZUSZmiO?eTax{PFul(S*B*
z%g`N2HF(C8@lp`_(v8{`4fV2*?bIN$7B4wVeQ>FSEEdi;kP10O+lHX$Prt&fk#gOd
z#urICR+Qdea_fWE35z(IRz@_*R^kxmEU2pEH~@pD4|xt1y>Q)1H4bkHzm+ITyo>OH
zMx#OuhWkttC{Ic5OVqVXE8WYgL{1i9NgA4a7ZEL|Sj)Z$eMSl3!Kj-b*IPaCq6Mc_
z1^1C#>sm1uG&k{n8yb-VfT!?)S||5z)uGd;u(F
zK2bAMv}r`*@D*d?eG)C#1Cbdac~4ClEGOTt5FwjPCXpl@UV#IYlZ){l-QBG{#d$I>
zy%8ssRj=%#jz9DtI=fo_V4MYP`5DC0s0-QKoMF9xuN!VzCf%E(R3JQQSzf4B{G{D;
zx-e@-NjD?uz&QIIME2G}-&{hUX)A}7eW8GzI>~Bse~q$FPggr&(>YqfF7&K4`Ms20vV;pF
zi6*-%M6{lmI5DO-?gt698J530{3P~ifMK3la*9%_v0RMt9qTpW-=_-KNgwWX&Y}=f
z#7c=pcp=%c7tlaSsT@0EX_Ipe6fX_>sHaLS_3EF26Q?wFg^9qZY%Wb$$;PTP{PE&R
zAT_yi%+k{4^)-;57|~QuHKGJoeSIFN-={P9P@LS;OGaVeHooyXHRvKzp_wq0(Wd(h
zlkY;LuBU&ud6BPBKCy6FjS(N0(Q#Jwv)U>K%<)#`BgxQ_M(B4wa%V%(7Dj_+(E!Ne
zc$>`hQOXu&@NpIJ1yed0gI!pDZe*!T0zOB96PM>puvF6}HuiK62;MdjX=giWo5s
z)>LLYc0~J@c=f`g*qY-d&(vUDS}yec4uvvX#Ldq;|EX+NcjzRX`qlX-S
zA_XpXa|jWUYGqk$ag<366)*cr%VZKZ5odGd$j9GW)}&{bUy90{-pP8jN^sH-oo{Jm
z=XKU%=#UCo
z5VsP+^?*}Op~umb_=>3o`V*S?1v{-
z9k#zc*6plD|2kw4u3&(saLf1&>0T@s5i4k-Gvd+a5wPmG8_65x9cAuEQt6Fds5RF}N(?iXmNK5FswvrS_p-B-bsQ
z=Sx##L&c#MUh>pEb>eVwo}We26=8AjC!dC%AKS*0!?l8xzTT+6Lqe$irEHz{KAbHL
ztI|nnTVsT$ufv?WZWiq?3Ql}i{T00}IF=VD=s(=9|=<~LhY>Agx
z{(?5kT*td6;<{aRUf+`??%?yH>fz_+boOAKnp{s`DZs~_70sMXx%-?Y`&6yqN9Wd3
z2$JYhBk`5aucC{N-b31FJ@{laZ&!k!bz}$|Sy)}uK$c1<5-U?m--JGtRbX9ZDrA@9
zsV?IB3fUOd)Z8DHPOLKP9zz>41eteC2HW~8!j*FDV!@`e$%Wf<{z~#^Mu&Kr;w9-i
zIuDs!c$(|FUvIG5!G=rk-tm;|Qr~y7;w$BQ=R+rTC*@cJ+&y!EH1kYLZpTy2kY;0^
z{i&Jvbi1p$zLKn*fJRwqaIuJw^vrFwWMoZc>L?Gz?8+wzALEZ7B<^2F)HoU4yWUjm
z5Xmray(sYfMwwX3h4*BSxVOjNjM}8e#6&17&yfA5i+mJMPGn_c+?M+LU+f$9Z1N3;
zxq%;sHoCjzgjMs6{0)H=&eV@+ys00h!FW)CeppZA_PN^RsI~yXE-(3shnMNWH@L)y
za&U3#LZh0bbsAkUctUq2QfRh6ajRUR8S4iohS{EIH&@%Im}Br<8^2?Af8v6AG3&fn
zWSYlK#+nj>m*^#WGfIrv__nr>RztX|fkl2_oMNMIA{;)o$G1ExS-X~X=N%$L(yuHb
z=A=t*WKzetK{R3)xFz!gv8y_N7Q%wqZ}z9q=FE~GB3uW~mps5Zrr5&D0hYIV=DQhhKPvTVXB^ks2#?qP>FtiS_BObW&*Ul=<;r+^$ON
zSiigQ-3ne0kTsCbAska)C?4JRR6=_)w@4C%O6rfgdz2{7%@=XrwyB&py{ljJW0&fR
z5kLlcb8NhY*smdy?bG)@eee#Eq8{BnN)cbDruxE|)>^`HYjR&(>IW|O9S7}*$%763
zu{$Z{o)1tnJsS>VX{wOp7dO4aWAgkCEZi>jw3FfMMpFx8#xSOZRtwuT$S7kFGs7_v
z0ZaP)NSNQRIjvKHpIFpxu35ByaEO!Nbc$_vzU-_y7XSkZTP*e_>a5*>xwgjl@e6R-
z+BW69;HKsjT@VFfP$6qFIi#2_@ZZG#zY4rWzIENX0141dVcB`cY6%|^LiM#Cj3>mx
zZ`(AaZQz19mS@=U)t-pGB=b7vg&t30zY?2O1*lj+P*YvYdF?mH|B*E1yI$8%#LL+>
zJhNqbsP(De=jJ0`=-Am97q;yXO$S=Knew(f-+UFlR>N5V3))
zY1=QY{CD!m=_1me%Y|FFwtcZta3}Ja8Z-u>{@G5>X2zTV+Xzs%g!O~HVgi1Bn0P6_
z>A`rY0@3nw_
zbbI+a*=Pjp#V^)U9v|^3Ra1>-uBTK;Xs#t2QzXe;7`*tLV1Q81zREtxvkY;cD=l3b
z68wogvI4GN6ad-Ax`%WG&t8MMcEk&cTdVxcZr{tXAC|_WUVnNB3*p}79)Z*MrHVg&
zGwfKyu6W|P=M=z1il)>FV0MfTV2=GBM9F&en@L`BV`uF!iGrvzcYkXSc8d&dS)iI#
zJp`z5;ty?zM7T_=Y^&UxV=%N|eykxq#Cbes^S#u%P*htM#qy;lt&Sr|F#!QymYM^!
z2k_)1*tr9T%`k~FsN*HR&zq_SHiGMYGvObnbwyv*SJuHx`6M{o>~IqN0-$j~7y6TL
ztMZ)9oVByg+?4%-Ev#-1J~u&3CRWB7&s8nV&J2H%JhQHU=W<2D`2n
z9I)P6ViJKUr|x;L)N(X_VKD;9y6sTpJ8Lp#t&Xh8X>Ws`pUC
zoi1~sLaz)j&d$hH-Q_3Vm{TE#|K8%tv6`0@<0!o-1Ojvcb8PN+>p~8}s+Z@Rx;T{~
zenKG{EDY(K<<2wR-27>V9h+Y308UYcI02vK*Gh!n0$jG^f4tiIPAH0_73R!8CtuO{
zxajl|Vqf^}$A394+X-`sfRZj$86Q^1Y<^p#XGS0oDYJaN0-!KX65!Z70+J#_*TZp@
za|d!NV_<;lNPS=7>qj}xa$No@00JCy)}|QJ^(wfU`N;@n04kFi)JPv7l)`zUvP@AZ
zQL*Zcn~iBS#*Kr>Hjp##x;#RPDx@2^zapr;J1PiWK+YL~(a~*5>?8w-ZxuH-(f~Th
zxtR?Btz{FjFagGiUA1-lZg){oeC^rC?PMb+?D_)e55Bwd0UnDphKJQOyj;=Ye1FEy
zARP3hI}&_MxO2ud~q2N)Z7zVj6)!wSvrKJV@?xt_Eqq^BKp016%N*0WkiD)bw!6i=uQ?h+dTROXXIpT
zjsQ=Jp-AJ!;p$+$#fRMY|NI=iWFeR(`RCnpWk>;*8KhpihsHqyu>|9np*4RP@wXFY
zsdE10od4ftC}n6bpB#nbuDy|5GD3}sR)4R%=`QWL%=ufX+2=g-T6PQ@x4`tU+)&y+
zanrsf8h(*)a($}wN0RnWU=ySaXbwz;p=@UX`UMUGQQ-x&czzoXaw(HhInWS~>p$f|
z^Qkx*H_^=#|1icjUXA`|f{w~Ym02wsI*1T(2s#7q7msXA-Q|By}r`KOZ8PH?<5
zc}-9U(1F0m@5A$2GhnCV)lEb|HqX;X4$~;Hp(ERFrEo0&Rpp
z!3{DZQNlMr0e%YGfBY9&vBus#1AdEd@^|T|A_^qq`Tc0f6`C{y0x@G7JB!m7{;==k
zyMRV*6|m~|3T@R~_2zwoh1
zb>4x%PExCMC@v~{(LNNfjF+QuF@$~<4Bu}3ZX}#s)6@Dca5MVdq4D_VCRJwMVYXWjoP+=PTuD~8vZiX&p3q)1I3xo=Hf~hI
zO$7v#T$$9y3q+~s__y}r+^u3beSwwlOrW6PcNoh2hG%UqwbH#mfHbNLxKXb;8ow~D
zU2KJxzm8k^LT|jDxyDS~I~+y6v%h*b-o9
kBQ>9IJ}uyFy={8hdUeI_i$pwre=*$(xNui1~pnWN>Q7j2(MVvpFXC~DQLk5x0VSBV;_y@{<9
zt`o_prK=bn>b@cbzQEjKLy0ASG8Qa1tsfKk`uG-}Xw
zFTgMhy`G41R#P+ZF?#xhUt2>BDx&~}O5Br_0RZl%A=5f_$W?E|_1uY#*EGs`(hG^9
zPE6FKS^B6N!VfX*t<>}05%}h{qMfiNy5MkGU^^GgX=D1K@e!N^+JUA&gOQ^w=S
z91Fj*<@NE=pW~6Cf@Ni6u|O>+Z_U>(pIcdMezn_pO{XNelQECY;%;*6RJyLG;%EO%
zR(Q{yT`s(rvo8m=XnvK^lk@&O^Z9sL!I~6-HrLUFMoYy6Gqe{I8>jzs3^UKai?QB5
z;nw+3j5fz-SAKoX;HrIFLP>lj_qm&M=4EAen?vrlMwt!)d#9g`^*qNeogWC|%9MG0
zE9|$UMnOQhNLXP_zstS712|ec@{U4~#%lV3Ozu_*uC%OQP4{o_`qF!nK==j4nYjGA
z__nXlYCEjhZ{`<2-@p~va(P167;8y!lKlWV=ucxqYr004BX
z|7$?N`+QCSfZIV^UDYHY|8O<1%hd4`jx-TTp2&Uo^@nnytnzrybmJ7dp)UYtk5?(q
z&V3RR`>$RGHnYSHOO|DoLzm$1Sv46)_F(s9BxIO9TcnOqU*fO3nFh{#68U_W6qv2H22
z{mchO>UV&YQEVaEwWvna5RHur(|OL8mn){0g;`s_x0s=O!~ibjNk2I=Xk`v;L~*6y
z8X!wTko1^H)hFE}+*mNYp1vZN4*V@(C0pk|b#?C&_*)d=9-xg)o`0uo0^mUoQt#R^
zgx745o;@F9WgZgWD;KODgUH{gAqxW>(7o|gf|A9Ob&}hXXMwtQ3$R?2&Bp3?*niR6
zpG#ViF-@HX|DyIR-ST?cAN&+}lRUVIk4#9OPi}kBj5)1oRyO|3;x{N#7)3IS0ANWu
zsbexL_#o=GFaBKl=C?J}8w{l5H#+IbQxJded%oKu*{A!+^Qm~=Z~0fesEDHB%DW3m
zPx70--5E>R1n>ZO9;iSOzps!ebM9M&*MB+}>%sLC8DV#D4iN-zTH;}r+9i4T>(;*z
zNZ|g>UabEjcMh#C8I($sEXt^`uJ-X{Ki&BXqsW9HB)z(h9+Ye{X^jR*vx~8&W%rba
ztVdz1vG4VYmhT4psH->ZSHU+rbtxD>536@I7mp~t-_c9K(XKlJ(WWyM3nLZU5Xhph
zhBVRI4>(TYC-&8Q85r(gQ
zU)u75lpg-Rg&DydCkAuA!ibW-M;Q!d~O&ELYuN}s5^7D$^#2Ry(74)|`C
z+Lfo0=FKU2<}^QKzjPX*L4tU5kC_^qIK|6$23dC$pZ_`k7$Dt~MkWR^YF|2|56Kq6
zN}*a!R+1pQr8der
zg4oj(9P6hdqV+f^rGik>aOWq5kRo5wk*mFAY?^-6&wDlK^$hG0cwE@%rUOb(EoH2)
zir4>*B`8Ys`M(&ffN85BmJPSHw-$asHmLBZu&u^a|
zTfaTN&=~gRBM!ZFkd5FHY!PgHsO%-znG*zxB;2Qpj9neFF|%j$EFf^SxO7Nrk}2Br
zTq(t}|HuQ;*EASHjQXzCItpN{>)-|cqsZqZUZj5W{pSx1e?9u>go=>}-01|^`DANOM8Pt2QXK;0vp
z;;xnM$DAsj>c`Pu88rBA>cAzsW%Kr)Kt)fnP&rnXG?rP&HBKIY`IP_d+=u5WTV8r!
zFkP|nmZ2;D7oMuSb&aZ6N{afC8);;weDvI6zTt6)qGod8WZf*j#h7qX6Ip#|H(~xL
z*rNJ(auTZtyVQAkLG%e1oQau-
zVN#yrp5(-!NFNWfKW~Emge9KPW&XoFFxwC79)?B}Bl>t#g>Ty!&)zU>EWntKB2Tzm
zzDVwE32t~H{M(Kr67Y3CS9u$MZrxFevr_jq`<1aEf@giX8nR5v;q{yJx1d|rtECr~
zOFTg)UEupK%~`un?=0C)NkBwQIIc})3d?M2{pd4qe?2%~H&C!?p)c~4oCRe*D4+ef
zgCRnnZ5_<~vBAmn;V-Me-%=&JG(nj%>az#1b;rE)dW%?Af&0eQ3pBG*tFY326P9a)
z$|xpNuS4oj#}xV+-CxY8^<6Gbl3=~~pOrGA?;+z)UcqGu^vi}OdhhP>lFjbC{
z73*++*9cB^W$kjOCmJI*gKgogIES1qj4Yew>z{yi$1So6+e}LARe#s3CtM87KPH~(
zTb)=78TT}hw`L%ou;R8(2lv-YF+lyaTtj7Sg*JYi(TAZISrl{_X&OH|vW$KbuV^P2
zFV`1aQaDKa-Fopn;Nx~E4!9qie=J;D{W*x5IR4gQfw(h_VU*@RXM&s8Jl@oT3wYjY
zY2H(Ywaizsx&3g?_oqFblr14)B+WWP?M859a469`(RCn~5y9hqp!vPg`hWu;Ypm$t
zngtz<4ZR89F0Ci4qgswCg;>EKb$&t@t;S*@8*78Ob#(pyoG7=s24nC6qn+TTFc2BH
z^tDqSKUVdfuq)F&A_uO2Z<|H78nbHf*{pciw9A8h;{`lCGTiKT1TTsl^XLl!ux|fL
zdY%+5mC}{q+q+><1Gjw}Q^N>SguAWFY#{qEJWd{E>&uvT-!55N>^NK2c8P=Sku^!R
zT+Y8dYKDml*#Btiv-^EN)0uS-@4=-R&-u)h+vYqG@m{wO1tXA}n?XX`pcn}ZVL}Cn
zo2P04sVtqJ{fDhUKTs$WMdnmuP%b_9>bY)DDXEe@O`p(|_L|2Pig%3B#;@oG=$<
zqD`x7#^Ff`#VUKLMcz&ggppals5Pm
zminM(Oc-eUEb!-DHB>dG@Th|JKIHOWWN>8E7h+iui1Y;}??ptChXb18`o(A{V4u
z;9JQ-Veh;2`kO+>Nb$ss~D(uSA6KQAfRGp!fv+oX$#XZh8GW4yLWk-KD!FKS~OC
zIHQgDx205Nal_P1r=TgdkMvD4^f9aKJq}{CU9hqL-AQyu-&%qaL9&
zEdJ-f{!*Nfgoo}9n6Tszd&%*m1G6WR@TsnS^i`&c0jx|jVC;~|Cc|Ki%*=@PUb5#;7wrcuv`^MO?^_vqE3w;mA^wvE8w4
zV>N@@E9EM~`Lg(uepgBh3h+UIo4