From 74ea449586957d635462b680f77b6bb423c47769 Mon Sep 17 00:00:00 2001 From: Sylvain Wallez Date: Sun, 14 Feb 2021 17:37:55 +0100 Subject: [PATCH 1/4] [CI] Update test suite names --- .ci/DockerFile | 2 +- .ci/functions/imports.sh | 5 ++--- .ci/run-elasticsearch.sh | 6 +++--- .ci/run-repository.sh | 2 +- .ci/test-matrix.yml | 4 ++-- .dockerignore | 3 ++- Makefile.toml | 30 ++++++++++++++++-------------- 7 files changed, 27 insertions(+), 25 deletions(-) diff --git a/.ci/DockerFile b/.ci/DockerFile index 37b88954..22c3889e 100644 --- a/.ci/DockerFile +++ b/.ci/DockerFile @@ -28,4 +28,4 @@ COPY elasticsearch/src ./elasticsearch/src COPY elasticsearch/build.rs ./elasticsearch/build.rs COPY yaml_test_runner ./yaml_test_runner -RUN cargo build --tests \ No newline at end of file +RUN cargo build --tests diff --git a/.ci/functions/imports.sh b/.ci/functions/imports.sh index e022a3be..3fb28cc3 100644 --- a/.ci/functions/imports.sh +++ b/.ci/functions/imports.sh @@ -18,7 +18,7 @@ require_stack_version if [[ -z $es_node_name ]]; then # only set these once set -euo pipefail - export TEST_SUITE=${TEST_SUITE-oss} + export TEST_SUITE=${TEST_SUITE-free} export RUNSCRIPTS=${RUNSCRIPTS-} export DETACH=${DETACH-false} export CLEANUP=${CLEANUP-false} @@ -27,8 +27,7 @@ if [[ -z $es_node_name ]]; then export elastic_password=changeme export elasticsearch_image=elasticsearch export elasticsearch_url=https://elastic:${elastic_password}@${es_node_name}:9200 - if [[ $TEST_SUITE != "xpack" ]]; then - export elasticsearch_image=elasticsearch-${TEST_SUITE} + if [[ $TEST_SUITE != "platinum" ]]; then export elasticsearch_url=http://${es_node_name}:9200 fi export external_elasticsearch_url=${elasticsearch_url/$es_node_name/localhost} diff --git a/.ci/run-elasticsearch.sh b/.ci/run-elasticsearch.sh index dcca5ac6..d3cf6ba3 100755 --- a/.ci/run-elasticsearch.sh +++ b/.ci/run-elasticsearch.sh @@ -4,7 +4,7 @@ # to form a cluster suitable for running the REST API tests. # # Export the STACK_VERSION variable, eg. '8.0.0-SNAPSHOT'. -# Export the TEST_SUITE variable, eg. 'oss' or 'xpack' defaults to 'oss'. +# Export the TEST_SUITE variable, eg. 'free' or 'platinum' defaults to 'free'. # Export the NUMBER_OF_NODES variable to start more than 1 node # Version 1.1.0 @@ -38,7 +38,7 @@ environment=($(cat <<-END --env repositories.url.allowed_urls=http://snapshot.test* END )) -if [[ "$TEST_SUITE" == "xpack" ]]; then +if [[ "$TEST_SUITE" == "platinum" ]]; then environment+=($(cat <<-END --env ELASTIC_PASSWORD=$elastic_password --env xpack.license.self_generated.type=trial @@ -63,7 +63,7 @@ END fi cert_validation_flags="" -if [[ "$TEST_SUITE" == "xpack" ]]; then +if [[ "$TEST_SUITE" == "platinum" ]]; then cert_validation_flags="--insecure --cacert /usr/share/elasticsearch/config/certs/ca.crt --resolve ${es_node_name}:443:127.0.0.1" fi diff --git a/.ci/run-repository.sh b/.ci/run-repository.sh index 5809b44d..923b917c 100644 --- a/.ci/run-repository.sh +++ b/.ci/run-repository.sh @@ -3,7 +3,7 @@ # STACK_VERSION -- version e.g Major.Minor.Patch(-Prelease) -# TEST_SUITE -- which test suite to run: oss or xpack +# TEST_SUITE -- which test suite to run: free or platinum # ELASTICSEARCH_URL -- The url at which elasticsearch is reachable, a default is composed based on STACK_VERSION and TEST_SUITE # RUST_TOOLCHAIN -- Rust toolchain version to compile and run tests script_path=$(dirname $(realpath -s $0)) diff --git a/.ci/test-matrix.yml b/.ci/test-matrix.yml index 37bccfa9..d0414033 100644 --- a/.ci/test-matrix.yml +++ b/.ci/test-matrix.yml @@ -4,8 +4,8 @@ STACK_VERSION: - 8.0.0-SNAPSHOT TEST_SUITE: - - oss - - xpack + - free + - platinum RUST_TOOLCHAIN: - latest diff --git a/.dockerignore b/.dockerignore index 120a8907..f3cb8460 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,8 +6,9 @@ target docs .vscode .idea +*.iml api_generator/src/bin yaml_test_runner/tests -!yaml_test_runner/tests/common \ No newline at end of file +!yaml_test_runner/tests/common diff --git a/Makefile.toml b/Makefile.toml index e482a1ef..6b3b2e10 100644 --- a/Makefile.toml +++ b/Makefile.toml @@ -4,23 +4,23 @@ default_to_workspace = false [env] # Determines the version of Elasticsearch docker container used STACK_VERSION = "8.0.0-SNAPSHOT" -# Determines the distribution of docker container used. Either xpack or oss -TEST_SUITE = "xpack" +# Determines the distribution of docker container used. Either platinum or free +TEST_SUITE = "platinum" # Set publish flags to dry-run by default, to force user to explicitly define for publishing CARGO_MAKE_CARGO_PUBLISH_FLAGS = "--dry-run" -[tasks.set-oss-env] +[tasks.set-free-env] category = "Elasticsearch" -description = "Sets ELASTICSEARCH_URL environment variable if not already set for later tasks when oss test suite used" +description = "Sets ELASTICSEARCH_URL environment variable if not already set for later tasks when free test suite used" private = true -condition = { env = { "TEST_SUITE" = "oss" }, env_not_set = ["ELASTICSEARCH_URL"] } +condition = { env = { "TEST_SUITE" = "free" }, env_not_set = ["ELASTICSEARCH_URL"] } env = { "ELASTICSEARCH_URL" = "http://localhost:9200" } -[tasks.set-xpack-env] +[tasks.set-platinum-env] category = "Elasticsearch" -description = "Sets ELASTICSEARCH_URL environment variable if not already set for later tasks when xpack test suite used" +description = "Sets ELASTICSEARCH_URL environment variable if not already set for later tasks when platinum test suite used" private = true -condition = { env = { "TEST_SUITE" = "xpack" }, env_not_set = ["ELASTICSEARCH_URL"] } +condition = { env = { "TEST_SUITE" = "platinum" }, env_not_set = ["ELASTICSEARCH_URL"] } env = { "ELASTICSEARCH_URL" = "https://elastic:changeme@localhost:9200" } [tasks.run-yaml-test-runner] @@ -52,7 +52,7 @@ dependencies = ["generate-yaml-tests"] [tasks.test-elasticsearch] category = "Elasticsearch" private = true -condition = { env_set = [ "ELASTICSEARCH_URL" ], env = { "TEST_SUITE" = "xpack" } } +condition = { env_set = [ "ELASTICSEARCH_URL" ], env = { "TEST_SUITE" = "platinum" } } command = "cargo" args = ["test", "-p", "elasticsearch"] dependencies = ["start-elasticsearch"] @@ -85,7 +85,7 @@ dependencies = ["install-cargo2junit"] category = "Elasticsearch" private = true condition = { env_set = [ "STACK_VERSION", "TEST_SUITE" ], env_false = ["CARGO_MAKE_CI"] } -dependencies = ["set-oss-env", "set-xpack-env"] +dependencies = ["set-free-env", "set-platinum-env"] [tasks.run-elasticsearch.linux] command = "./.ci/run-elasticsearch.sh" @@ -120,17 +120,19 @@ exec cargo publish %{CARGO_MAKE_CARGO_PUBLISH_FLAGS} [tasks.start-elasticsearch] extend = "run-elasticsearch" +private = false description = "Starts Elasticsearch docker container with the given version and distribution" env = { "CLEANUP" = false, "DETACH" = true } [tasks.stop-elasticsearch] extend = "run-elasticsearch" +private = false description = "Stops Elasticsearch docker container, if running" env = { "CLEANUP" = true, "DETACH" = false } [tasks.test-yaml] category = "Elasticsearch" -description = "Generates and runs yaml_test_runner package xpack/oss tests against a given Elasticsearch version" +description = "Generates and runs yaml_test_runner package platinum/free tests against a given Elasticsearch version" condition = { env_set = [ "STACK_VERSION", "TEST_SUITE" ] } dependencies = ["generate-yaml-tests", "create-test-results-dir", "test-yaml-test-runner", "test-yaml-test-runner-ci", "convert-test-results-junit"] run_task = "stop-elasticsearch" @@ -146,7 +148,7 @@ args = ["test", "-p", "api_generator"] category = "Elasticsearch" clear = true description = "Runs elasticsearch package tests against a given Elasticsearch version" -env = { "TEST_SUITE" = { value = "xpack", condition = { env_set = ["TEST_SUITE"] } } } +env = { "TEST_SUITE" = { value = "platinum", condition = { env_set = ["TEST_SUITE"] } } } dependencies = ["test-elasticsearch"] run_task = "stop-elasticsearch" @@ -217,7 +219,7 @@ script = [''' echo - start-elasticsearch: Starts Elasticsearch docker container with the given version and distribution echo - stop-elasticsearch: Stops Elasticsearch docker container, if running echo - echo - test-yaml: Generates and runs yaml_test_runner package xpack/oss tests against a given Elasticsearch version + echo - test-yaml: Generates and runs yaml_test_runner package platinum/free tests against a given Elasticsearch version echo - test-generator: Generates and runs api_generator package tests echo - test: Runs elasticsearch package tests against a given Elasticsearch version echo @@ -230,7 +232,7 @@ script = [''' echo echo Most tasks use these environment variables: echo - STACK_VERSION (default '${STACK_VERSION}'): the version of Elasticsearch - echo - TEST_SUITE ('oss' or 'xpack', default '${TEST_SUITE}'): the distribution of Elasticsearch + echo - TEST_SUITE ('free' or 'platinum', default '${TEST_SUITE}'): the distribution of Elasticsearch echo - CI (default not set): set when running on CI to determine whether to start Elasticsearch and format test output as JSON echo echo Run 'cargo make --list-all-steps' for a complete list of available tasks. From 8069327a3ae072e828609812ee1c09610d0535a9 Mon Sep 17 00:00:00 2001 From: Sylvain Wallez Date: Sun, 14 Feb 2021 17:38:56 +0100 Subject: [PATCH 2/4] [es] Generate client code --- elasticsearch/src/async_search.rs | 3 ++- elasticsearch/src/cat.rs | 3 ++- elasticsearch/src/ccr.rs | 3 ++- elasticsearch/src/cluster.rs | 3 ++- elasticsearch/src/dangling_indices.rs | 3 ++- elasticsearch/src/enrich.rs | 3 ++- elasticsearch/src/graph.rs | 3 ++- elasticsearch/src/ilm.rs | 3 ++- elasticsearch/src/indices.rs | 3 ++- elasticsearch/src/ingest.rs | 3 ++- elasticsearch/src/license.rs | 3 ++- elasticsearch/src/migration.rs | 3 ++- elasticsearch/src/ml.rs | 3 ++- elasticsearch/src/nodes.rs | 3 ++- elasticsearch/src/root/mod.rs | 3 ++- elasticsearch/src/security.rs | 3 ++- elasticsearch/src/slm.rs | 3 ++- elasticsearch/src/snapshot.rs | 3 ++- elasticsearch/src/sql.rs | 3 ++- elasticsearch/src/ssl.rs | 3 ++- elasticsearch/src/tasks.rs | 3 ++- elasticsearch/src/transform.rs | 3 ++- elasticsearch/src/watcher.rs | 3 ++- elasticsearch/src/xpack.rs | 3 ++- 24 files changed, 48 insertions(+), 24 deletions(-) diff --git a/elasticsearch/src/async_search.rs b/elasticsearch/src/async_search.rs index 1c6dfc9a..73101c7d 100644 --- a/elasticsearch/src/async_search.rs +++ b/elasticsearch/src/async_search.rs @@ -30,7 +30,8 @@ //! let you asynchronously execute a search request, monitor its progress, and retrieve //! partial results as they become available. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/cat.rs b/elasticsearch/src/cat.rs index 4158787c..b83739f2 100644 --- a/elasticsearch/src/cat.rs +++ b/elasticsearch/src/cat.rs @@ -119,7 +119,8 @@ //! ``` //! -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/ccr.rs b/elasticsearch/src/ccr.rs index a0458a67..474447b1 100644 --- a/elasticsearch/src/ccr.rs +++ b/elasticsearch/src/ccr.rs @@ -32,7 +32,8 @@ //! - Disaster recovery in case a primary cluster fails. A secondary cluster can serve as a hot backup //! - Geo-proximity so that reads can be served locally -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/cluster.rs b/elasticsearch/src/cluster.rs index b1485d53..32c63964 100644 --- a/elasticsearch/src/cluster.rs +++ b/elasticsearch/src/cluster.rs @@ -29,7 +29,8 @@ //! [Manage settings](https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster.html), //! perform operations, and retrieve information about an Elasticsearch cluster. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/dangling_indices.rs b/elasticsearch/src/dangling_indices.rs index 96e9f122..a165cde6 100644 --- a/elasticsearch/src/dangling_indices.rs +++ b/elasticsearch/src/dangling_indices.rs @@ -33,7 +33,8 @@ //! //! The dangling indices APIs can list, import and delete dangling indices. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/enrich.rs b/elasticsearch/src/enrich.rs index 63576b06..65b90030 100644 --- a/elasticsearch/src/enrich.rs +++ b/elasticsearch/src/enrich.rs @@ -31,7 +31,8 @@ //! as part of an [ingest pipeline](../ingest/index.html), to add data from your existing indices //! to incoming documents during ingest. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/graph.rs b/elasticsearch/src/graph.rs index 3c4580ca..3ab94aba 100644 --- a/elasticsearch/src/graph.rs +++ b/elasticsearch/src/graph.rs @@ -30,7 +30,8 @@ //! indices, [inferring relationships across documents](https://www.elastic.co/what-is/elasticsearch-graph), //! and allowing the [exploration of such relationships](https://www.elastic.co/guide/en/elasticsearch/reference/master/graph-explore-api.html). -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/ilm.rs b/elasticsearch/src/ilm.rs index ffc54ad0..0e0370c1 100644 --- a/elasticsearch/src/ilm.rs +++ b/elasticsearch/src/ilm.rs @@ -34,7 +34,8 @@ //! template used to create them. Update the policy to modify the lifecycle of both new //! and existing indices. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/indices.rs b/elasticsearch/src/indices.rs index ae41d9eb..077153a0 100644 --- a/elasticsearch/src/indices.rs +++ b/elasticsearch/src/indices.rs @@ -29,7 +29,8 @@ //! [Manage individual indices](https://www.elastic.co/guide/en/elasticsearch/reference/master/indices.html), //! index settings, aliases, mappings, and index templates. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/ingest.rs b/elasticsearch/src/ingest.rs index f0a1ccee..41ceae71 100644 --- a/elasticsearch/src/ingest.rs +++ b/elasticsearch/src/ingest.rs @@ -42,7 +42,8 @@ //! be conditionally executed, and failures within pipelines can be explicitly handled by defining //! processors to execute in the event of failure. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/license.rs b/elasticsearch/src/license.rs index cdbe5307..707db8a7 100644 --- a/elasticsearch/src/license.rs +++ b/elasticsearch/src/license.rs @@ -33,7 +33,8 @@ //! - Start indefinite use of the Basic license features //! - Get the status of trial and basic license features -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/migration.rs b/elasticsearch/src/migration.rs index 1306eb1f..f6506610 100644 --- a/elasticsearch/src/migration.rs +++ b/elasticsearch/src/migration.rs @@ -28,7 +28,8 @@ //! //! [Simplify upgrading X-Pack indices from one version to another](https://www.elastic.co/guide/en/elasticsearch/reference/master/migration-api.html). -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/ml.rs b/elasticsearch/src/ml.rs index be707d01..39e141f1 100644 --- a/elasticsearch/src/ml.rs +++ b/elasticsearch/src/ml.rs @@ -28,7 +28,8 @@ //! //! [Perform machine learning anomaly detection activities](https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-apis.html). -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/nodes.rs b/elasticsearch/src/nodes.rs index 795b030b..f7858b34 100644 --- a/elasticsearch/src/nodes.rs +++ b/elasticsearch/src/nodes.rs @@ -29,7 +29,8 @@ //! Manage settings, perform operations, and retrieve information about the //! [nodes in an Elasticsearch cluster](https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster.html). -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/root/mod.rs b/elasticsearch/src/root/mod.rs index 646c6d6b..ddf9d19d 100644 --- a/elasticsearch/src/root/mod.rs +++ b/elasticsearch/src/root/mod.rs @@ -24,7 +24,8 @@ // cargo make generate-api // ----------------------------------------------- -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/security.rs b/elasticsearch/src/security.rs index 55f0fcb6..d71b852b 100644 --- a/elasticsearch/src/security.rs +++ b/elasticsearch/src/security.rs @@ -37,7 +37,8 @@ //! - Authenticate users against an OpenID Connect or SAML authentication realm when using a //! custom web application other than Kibana -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/slm.rs b/elasticsearch/src/slm.rs index 7db66376..50e5c507 100644 --- a/elasticsearch/src/slm.rs +++ b/elasticsearch/src/slm.rs @@ -33,7 +33,8 @@ //! indices. Snapshot Lifecycle Management can also perform deletion of older snapshots based on a //! configurable retention policy. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/snapshot.rs b/elasticsearch/src/snapshot.rs index a8f80d64..2507e53c 100644 --- a/elasticsearch/src/snapshot.rs +++ b/elasticsearch/src/snapshot.rs @@ -31,7 +31,8 @@ //! indices or the entire cluster, stored in a repository on a shared filesystem or a remote repository //! on S3, HDFS, Azure, Google Cloud storage, and more. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/sql.rs b/elasticsearch/src/sql.rs index 72b6da24..2141c56a 100644 --- a/elasticsearch/src/sql.rs +++ b/elasticsearch/src/sql.rs @@ -28,7 +28,8 @@ //! //! [Execute SQL queries against Elasticsearch indices and return results in tabular format](https://www.elastic.co/guide/en/elasticsearch/reference/master/xpack-sql.html). -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/ssl.rs b/elasticsearch/src/ssl.rs index 43bfd170..0c333a0d 100644 --- a/elasticsearch/src/ssl.rs +++ b/elasticsearch/src/ssl.rs @@ -28,7 +28,8 @@ //! //! [Retrieve information about the X.509 certificates used to encrypt communications in the cluster](https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-ssl.html). -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/tasks.rs b/elasticsearch/src/tasks.rs index c37fabdb..78589a12 100644 --- a/elasticsearch/src/tasks.rs +++ b/elasticsearch/src/tasks.rs @@ -28,7 +28,8 @@ //! //! [Manage tasks currently executing on one or more nodes in the cluster](https://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html). -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/transform.rs b/elasticsearch/src/transform.rs index 324dfa2b..5f4048d1 100644 --- a/elasticsearch/src/transform.rs +++ b/elasticsearch/src/transform.rs @@ -30,7 +30,8 @@ //! can be used to copy data from source indices, transforms it, and persists it into an //! entity-centric destination index. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/watcher.rs b/elasticsearch/src/watcher.rs index b62c6c1d..7d7788d9 100644 --- a/elasticsearch/src/watcher.rs +++ b/elasticsearch/src/watcher.rs @@ -29,7 +29,8 @@ //! Enable [watching for changes or anomalies in data and perform the necessary actions in response](https://www.elastic.co/guide/en/elasticsearch/reference/master/xpack-alerting.html), //! by creating and managing watches that take action based on a met condition. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ diff --git a/elasticsearch/src/xpack.rs b/elasticsearch/src/xpack.rs index 7680767e..26192a57 100644 --- a/elasticsearch/src/xpack.rs +++ b/elasticsearch/src/xpack.rs @@ -28,7 +28,8 @@ //! //! Provide general information about the installed X-Pack features and their usage. -# ! [ allow ( unused_imports ) ]use crate::{ +#![allow(unused_imports)] +use crate::{ client::Elasticsearch, error::Error, http::{ From 0cb25aeee6fea3c992389c7c14c5b01246d57275 Mon Sep 17 00:00:00 2001 From: Sylvain Wallez Date: Sun, 14 Feb 2021 17:40:47 +0100 Subject: [PATCH 3/4] [yaml-tests] Update test suite names, fix delete_indices, improve error reporting --- CONTRIBUTING.md | 6 +-- Makefile.toml | 4 +- yaml_test_runner/src/generator.rs | 8 ++-- yaml_test_runner/src/github.rs | 2 +- yaml_test_runner/src/main.rs | 9 +++-- yaml_test_runner/src/step/do.rs | 2 +- yaml_test_runner/tests/common/client.rs | 29 +++++++++++++ yaml_test_runner/tests/common/macros.rs | 54 ++++++++++++++++++++----- 8 files changed, 88 insertions(+), 26 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e8f986b3..7c90562e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -79,10 +79,10 @@ The `Elasticsearch` category of steps are specifically defined for this project - `STACK_VERSION`: Elasticsearch version like `7.9.0` or can be a snapshot release like `7.x-SNAPSHOT` - - `TEST_SUITE`: Elasticsearch distribution of `oss` or `xpack` + - `TEST_SUITE`: Elasticsearch distribution of `free` or `platinum` ```sh - cargo make test-yaml --env STACK_VERSION=7.9.0 --env TEST_SUITE=oss + cargo make test-yaml --env STACK_VERSION=7.9.0 --env TEST_SUITE=free ``` ### Packages @@ -198,4 +198,4 @@ if wishing to use the MSVC debugger with Rust in VS code, which may be preferred } ``` -3. Add `"debug.allowBreakpointsEverywhere": true` to VS code settings.json \ No newline at end of file +3. Add `"debug.allowBreakpointsEverywhere": true` to VS code settings.json diff --git a/Makefile.toml b/Makefile.toml index 6b3b2e10..1e3d9962 100644 --- a/Makefile.toml +++ b/Makefile.toml @@ -3,9 +3,9 @@ default_to_workspace = false [env] # Determines the version of Elasticsearch docker container used -STACK_VERSION = "8.0.0-SNAPSHOT" +STACK_VERSION = { value = "8.0.0-SNAPSHOT", condition = { env_not_set = ["STACK_VERSION"] }} # Determines the distribution of docker container used. Either platinum or free -TEST_SUITE = "platinum" +TEST_SUITE = { value = "free", condition = { env_not_set = ["TEST_SUITE"] }} # Set publish flags to dry-run by default, to force user to explicitly define for publishing CARGO_MAKE_CARGO_PUBLISH_FLAGS = "--dry-run" diff --git a/yaml_test_runner/src/generator.rs b/yaml_test_runner/src/generator.rs index 56ecaec9..28b6101a 100644 --- a/yaml_test_runner/src/generator.rs +++ b/yaml_test_runner/src/generator.rs @@ -36,7 +36,7 @@ use yaml_rust::{Yaml, YamlLoader}; /// The test suite to compile #[derive(Debug, PartialEq)] pub enum TestSuite { - Oss, + Free, XPack, } @@ -121,7 +121,7 @@ impl<'a> YamlTests<'a> { let (setup_fn, setup_call) = Self::generate_fixture(&self.setup); let (teardown_fn, teardown_call) = Self::generate_fixture(&self.teardown); let general_setup_call = match self.suite { - TestSuite::Oss => quote!(client::general_oss_setup().await?;), + TestSuite::Free => quote!(client::general_oss_setup().await?;), TestSuite::XPack => quote!(client::general_xpack_setup().await?;), }; @@ -406,9 +406,9 @@ pub fn generate_tests_from_yaml( } match top_dir.as_str() { - "oss" => TestSuite::Oss, + "free" => TestSuite::Free, "xpack" => TestSuite::XPack, - _ => panic!("Unknown test suite"), + _ => panic!("Unknown test suite {:?}", path), } }; diff --git a/yaml_test_runner/src/github.rs b/yaml_test_runner/src/github.rs index f8a29c3c..c4eaf9ec 100644 --- a/yaml_test_runner/src/github.rs +++ b/yaml_test_runner/src/github.rs @@ -67,7 +67,7 @@ pub fn download_test_suites(branch: &str, download_dir: &PathBuf) -> Result<(), let file = entry?; let path = file.path()?; if oss_test.is_match(&path) { - write_test_file(download_dir, "oss", file)?; + write_test_file(download_dir, "free", file)?; } else if xpack_test.is_match(&path) { write_test_file(download_dir, "xpack", file)?; } diff --git a/yaml_test_runner/src/main.rs b/yaml_test_runner/src/main.rs index a011b2a4..161c9c70 100644 --- a/yaml_test_runner/src/main.rs +++ b/yaml_test_runner/src/main.rs @@ -148,13 +148,14 @@ fn branch_suite_and_version_from_elasticsearch( .danger_accept_invalid_certs(true) .build()?; + let suite = match std::env::var("TEST_SUITE") { + Err(_) => panic!("Env var TEST_SUITE is not defined"), + Ok(ref s) if s == "free" => TestSuite::Free, + _ => TestSuite::XPack, + }; let mut response = client.get(url).send()?; let json: Value = response.json()?; let branch = json["version"]["build_hash"].as_str().unwrap().to_string(); - let suite = match json["version"]["build_flavor"].as_str().unwrap() { - "oss" => TestSuite::Oss, - _ => TestSuite::XPack, - }; // any prerelease part needs to be trimmed because the semver crate only allows // a version with a prerelease to match against predicates, if at least one predicate diff --git a/yaml_test_runner/src/step/do.rs b/yaml_test_runner/src/step/do.rs index 8fd63006..715d3bc7 100644 --- a/yaml_test_runner/src/step/do.rs +++ b/yaml_test_runner/src/step/do.rs @@ -38,7 +38,7 @@ impl ToTokens for Catch { fn to_tokens(&self, tokens: &mut Tokens) { fn http_status_code(status_code: u16, tokens: &mut Tokens) { tokens.append(quote! { - assert_status_code!(response.status_code(), #status_code); + assert_response_status_code!(response, #status_code); }); } diff --git a/yaml_test_runner/tests/common/client.rs b/yaml_test_runner/tests/common/client.rs index 59d8704e..b6d1356e 100644 --- a/yaml_test_runner/tests/common/client.rs +++ b/yaml_test_runner/tests/common/client.rs @@ -247,6 +247,35 @@ async fn wait_for_yellow_status(client: &Elasticsearch) -> Result<(), Error> { } async fn delete_indices(client: &Elasticsearch) -> Result<(), Error> { + // Hand-crafted request as the indices.delete_data_stream spec doesn't yet have the + // "expand_wildcards" parameter that is needed to delete ILM data streams + // + // Not deleting data streams yields errors like this when trying to delete hidden indices: + // { + // "type":"illegal_argument_exception" + // "reason":"index [.ds-ilm-history-5-2021.02.14-000001] is the write index for data + // stream [ilm-history-5] and cannot be deleted" + // } + // + // Quoting the docs: + // You cannot delete the current write index of a data stream. To delete the index, + // you must roll over the data stream so a new write index is created. You can then use + // the delete index API to delete the previous write index. + // + let delete_response = client + .transport() + .send( + Method::Delete, + "/_data_stream/*", + elasticsearch::http::headers::HeaderMap::new(), + Some(&[("expand_wildcards", "hidden")]), + None::<()>, // body + None, // timeout + ) + .await?; + + assert_response_success!(delete_response); + let delete_response = client .indices() .delete(IndicesDeleteParts::Index(&["*"])) diff --git a/yaml_test_runner/tests/common/macros.rs b/yaml_test_runner/tests/common/macros.rs index 592aa1ce..9ec262aa 100644 --- a/yaml_test_runner/tests/common/macros.rs +++ b/yaml_test_runner/tests/common/macros.rs @@ -17,14 +17,21 @@ * under the License. */ /// Asserts that a [Response] has a status code >=200 and <300 + #[macro_export] macro_rules! assert_response_success { ($response:ident) => {{ - assert!( - $response.status_code().is_success(), - "expected response to be successful but was {}", - $response.status_code().as_u16() - ); + let status_code = $response.status_code(); + if !status_code.is_success() { + let text = $response + .text() + .await + .unwrap_or("[no response]".to_string()); + panic!( + "expected response to be successful but was {}. Response: {}", + status_code, text + ); + } }}; } @@ -32,12 +39,37 @@ macro_rules! assert_response_success { #[macro_export] macro_rules! assert_response_success_or { ($response:ident, $status:expr) => {{ - assert!( - $response.status_code().is_success() || $response.status_code().as_u16() == $status, - "expected response to be successful or {} but was {}", - $status, - $response.status_code().as_u16() - ); + let status_code = $response.status_code(); + if !status_code.is_success() && status_code.as_u16() != $status { + let text = $response + .text() + .await + .unwrap_or("[no response]".to_string()); + panic!( + "expected response to be successful or {} but was {}", + $status, + status_code.as_u16() + ); + } + }}; +} + +/// Asserts that a [Response] has a status that matches the passed status +#[macro_export] +macro_rules! assert_response_status_code { + ($response:ident, $status:expr) => {{ + let status_code = $response.status_code(); + if status_code.as_u16() != $status { + let text = $response + .text() + .await + .unwrap_or("[no response]".to_string()); + panic!( + "expected response to be {} but was {}", + $status, + status_code.as_u16() + ); + } }}; } From f10b97820f9135632d96cfce66d92139408a7b6d Mon Sep 17 00:00:00 2001 From: Sylvain Wallez Date: Sun, 14 Feb 2021 19:40:32 +0100 Subject: [PATCH 4/4] Update skipped tests --- yaml_test_runner/skip.yml | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/yaml_test_runner/skip.yml b/yaml_test_runner/skip.yml index 5021aab6..183e73c2 100644 --- a/yaml_test_runner/skip.yml +++ b/yaml_test_runner/skip.yml @@ -11,6 +11,11 @@ features: # tests to skip generating and compiling a test for tests: + + xpack/license/30_enterprise_license.yml: + # Warning "Including [accept_enterprise] in get license requests is deprecated" is no more present + - "Installing enterprise license" + xpack/api_key/10_basic.yml: # Invalid regex: "^(${api_key_id_2}|${api_key_id_3})$" # --> missing expansion of stashed values @@ -45,6 +50,11 @@ tests: # this test fails because it can't access snapshot to restore it - "Create a source only snapshot and then restore it" + xpack/snapshot/20_operator_privileges_disabled.yml: + # expected value json["persistent"]["xpack"]["security"]["http"]["filter"]["deny"] to match String("example.com") + # but was String("tutorial.com") + - "Operator only settings can be set and restored by non-operator user when operator privileges is disabled" + # Test generator currently doesn't handle unsigned long values. # We skip all tests as even the setup code will fail to compile. xpack/unsigned_long/10_basic.yml: @@ -60,32 +70,36 @@ tests: xpack/unsigned_long/60_collapse.yml: - "*" - oss/cat.aliases/10_basic.yml: + free/cat.aliases/10_basic.yml: # this test fails as the regex needs a \n before the ending $ - "Multiple alias names" - oss/cat.indices/10_basic.yml: + free/cat.indices/10_basic.yml: # this test fails as the regex needs a \n before the ending $ - "Test cat indices using health status" - oss/indices.shard_stores/10_basic.yml: + free/indices.shard_stores/10_basic.yml: # uses number as a key into object. serde_json::Value expects a string key - "basic index test" - "multiple indices test" - oss/indices.flush/10_basic.yml: + free/indices.flush/10_basic.yml: # uses number as a key into object. serde_json::Value expects a string key - "Index synced flush rest test" - oss/indices.segments/10_basic.yml: + free/indices.segments/10_basic.yml: # uses number as a key into object. serde_json::Value expects a string key - "basic segments test" - oss/indices.stats/12_level.yml: + free/indices.stats/12_level.yml: # uses number as a key into object. serde_json::Value expects a string key - "Level - shards" - oss/search.aggregation/250_moving_fn.yml: + free/nodes.info/10_basic.yml: + # node has a lot more roles than those checked in the test + - "node_info role test" + + free/search.aggregation/250_moving_fn.yml: # The use of an invalid window interval results in a 400 response which looks like # it suppresses the sending of deprecation headers - "Bad window deprecated interval"