diff --git a/.ci/DockerFile b/.ci/DockerFile index 37b88954..22c3889e 100644 --- a/.ci/DockerFile +++ b/.ci/DockerFile @@ -28,4 +28,4 @@ COPY elasticsearch/src ./elasticsearch/src COPY elasticsearch/build.rs ./elasticsearch/build.rs COPY yaml_test_runner ./yaml_test_runner -RUN cargo build --tests \ No newline at end of file +RUN cargo build --tests diff --git a/.ci/functions/imports.sh b/.ci/functions/imports.sh index e022a3be..3fb28cc3 100644 --- a/.ci/functions/imports.sh +++ b/.ci/functions/imports.sh @@ -18,7 +18,7 @@ require_stack_version if [[ -z $es_node_name ]]; then # only set these once set -euo pipefail - export TEST_SUITE=${TEST_SUITE-oss} + export TEST_SUITE=${TEST_SUITE-free} export RUNSCRIPTS=${RUNSCRIPTS-} export DETACH=${DETACH-false} export CLEANUP=${CLEANUP-false} @@ -27,8 +27,7 @@ if [[ -z $es_node_name ]]; then export elastic_password=changeme export elasticsearch_image=elasticsearch export elasticsearch_url=https://elastic:${elastic_password}@${es_node_name}:9200 - if [[ $TEST_SUITE != "xpack" ]]; then - export elasticsearch_image=elasticsearch-${TEST_SUITE} + if [[ $TEST_SUITE != "platinum" ]]; then export elasticsearch_url=http://${es_node_name}:9200 fi export external_elasticsearch_url=${elasticsearch_url/$es_node_name/localhost} diff --git a/.ci/run-elasticsearch.sh b/.ci/run-elasticsearch.sh index f9d13906..3fb71425 100755 --- a/.ci/run-elasticsearch.sh +++ b/.ci/run-elasticsearch.sh @@ -4,7 +4,7 @@ # to form a cluster suitable for running the REST API tests. # # Export the STACK_VERSION variable, eg. '8.0.0-SNAPSHOT'. -# Export the TEST_SUITE variable, eg. 'oss' or 'xpack' defaults to 'oss'. +# Export the TEST_SUITE variable, eg. 'free' or 'platinum' defaults to 'free'. # Export the NUMBER_OF_NODES variable to start more than 1 node # Version 1.1.0 @@ -38,7 +38,7 @@ environment=($(cat <<-END --env repositories.url.allowed_urls=http://snapshot.test* END )) -if [[ "$TEST_SUITE" == "xpack" ]]; then +if [[ "$TEST_SUITE" == "platinum" ]]; then environment+=($(cat <<-END --env ELASTIC_PASSWORD=$elastic_password --env xpack.license.self_generated.type=trial @@ -63,7 +63,7 @@ END fi cert_validation_flags="" -if [[ "$TEST_SUITE" == "xpack" ]]; then +if [[ "$TEST_SUITE" == "platinum" ]]; then cert_validation_flags="--insecure --cacert /usr/share/elasticsearch/config/certs/ca.crt --resolve ${es_node_name}:443:127.0.0.1" fi diff --git a/.ci/run-repository.sh b/.ci/run-repository.sh index 08729ba7..bcb27753 100644 --- a/.ci/run-repository.sh +++ b/.ci/run-repository.sh @@ -3,7 +3,7 @@ # STACK_VERSION -- version e.g Major.Minor.Patch(-Prelease) -# TEST_SUITE -- which test suite to run: oss or xpack +# TEST_SUITE -- which test suite to run: free or platinum # ELASTICSEARCH_URL -- The url at which elasticsearch is reachable, a default is composed based on STACK_VERSION and TEST_SUITE # RUST_TOOLCHAIN -- Rust toolchain version to compile and run tests script_path=$(dirname $(realpath -s $0)) diff --git a/.ci/test-matrix.yml b/.ci/test-matrix.yml index 553ca54e..6936e68c 100644 --- a/.ci/test-matrix.yml +++ b/.ci/test-matrix.yml @@ -4,8 +4,8 @@ STACK_VERSION: - 7.x-SNAPSHOT TEST_SUITE: - - oss - - xpack + - free + - platinum RUST_TOOLCHAIN: - latest diff --git a/.dockerignore b/.dockerignore index 120a8907..f3cb8460 100644 --- a/.dockerignore +++ b/.dockerignore @@ -6,8 +6,9 @@ target docs .vscode .idea +*.iml api_generator/src/bin yaml_test_runner/tests -!yaml_test_runner/tests/common \ No newline at end of file +!yaml_test_runner/tests/common diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index e8f986b3..7c90562e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -79,10 +79,10 @@ The `Elasticsearch` category of steps are specifically defined for this project - `STACK_VERSION`: Elasticsearch version like `7.9.0` or can be a snapshot release like `7.x-SNAPSHOT` - - `TEST_SUITE`: Elasticsearch distribution of `oss` or `xpack` + - `TEST_SUITE`: Elasticsearch distribution of `free` or `platinum` ```sh - cargo make test-yaml --env STACK_VERSION=7.9.0 --env TEST_SUITE=oss + cargo make test-yaml --env STACK_VERSION=7.9.0 --env TEST_SUITE=free ``` ### Packages @@ -198,4 +198,4 @@ if wishing to use the MSVC debugger with Rust in VS code, which may be preferred } ``` -3. Add `"debug.allowBreakpointsEverywhere": true` to VS code settings.json \ No newline at end of file +3. Add `"debug.allowBreakpointsEverywhere": true` to VS code settings.json diff --git a/Makefile.toml b/Makefile.toml index 5ef8c347..823adabe 100644 --- a/Makefile.toml +++ b/Makefile.toml @@ -3,24 +3,24 @@ default_to_workspace = false [env] # Determines the version of Elasticsearch docker container used -STACK_VERSION = "7.x-SNAPSHOT" -# Determines the distribution of docker container used. Either xpack or oss -TEST_SUITE = "xpack" +STACK_VERSION = { value = "7.x-SNAPSHOT", condition = { env_not_set = ["STACK_VERSION"] }} +# Determines the distribution of docker container used. Either platinum or free +TEST_SUITE = { value = "free", condition = { env_not_set = ["TEST_SUITE"] }} # Set publish flags to dry-run by default, to force user to explicitly define for publishing CARGO_MAKE_CARGO_PUBLISH_FLAGS = "--dry-run" -[tasks.set-oss-env] +[tasks.set-free-env] category = "Elasticsearch" -description = "Sets ELASTICSEARCH_URL environment variable if not already set for later tasks when oss test suite used" +description = "Sets ELASTICSEARCH_URL environment variable if not already set for later tasks when free test suite used" private = true -condition = { env = { "TEST_SUITE" = "oss" }, env_not_set = ["ELASTICSEARCH_URL"] } +condition = { env = { "TEST_SUITE" = "free" }, env_not_set = ["ELASTICSEARCH_URL"] } env = { "ELASTICSEARCH_URL" = "http://localhost:9200" } -[tasks.set-xpack-env] +[tasks.set-platinum-env] category = "Elasticsearch" -description = "Sets ELASTICSEARCH_URL environment variable if not already set for later tasks when xpack test suite used" +description = "Sets ELASTICSEARCH_URL environment variable if not already set for later tasks when platinum test suite used" private = true -condition = { env = { "TEST_SUITE" = "xpack" }, env_not_set = ["ELASTICSEARCH_URL"] } +condition = { env = { "TEST_SUITE" = "platinum" }, env_not_set = ["ELASTICSEARCH_URL"] } env = { "ELASTICSEARCH_URL" = "https://elastic:changeme@localhost:9200" } [tasks.run-yaml-test-runner] @@ -52,7 +52,7 @@ dependencies = ["generate-yaml-tests"] [tasks.test-elasticsearch] category = "Elasticsearch" private = true -condition = { env_set = [ "ELASTICSEARCH_URL" ], env = { "TEST_SUITE" = "xpack" } } +condition = { env_set = [ "ELASTICSEARCH_URL" ], env = { "TEST_SUITE" = "platinum" } } command = "cargo" args = ["test", "-p", "elasticsearch"] dependencies = ["start-elasticsearch"] @@ -85,7 +85,7 @@ dependencies = ["install-cargo2junit"] category = "Elasticsearch" private = true condition = { env_set = [ "STACK_VERSION", "TEST_SUITE" ], env_false = ["CARGO_MAKE_CI"] } -dependencies = ["set-oss-env", "set-xpack-env"] +dependencies = ["set-free-env", "set-platinum-env"] [tasks.run-elasticsearch.linux] command = "./.ci/run-elasticsearch.sh" @@ -120,17 +120,19 @@ exec cargo publish %{CARGO_MAKE_CARGO_PUBLISH_FLAGS} [tasks.start-elasticsearch] extend = "run-elasticsearch" +private = false description = "Starts Elasticsearch docker container with the given version and distribution" env = { "CLEANUP" = false, "DETACH" = true } [tasks.stop-elasticsearch] extend = "run-elasticsearch" +private = false description = "Stops Elasticsearch docker container, if running" env = { "CLEANUP" = true, "DETACH" = false } [tasks.test-yaml] category = "Elasticsearch" -description = "Generates and runs yaml_test_runner package xpack/oss tests against a given Elasticsearch version" +description = "Generates and runs yaml_test_runner package platinum/free tests against a given Elasticsearch version" condition = { env_set = [ "STACK_VERSION", "TEST_SUITE" ] } dependencies = ["generate-yaml-tests", "create-test-results-dir", "test-yaml-test-runner", "test-yaml-test-runner-ci", "convert-test-results-junit"] run_task = "stop-elasticsearch" @@ -146,7 +148,7 @@ args = ["test", "-p", "api_generator"] category = "Elasticsearch" clear = true description = "Runs elasticsearch package tests against a given Elasticsearch version" -env = { "TEST_SUITE" = { value = "xpack", condition = { env_set = ["TEST_SUITE"] } } } +env = { "TEST_SUITE" = { value = "platinum", condition = { env_set = ["TEST_SUITE"] } } } dependencies = ["test-elasticsearch"] run_task = "stop-elasticsearch" @@ -167,7 +169,7 @@ description = "Generate Elasticsearch client documentation and opens in browser" clear = true category = "Elasticsearch" command = "cargo" -args = ["doc", "-p", "elasticsearch", "--no-deps", "--open"] +args = ["doc", "-p", "elasticsearch", "--no-deps", "--open", "--all-features"] [tasks.generate-release-notes] category = "Elasticsearch" @@ -217,7 +219,7 @@ script = [''' echo - start-elasticsearch: Starts Elasticsearch docker container with the given version and distribution echo - stop-elasticsearch: Stops Elasticsearch docker container, if running echo - echo - test-yaml: Generates and runs yaml_test_runner package xpack/oss tests against a given Elasticsearch version + echo - test-yaml: Generates and runs yaml_test_runner package platinum/free tests against a given Elasticsearch version echo - test-generator: Generates and runs api_generator package tests echo - test: Runs elasticsearch package tests against a given Elasticsearch version echo @@ -230,7 +232,7 @@ script = [''' echo echo Most tasks use these environment variables: echo - STACK_VERSION (default '${STACK_VERSION}'): the version of Elasticsearch - echo - TEST_SUITE ('oss' or 'xpack', default '${TEST_SUITE}'): the distribution of Elasticsearch + echo - TEST_SUITE ('free' or 'platinum', default '${TEST_SUITE}'): the distribution of Elasticsearch echo - CI (default not set): set when running on CI to determine whether to start Elasticsearch and format test output as JSON echo echo Run 'cargo make --list-all-steps' for a complete list of available tasks. diff --git a/README.md b/README.md index 83087064..262801dd 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,11 @@ The following are a list of Cargo features that can be enabled or disabled: - **native-tls** *(enabled by default)*: Enables TLS functionality provided by `native-tls`. - **rustls-tls**: Enables TLS functionality provided by `rustls`. +- **beta-apis**: Enables beta APIs. Beta APIs are on track to become stable and permanent features. Use them with + caution because it is possible that breaking changes are made to these APIs in a minor version. +- **experimental-apis**: Enables experimental APIs. Experimental APIs are just that - an experiment. An experimental + API might have breaking changes in any future version, or it might even be removed entirely. This feature also + enables `beta-apis`. ## Getting started diff --git a/api_generator/Cargo.toml b/api_generator/Cargo.toml index 68377642..a4516cfe 100644 --- a/api_generator/Cargo.toml +++ b/api_generator/Cargo.toml @@ -16,7 +16,9 @@ flate2 = "~1" globset = "~0.4" Inflector = "0.11.4" indicatif = "0.12.0" +itertools = "0.10.0" lazy_static = "1.4.0" +log = "0.4.8" path-slash = "0.1.3" quote = "~0.3" reduce = "0.1.2" @@ -26,6 +28,7 @@ semver = "0.9.0" serde = "~1" serde_json = "~1" serde_derive = "~1" +simple_logger = "1.9.0" syn = { version = "~0.11", features = ["full"] } tar = "~0.4" toml = "0.5.6" diff --git a/api_generator/docs/namespaces/autoscaling.md b/api_generator/docs/namespaces/autoscaling.md new file mode 100644 index 00000000..46513be4 --- /dev/null +++ b/api_generator/docs/namespaces/autoscaling.md @@ -0,0 +1,5 @@ +Autoscaling APIs + +The [autoscaling feature](https://www.elastic.co/guide/en/elasticsearch/reference/master/xpack-autoscaling.html) +enables an operator to configure tiers of nodes that self-monitor whether or not they need to scale based on an +operator-defined policy. diff --git a/api_generator/docs/namespaces/eql.md b/api_generator/docs/namespaces/eql.md new file mode 100644 index 00000000..903e300f --- /dev/null +++ b/api_generator/docs/namespaces/eql.md @@ -0,0 +1,6 @@ +EQL APIs + +[Event Query Language (EQL)](https://www.elastic.co/guide/en/elasticsearch/reference/master/eql.html) is a query +language for event-based time series data, such as logs, metrics, and traces. + + diff --git a/api_generator/docs/namespaces/logstash.md b/api_generator/docs/namespaces/logstash.md new file mode 100644 index 00000000..b6c04701 --- /dev/null +++ b/api_generator/docs/namespaces/logstash.md @@ -0,0 +1,4 @@ +Logstash APIs + +The [Logstash APIs](https://www.elastic.co/guide/en/elasticsearch/reference/master/logstash-apis.html) are used to +manage pipelines used by Logstash Central Management. diff --git a/api_generator/docs/namespaces/monitoring.md b/api_generator/docs/namespaces/monitoring.md new file mode 100644 index 00000000..41cb74b9 --- /dev/null +++ b/api_generator/docs/namespaces/monitoring.md @@ -0,0 +1,4 @@ +Monitoring APIs + +The Elastic Stack [monitoring features](https://www.elastic.co/guide/en/elasticsearch/reference/master/monitor-elasticsearch-cluster.html) +provide a way to keep a pulse on thehealth and performance of your Elasticsearch cluster. diff --git a/api_generator/docs/namespaces/rollup.md b/api_generator/docs/namespaces/rollup.md new file mode 100644 index 00000000..71fa9444 --- /dev/null +++ b/api_generator/docs/namespaces/rollup.md @@ -0,0 +1,5 @@ +Rollup APIs + +The Elastic Stack [data rollup features](https://www.elastic.co/guide/en/elasticsearch/reference/master/xpack-rollup.html) +provide a means to summarize and store historical data so that it can still be used for analysis, but at a fraction of +the storage cost of raw data. diff --git a/api_generator/docs/namespaces/searchable_snapshots.md b/api_generator/docs/namespaces/searchable_snapshots.md new file mode 100644 index 00000000..c903089e --- /dev/null +++ b/api_generator/docs/namespaces/searchable_snapshots.md @@ -0,0 +1,6 @@ +Searchable snapshot APIs + +[Searchable snapshots](https://www.elastic.co/guide/en/elasticsearch/reference/master/searchable-snapshots.html) let +you reduce your operating costs by using snapshots for resiliency rather than maintaining replica shards within a +cluster. This can result in significant cost savings for less frequently searched data. + diff --git a/api_generator/docs/namespaces/text_structure.md b/api_generator/docs/namespaces/text_structure.md new file mode 100644 index 00000000..42ab69e7 --- /dev/null +++ b/api_generator/docs/namespaces/text_structure.md @@ -0,0 +1,4 @@ +Text structure APIs + +Determines the structure of text and other information that will be useful to import its contents to an Elasticsearch +index. diff --git a/api_generator/src/bin/run.rs b/api_generator/src/bin/run.rs index d9166ede..ad4a5a98 100644 --- a/api_generator/src/bin/run.rs +++ b/api_generator/src/bin/run.rs @@ -28,6 +28,11 @@ use std::{ }; fn main() -> Result<(), failure::Error> { + simple_logger::SimpleLogger::new() + .with_level(log::LevelFilter::Info) + .init() + .unwrap(); + // This must be run from the repo root directory, with cargo make generate-api let download_dir = fs::canonicalize(PathBuf::from("./api_generator/rest_specs"))?; let generated_dir = fs::canonicalize(PathBuf::from("./elasticsearch/src"))?; diff --git a/api_generator/src/generator/code_gen/mod.rs b/api_generator/src/generator/code_gen/mod.rs index 5136b008..37095d65 100644 --- a/api_generator/src/generator/code_gen/mod.rs +++ b/api_generator/src/generator/code_gen/mod.rs @@ -22,7 +22,7 @@ pub mod request; pub mod root; pub mod url; -use crate::generator::TypeKind; +use crate::generator::{Stability, TypeKind}; use inflector::Inflector; use quote::Tokens; use std::str; @@ -72,6 +72,22 @@ fn doc>(comment: I) -> syn::Attribute { } } +fn stability_doc(stability: Stability) -> Option { + match stability { + Stability::Experimental => Some(doc(r#"  +# Optional, experimental +This requires the `experimental-apis` feature. Can have breaking changes in future +versions or might even be removed entirely. + "#)), + Stability::Beta => Some(doc(r#"  +# Optional, beta +This requires the `beta-apis` feature. On track to become stable but breaking changes can +happen in minor versions. + "#)), + Stability::Stable => None, + } +} + /// AST for an expression parsed from quoted tokens pub fn parse_expr(input: quote::Tokens) -> syn::Expr { syn::parse_expr(input.to_string().as_ref()).unwrap() diff --git a/api_generator/src/generator/code_gen/namespace_clients.rs b/api_generator/src/generator/code_gen/namespace_clients.rs index b8e3e0c6..b27b26a1 100644 --- a/api_generator/src/generator/code_gen/namespace_clients.rs +++ b/api_generator/src/generator/code_gen/namespace_clients.rs @@ -28,12 +28,20 @@ use std::path::PathBuf; pub fn generate(api: &Api, docs_dir: &PathBuf) -> Result, failure::Error> { let mut output = Vec::new(); - for (namespace, namespace_methods) in &api.namespaces { + for (namespace_name, namespace) in &api.namespaces { let mut tokens = Tokens::new(); + if let Some(attr) = namespace.stability.inner_cfg_attr() { + tokens.append(attr); + } + if let Some(mut attr) = stability_doc(namespace.stability) { + attr.style = syn::AttrStyle::Inner; + tokens.append(quote! { #attr }); + } + tokens.append(use_declarations()); - let namespace_pascal_case = namespace.to_pascal_case(); - let namespace_replaced_pascal_case = namespace.replace("_", " ").to_pascal_case(); + let namespace_pascal_case = namespace_name.to_pascal_case(); + let namespace_replaced_pascal_case = namespace_name.replace("_", " ").to_pascal_case(); let namespace_client_name = ident(&namespace_pascal_case); let name_for_docs = match namespace_replaced_pascal_case.as_ref() { "Ccr" => "Cross Cluster Replication", @@ -53,9 +61,10 @@ pub fn generate(api: &Api, docs_dir: &PathBuf) -> Result, "Creates a new instance of [{}]", &namespace_pascal_case )); - let namespace_name = ident(namespace.to_string()); + let namespace_name = ident(namespace_name.to_string()); - let (builders, methods): (Vec, Vec) = namespace_methods + let (builders, methods): (Vec, Vec) = namespace + .endpoints() .iter() .map(|(name, endpoint)| { let builder_name = format!("{}{}", &namespace_pascal_case, name.to_pascal_case()); @@ -72,14 +81,19 @@ pub fn generate(api: &Api, docs_dir: &PathBuf) -> Result, }) .unzip(); + let cfg_attr = namespace.stability.outer_cfg_attr(); + let cfg_doc = stability_doc(namespace.stability); tokens.append(quote!( #(#builders)* #namespace_doc + #cfg_doc + #cfg_attr pub struct #namespace_client_name<'a> { transport: &'a Transport } + #cfg_attr impl<'a> #namespace_client_name<'a> { #new_namespace_client_doc pub fn new(transport: &'a Transport) -> Self { @@ -95,6 +109,7 @@ pub fn generate(api: &Api, docs_dir: &PathBuf) -> Result, #(#methods)* } + #cfg_attr impl Elasticsearch { #namespace_fn_doc pub fn #namespace_name(&self) -> #namespace_client_name { @@ -104,7 +119,7 @@ pub fn generate(api: &Api, docs_dir: &PathBuf) -> Result, )); let generated = tokens.to_string(); - output.push((namespace.to_string(), generated)); + output.push((namespace_name.to_string(), generated)); } Ok(output) diff --git a/api_generator/src/generator/code_gen/params.rs b/api_generator/src/generator/code_gen/params.rs index 169d4e0b..51491d77 100644 --- a/api_generator/src/generator/code_gen/params.rs +++ b/api_generator/src/generator/code_gen/params.rs @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ +use crate::generator::code_gen::stability_doc; use crate::generator::*; use inflector::Inflector; use quote::Tokens; @@ -64,9 +65,14 @@ fn generate_param(tokens: &mut Tokens, e: &ApiEnum) { None => None, }; + let cfg_attr = e.stability.outer_cfg_attr(); + let cfg_doc = stability_doc(e.stability); + let generated_enum_tokens = quote!( - #[derive(Debug, PartialEq, Deserialize, Serialize, Clone, Copy)] #doc + #cfg_doc + #cfg_attr + #[derive(Debug, PartialEq, Deserialize, Serialize, Clone, Copy)] pub enum #name { #(#[serde(rename = #renames)] #variants),* } diff --git a/api_generator/src/generator/code_gen/request/request_builder.rs b/api_generator/src/generator/code_gen/request/request_builder.rs index be1cb973..5febdc9e 100644 --- a/api_generator/src/generator/code_gen/request/request_builder.rs +++ b/api_generator/src/generator/code_gen/request/request_builder.rs @@ -626,19 +626,27 @@ impl<'a> RequestBuilder<'a> { api_name_for_docs )); + let cfg_attr = endpoint.stability.outer_cfg_attr(); + let cfg_doc = stability_doc(endpoint.stability); + quote! { + #cfg_attr #enum_struct + #cfg_attr #enum_impl - #[derive(Clone, Debug)] #[doc = #builder_doc] + #cfg_doc + #cfg_attr + #[derive(Clone, Debug)] pub struct #builder_expr { transport: &'a Transport, parts: #enum_ty, #(#fields),*, } + #cfg_attr #builder_impl { #new_fn #(#builder_fns)* @@ -669,6 +677,9 @@ impl<'a> RequestBuilder<'a> { is_root_method: bool, enum_builder: &EnumBuilder, ) -> Tokens { + let cfg_attr = endpoint.stability.outer_cfg_attr(); + let cfg_doc = stability_doc(endpoint.stability); + let builder_ident = ident(builder_name); let (fn_name, builder_ident_ret) = { @@ -726,6 +737,8 @@ impl<'a> RequestBuilder<'a> { if enum_builder.contains_single_parameterless_part() { quote!( #method_doc + #cfg_doc + #cfg_attr pub fn #fn_name(&'a self) -> #builder_ident_ret { #builder_ident::new(#clone_expr) } @@ -734,6 +747,8 @@ impl<'a> RequestBuilder<'a> { let (enum_ty, _, _) = enum_builder.clone().build(); quote!( #method_doc + #cfg_doc + #cfg_attr pub fn #fn_name(&'a self, parts: #enum_ty) -> #builder_ident_ret { #builder_ident::new(#clone_expr, parts) } diff --git a/api_generator/src/generator/code_gen/root.rs b/api_generator/src/generator/code_gen/root.rs index db36284e..635102de 100644 --- a/api_generator/src/generator/code_gen/root.rs +++ b/api_generator/src/generator/code_gen/root.rs @@ -32,6 +32,7 @@ pub fn generate(api: &Api, docs_dir: &PathBuf) -> Result // AST for builder structs and methods let (builders, methods): (Vec, Vec) = api .root + .endpoints() .iter() .map(|(name, endpoint)| { let builder_name = name.to_pascal_case(); diff --git a/api_generator/src/generator/code_gen/url/enum_builder.rs b/api_generator/src/generator/code_gen/url/enum_builder.rs index 03ac8944..88986a3f 100644 --- a/api_generator/src/generator/code_gen/url/enum_builder.rs +++ b/api_generator/src/generator/code_gen/url/enum_builder.rs @@ -311,7 +311,7 @@ mod tests { #![cfg_attr(rustfmt, rustfmt_skip)] use super::*; - use crate::generator::{Url, Path, HttpMethod, Body, Deprecated, Type, TypeKind, Documentation, ast_eq}; + use crate::generator::{Url, Path, HttpMethod, Body, Deprecated, Type, TypeKind, Documentation, ast_eq, Stability}; use std::collections::BTreeMap; use crate::generator::code_gen::url::url_builder::PathString; @@ -326,7 +326,8 @@ mod tests { description: None, url: None, }, - stability: "stable".to_string(), + stability: Stability::Stable, + deprecated: None, url: Url { paths: vec![ Path { diff --git a/api_generator/src/generator/mod.rs b/api_generator/src/generator/mod.rs index 4e38494a..9c90f349 100644 --- a/api_generator/src/generator/mod.rs +++ b/api_generator/src/generator/mod.rs @@ -34,14 +34,17 @@ use std::{ }; #[cfg(test)] -use quote::{ToTokens, Tokens}; +use quote::ToTokens; +use quote::Tokens; use semver::Version; use void::Void; pub mod code_gen; pub mod output; +use itertools::Itertools; use output::{merge_file, write_file}; +use std::cmp::Ordering; lazy_static! { static ref VERSION: Version = semver::Version::parse(env!("CARGO_PKG_VERSION")).unwrap(); @@ -63,9 +66,9 @@ pub struct Api { /// parameters that are common to all API methods pub common_params: BTreeMap, /// root API methods e.g. Search, Index - pub root: BTreeMap, + pub root: ApiNamespace, /// namespace client methods e.g. Indices.Create, Ml.PutJob - pub namespaces: BTreeMap>, + pub namespaces: BTreeMap, /// enums in parameters pub enums: Vec, } @@ -79,9 +82,9 @@ impl Api { pub fn endpoint_for_api_call(&self, api_call: &str) -> Option<&ApiEndpoint> { let api_call_path: Vec<&str> = api_call.split('.').collect(); match api_call_path.len() { - 1 => self.root.get(api_call_path[0]), + 1 => self.root.endpoints().get(api_call_path[0]), _ => match self.namespaces.get(api_call_path[0]) { - Some(namespace) => namespace.get(api_call_path[1]), + Some(namespace) => namespace.endpoints().get(api_call_path[1]), None => None, }, } @@ -204,6 +207,37 @@ pub struct Deprecated { pub description: String, } +impl PartialOrd for Deprecated { + fn partial_cmp(&self, other: &Self) -> Option { + match ( + Version::parse(&self.version), + Version::parse(&other.version), + ) { + (Err(_), _) => None, + (_, Err(_)) => None, + (Ok(self_version), Ok(other_version)) => self_version.partial_cmp(&other_version), + } + } +} + +impl Deprecated { + /// Combine optional deprecations, keeping either lack of deprecation or the highest version + pub fn combine<'a>( + left: &'a Option, + right: &'a Option, + ) -> &'a Option { + if let (Some(leftd), Some(rightd)) = (left, right) { + if leftd > rightd { + left + } else { + right + } + } else { + &None + } + } +} + /// An API url path #[derive(Debug, PartialEq, Deserialize, Clone)] pub struct Path { @@ -347,14 +381,51 @@ where deserializer.deserialize_any(StringOrStruct(PhantomData)) } +/// Stability level of an API endpoint. Ordering defines increasing stability level, i.e. +/// `beta` is "more stable" than `experimental`. +#[derive(Debug, Eq, PartialEq, Deserialize, Clone, Copy, Ord, PartialOrd)] +pub enum Stability { + #[serde(rename = "experimental")] + Experimental, + #[serde(rename = "beta")] + Beta, + #[serde(rename = "stable")] + Stable, +} + +impl Stability { + pub fn feature_name(self) -> Option<&'static str> { + match self { + Stability::Experimental => Some("experimental-apis"), + Stability::Beta => Some("beta-apis"), + Stability::Stable => None, + } + } + + /// Returns the (optional) feature configuration for this stability level as an outer + /// attribute, for use e.g. on function definitions. + pub fn outer_cfg_attr(self) -> Option { + let feature_name = self.feature_name(); + feature_name.map(|name| quote!(#[cfg(feature = #name)])) + } + + /// Returns the (optional) feature configuration for this stability level as an inner + /// attribute, for use e.g. at the top of a module source file + pub fn inner_cfg_attr(self) -> Option { + let feature_name = self.feature_name(); + feature_name.map(|name| quote!(#![cfg(feature = #name)])) + } +} + /// An API endpoint defined in the REST API specs #[derive(Debug, PartialEq, Deserialize, Clone)] pub struct ApiEndpoint { pub full_name: Option, #[serde(deserialize_with = "string_or_struct")] documentation: Documentation, - pub stability: String, + pub stability: Stability, pub url: Url, + pub deprecated: Option, #[serde(default = "BTreeMap::new")] pub params: BTreeMap, pub body: Option, @@ -382,6 +453,34 @@ impl ApiEndpoint { } } +pub struct ApiNamespace { + stability: Stability, + endpoints: BTreeMap, +} + +impl ApiNamespace { + pub fn new() -> Self { + ApiNamespace { + stability: Stability::Experimental, // will grow in stability as we add endpoints + endpoints: BTreeMap::new(), + } + } + + pub fn add(&mut self, name: String, endpoint: ApiEndpoint) { + // Stability of a namespace is that of the most stable of its endpoints + self.stability = Stability::max(self.stability, endpoint.stability); + self.endpoints.insert(name, endpoint); + } + + pub fn stability(&self) -> Stability { + self.stability + } + + pub fn endpoints(&self) -> &BTreeMap { + &self.endpoints + } +} + /// Common parameters accepted by all API endpoints #[derive(Debug, PartialEq, Deserialize, Clone)] pub struct Common { @@ -396,6 +495,7 @@ pub struct ApiEnum { pub name: String, pub description: Option, pub values: Vec, + pub stability: Stability, // inherited from the declaring API } impl Hash for ApiEnum { @@ -499,7 +599,7 @@ pub use bulk::*; /// Reads Api from a directory of REST Api specs pub fn read_api(branch: &str, download_dir: &PathBuf) -> Result { let paths = fs::read_dir(download_dir)?; - let mut namespaces = BTreeMap::new(); + let mut namespaces = BTreeMap::::new(); let mut enums: HashSet = HashSet::new(); let mut common_params = BTreeMap::new(); let root_key = "root"; @@ -517,8 +617,8 @@ pub fn read_api(branch: &str, download_dir: &PathBuf) -> Result Result = + let endpoints: BTreeMap = serde_json::from_reader(reader).map_err(|e| super::error::ParseError { message: format!("Failed to parse {} because: {}", name, e), })?; // get the first (and only) endpoint name and endpoint body - let mut first_endpoint = endpoint.into_iter().next().unwrap(); - first_endpoint.1.full_name = Some(first_endpoint.0.clone()); + let (name, mut endpoint) = endpoints.into_iter().next().unwrap(); + endpoint.full_name = Some(name.clone()); // sort the HTTP methods so that we can easily pattern match on them later - for path in first_endpoint.1.url.paths.iter_mut() { + for path in endpoint.url.paths.iter_mut() { path.methods.sort(); } - Ok(first_endpoint) + // endpoint deprecation is the "least deprecated" of its paths + let deprecation = endpoint + .url + .paths + .iter() + .map(|p| &p.deprecated) + .fold1(|d1, d2| Deprecated::combine(d1, d2)) + .unwrap_or(&None); + + if let Some(deprecated) = deprecation { + endpoint.deprecated = Some(Deprecated { + version: deprecated.version.clone(), + description: "Deprecated via one of the child items".to_string(), + }) + } + + Ok((name, endpoint)) } /// deserializes Common from a file @@ -627,3 +744,30 @@ where pub fn ast_eq(expected: Tokens, actual: T) { assert_eq!(expected, quote!(#actual)); } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn stability_ordering() { + assert!(Stability::Beta > Stability::Experimental); + assert!(Stability::Stable > Stability::Beta); + } + + #[test] + fn combine_deprecations() { + let d1 = Some(Deprecated { + version: "7.5.0".to_string(), + description: "foo".to_string(), + }); + + let d2 = Some(Deprecated { + version: "7.6.0".to_string(), + description: "foo".to_string(), + }); + + assert_eq!(&d2, Deprecated::combine(&d1, &d2)); + assert_eq!(&None, Deprecated::combine(&d1, &None)); + } +} diff --git a/api_generator/src/generator/output.rs b/api_generator/src/generator/output.rs index 9c1d5c43..08697fed 100644 --- a/api_generator/src/generator/output.rs +++ b/api_generator/src/generator/output.rs @@ -59,6 +59,8 @@ pub fn write_file( file.write_all(line.as_bytes())?; file.write_all(b"\n")?; } + } else { + warn!("Missing docs file {:?}", docs) } } diff --git a/api_generator/src/lib.rs b/api_generator/src/lib.rs index a23289fc..42f2228a 100644 --- a/api_generator/src/lib.rs +++ b/api_generator/src/lib.rs @@ -4,6 +4,9 @@ #[macro_use] extern crate lazy_static; +#[macro_use] +extern crate log; + #[macro_use] extern crate quote; diff --git a/elasticsearch/Cargo.toml b/elasticsearch/Cargo.toml index acd201e6..3f1b1e2b 100644 --- a/elasticsearch/Cargo.toml +++ b/elasticsearch/Cargo.toml @@ -17,6 +17,10 @@ all-features = true [features] default = ["native-tls"] +# beta and experimental APIs +beta-apis = [] +experimental-apis = ["beta-apis"] + # optional TLS native-tls = ["reqwest/native-tls"] rustls-tls = ["reqwest/rustls-tls"] diff --git a/yaml_test_runner/Cargo.toml b/yaml_test_runner/Cargo.toml index 5e2ce9a3..fc3bdaf8 100644 --- a/yaml_test_runner/Cargo.toml +++ b/yaml_test_runner/Cargo.toml @@ -9,13 +9,13 @@ repository = "https://github.com/elastic/elasticsearch-rs" license = "Apache-2.0" [dependencies] -elasticsearch = { path = "./../elasticsearch" } +elasticsearch = { path = "./../elasticsearch", features = ["experimental-apis"]} api_generator = { path = "./../api_generator" } base64 = "^0.11" clap = "~2" failure = "0.1.6" -itertools = "0.8.2" +itertools = "0.10.0" Inflector = "0.11.4" lazy_static = "1.4.0" log = "0.4.8" diff --git a/yaml_test_runner/src/generator.rs b/yaml_test_runner/src/generator.rs index 56ecaec9..28b6101a 100644 --- a/yaml_test_runner/src/generator.rs +++ b/yaml_test_runner/src/generator.rs @@ -36,7 +36,7 @@ use yaml_rust::{Yaml, YamlLoader}; /// The test suite to compile #[derive(Debug, PartialEq)] pub enum TestSuite { - Oss, + Free, XPack, } @@ -121,7 +121,7 @@ impl<'a> YamlTests<'a> { let (setup_fn, setup_call) = Self::generate_fixture(&self.setup); let (teardown_fn, teardown_call) = Self::generate_fixture(&self.teardown); let general_setup_call = match self.suite { - TestSuite::Oss => quote!(client::general_oss_setup().await?;), + TestSuite::Free => quote!(client::general_oss_setup().await?;), TestSuite::XPack => quote!(client::general_xpack_setup().await?;), }; @@ -406,9 +406,9 @@ pub fn generate_tests_from_yaml( } match top_dir.as_str() { - "oss" => TestSuite::Oss, + "free" => TestSuite::Free, "xpack" => TestSuite::XPack, - _ => panic!("Unknown test suite"), + _ => panic!("Unknown test suite {:?}", path), } }; diff --git a/yaml_test_runner/src/github.rs b/yaml_test_runner/src/github.rs index f8a29c3c..c4eaf9ec 100644 --- a/yaml_test_runner/src/github.rs +++ b/yaml_test_runner/src/github.rs @@ -67,7 +67,7 @@ pub fn download_test_suites(branch: &str, download_dir: &PathBuf) -> Result<(), let file = entry?; let path = file.path()?; if oss_test.is_match(&path) { - write_test_file(download_dir, "oss", file)?; + write_test_file(download_dir, "free", file)?; } else if xpack_test.is_match(&path) { write_test_file(download_dir, "xpack", file)?; } diff --git a/yaml_test_runner/src/main.rs b/yaml_test_runner/src/main.rs index a011b2a4..161c9c70 100644 --- a/yaml_test_runner/src/main.rs +++ b/yaml_test_runner/src/main.rs @@ -148,13 +148,14 @@ fn branch_suite_and_version_from_elasticsearch( .danger_accept_invalid_certs(true) .build()?; + let suite = match std::env::var("TEST_SUITE") { + Err(_) => panic!("Env var TEST_SUITE is not defined"), + Ok(ref s) if s == "free" => TestSuite::Free, + _ => TestSuite::XPack, + }; let mut response = client.get(url).send()?; let json: Value = response.json()?; let branch = json["version"]["build_hash"].as_str().unwrap().to_string(); - let suite = match json["version"]["build_flavor"].as_str().unwrap() { - "oss" => TestSuite::Oss, - _ => TestSuite::XPack, - }; // any prerelease part needs to be trimmed because the semver crate only allows // a version with a prerelease to match against predicates, if at least one predicate diff --git a/yaml_test_runner/src/step/do.rs b/yaml_test_runner/src/step/do.rs index 8fd63006..715d3bc7 100644 --- a/yaml_test_runner/src/step/do.rs +++ b/yaml_test_runner/src/step/do.rs @@ -38,7 +38,7 @@ impl ToTokens for Catch { fn to_tokens(&self, tokens: &mut Tokens) { fn http_status_code(status_code: u16, tokens: &mut Tokens) { tokens.append(quote! { - assert_status_code!(response.status_code(), #status_code); + assert_response_status_code!(response, #status_code); }); } diff --git a/yaml_test_runner/tests/common/client.rs b/yaml_test_runner/tests/common/client.rs index 84a671da..8c09797a 100644 --- a/yaml_test_runner/tests/common/client.rs +++ b/yaml_test_runner/tests/common/client.rs @@ -247,6 +247,35 @@ async fn wait_for_yellow_status(client: &Elasticsearch) -> Result<(), Error> { } async fn delete_indices(client: &Elasticsearch) -> Result<(), Error> { + // Hand-crafted request as the indices.delete_data_stream spec doesn't yet have the + // "expand_wildcards" parameter that is needed to delete ILM data streams + // + // Not deleting data streams yields errors like this when trying to delete hidden indices: + // { + // "type":"illegal_argument_exception" + // "reason":"index [.ds-ilm-history-5-2021.02.14-000001] is the write index for data + // stream [ilm-history-5] and cannot be deleted" + // } + // + // Quoting the docs: + // You cannot delete the current write index of a data stream. To delete the index, + // you must roll over the data stream so a new write index is created. You can then use + // the delete index API to delete the previous write index. + // + let delete_response = client + .transport() + .send( + Method::Delete, + "/_data_stream/*", + elasticsearch::http::headers::HeaderMap::new(), + Some(&[("expand_wildcards", "hidden")]), + None::<()>, // body + None, // timeout + ) + .await?; + + assert_response_success!(delete_response); + let delete_response = client .indices() .delete(IndicesDeleteParts::Index(&["*"])) diff --git a/yaml_test_runner/tests/common/macros.rs b/yaml_test_runner/tests/common/macros.rs index 592aa1ce..9ec262aa 100644 --- a/yaml_test_runner/tests/common/macros.rs +++ b/yaml_test_runner/tests/common/macros.rs @@ -17,14 +17,21 @@ * under the License. */ /// Asserts that a [Response] has a status code >=200 and <300 + #[macro_export] macro_rules! assert_response_success { ($response:ident) => {{ - assert!( - $response.status_code().is_success(), - "expected response to be successful but was {}", - $response.status_code().as_u16() - ); + let status_code = $response.status_code(); + if !status_code.is_success() { + let text = $response + .text() + .await + .unwrap_or("[no response]".to_string()); + panic!( + "expected response to be successful but was {}. Response: {}", + status_code, text + ); + } }}; } @@ -32,12 +39,37 @@ macro_rules! assert_response_success { #[macro_export] macro_rules! assert_response_success_or { ($response:ident, $status:expr) => {{ - assert!( - $response.status_code().is_success() || $response.status_code().as_u16() == $status, - "expected response to be successful or {} but was {}", - $status, - $response.status_code().as_u16() - ); + let status_code = $response.status_code(); + if !status_code.is_success() && status_code.as_u16() != $status { + let text = $response + .text() + .await + .unwrap_or("[no response]".to_string()); + panic!( + "expected response to be successful or {} but was {}", + $status, + status_code.as_u16() + ); + } + }}; +} + +/// Asserts that a [Response] has a status that matches the passed status +#[macro_export] +macro_rules! assert_response_status_code { + ($response:ident, $status:expr) => {{ + let status_code = $response.status_code(); + if status_code.as_u16() != $status { + let text = $response + .text() + .await + .unwrap_or("[no response]".to_string()); + panic!( + "expected response to be {} but was {}", + $status, + status_code.as_u16() + ); + } }}; }