From 1869680a11882bba819333ce14354d36afd7968a Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Wed, 3 Apr 2024 13:53:34 -0400 Subject: [PATCH 1/6] Fix bulk API ignores ingest pipeline for upsert (update version) (#13056) Signed-off-by: Andriy Redko --- .../resources/rest-api-spec/test/ingest/70_bulk.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/70_bulk.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/70_bulk.yml index d95b1239b1cf2..d7be48a92908c 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/70_bulk.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/70_bulk.yml @@ -149,8 +149,8 @@ teardown: --- "Test bulk honors pipeline in update action with upsert": - skip: - version: " - 2.99.99" - reason: "fixed in 3.0.0" + version: " - 2.13.99" + reason: "fixed in 2.14.0" - do: bulk: From c00e8631b42e0a12039b8f5796e5b54f429e2a1f Mon Sep 17 00:00:00 2001 From: Sandesh Kumar Date: Wed, 3 Apr 2024 11:09:52 -0700 Subject: [PATCH 2/6] Bump up commons-compress to 1.26.1 to fix CVE (#12627) * Bump up commons-compress to 1.26.0 to fix CVE Signed-off-by: Aman Khare * Change log entry Signed-off-by: Aman Khare * Update ignoreMissingClasses Signed-off-by: Aman Khare * Update commons-codec and commons-lang3 dependencies also Signed-off-by: Aman Khare * Upgrade commons-codec to 1.16.1 Signed-off-by: Aman Khare * Add commons-io dependency in plugin-cli build.gradle Signed-off-by: Sandesh Kumar * Revert "Update ignoreMissingClasses" This reverts commit d92fbdafdccf9c01f6e628568770ad9f6e85aaaa. Signed-off-by: Sandesh Kumar * Adding SHA for commons-io-2.15.1.jar Signed-off-by: Sandesh Kumar * adding license, notice files for commons-io Signed-off-by: Sandesh Kumar * Add missing classes for thirdPartyAudit Signed-off-by: Sandesh Kumar * Refactor Signed-off-by: Sandesh Kumar * Test commit - to be reverted Signed-off-by: Sandesh Kumar * Bump commons-compress to 1.26.1, tika to 2.9.1 Signed-off-by: Sandesh Kumar * Remove Charsets class from exclusion list - not missing Signed-off-by: Sandesh Kumar * Update tika to 2.9.2 Signed-off-by: Sandesh Kumar * commons-io 2.16.0 Signed-off-by: Sandesh Kumar * Refactor commons-io dependency mentions to avoid manual version setting/update Signed-off-by: Sandesh Kumar --------- Signed-off-by: Aman Khare Signed-off-by: Sandesh Kumar Co-authored-by: Aman Khare --- CHANGELOG.md | 4 + buildSrc/build.gradle | 6 +- buildSrc/version.properties | 7 +- .../rest/licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + .../licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + distribution/tools/plugin-cli/build.gradle | 7 +- .../licenses/commons-compress-1.24.0.jar.sha1 | 1 - .../licenses/commons-compress-1.26.1.jar.sha1 | 1 + .../licenses/commons-io-2.16.0.jar.sha1 | 1 + .../licenses/commons-io-LICENSE.txt | 202 ++++++++++++++++++ .../plugin-cli/licenses/commons-io-NOTICE.txt | 5 + .../licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + .../licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + plugins/discovery-azure-classic/build.gradle | 2 +- .../licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + .../licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + .../licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + plugins/ingest-attachment/build.gradle | 20 +- .../apache-mime4j-core-0.8.11.jar.sha1 | 1 + .../apache-mime4j-core-0.8.8.jar.sha1 | 1 - .../apache-mime4j-dom-0.8.11.jar.sha1 | 1 + .../licenses/apache-mime4j-dom-0.8.8.jar.sha1 | 1 - .../licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + .../licenses/commons-compress-1.24.0.jar.sha1 | 1 - .../licenses/commons-compress-1.26.1.jar.sha1 | 1 + .../licenses/commons-lang3-3.13.0.jar.sha1 | 1 - .../licenses/commons-lang3-3.14.0.jar.sha1 | 1 + .../licenses/fontbox-2.0.27.jar.sha1 | 1 - .../licenses/fontbox-2.0.31.jar.sha1 | 1 + .../licenses/pdfbox-2.0.27.jar.sha1 | 1 - .../licenses/pdfbox-2.0.31.jar.sha1 | 1 + .../licenses/poi-5.2.3.jar.sha1 | 1 - .../licenses/poi-5.2.5.jar.sha1 | 1 + .../licenses/poi-ooxml-5.2.3.jar.sha1 | 1 - .../licenses/poi-ooxml-5.2.5.jar.sha1 | 1 + .../licenses/poi-ooxml-lite-5.2.3.jar.sha1 | 1 - .../licenses/poi-ooxml-lite-5.2.5.jar.sha1 | 1 + .../licenses/poi-scratchpad-5.2.3.jar.sha1 | 1 - .../licenses/poi-scratchpad-5.2.5.jar.sha1 | 1 + .../licenses/tika-core-2.6.0.jar.sha1 | 1 - .../licenses/tika-core-2.9.2.jar.sha1 | 1 + .../tika-langdetect-optimaize-2.6.0.jar.sha1 | 1 - .../tika-langdetect-optimaize-2.9.2.jar.sha1 | 1 + .../tika-parser-apple-module-2.9.2.jar.sha1 | 1 + .../tika-parser-html-module-2.9.2.jar.sha1 | 1 + ...ika-parser-microsoft-module-2.9.2.jar.sha1 | 1 + ...ka-parser-miscoffice-module-2.9.2.jar.sha1 | 1 + .../tika-parser-pdf-module-2.9.2.jar.sha1 | 1 + .../tika-parser-text-module-2.9.2.jar.sha1 | 1 + .../tika-parser-xml-module-2.9.2.jar.sha1 | 1 + .../tika-parser-xmp-commons-2.9.2.jar.sha1 | 1 + .../tika-parser-zip-commons-2.9.2.jar.sha1 | 1 + ...ka-parsers-standard-package-2.6.0.jar.sha1 | 1 - ...ka-parsers-standard-package-2.9.2.jar.sha1 | 1 + .../licenses/commons-lang3-3.13.0.jar.sha1 | 1 - .../licenses/commons-lang3-3.14.0.jar.sha1 | 1 + .../licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + plugins/repository-hdfs/build.gradle | 2 +- .../licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + .../licenses/commons-compress-1.24.0.jar.sha1 | 1 - .../licenses/commons-compress-1.26.1.jar.sha1 | 1 + .../licenses/commons-codec-1.15.jar.sha1 | 1 - .../licenses/commons-codec-1.16.1.jar.sha1 | 1 + 73 files changed, 277 insertions(+), 42 deletions(-) delete mode 100644 client/rest/licenses/commons-codec-1.15.jar.sha1 create mode 100644 client/rest/licenses/commons-codec-1.16.1.jar.sha1 delete mode 100644 client/sniffer/licenses/commons-codec-1.15.jar.sha1 create mode 100644 client/sniffer/licenses/commons-codec-1.16.1.jar.sha1 delete mode 100644 distribution/tools/plugin-cli/licenses/commons-compress-1.24.0.jar.sha1 create mode 100644 distribution/tools/plugin-cli/licenses/commons-compress-1.26.1.jar.sha1 create mode 100644 distribution/tools/plugin-cli/licenses/commons-io-2.16.0.jar.sha1 create mode 100644 distribution/tools/plugin-cli/licenses/commons-io-LICENSE.txt create mode 100644 distribution/tools/plugin-cli/licenses/commons-io-NOTICE.txt delete mode 100644 plugins/analysis-phonetic/licenses/commons-codec-1.15.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/commons-codec-1.16.1.jar.sha1 delete mode 100644 plugins/crypto-kms/licenses/commons-codec-1.15.jar.sha1 create mode 100644 plugins/crypto-kms/licenses/commons-codec-1.16.1.jar.sha1 delete mode 100644 plugins/discovery-azure-classic/licenses/commons-codec-1.15.jar.sha1 create mode 100644 plugins/discovery-azure-classic/licenses/commons-codec-1.16.1.jar.sha1 delete mode 100644 plugins/discovery-ec2/licenses/commons-codec-1.15.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/commons-codec-1.16.1.jar.sha1 delete mode 100644 plugins/discovery-gce/licenses/commons-codec-1.15.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/commons-codec-1.16.1.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.11.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.8.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.11.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.8.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/commons-codec-1.15.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/commons-codec-1.16.1.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/commons-compress-1.24.0.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/commons-compress-1.26.1.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/commons-lang3-3.14.0.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/fontbox-2.0.27.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/fontbox-2.0.31.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/pdfbox-2.0.27.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/pdfbox-2.0.31.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/poi-5.2.3.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/poi-5.2.5.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/poi-ooxml-5.2.3.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/poi-ooxml-5.2.5.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/poi-ooxml-lite-5.2.3.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/poi-ooxml-lite-5.2.5.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/poi-scratchpad-5.2.3.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/poi-scratchpad-5.2.5.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/tika-core-2.6.0.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-core-2.9.2.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/tika-langdetect-optimaize-2.6.0.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-langdetect-optimaize-2.9.2.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parser-apple-module-2.9.2.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parser-html-module-2.9.2.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parser-microsoft-module-2.9.2.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parser-miscoffice-module-2.9.2.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parser-pdf-module-2.9.2.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parser-text-module-2.9.2.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parser-xml-module-2.9.2.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parser-xmp-commons-2.9.2.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parser-zip-commons-2.9.2.jar.sha1 delete mode 100644 plugins/ingest-attachment/licenses/tika-parsers-standard-package-2.6.0.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/tika-parsers-standard-package-2.9.2.jar.sha1 delete mode 100644 plugins/repository-azure/licenses/commons-lang3-3.13.0.jar.sha1 create mode 100644 plugins/repository-azure/licenses/commons-lang3-3.14.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/commons-codec-1.15.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/commons-codec-1.16.1.jar.sha1 delete mode 100644 plugins/repository-hdfs/licenses/commons-codec-1.15.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-codec-1.16.1.jar.sha1 delete mode 100644 plugins/repository-hdfs/licenses/commons-compress-1.24.0.jar.sha1 create mode 100644 plugins/repository-hdfs/licenses/commons-compress-1.26.1.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/commons-codec-1.15.jar.sha1 create mode 100644 plugins/repository-s3/licenses/commons-codec-1.16.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d81f7e2ff21d..1f9febf7013a9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -120,6 +120,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `net.minidev:json-smart` from 2.5.0 to 2.5.1 ([#12893](https://github.com/opensearch-project/OpenSearch/pull/12893)) - Bump `netty` from 4.1.107.Final to 4.1.108.Final ([#12924](https://github.com/opensearch-project/OpenSearch/pull/12924)) - Bump `commons-io:commons-io` from 2.15.1 to 2.16.0 ([#12996](https://github.com/opensearch-project/OpenSearch/pull/12996), [#12998](https://github.com/opensearch-project/OpenSearch/pull/12998), [#12999](https://github.com/opensearch-project/OpenSearch/pull/12999)) +- Bump `org.apache.commons:commons-compress` from 1.24.0 to 1.26.1 ([#12627](https://github.com/opensearch-project/OpenSearch/pull/12627)) +- Bump `org.apache.commons:commonscodec` from 1.15 to 1.16.1 ([#12627](https://github.com/opensearch-project/OpenSearch/pull/12627)) +- Bump `org.apache.commons:commonslang` from 3.13.0 to 3.14.0 ([#12627](https://github.com/opensearch-project/OpenSearch/pull/12627)) +- Bump Apache Tika from 2.6.0 to 2.9.2 ([#12627](https://github.com/opensearch-project/OpenSearch/pull/12627)) ### Changed - [BWC and API enforcement] Enforcing the presence of API annotations at build time ([#12872](https://github.com/opensearch-project/OpenSearch/pull/12872)) diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 0562ecc6ee61b..c68cc0406d3a6 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -102,14 +102,14 @@ dependencies { api localGroovy() - api 'commons-codec:commons-codec:1.16.0' - api 'org.apache.commons:commons-compress:1.25.0' + api "commons-codec:commons-codec:${props.getProperty('commonscodec')}" + api "org.apache.commons:commons-compress:${props.getProperty('commonscompress')}" api 'org.apache.ant:ant:1.10.14' api 'com.netflix.nebula:gradle-extra-configurations-plugin:10.0.0' api 'com.netflix.nebula:nebula-publishing-plugin:21.0.0' api 'com.netflix.nebula:gradle-info-plugin:12.1.6' api 'org.apache.rat:apache-rat:0.15' - api 'commons-io:commons-io:2.15.1' + api "commons-io:commons-io:${props.getProperty('commonsio')}" api "net.java.dev.jna:jna:5.14.0" api 'com.github.johnrengelman:shadow:8.1.1' api 'org.jdom:jdom2:2.0.6.1' diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 5c9cd25bb79ad..c60f42dc1bb89 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -40,9 +40,10 @@ httpclient = 4.5.14 httpcore = 4.4.16 httpasyncclient = 4.1.5 commonslogging = 1.2 -commonscodec = 1.15 -commonslang = 3.13.0 -commonscompress = 1.24.0 +commonscodec = 1.16.1 +commonslang = 3.14.0 +commonscompress = 1.26.1 +commonsio = 2.16.0 # plugin dependencies aws = 2.20.86 reactivestreams = 1.0.4 diff --git a/client/rest/licenses/commons-codec-1.15.jar.sha1 b/client/rest/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/client/rest/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/client/rest/licenses/commons-codec-1.16.1.jar.sha1 b/client/rest/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/client/rest/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file diff --git a/client/sniffer/licenses/commons-codec-1.15.jar.sha1 b/client/sniffer/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/client/sniffer/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/client/sniffer/licenses/commons-codec-1.16.1.jar.sha1 b/client/sniffer/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/client/sniffer/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index f40fb1c4b0a9f..446dbaad8466e 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -44,7 +44,7 @@ dependencies { testRuntimeOnly("com.google.guava:guava:${versions.guava}") { transitive = false } - + api "commons-io:commons-io:${versions.commonsio}" implementation "org.apache.commons:commons-compress:${versions.commonscompress}" } @@ -104,5 +104,8 @@ thirdPartyAudit.ignoreMissingClasses( 'org.tukaani.xz.MemoryLimitException', 'org.tukaani.xz.UnsupportedOptionsException', 'org.tukaani.xz.XZ', - 'org.tukaani.xz.XZOutputStream' + 'org.tukaani.xz.XZOutputStream', + 'org.apache.commons.codec.digest.PureJavaCrc32C', + 'org.apache.commons.codec.digest.XXHash32', + 'org.apache.commons.lang3.reflect.FieldUtils' ) diff --git a/distribution/tools/plugin-cli/licenses/commons-compress-1.24.0.jar.sha1 b/distribution/tools/plugin-cli/licenses/commons-compress-1.24.0.jar.sha1 deleted file mode 100644 index 23999d1bfbde4..0000000000000 --- a/distribution/tools/plugin-cli/licenses/commons-compress-1.24.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b4b1b5a3d9573b2970fddab236102c0a4d27d35e \ No newline at end of file diff --git a/distribution/tools/plugin-cli/licenses/commons-compress-1.26.1.jar.sha1 b/distribution/tools/plugin-cli/licenses/commons-compress-1.26.1.jar.sha1 new file mode 100644 index 0000000000000..912bda85de18a --- /dev/null +++ b/distribution/tools/plugin-cli/licenses/commons-compress-1.26.1.jar.sha1 @@ -0,0 +1 @@ +44331c1130c370e726a2e1a3e6fba6d2558ef04a \ No newline at end of file diff --git a/distribution/tools/plugin-cli/licenses/commons-io-2.16.0.jar.sha1 b/distribution/tools/plugin-cli/licenses/commons-io-2.16.0.jar.sha1 new file mode 100644 index 0000000000000..6a7b638719fa3 --- /dev/null +++ b/distribution/tools/plugin-cli/licenses/commons-io-2.16.0.jar.sha1 @@ -0,0 +1 @@ +27875a7935f1ddcc13267eb6fae1f719e0409572 \ No newline at end of file diff --git a/distribution/tools/plugin-cli/licenses/commons-io-LICENSE.txt b/distribution/tools/plugin-cli/licenses/commons-io-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/distribution/tools/plugin-cli/licenses/commons-io-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/distribution/tools/plugin-cli/licenses/commons-io-NOTICE.txt b/distribution/tools/plugin-cli/licenses/commons-io-NOTICE.txt new file mode 100644 index 0000000000000..a6b77d1eb6089 --- /dev/null +++ b/distribution/tools/plugin-cli/licenses/commons-io-NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons IO +Copyright 2002-2014 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (http://www.apache.org/). diff --git a/plugins/analysis-phonetic/licenses/commons-codec-1.15.jar.sha1 b/plugins/analysis-phonetic/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/plugins/analysis-phonetic/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/commons-codec-1.16.1.jar.sha1 b/plugins/analysis-phonetic/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/commons-codec-1.15.jar.sha1 b/plugins/crypto-kms/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/plugins/crypto-kms/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/plugins/crypto-kms/licenses/commons-codec-1.16.1.jar.sha1 b/plugins/crypto-kms/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/plugins/crypto-kms/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 68bda0933daa7..7f34cec94499c 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -53,7 +53,7 @@ dependencies { api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" api "commons-lang:commons-lang:2.6" - api "commons-io:commons-io:2.16.0" + api "commons-io:commons-io:${versions.commonsio}" api 'javax.mail:mail:1.4.7' api 'javax.inject:javax.inject:1' api "com.sun.jersey:jersey-client:${versions.jersey}" diff --git a/plugins/discovery-azure-classic/licenses/commons-codec-1.15.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/plugins/discovery-azure-classic/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/commons-codec-1.16.1.jar.sha1 b/plugins/discovery-azure-classic/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/commons-codec-1.15.jar.sha1 b/plugins/discovery-ec2/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/plugins/discovery-ec2/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/commons-codec-1.16.1.jar.sha1 b/plugins/discovery-ec2/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/plugins/discovery-ec2/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/commons-codec-1.15.jar.sha1 b/plugins/discovery-gce/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/plugins/discovery-gce/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/commons-codec-1.16.1.jar.sha1 b/plugins/discovery-gce/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/plugins/discovery-gce/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index a931f45802318..4749aa911886d 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -38,10 +38,10 @@ opensearchplugin { } versions << [ - 'tika' : '2.6.0', - 'pdfbox': '2.0.27', - 'poi' : '5.2.3', - 'mime4j': '0.8.8' + 'tika' : '2.9.2', + 'pdfbox': '2.0.31', + 'poi' : '5.2.5', + 'mime4j': '0.8.11' ] dependencies { @@ -50,6 +50,16 @@ dependencies { api "org.apache.tika:tika-parsers:${versions.tika}" // Required for the various document parsers api "org.apache.tika:tika-parsers-standard-package:${versions.tika}" + api "org.apache.tika:tika-parser-apple-module:${versions.tika}" + api "org.apache.tika:tika-parser-html-module:${versions.tika}" + api "org.apache.tika:tika-parser-microsoft-module:${versions.tika}" + api "org.apache.tika:tika-parser-miscoffice-module:${versions.tika}" + api "org.apache.tika:tika-parser-pdf-module:${versions.tika}" + api "org.apache.tika:tika-parser-text-module:${versions.tika}" + api "org.apache.tika:tika-parser-xml-module:${versions.tika}" + // Utilities consumed by document parsers + api "org.apache.tika:tika-parser-xmp-commons:${versions.tika}" + api "org.apache.tika:tika-parser-zip-commons:${versions.tika}" // Required for language detection api "org.apache.tika:tika-langdetect-optimaize:${versions.tika}" // Optimaize libraries/dependencies @@ -57,7 +67,7 @@ dependencies { runtimeOnly "com.google.guava:guava:${versions.guava}" // Other dependencies api 'org.tukaani:xz:1.9' - api 'commons-io:commons-io:2.16.0' + api "commons-io:commons-io:${versions.commonsio}" api "org.slf4j:slf4j-api:${versions.slf4j}" // character set detection diff --git a/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.11.jar.sha1 b/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.11.jar.sha1 new file mode 100644 index 0000000000000..82d9bf2617ce6 --- /dev/null +++ b/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.11.jar.sha1 @@ -0,0 +1 @@ +6d1eb5f7b84eaa9d38fca13b761f01c693aef3da \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.8.jar.sha1 b/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.8.jar.sha1 deleted file mode 100644 index 77c36691d36b5..0000000000000 --- a/plugins/ingest-attachment/licenses/apache-mime4j-core-0.8.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7330de23c52f71617cbec7f1d2760dae32e687cd \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.11.jar.sha1 b/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.11.jar.sha1 new file mode 100644 index 0000000000000..7a494aba6a231 --- /dev/null +++ b/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.11.jar.sha1 @@ -0,0 +1 @@ +f0d42ab9a5832b5f5d05afc004b31245b838e0fc \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.8.jar.sha1 b/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.8.jar.sha1 deleted file mode 100644 index fb9c5fed27162..0000000000000 --- a/plugins/ingest-attachment/licenses/apache-mime4j-dom-0.8.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e76715563a6bd150f84ccb0adb920aec8faf4779 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-codec-1.15.jar.sha1 b/plugins/ingest-attachment/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-codec-1.16.1.jar.sha1 b/plugins/ingest-attachment/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.24.0.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.24.0.jar.sha1 deleted file mode 100644 index 23999d1bfbde4..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-compress-1.24.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b4b1b5a3d9573b2970fddab236102c0a4d27d35e \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.26.1.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.26.1.jar.sha1 new file mode 100644 index 0000000000000..912bda85de18a --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-compress-1.26.1.jar.sha1 @@ -0,0 +1 @@ +44331c1130c370e726a2e1a3e6fba6d2558ef04a \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1 b/plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1 deleted file mode 100644 index d0c2f2486ee1f..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-lang3-3.13.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b7263237aa89c1f99b327197c41d0669707a462e \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-lang3-3.14.0.jar.sha1 b/plugins/ingest-attachment/licenses/commons-lang3-3.14.0.jar.sha1 new file mode 100644 index 0000000000000..d783e07e40902 --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-lang3-3.14.0.jar.sha1 @@ -0,0 +1 @@ +1ed471194b02f2c6cb734a0cd6f6f107c673afae \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.27.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.27.jar.sha1 deleted file mode 100644 index d578dffbfa3f6..0000000000000 --- a/plugins/ingest-attachment/licenses/fontbox-2.0.27.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d08c064d18b2b149da937d15c0d1708cba03f29d \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.31.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.31.jar.sha1 new file mode 100644 index 0000000000000..d45d45a66e072 --- /dev/null +++ b/plugins/ingest-attachment/licenses/fontbox-2.0.31.jar.sha1 @@ -0,0 +1 @@ +96999ecdb7324bf718b88724818fa62f81286c36 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.27.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.27.jar.sha1 deleted file mode 100644 index 4f670b7f95e8c..0000000000000 --- a/plugins/ingest-attachment/licenses/pdfbox-2.0.27.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -416a9dfce3714116bfdf793b15368df04266845f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.31.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.31.jar.sha1 new file mode 100644 index 0000000000000..fa256ed9a65d2 --- /dev/null +++ b/plugins/ingest-attachment/licenses/pdfbox-2.0.31.jar.sha1 @@ -0,0 +1 @@ +29b25053099bc30784a766ccb821417e06f4b8a1 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-5.2.3.jar.sha1 b/plugins/ingest-attachment/licenses/poi-5.2.3.jar.sha1 deleted file mode 100644 index 3d8b3daf606ad..0000000000000 --- a/plugins/ingest-attachment/licenses/poi-5.2.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2fb22ae74ad5aea6af1a9c64b9542f2ccf348604 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-5.2.5.jar.sha1 b/plugins/ingest-attachment/licenses/poi-5.2.5.jar.sha1 new file mode 100644 index 0000000000000..0eca17726eb0b --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-5.2.5.jar.sha1 @@ -0,0 +1 @@ +7e00f6b2f76375fe89022d5a7db8acb71cbd55f5 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-5.2.3.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-5.2.3.jar.sha1 deleted file mode 100644 index 8371593cf0841..0000000000000 --- a/plugins/ingest-attachment/licenses/poi-ooxml-5.2.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -02efd11c940adb18c03eb9ce7ad88fc40ee6a196 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-5.2.5.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-5.2.5.jar.sha1 new file mode 100644 index 0000000000000..6b14be4461425 --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-ooxml-5.2.5.jar.sha1 @@ -0,0 +1 @@ +df9f2c52371eeba24db8ea8cafa77285c3cc0742 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-lite-5.2.3.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-lite-5.2.3.jar.sha1 deleted file mode 100644 index 5c6365876b7be..0000000000000 --- a/plugins/ingest-attachment/licenses/poi-ooxml-lite-5.2.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -db113c8e9051b0ff967f4911fa20336c8325a7c5 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-ooxml-lite-5.2.5.jar.sha1 b/plugins/ingest-attachment/licenses/poi-ooxml-lite-5.2.5.jar.sha1 new file mode 100644 index 0000000000000..f9a473173a297 --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-ooxml-lite-5.2.5.jar.sha1 @@ -0,0 +1 @@ +eaa61452d8f0d13080fbb4757a392f09f90e4c49 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-scratchpad-5.2.3.jar.sha1 b/plugins/ingest-attachment/licenses/poi-scratchpad-5.2.3.jar.sha1 deleted file mode 100644 index 3c8f92498f1a4..0000000000000 --- a/plugins/ingest-attachment/licenses/poi-scratchpad-5.2.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2a7fce47e22b7fedb1b277347ff4fe36d6eda50d \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/poi-scratchpad-5.2.5.jar.sha1 b/plugins/ingest-attachment/licenses/poi-scratchpad-5.2.5.jar.sha1 new file mode 100644 index 0000000000000..68665ddafd7d8 --- /dev/null +++ b/plugins/ingest-attachment/licenses/poi-scratchpad-5.2.5.jar.sha1 @@ -0,0 +1 @@ +fc600cf765a49d73935a6e48a5b84f4abcdd0518 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-2.6.0.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-2.6.0.jar.sha1 deleted file mode 100644 index c66c2f3f39401..0000000000000 --- a/plugins/ingest-attachment/licenses/tika-core-2.6.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f6ed6356dd4a9bd269d873f65494376685e6192e \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..80635a63d29fe --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-core-2.9.2.jar.sha1 @@ -0,0 +1 @@ +796a21391780339e3d4862626339b49df170024e \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-langdetect-optimaize-2.6.0.jar.sha1 b/plugins/ingest-attachment/licenses/tika-langdetect-optimaize-2.6.0.jar.sha1 deleted file mode 100644 index e7bc59bb5ae49..0000000000000 --- a/plugins/ingest-attachment/licenses/tika-langdetect-optimaize-2.6.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -72b784a7bdab0ffde005fa64d15e3f077331d6fc \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-langdetect-optimaize-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-langdetect-optimaize-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..a4bb6d48c6a08 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-langdetect-optimaize-2.9.2.jar.sha1 @@ -0,0 +1 @@ +7a48a287e464b456a85c79f318d7bad7db201518 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parser-apple-module-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parser-apple-module-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..dbaee880d1251 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parser-apple-module-2.9.2.jar.sha1 @@ -0,0 +1 @@ +758dac27c246c51b019562bab7e266d2da6a6e01 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parser-html-module-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parser-html-module-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..b4806746301ef --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parser-html-module-2.9.2.jar.sha1 @@ -0,0 +1 @@ +47f6a4c46b92616d14e82cd7ad4d05cb43077b83 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parser-microsoft-module-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parser-microsoft-module-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..da1ae42bac652 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parser-microsoft-module-2.9.2.jar.sha1 @@ -0,0 +1 @@ +235a20823c02c699ce3d57f3d6b9550db05d91a9 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parser-miscoffice-module-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parser-miscoffice-module-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..7ceed9e1643b8 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parser-miscoffice-module-2.9.2.jar.sha1 @@ -0,0 +1 @@ +7688a4220d07c32b505230479f957cd495c0bef2 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parser-pdf-module-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parser-pdf-module-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..e780c1b92d525 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parser-pdf-module-2.9.2.jar.sha1 @@ -0,0 +1 @@ +4d0f0e3f6eff184040402094f4fabbb3c5c7d09f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parser-text-module-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parser-text-module-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..6e56fcffc5f88 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parser-text-module-2.9.2.jar.sha1 @@ -0,0 +1 @@ +b3a93e538ba6cb4066aba96d629febf181ec9f92 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parser-xml-module-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parser-xml-module-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..27062077b92bf --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parser-xml-module-2.9.2.jar.sha1 @@ -0,0 +1 @@ +ff707716c0c4748ffeb21996aefa8d269b3eab5b \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parser-xmp-commons-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parser-xmp-commons-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..396e2655b14db --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parser-xmp-commons-2.9.2.jar.sha1 @@ -0,0 +1 @@ +69104107ff85194df5acf682178128771863e442 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parser-zip-commons-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parser-zip-commons-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..bda62033e4e8c --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parser-zip-commons-2.9.2.jar.sha1 @@ -0,0 +1 @@ +2fcea85a56f93a5c0cb81f3d6dd8673f3d81c598 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-standard-package-2.6.0.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-standard-package-2.6.0.jar.sha1 deleted file mode 100644 index 83c0777fcbe8a..0000000000000 --- a/plugins/ingest-attachment/licenses/tika-parsers-standard-package-2.6.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -00980e70b1df13c1236b750f0ca1462edd5d7417 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-standard-package-2.9.2.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-standard-package-2.9.2.jar.sha1 new file mode 100644 index 0000000000000..bb76974b6344e --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parsers-standard-package-2.9.2.jar.sha1 @@ -0,0 +1 @@ +c8408deb51fa617ef4e912b4d161712e695d3a29 \ No newline at end of file diff --git a/plugins/repository-azure/licenses/commons-lang3-3.13.0.jar.sha1 b/plugins/repository-azure/licenses/commons-lang3-3.13.0.jar.sha1 deleted file mode 100644 index d0c2f2486ee1f..0000000000000 --- a/plugins/repository-azure/licenses/commons-lang3-3.13.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b7263237aa89c1f99b327197c41d0669707a462e \ No newline at end of file diff --git a/plugins/repository-azure/licenses/commons-lang3-3.14.0.jar.sha1 b/plugins/repository-azure/licenses/commons-lang3-3.14.0.jar.sha1 new file mode 100644 index 0000000000000..d783e07e40902 --- /dev/null +++ b/plugins/repository-azure/licenses/commons-lang3-3.14.0.jar.sha1 @@ -0,0 +1 @@ +1ed471194b02f2c6cb734a0cd6f6f107c673afae \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-codec-1.15.jar.sha1 b/plugins/repository-gcs/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/plugins/repository-gcs/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-codec-1.16.1.jar.sha1 b/plugins/repository-gcs/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 6faf0383d3ba2..2c51bb4cbea53 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -75,7 +75,7 @@ dependencies { api 'commons-collections:commons-collections:3.2.2' api "org.apache.commons:commons-compress:${versions.commonscompress}" api 'org.apache.commons:commons-configuration2:2.10.1' - api 'commons-io:commons-io:2.16.0' + api "commons-io:commons-io:${versions.commonsio}" api 'org.apache.commons:commons-lang3:3.14.0' implementation 'com.google.re2j:re2j:1.7' api 'javax.servlet:servlet-api:2.5' diff --git a/plugins/repository-hdfs/licenses/commons-codec-1.15.jar.sha1 b/plugins/repository-hdfs/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-codec-1.16.1.jar.sha1 b/plugins/repository-hdfs/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-compress-1.24.0.jar.sha1 b/plugins/repository-hdfs/licenses/commons-compress-1.24.0.jar.sha1 deleted file mode 100644 index 23999d1bfbde4..0000000000000 --- a/plugins/repository-hdfs/licenses/commons-compress-1.24.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b4b1b5a3d9573b2970fddab236102c0a4d27d35e \ No newline at end of file diff --git a/plugins/repository-hdfs/licenses/commons-compress-1.26.1.jar.sha1 b/plugins/repository-hdfs/licenses/commons-compress-1.26.1.jar.sha1 new file mode 100644 index 0000000000000..912bda85de18a --- /dev/null +++ b/plugins/repository-hdfs/licenses/commons-compress-1.26.1.jar.sha1 @@ -0,0 +1 @@ +44331c1130c370e726a2e1a3e6fba6d2558ef04a \ No newline at end of file diff --git a/plugins/repository-s3/licenses/commons-codec-1.15.jar.sha1 b/plugins/repository-s3/licenses/commons-codec-1.15.jar.sha1 deleted file mode 100644 index 62d99837b87e1..0000000000000 --- a/plugins/repository-s3/licenses/commons-codec-1.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -49d94806b6e3dc933dacbd8acb0fdbab8ebd1e5d \ No newline at end of file diff --git a/plugins/repository-s3/licenses/commons-codec-1.16.1.jar.sha1 b/plugins/repository-s3/licenses/commons-codec-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..6b8803089c6d7 --- /dev/null +++ b/plugins/repository-s3/licenses/commons-codec-1.16.1.jar.sha1 @@ -0,0 +1 @@ +47bd4d333fba53406f6c6c51884ddbca435c8862 \ No newline at end of file From 434dc6157021658d98841e20814025bd1a7ae8f8 Mon Sep 17 00:00:00 2001 From: "opensearch-trigger-bot[bot]" <98922864+opensearch-trigger-bot[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 17:41:50 -0400 Subject: [PATCH 3/6] [AUTO] [main] Add bwc version 2.13.1. (#13039) * Add bwc version 2.13.1 Signed-off-by: GitHub * Fix version identifier Signed-off-by: Kunal Kotwani --------- Signed-off-by: GitHub Signed-off-by: Kunal Kotwani Co-authored-by: opensearch-ci-bot Co-authored-by: Kunal Kotwani --- .ci/bwcVersions | 1 + libs/core/src/main/java/org/opensearch/Version.java | 1 + 2 files changed, 2 insertions(+) diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 78d8796c624d7..f3a9aa2787a80 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -30,4 +30,5 @@ BWC_VERSION: - "2.12.0" - "2.12.1" - "2.13.0" + - "2.13.1" - "2.14.0" diff --git a/libs/core/src/main/java/org/opensearch/Version.java b/libs/core/src/main/java/org/opensearch/Version.java index 56df46ae94d44..f312c484a4842 100644 --- a/libs/core/src/main/java/org/opensearch/Version.java +++ b/libs/core/src/main/java/org/opensearch/Version.java @@ -101,6 +101,7 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_2_12_0 = new Version(2120099, org.apache.lucene.util.Version.LUCENE_9_9_2); public static final Version V_2_12_1 = new Version(2120199, org.apache.lucene.util.Version.LUCENE_9_9_2); public static final Version V_2_13_0 = new Version(2130099, org.apache.lucene.util.Version.LUCENE_9_10_0); + public static final Version V_2_13_1 = new Version(2130199, org.apache.lucene.util.Version.LUCENE_9_10_0); public static final Version V_2_14_0 = new Version(2140099, org.apache.lucene.util.Version.LUCENE_9_10_0); public static final Version V_3_0_0 = new Version(3000099, org.apache.lucene.util.Version.LUCENE_9_11_0); public static final Version CURRENT = V_3_0_0; From e8c5daf6225d10d7a172ec1376b40707f43a1ff8 Mon Sep 17 00:00:00 2001 From: Mohammad Qureshi <47198598+qreshi@users.noreply.github.com> Date: Wed, 3 Apr 2024 16:27:21 -0700 Subject: [PATCH 4/6] [Derived Fields] Add support for emitting multiple values in DerivedFieldScripts (#12837) --------- Signed-off-by: Mohammad Qureshi Signed-off-by: Rishabh Maurya Co-authored-by: Rishabh Maurya --- .../painless/PainlessModulePlugin.java | 6 + .../painless/spi/org.opensearch.derived.txt | 17 ++ .../painless/DerivedFieldScriptTests.java | 227 ++++++++++++++++++ .../mapper/DerivedFieldValueFetcher.java | 4 +- .../opensearch/script/DerivedFieldScript.java | 67 +++++- .../opensearch/script/ScriptEmitValues.java | 63 +++++ .../mapper/DerivedFieldMapperQueryTests.java | 4 +- .../index/query/DerivedFieldQueryTests.java | 4 +- .../opensearch/script/MockScriptEngine.java | 4 +- 9 files changed, 381 insertions(+), 15 deletions(-) create mode 100644 modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.derived.txt create mode 100644 modules/lang-painless/src/test/java/org/opensearch/painless/DerivedFieldScriptTests.java create mode 100644 server/src/main/java/org/opensearch/script/ScriptEmitValues.java diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java index c7638b3c41c63..55dc23f665d2e 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessModulePlugin.java @@ -60,6 +60,7 @@ import org.opensearch.repositories.RepositoriesService; import org.opensearch.rest.RestController; import org.opensearch.rest.RestHandler; +import org.opensearch.script.DerivedFieldScript; import org.opensearch.script.IngestScript; import org.opensearch.script.ScoreScript; import org.opensearch.script.ScriptContext; @@ -108,6 +109,11 @@ public final class PainlessModulePlugin extends Plugin implements ScriptPlugin, ingest.add(AllowlistLoader.loadFromResourceFiles(Allowlist.class, "org.opensearch.ingest.txt")); map.put(IngestScript.CONTEXT, ingest); + // Functions available to derived fields + List derived = new ArrayList<>(Allowlist.BASE_ALLOWLISTS); + derived.add(AllowlistLoader.loadFromResourceFiles(Allowlist.class, "org.opensearch.derived.txt")); + map.put(DerivedFieldScript.CONTEXT, derived); + allowlists = map; } diff --git a/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.derived.txt b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.derived.txt new file mode 100644 index 0000000000000..9a3dd4894b286 --- /dev/null +++ b/modules/lang-painless/src/main/resources/org/opensearch/painless/spi/org.opensearch.derived.txt @@ -0,0 +1,17 @@ +# +# SPDX-License-Identifier: Apache-2.0 +# +# The OpenSearch Contributors require contributions made to +# this file be licensed under the Apache-2.0 license or a +# compatible open source license. +# + +# This file contains an allowlist for functions to be used in derived field context + +class org.opensearch.script.DerivedFieldScript @no_import { +} + +static_import { + void emit(org.opensearch.script.DerivedFieldScript, Object) bound_to org.opensearch.script.ScriptEmitValues$EmitSingle + void emit(org.opensearch.script.DerivedFieldScript, double, double) bound_to org.opensearch.script.ScriptEmitValues$GeoPoint +} diff --git a/modules/lang-painless/src/test/java/org/opensearch/painless/DerivedFieldScriptTests.java b/modules/lang-painless/src/test/java/org/opensearch/painless/DerivedFieldScriptTests.java new file mode 100644 index 0000000000000..2340e5b238ebb --- /dev/null +++ b/modules/lang-painless/src/test/java/org/opensearch/painless/DerivedFieldScriptTests.java @@ -0,0 +1,227 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.painless; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.memory.MemoryIndex; +import org.opensearch.common.collect.Tuple; +import org.opensearch.common.geo.GeoPoint; +import org.opensearch.common.settings.Settings; +import org.opensearch.index.fielddata.IndexGeoPointFieldData; +import org.opensearch.index.fielddata.IndexNumericFieldData; +import org.opensearch.index.fielddata.LeafGeoPointFieldData; +import org.opensearch.index.fielddata.LeafNumericFieldData; +import org.opensearch.index.fielddata.MultiGeoPointValues; +import org.opensearch.index.fielddata.SortedNumericDoubleValues; +import org.opensearch.index.fielddata.plain.AbstractLeafGeoPointFieldData; +import org.opensearch.index.fielddata.plain.LeafDoubleFieldData; +import org.opensearch.index.mapper.GeoPointFieldMapper.GeoPointFieldType; +import org.opensearch.index.mapper.MapperService; +import org.opensearch.index.mapper.NumberFieldMapper.NumberFieldType; +import org.opensearch.index.mapper.NumberFieldMapper.NumberType; +import org.opensearch.painless.spi.Allowlist; +import org.opensearch.painless.spi.AllowlistLoader; +import org.opensearch.script.DerivedFieldScript; +import org.opensearch.script.ScriptContext; +import org.opensearch.script.ScriptException; +import org.opensearch.search.lookup.LeafSearchLookup; +import org.opensearch.search.lookup.SearchLookup; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyInt; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class DerivedFieldScriptTests extends ScriptTestCase { + + private static PainlessScriptEngine SCRIPT_ENGINE; + + @Override + public void setUp() throws Exception { + super.setUp(); + + // Adding derived field script to the contexts for the script engine + Map, List> contexts = newDefaultContexts(); + List allowlists = new ArrayList<>(Allowlist.BASE_ALLOWLISTS); + allowlists.add(AllowlistLoader.loadFromResourceFiles(Allowlist.class, "org.opensearch.derived.txt")); + contexts.put(DerivedFieldScript.CONTEXT, allowlists); + + SCRIPT_ENGINE = new PainlessScriptEngine(Settings.EMPTY, contexts); + } + + @Override + public void tearDown() throws Exception { + super.tearDown(); + SCRIPT_ENGINE = null; + } + + @Override + protected PainlessScriptEngine getEngine() { + return SCRIPT_ENGINE; + } + + private DerivedFieldScript.LeafFactory compile(String expression, SearchLookup lookup) { + DerivedFieldScript.Factory factory = SCRIPT_ENGINE.compile( + "derived_script_test", + expression, + DerivedFieldScript.CONTEXT, + Collections.emptyMap() + ); + return factory.newFactory(Collections.emptyMap(), lookup); + } + + public void testEmittingDoubleField() throws IOException { + // Mocking field value to be returned + NumberFieldType fieldType = new NumberFieldType("test_double_field", NumberType.DOUBLE); + MapperService mapperService = mock(MapperService.class); + when(mapperService.fieldType("test_double_field")).thenReturn(fieldType); + + SortedNumericDoubleValues doubleValues = mock(SortedNumericDoubleValues.class); + when(doubleValues.docValueCount()).thenReturn(1); + when(doubleValues.advanceExact(anyInt())).thenReturn(true); + when(doubleValues.nextValue()).thenReturn(2.718); + + LeafNumericFieldData atomicFieldData = mock(LeafDoubleFieldData.class); // SortedNumericDoubleFieldData + when(atomicFieldData.getDoubleValues()).thenReturn(doubleValues); + + IndexNumericFieldData fieldData = mock(IndexNumericFieldData.class); // SortedNumericIndexFieldData + when(fieldData.getFieldName()).thenReturn("test_double_field"); + when(fieldData.load(any())).thenReturn(atomicFieldData); + + SearchLookup lookup = new SearchLookup(mapperService, (ignored, searchLookup) -> fieldData); + + // We don't need a real index, just need to construct a LeafReaderContext which cannot be mocked + MemoryIndex index = new MemoryIndex(); + LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0); + + // Execute the script + DerivedFieldScript script = compile("emit(doc['test_double_field'].value)", lookup).newInstance(leafReaderContext); + script.setDocument(1); + script.execute(); + + List result = script.getEmittedValues(); + assertEquals(List.of(2.718), result); + } + + public void testEmittingGeoPoint() throws IOException { + // Mocking field value to be returned + GeoPointFieldType fieldType = new GeoPointFieldType("test_geo_field"); + MapperService mapperService = mock(MapperService.class); + when(mapperService.fieldType("test_geo_field")).thenReturn(fieldType); + + MultiGeoPointValues geoPointValues = mock(MultiGeoPointValues.class); + when(geoPointValues.docValueCount()).thenReturn(1); + when(geoPointValues.advanceExact(anyInt())).thenReturn(true); + when(geoPointValues.nextValue()).thenReturn(new GeoPoint(5, 8)); + + LeafGeoPointFieldData atomicFieldData = mock(AbstractLeafGeoPointFieldData.class); // LatLonPointDVLeafFieldData + when(atomicFieldData.getGeoPointValues()).thenReturn(geoPointValues); + + IndexGeoPointFieldData fieldData = mock(IndexGeoPointFieldData.class); + when(fieldData.getFieldName()).thenReturn("test_geo_field"); + when(fieldData.load(any())).thenReturn(atomicFieldData); + + SearchLookup lookup = new SearchLookup(mapperService, (ignored, searchLookup) -> fieldData); + + // We don't need a real index, just need to construct a LeafReaderContext which cannot be mocked + MemoryIndex index = new MemoryIndex(); + LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0); + + // Execute the script + DerivedFieldScript script = compile("emit(doc['test_geo_field'].value.getLat(), doc['test_geo_field'].value.getLon())", lookup) + .newInstance(leafReaderContext); + script.setDocument(1); + script.execute(); + + List result = script.getEmittedValues(); + assertEquals(List.of(new Tuple<>(5.0, 8.0)), result); + } + + public void testEmittingMultipleValues() throws IOException { + SearchLookup lookup = mock(SearchLookup.class); + + // We don't need a real index, just need to construct a LeafReaderContext which cannot be mocked + MemoryIndex index = new MemoryIndex(); + LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0); + + LeafSearchLookup leafSearchLookup = mock(LeafSearchLookup.class); + when(lookup.getLeafSearchLookup(leafReaderContext)).thenReturn(leafSearchLookup); + + // Execute the script + DerivedFieldScript script = compile( + "def l = new ArrayList(); l.add('test'); l.add('multiple'); l.add('values'); for (String x : l) emit(x)", + lookup + ).newInstance(leafReaderContext); + script.setDocument(1); + script.execute(); + + List result = script.getEmittedValues(); + assertEquals(List.of("test", "multiple", "values"), result); + } + + public void testExceedingByteSizeLimit() throws IOException { + SearchLookup lookup = mock(SearchLookup.class); + + // We don't need a real index, just need to construct a LeafReaderContext which cannot be mocked + MemoryIndex index = new MemoryIndex(); + LeafReaderContext leafReaderContext = index.createSearcher().getIndexReader().leaves().get(0); + + LeafSearchLookup leafSearchLookup = mock(LeafSearchLookup.class); + when(lookup.getLeafSearchLookup(leafReaderContext)).thenReturn(leafSearchLookup); + + // Emitting a large string to exceed the byte size limit + DerivedFieldScript stringScript = compile("for (int i = 0; i < 1024 * 1024; i++) emit('a' + i);", lookup).newInstance( + leafReaderContext + ); + expectThrows(ScriptException.class, () -> { + stringScript.setDocument(1); + stringScript.execute(); + }); + + // Emitting an integer to check byte size limit + DerivedFieldScript intScript = compile("for (int i = 0; i < 1024 * 1024; i++) emit(42)", lookup).newInstance(leafReaderContext); + expectThrows(ScriptException.class, "Expected IllegalStateException for exceeding byte size limit", () -> { + intScript.setDocument(1); + intScript.execute(); + }); + + // Emitting a long to check byte size limit + DerivedFieldScript longScript = compile("for (int i = 0; i < 1024 * 1024; i++) emit(1234567890123456789L)", lookup).newInstance( + leafReaderContext + ); + expectThrows(ScriptException.class, "Expected IllegalStateException for exceeding byte size limit", () -> { + longScript.setDocument(1); + longScript.execute(); + }); + + // Emitting a double to check byte size limit + DerivedFieldScript doubleScript = compile("for (int i = 0; i < 1024 * 1024; i++) emit(3.14159)", lookup).newInstance( + leafReaderContext + ); + expectThrows(ScriptException.class, "Expected IllegalStateException for exceeding byte size limit", () -> { + doubleScript.setDocument(1); + doubleScript.execute(); + }); + + // Emitting a GeoPoint to check byte size limit + DerivedFieldScript geoPointScript = compile("for (int i = 0; i < 1024 * 1024; i++) emit(1.23, 4.56);", lookup).newInstance( + leafReaderContext + ); + expectThrows(ScriptException.class, "Expected IllegalStateException for exceeding byte size limit", () -> { + geoPointScript.setDocument(1); + geoPointScript.execute(); + }); + } +} diff --git a/server/src/main/java/org/opensearch/index/mapper/DerivedFieldValueFetcher.java b/server/src/main/java/org/opensearch/index/mapper/DerivedFieldValueFetcher.java index f3bf0c613415a..40aa2f9890965 100644 --- a/server/src/main/java/org/opensearch/index/mapper/DerivedFieldValueFetcher.java +++ b/server/src/main/java/org/opensearch/index/mapper/DerivedFieldValueFetcher.java @@ -31,8 +31,8 @@ public DerivedFieldValueFetcher(DerivedFieldScript.LeafFactory derivedFieldScrip @Override public List fetchValues(SourceLookup lookup) { derivedFieldScript.setDocument(lookup.docId()); - // TODO: remove List.of() when derivedFieldScript.execute() returns list of objects. - return List.of(derivedFieldScript.execute()); + derivedFieldScript.execute(); + return derivedFieldScript.getEmittedValues(); } public void setNextReader(LeafReaderContext context) { diff --git a/server/src/main/java/org/opensearch/script/DerivedFieldScript.java b/server/src/main/java/org/opensearch/script/DerivedFieldScript.java index 7f5b991950ec6..e9988ec5aeef2 100644 --- a/server/src/main/java/org/opensearch/script/DerivedFieldScript.java +++ b/server/src/main/java/org/opensearch/script/DerivedFieldScript.java @@ -9,14 +9,17 @@ package org.opensearch.script; import org.apache.lucene.index.LeafReaderContext; -import org.opensearch.common.logging.DeprecationLogger; +import org.opensearch.common.collect.Tuple; import org.opensearch.index.fielddata.ScriptDocValues; import org.opensearch.search.lookup.LeafSearchLookup; import org.opensearch.search.lookup.SearchLookup; import org.opensearch.search.lookup.SourceLookup; import java.io.IOException; +import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.function.Function; @@ -30,7 +33,7 @@ public abstract class DerivedFieldScript { public static final String[] PARAMETERS = {}; public static final ScriptContext CONTEXT = new ScriptContext<>("derived_field", Factory.class); - private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(DynamicMap.class); + private static final int MAX_BYTE_SIZE = 1024 * 1024; // Maximum allowed byte size (1 MB) private static final Map> PARAMS_FUNCTIONS = Map.of( "doc", @@ -49,16 +52,27 @@ public abstract class DerivedFieldScript { */ private final LeafSearchLookup leafLookup; + /** + * The field values emitted from the script. + */ + private List emittedValues; + + private int totalByteSize; + public DerivedFieldScript(Map params, SearchLookup lookup, LeafReaderContext leafContext) { Map parameters = new HashMap<>(params); this.leafLookup = lookup.getLeafSearchLookup(leafContext); parameters.putAll(leafLookup.asMap()); this.params = new DynamicMap(parameters, PARAMS_FUNCTIONS); + this.emittedValues = new ArrayList<>(); + this.totalByteSize = 0; } - protected DerivedFieldScript() { - params = null; - leafLookup = null; + public DerivedFieldScript() { + this.params = null; + this.leafLookup = null; + this.emittedValues = new ArrayList<>(); + this.totalByteSize = 0; } /** @@ -75,14 +89,54 @@ public Map> getDoc() { return leafLookup.doc(); } + /** + * Return the emitted values from the script execution. + */ + public List getEmittedValues() { + return emittedValues; + } + /** * Set the current document to run the script on next. + * Clears the emittedValues as well since they should be scoped per document. */ public void setDocument(int docid) { + this.emittedValues = new ArrayList<>(); + this.totalByteSize = 0; leafLookup.setDocument(docid); } - public abstract Object execute(); + public void addEmittedValue(Object o) { + int byteSize = getObjectByteSize(o); + int newTotalByteSize = totalByteSize + byteSize; + if (newTotalByteSize <= MAX_BYTE_SIZE) { + emittedValues.add(o); + totalByteSize = newTotalByteSize; + } else { + throw new IllegalStateException("Exceeded maximum allowed byte size for emitted values"); + } + } + + private int getObjectByteSize(Object obj) { + if (obj instanceof String) { + return ((String) obj).getBytes(StandardCharsets.UTF_8).length; + } else if (obj instanceof Integer) { + return Integer.BYTES; + } else if (obj instanceof Long) { + return Long.BYTES; + } else if (obj instanceof Double) { + return Double.BYTES; + } else if (obj instanceof Boolean) { + return Byte.BYTES; // Assuming 1 byte for boolean + } else if (obj instanceof Tuple) { + // Assuming each element in the tuple is a double for GeoPoint case + return Double.BYTES * 2; + } else { + throw new IllegalArgumentException("Unsupported object type passed in emit()"); + } + } + + public void execute() {} /** * A factory to construct {@link DerivedFieldScript} instances. @@ -95,7 +149,6 @@ public interface LeafFactory { /** * A factory to construct stateful {@link DerivedFieldScript} factories for a specific index. - * * @opensearch.internal */ public interface Factory extends ScriptFactory { diff --git a/server/src/main/java/org/opensearch/script/ScriptEmitValues.java b/server/src/main/java/org/opensearch/script/ScriptEmitValues.java new file mode 100644 index 0000000000000..5d12f36442179 --- /dev/null +++ b/server/src/main/java/org/opensearch/script/ScriptEmitValues.java @@ -0,0 +1,63 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.script; + +import org.opensearch.common.collect.Tuple; + +/** + * Values that can be emitted in a derived field script context. + *

+ * The emit function can be called multiple times within a script definition + * so the function will handle collecting the values over the script execution. + */ +public final class ScriptEmitValues { + + /** + * Takes in a single value and emits it + * Could be a long, double, String, etc. + */ + public static final class EmitSingle { + + private final DerivedFieldScript derivedFieldScript; + + public EmitSingle(DerivedFieldScript derivedFieldScript) { + this.derivedFieldScript = derivedFieldScript; + } + + // TODO: Keeping this generic for the time being due to limitations with + // binding methods with the same name and arity. + // Ideally, we should have an emit signature per derived field type and try to scope + // that to the respective script execution so the other emits aren't allowed. + // One way to do this could be to create implementations of the DerivedFieldScript.LeafFactory + // per field type where they each define their own emit() method and then the engine that executes + // it can have custom compilation logic to perform class bindings on that emit implementation. + public void emit(Object val) { + derivedFieldScript.addEmittedValue(val); + } + + } + + /** + * Emits a GeoPoint value + */ + public static final class GeoPoint { + + private final DerivedFieldScript derivedFieldScript; + + public GeoPoint(DerivedFieldScript derivedFieldScript) { + this.derivedFieldScript = derivedFieldScript; + } + + public void emit(double lat, double lon) { + derivedFieldScript.addEmittedValue(new Tuple<>(lat, lon)); + } + + } + +} diff --git a/server/src/test/java/org/opensearch/index/mapper/DerivedFieldMapperQueryTests.java b/server/src/test/java/org/opensearch/index/mapper/DerivedFieldMapperQueryTests.java index 64f4f1f3e083e..bd6d7b88ade28 100644 --- a/server/src/test/java/org/opensearch/index/mapper/DerivedFieldMapperQueryTests.java +++ b/server/src/test/java/org/opensearch/index/mapper/DerivedFieldMapperQueryTests.java @@ -197,8 +197,8 @@ public void setDocument(int docId) { } @Override - public Object execute() { - return raw_requests[docId][scriptIndex[0]]; + public void execute() { + addEmittedValue(raw_requests[docId][scriptIndex[0]]); } }; diff --git a/server/src/test/java/org/opensearch/index/query/DerivedFieldQueryTests.java b/server/src/test/java/org/opensearch/index/query/DerivedFieldQueryTests.java index 18d117fa8c0f5..1bb303a874b9a 100644 --- a/server/src/test/java/org/opensearch/index/query/DerivedFieldQueryTests.java +++ b/server/src/test/java/org/opensearch/index/query/DerivedFieldQueryTests.java @@ -67,8 +67,8 @@ public void testDerivedField() throws IOException { when(searchLookup.getLeafSearchLookup(ctx)).thenReturn(leafLookup); return new DerivedFieldScript(params, lookup, ctx) { @Override - public Object execute() { - return raw_requests[sourceLookup.docId()][2]; + public void execute() { + addEmittedValue(raw_requests[sourceLookup.docId()][2]); } }; }; diff --git a/test/framework/src/main/java/org/opensearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/opensearch/script/MockScriptEngine.java index 8c7e9718eb0cd..456b55883f91e 100644 --- a/test/framework/src/main/java/org/opensearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/opensearch/script/MockScriptEngine.java @@ -288,10 +288,10 @@ public double execute(Map params1, double[] values) { ctx ) { @Override - public Object execute() { + public void execute() { Map vars = new HashMap<>(derivedFieldsParams); vars.put("params", derivedFieldsParams); - return script.apply(vars); + script.apply(vars); } }; return context.factoryClazz.cast(factory); From 17641f65c06c3d537de03e4b91a25ae9a681de71 Mon Sep 17 00:00:00 2001 From: Sachin Kale Date: Thu, 4 Apr 2024 10:57:57 +0530 Subject: [PATCH 5/6] Remove duplicate methods from RemoteStoreBaseIntegTestCase (#13044) Signed-off-by: Sachin Kale --- .../RemoteStoreBaseIntegTestCase.java | 119 ------------------ .../test/OpenSearchIntegTestCase.java | 2 +- 2 files changed, 1 insertion(+), 120 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java index ba90cbe96e157..d7ad0daa43524 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteStoreBaseIntegTestCase.java @@ -28,7 +28,6 @@ import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.UUIDs; import org.opensearch.common.settings.Settings; -import org.opensearch.core.common.unit.ByteSizeUnit; import org.opensearch.core.index.Index; import org.opensearch.index.IndexModule; import org.opensearch.index.IndexService; @@ -57,11 +56,8 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; -import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_REPOSITORY_SETTINGS_ATTRIBUTE_KEY_PREFIX; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_REPOSITORY_TYPE_ATTRIBUTE_KEY_FORMAT; -import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY; -import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.repositories.fs.ReloadableFsRepository.REPOSITORIES_FAILRATE_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; @@ -191,121 +187,6 @@ protected BulkResponse indexBulk(String indexName, int numDocs) { return client().bulk(bulkRequest).actionGet(); } - public static Settings remoteStoreClusterSettings(String name, Path path) { - return remoteStoreClusterSettings(name, path, name, path); - } - - public static Settings remoteStoreClusterSettings( - String segmentRepoName, - Path segmentRepoPath, - String segmentRepoType, - String translogRepoName, - Path translogRepoPath, - String translogRepoType - ) { - Settings.Builder settingsBuilder = Settings.builder(); - settingsBuilder.put( - buildRemoteStoreNodeAttributes( - segmentRepoName, - segmentRepoPath, - segmentRepoType, - translogRepoName, - translogRepoPath, - translogRepoType, - false - ) - ); - return settingsBuilder.build(); - } - - public static Settings remoteStoreClusterSettings( - String segmentRepoName, - Path segmentRepoPath, - String translogRepoName, - Path translogRepoPath - ) { - Settings.Builder settingsBuilder = Settings.builder(); - settingsBuilder.put(buildRemoteStoreNodeAttributes(segmentRepoName, segmentRepoPath, translogRepoName, translogRepoPath, false)); - return settingsBuilder.build(); - } - - public static Settings buildRemoteStoreNodeAttributes( - String segmentRepoName, - Path segmentRepoPath, - String translogRepoName, - Path translogRepoPath, - boolean withRateLimiterAttributes - ) { - return buildRemoteStoreNodeAttributes( - segmentRepoName, - segmentRepoPath, - ReloadableFsRepository.TYPE, - translogRepoName, - translogRepoPath, - ReloadableFsRepository.TYPE, - withRateLimiterAttributes - ); - } - - public static Settings buildRemoteStoreNodeAttributes( - String segmentRepoName, - Path segmentRepoPath, - String segmentRepoType, - String translogRepoName, - Path translogRepoPath, - String translogRepoType, - boolean withRateLimiterAttributes - ) { - String segmentRepoTypeAttributeKey = String.format( - Locale.getDefault(), - "node.attr." + REMOTE_STORE_REPOSITORY_TYPE_ATTRIBUTE_KEY_FORMAT, - segmentRepoName - ); - String segmentRepoSettingsAttributeKeyPrefix = String.format( - Locale.getDefault(), - "node.attr." + REMOTE_STORE_REPOSITORY_SETTINGS_ATTRIBUTE_KEY_PREFIX, - segmentRepoName - ); - String translogRepoTypeAttributeKey = String.format( - Locale.getDefault(), - "node.attr." + REMOTE_STORE_REPOSITORY_TYPE_ATTRIBUTE_KEY_FORMAT, - translogRepoName - ); - String translogRepoSettingsAttributeKeyPrefix = String.format( - Locale.getDefault(), - "node.attr." + REMOTE_STORE_REPOSITORY_SETTINGS_ATTRIBUTE_KEY_PREFIX, - translogRepoName - ); - String stateRepoTypeAttributeKey = String.format( - Locale.getDefault(), - "node.attr." + REMOTE_STORE_REPOSITORY_TYPE_ATTRIBUTE_KEY_FORMAT, - segmentRepoName - ); - String stateRepoSettingsAttributeKeyPrefix = String.format( - Locale.getDefault(), - "node.attr." + REMOTE_STORE_REPOSITORY_SETTINGS_ATTRIBUTE_KEY_PREFIX, - segmentRepoName - ); - - Settings.Builder settings = Settings.builder() - .put("node.attr." + REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY, segmentRepoName) - .put(segmentRepoTypeAttributeKey, segmentRepoType) - .put(segmentRepoSettingsAttributeKeyPrefix + "location", segmentRepoPath) - .put("node.attr." + REMOTE_STORE_TRANSLOG_REPOSITORY_NAME_ATTRIBUTE_KEY, translogRepoName) - .put(translogRepoTypeAttributeKey, translogRepoType) - .put(translogRepoSettingsAttributeKeyPrefix + "location", translogRepoPath) - .put("node.attr." + REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY, segmentRepoName) - .put(stateRepoTypeAttributeKey, segmentRepoType) - .put(stateRepoSettingsAttributeKeyPrefix + "location", segmentRepoPath); - - if (withRateLimiterAttributes) { - settings.put(segmentRepoSettingsAttributeKeyPrefix + "compress", randomBoolean()) - .put(segmentRepoSettingsAttributeKeyPrefix + "chunk_size", 200, ByteSizeUnit.BYTES); - } - - return settings.build(); - } - Settings defaultIndexSettings() { return Settings.builder() .put(super.indexSettings()) diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 664314245530e..f0f5576713042 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -2538,7 +2538,7 @@ public static Settings buildRemoteStoreNodeAttributes( ); } - public static Settings buildRemoteStoreNodeAttributes( + private static Settings buildRemoteStoreNodeAttributes( String segmentRepoName, Path segmentRepoPath, String segmentRepoType, From 9ab31f55e88cc2d023cad6e60a15a0e0e87dac38 Mon Sep 17 00:00:00 2001 From: Shubh Sahu Date: Thu, 4 Apr 2024 16:09:56 +0530 Subject: [PATCH 6/6] Reject Resize index requests during remote store migration (#12686) Signed-off-by: Shubh Sahu --- CHANGELOG.md | 1 + .../ResizeIndexMigrationTestCase.java | 208 ++++++++++++++++++ .../indices/shrink/TransportResizeAction.java | 48 +++- .../shrink/TransportResizeActionTests.java | 166 +++++++++++++- 4 files changed, 418 insertions(+), 5 deletions(-) create mode 100644 server/src/internalClusterTest/java/org/opensearch/remotemigration/ResizeIndexMigrationTestCase.java diff --git a/CHANGELOG.md b/CHANGELOG.md index 1f9febf7013a9..13241de0035aa 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -113,6 +113,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Detect breaking changes on pull requests ([#9044](https://github.com/opensearch-project/OpenSearch/pull/9044)) - Add cluster primary balance contraint for rebalancing with buffer ([#12656](https://github.com/opensearch-project/OpenSearch/pull/12656)) - [Remote Store] Make translog transfer timeout configurable ([#12704](https://github.com/opensearch-project/OpenSearch/pull/12704)) +- Reject Resize index requests (i.e, split, shrink and clone), While DocRep to SegRep migration is in progress.([#12686](https://github.com/opensearch-project/OpenSearch/pull/12686)) ### Dependencies - Bump `org.apache.commons:commons-configuration2` from 2.10.0 to 2.10.1 ([#12896](https://github.com/opensearch-project/OpenSearch/pull/12896)) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotemigration/ResizeIndexMigrationTestCase.java b/server/src/internalClusterTest/java/org/opensearch/remotemigration/ResizeIndexMigrationTestCase.java new file mode 100644 index 0000000000000..0548ce4a7955f --- /dev/null +++ b/server/src/internalClusterTest/java/org/opensearch/remotemigration/ResizeIndexMigrationTestCase.java @@ -0,0 +1,208 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.remotemigration; + +import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.opensearch.action.admin.indices.shrink.ResizeType; +import org.opensearch.action.support.ActiveShardCount; +import org.opensearch.client.Client; +import org.opensearch.common.settings.Settings; +import org.opensearch.indices.replication.common.ReplicationType; +import org.opensearch.test.OpenSearchIntegTestCase; + +import java.util.List; + +import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REPLICATION_TYPE; +import static org.opensearch.node.remotestore.RemoteStoreNodeService.MIGRATION_DIRECTION_SETTING; +import static org.opensearch.node.remotestore.RemoteStoreNodeService.REMOTE_STORE_COMPATIBILITY_MODE_SETTING; +import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; + +@OpenSearchIntegTestCase.ClusterScope(scope = OpenSearchIntegTestCase.Scope.TEST, numDataNodes = 0, autoManageMasterNodes = false) +public class ResizeIndexMigrationTestCase extends MigrationBaseTestCase { + private static final String TEST_INDEX = "test_index"; + private final static String REMOTE_STORE_DIRECTION = "remote_store"; + private final static String DOC_REP_DIRECTION = "docrep"; + private final static String NONE_DIRECTION = "none"; + private final static String STRICT_MODE = "strict"; + private final static String MIXED_MODE = "mixed"; + + /* + * This test will verify the resize request failure, when cluster mode is mixed + * and index is on DocRep node, and migration to remote store is in progress. + * */ + public void testFailResizeIndexWhileDocRepToRemoteStoreMigration() throws Exception { + + internalCluster().setBootstrapClusterManagerNodeIndex(0); + List cmNodes = internalCluster().startNodes(1); + Client client = internalCluster().client(cmNodes.get(0)); + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put(REMOTE_STORE_COMPATIBILITY_MODE_SETTING.getKey(), MIXED_MODE)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + + // Adding a non remote and a remote node + addRemote = false; + String nonRemoteNodeName = internalCluster().startNode(); + + addRemote = true; + String remoteNodeName = internalCluster().startNode(); + + logger.info("-->Create index on non-remote node and SETTING_REMOTE_STORE_ENABLED is false. Resize should not happen"); + Settings.Builder builder = Settings.builder().put(SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT); + client.admin() + .indices() + .prepareCreate(TEST_INDEX) + .setSettings( + builder.put("index.number_of_shards", 10) + .put("index.number_of_replicas", 0) + .put("index.routing.allocation.include._name", nonRemoteNodeName) + .put("index.routing.allocation.exclude._name", remoteNodeName) + ) + .setWaitForActiveShards(ActiveShardCount.ALL) + .execute() + .actionGet(); + + updateSettingsRequest.persistentSettings(Settings.builder().put(MIGRATION_DIRECTION_SETTING.getKey(), REMOTE_STORE_DIRECTION)); + assertAcked(client.admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + + ResizeType resizeType; + int resizeShardsNum; + String cause; + switch (randomIntBetween(0, 2)) { + case 0: + resizeType = ResizeType.SHRINK; + resizeShardsNum = 5; + cause = "shrink_index"; + break; + case 1: + resizeType = ResizeType.SPLIT; + resizeShardsNum = 20; + cause = "split_index"; + break; + default: + resizeType = ResizeType.CLONE; + resizeShardsNum = 10; + cause = "clone_index"; + } + + client.admin() + .indices() + .prepareUpdateSettings(TEST_INDEX) + .setSettings(Settings.builder().put("index.blocks.write", true)) + .execute() + .actionGet(); + + ensureGreen(TEST_INDEX); + + Settings.Builder resizeSettingsBuilder = Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", resizeShardsNum) + .putNull("index.blocks.write"); + + IllegalStateException ex = expectThrows( + IllegalStateException.class, + () -> client().admin() + .indices() + .prepareResizeIndex(TEST_INDEX, "first_split") + .setResizeType(resizeType) + .setSettings(resizeSettingsBuilder.build()) + .get() + ); + assertEquals( + ex.getMessage(), + "Index " + resizeType + " is not allowed as remote migration mode is mixed" + " and index is remote store disabled" + ); + } + + /* + * This test will verify the resize request failure, when cluster mode is mixed + * and index is on Remote Store node, and migration to DocRep node is in progress. + * */ + public void testFailResizeIndexWhileRemoteStoreToDocRepMigration() throws Exception { + + addRemote = true; + internalCluster().setBootstrapClusterManagerNodeIndex(0); + List cmNodes = internalCluster().startNodes(1); + Client client = internalCluster().client(cmNodes.get(0)); + ClusterUpdateSettingsRequest updateSettingsRequest = new ClusterUpdateSettingsRequest(); + updateSettingsRequest.persistentSettings(Settings.builder().put(REMOTE_STORE_COMPATIBILITY_MODE_SETTING.getKey(), MIXED_MODE)); + assertAcked(client().admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + + // Adding a non remote and a remote node + String remoteNodeName = internalCluster().startNode(); + + addRemote = false; + String nonRemoteNodeName = internalCluster().startNode(); + + logger.info("-->Create index on remote node and SETTING_REMOTE_STORE_ENABLED is true. Resize should not happen"); + Settings.Builder builder = Settings.builder().put(SETTING_REPLICATION_TYPE, ReplicationType.SEGMENT); + client.admin() + .indices() + .prepareCreate(TEST_INDEX) + .setSettings( + builder.put("index.number_of_shards", 10) + .put("index.number_of_replicas", 0) + .put("index.routing.allocation.include._name", remoteNodeName) + .put("index.routing.allocation.exclude._name", nonRemoteNodeName) + ) + .setWaitForActiveShards(ActiveShardCount.ALL) + .execute() + .actionGet(); + + updateSettingsRequest.persistentSettings(Settings.builder().put(MIGRATION_DIRECTION_SETTING.getKey(), DOC_REP_DIRECTION)); + assertAcked(client.admin().cluster().updateSettings(updateSettingsRequest).actionGet()); + + ResizeType resizeType; + int resizeShardsNum; + String cause; + switch (randomIntBetween(0, 2)) { + case 0: + resizeType = ResizeType.SHRINK; + resizeShardsNum = 5; + cause = "shrink_index"; + break; + case 1: + resizeType = ResizeType.SPLIT; + resizeShardsNum = 20; + cause = "split_index"; + break; + default: + resizeType = ResizeType.CLONE; + resizeShardsNum = 10; + cause = "clone_index"; + } + + client.admin() + .indices() + .prepareUpdateSettings(TEST_INDEX) + .setSettings(Settings.builder().put("index.blocks.write", true)) + .execute() + .actionGet(); + + ensureGreen(TEST_INDEX); + + Settings.Builder resizeSettingsBuilder = Settings.builder() + .put("index.number_of_replicas", 0) + .put("index.number_of_shards", resizeShardsNum) + .putNull("index.blocks.write"); + + IllegalStateException ex = expectThrows( + IllegalStateException.class, + () -> client().admin() + .indices() + .prepareResizeIndex(TEST_INDEX, "first_split") + .setResizeType(resizeType) + .setSettings(resizeSettingsBuilder.build()) + .get() + ); + assertEquals( + ex.getMessage(), + "Index " + resizeType + " is not allowed as remote migration mode is mixed" + " and index is remote store enabled" + ); + } +} diff --git a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java index ca4c16935c2b9..cb41325c18a22 100644 --- a/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java +++ b/server/src/main/java/org/opensearch/action/admin/indices/shrink/TransportResizeAction.java @@ -48,6 +48,7 @@ import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; +import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.core.action.ActionListener; import org.opensearch.core.common.io.stream.StreamInput; @@ -57,6 +58,9 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.shard.DocsStats; import org.opensearch.index.store.StoreStats; +import org.opensearch.node.remotestore.RemoteStoreNodeService; +import org.opensearch.node.remotestore.RemoteStoreNodeService.CompatibilityMode; +import org.opensearch.node.remotestore.RemoteStoreNodeService.Direction; import org.opensearch.threadpool.ThreadPool; import org.opensearch.transport.TransportService; @@ -67,6 +71,7 @@ import java.util.function.IntFunction; import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_NUMBER_OF_REPLICAS; +import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED; /** * Main class to initiate resizing (shrink / split) an index into a new index @@ -140,8 +145,8 @@ protected void clusterManagerOperation( // there is no need to fetch docs stats for split but we keep it simple and do it anyway for simplicity of the code final String sourceIndex = indexNameExpressionResolver.resolveDateMathExpression(resizeRequest.getSourceIndex()); final String targetIndex = indexNameExpressionResolver.resolveDateMathExpression(resizeRequest.getTargetIndexRequest().index()); - IndexMetadata indexMetadata = state.metadata().index(sourceIndex); + ClusterSettings clusterSettings = clusterService.getClusterSettings(); if (resizeRequest.getResizeType().equals(ResizeType.SHRINK) && state.metadata().isSegmentReplicationEnabled(sourceIndex) && indexMetadata != null @@ -161,7 +166,7 @@ protected void clusterManagerOperation( CreateIndexClusterStateUpdateRequest updateRequest = prepareCreateIndexRequest(resizeRequest, state, i -> { IndexShardStats shard = indicesStatsResponse.getIndex(sourceIndex).getIndexShards().get(i); return shard == null ? null : shard.getPrimary().getDocs(); - }, indicesStatsResponse.getPrimaries().store, sourceIndex, targetIndex); + }, indicesStatsResponse.getPrimaries().store, clusterSettings, sourceIndex, targetIndex); if (indicesStatsResponse.getIndex(sourceIndex) .getTotal() @@ -200,7 +205,7 @@ protected void clusterManagerOperation( CreateIndexClusterStateUpdateRequest updateRequest = prepareCreateIndexRequest(resizeRequest, state, i -> { IndexShardStats shard = indicesStatsResponse.getIndex(sourceIndex).getIndexShards().get(i); return shard == null ? null : shard.getPrimary().getDocs(); - }, indicesStatsResponse.getPrimaries().store, sourceIndex, targetIndex); + }, indicesStatsResponse.getPrimaries().store, clusterSettings, sourceIndex, targetIndex); createIndexService.createIndex( updateRequest, ActionListener.map( @@ -223,6 +228,7 @@ static CreateIndexClusterStateUpdateRequest prepareCreateIndexRequest( final ClusterState state, final IntFunction perShardDocStats, final StoreStats primaryShardsStoreStats, + final ClusterSettings clusterSettings, String sourceIndexName, String targetIndexName ) { @@ -231,6 +237,7 @@ static CreateIndexClusterStateUpdateRequest prepareCreateIndexRequest( if (metadata == null) { throw new IndexNotFoundException(sourceIndexName); } + validateRemoteMigrationModeSettings(resizeRequest.getResizeType(), metadata, clusterSettings); final Settings.Builder targetIndexSettingsBuilder = Settings.builder() .put(targetIndex.settings()) .normalizePrefix(IndexMetadata.INDEX_SETTING_PREFIX); @@ -368,4 +375,39 @@ protected static int calculateTargetIndexShardsNum( protected String getClusterManagerActionName(DiscoveryNode node) { return super.getClusterManagerActionName(node); } + + /** + * Reject resize request if cluster mode is [Mixed] and migration direction is [RemoteStore] and index is not on + * REMOTE_STORE_ENABLED node or [DocRep] and index is on REMOTE_STORE_ENABLED node. + * @param type resize type + * @param sourceIndexMetadata source index's metadata + * @param clusterSettings cluster settings + * @throws IllegalStateException if cluster mode is [Mixed] and migration direction is [RemoteStore] or [DocRep] and + * index's SETTING_REMOTE_STORE_ENABLED is not equal to the migration direction's value. + * For example, if migration direction is [RemoteStore] and index's SETTING_REMOTE_STORE_ENABLED + * is false, then throw IllegalStateException. If migration direction is [DocRep] and + * index's SETTING_REMOTE_STORE_ENABLED is true, then throw IllegalStateException. + */ + private static void validateRemoteMigrationModeSettings( + final ResizeType type, + IndexMetadata sourceIndexMetadata, + ClusterSettings clusterSettings + ) { + CompatibilityMode compatibilityMode = clusterSettings.get(RemoteStoreNodeService.REMOTE_STORE_COMPATIBILITY_MODE_SETTING); + if (compatibilityMode == CompatibilityMode.MIXED) { + boolean isRemoteStoreEnabled = sourceIndexMetadata.getSettings().getAsBoolean(SETTING_REMOTE_STORE_ENABLED, false); + Direction migrationDirection = clusterSettings.get(RemoteStoreNodeService.MIGRATION_DIRECTION_SETTING); + boolean invalidConfiguration = (migrationDirection == Direction.REMOTE_STORE && isRemoteStoreEnabled == false) + || (migrationDirection == Direction.DOCREP && isRemoteStoreEnabled); + if (invalidConfiguration) { + throw new IllegalStateException( + "Index " + + type + + " is not allowed as remote migration mode is mixed" + + " and index is remote store " + + (isRemoteStoreEnabled ? "enabled" : "disabled") + ); + } + } + } } diff --git a/server/src/test/java/org/opensearch/action/admin/indices/shrink/TransportResizeActionTests.java b/server/src/test/java/org/opensearch/action/admin/indices/shrink/TransportResizeActionTests.java index 848df5f8e4979..5bab2ceca0988 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/shrink/TransportResizeActionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/shrink/TransportResizeActionTests.java @@ -51,10 +51,13 @@ import org.opensearch.cluster.routing.allocation.allocator.BalancedShardsAllocator; import org.opensearch.cluster.routing.allocation.decider.AllocationDeciders; import org.opensearch.cluster.routing.allocation.decider.MaxRetryAllocationDecider; +import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; +import org.opensearch.common.util.FeatureFlags; import org.opensearch.core.common.unit.ByteSizeValue; import org.opensearch.index.shard.DocsStats; import org.opensearch.index.store.StoreStats; +import org.opensearch.node.remotestore.RemoteStoreNodeService; import org.opensearch.snapshots.EmptySnapshotsInfoService; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.gateway.TestGatewayAllocator; @@ -65,7 +68,12 @@ import java.util.Set; import static java.util.Collections.emptyMap; -import static org.hamcrest.Matchers.equalTo; +import static org.opensearch.cluster.metadata.IndexMetadata.SETTING_REMOTE_STORE_ENABLED; +import static org.opensearch.common.util.FeatureFlags.REMOTE_STORE_MIGRATION_EXPERIMENTAL; +import static org.opensearch.node.remotestore.RemoteStoreNodeService.CompatibilityMode; +import static org.opensearch.node.remotestore.RemoteStoreNodeService.MIGRATION_DIRECTION_SETTING; +import static org.opensearch.node.remotestore.RemoteStoreNodeService.REMOTE_STORE_COMPATIBILITY_MODE_SETTING; +import static org.hamcrest.CoreMatchers.equalTo; public class TransportResizeActionTests extends OpenSearchTestCase { @@ -95,6 +103,19 @@ private ClusterState createClusterState(String name, int numShards, int numRepli return clusterState; } + private ClusterSettings createClusterSettings( + CompatibilityMode compatibilityMode, + RemoteStoreNodeService.Direction migrationDirection + ) { + ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); + clusterSettings.applySettings( + (Settings.builder() + .put(REMOTE_STORE_COMPATIBILITY_MODE_SETTING.getKey(), compatibilityMode) + .put(MIGRATION_DIRECTION_SETTING.getKey(), migrationDirection)).build() + ); + return clusterSettings; + } + public void testErrorCondition() { ClusterState state = createClusterState( "source", @@ -102,6 +123,7 @@ public void testErrorCondition() { randomIntBetween(0, 10), Settings.builder().put("index.blocks.write", true).build() ); + ClusterSettings clusterSettings = createClusterSettings(CompatibilityMode.STRICT, RemoteStoreNodeService.Direction.NONE); assertTrue( expectThrows( IllegalStateException.class, @@ -110,6 +132,7 @@ public void testErrorCondition() { state, (i) -> new DocsStats(Integer.MAX_VALUE, between(1, 1000), between(1, 100)), new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, "source", "target" ) @@ -125,6 +148,7 @@ public void testErrorCondition() { clusterState, (i) -> i == 2 || i == 3 ? new DocsStats(Integer.MAX_VALUE / 2, between(1, 1000), between(1, 10000)) : null, new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, "source", "target" ); @@ -144,6 +168,7 @@ public void testErrorCondition() { clusterState, (i) -> new DocsStats(between(10, 1000), between(1, 10), between(1, 10000)), new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, "source", "target" ); @@ -173,6 +198,7 @@ public void testErrorCondition() { clusterState, (i) -> new DocsStats(between(1, 1000), between(1, 1000), between(0, 10000)), new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, "source", "target" ); @@ -189,7 +215,7 @@ public void testPassNumRoutingShards() { EmptyClusterInfoService.INSTANCE, EmptySnapshotsInfoService.INSTANCE ); - + ClusterSettings clusterSettings = createClusterSettings(CompatibilityMode.STRICT, RemoteStoreNodeService.Direction.NONE); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); // now we start the shard @@ -204,6 +230,7 @@ public void testPassNumRoutingShards() { clusterState, null, new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, "source", "target" ); @@ -217,6 +244,7 @@ public void testPassNumRoutingShards() { clusterState, null, new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, "source", "target" ); @@ -235,6 +263,7 @@ public void testPassNumRoutingShardsAndFail() { EmptySnapshotsInfoService.INSTANCE ); + ClusterSettings clusterSettings = createClusterSettings(CompatibilityMode.STRICT, RemoteStoreNodeService.Direction.NONE); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); // now we start the shard @@ -249,6 +278,7 @@ public void testPassNumRoutingShardsAndFail() { clusterState, null, new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, "source", "target" ); @@ -265,6 +295,7 @@ public void testPassNumRoutingShardsAndFail() { finalState, null, new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, "source", "target" ) @@ -286,6 +317,7 @@ public void testShrinkIndexSettings() { EmptySnapshotsInfoService.INSTANCE ); + ClusterSettings clusterSettings = createClusterSettings(CompatibilityMode.STRICT, RemoteStoreNodeService.Direction.NONE); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); // now we start the shard @@ -301,6 +333,7 @@ public void testShrinkIndexSettings() { clusterState, (i) -> stats, new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, indexName, "target" ); @@ -325,6 +358,8 @@ public void testShrinkWithMaxShardSize() { EmptyClusterInfoService.INSTANCE, EmptySnapshotsInfoService.INSTANCE ); + + ClusterSettings clusterSettings = createClusterSettings(CompatibilityMode.STRICT, RemoteStoreNodeService.Direction.NONE); RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); // now we start the shard @@ -345,6 +380,7 @@ public void testShrinkWithMaxShardSize() { clusterState, (i) -> stats, new StoreStats(100, between(1, 10000)), + clusterSettings, indexName, "target" ); @@ -366,6 +402,7 @@ public void testShrinkWithMaxShardSize() { clusterState, (i) -> stats, new StoreStats(100, between(1, 10000)), + clusterSettings, indexName, "target" ); @@ -387,6 +424,7 @@ public void testShrinkWithMaxShardSize() { clusterState, (i) -> stats, new StoreStats(100, between(1, 10000)), + clusterSettings, indexName, "target" ); @@ -477,6 +515,7 @@ public void testIndexBlocks() { createClusterState(indexName, 10, 0, 40, Settings.builder().put("index.blocks.read_only", true).build()) ).nodes(DiscoveryNodes.builder().add(newNode("node1"))).build(); + ClusterSettings clusterSettings = createClusterSettings(CompatibilityMode.STRICT, RemoteStoreNodeService.Direction.NONE); // Target index will be blocked by [index.blocks.read_only=true] copied from the source index ResizeRequest resizeRequest = new ResizeRequest("target", indexName); ResizeType resizeType; @@ -500,6 +539,7 @@ public void testIndexBlocks() { finalState, null, new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, indexName, "target" ) @@ -551,6 +591,7 @@ public void testIndexBlocks() { clusterState, (i) -> stats, new StoreStats(100, between(1, 10000)), + clusterSettings, indexName, "target" ); @@ -561,6 +602,127 @@ public void testIndexBlocks() { assertEquals(request.waitForActiveShards(), activeShardCount); } + public void testResizeFailuresDuringMigration() { + // We will keep all other settings correct for resize request, + // So we only need to test for the failures due to cluster setting validation while migration + final Settings directionEnabledNodeSettings = Settings.builder().put(REMOTE_STORE_MIGRATION_EXPERIMENTAL, "true").build(); + FeatureFlags.initializeFeatureFlags(directionEnabledNodeSettings); + boolean isRemoteStoreEnabled = randomBoolean(); + CompatibilityMode compatibilityMode = randomFrom(CompatibilityMode.values()); + RemoteStoreNodeService.Direction migrationDirection = randomFrom(RemoteStoreNodeService.Direction.values()); + // If not mixed mode, then migration direction is NONE. + if (!compatibilityMode.equals(CompatibilityMode.MIXED)) { + migrationDirection = RemoteStoreNodeService.Direction.NONE; + } + ClusterSettings clusterSettings = createClusterSettings(compatibilityMode, migrationDirection); + + ClusterState clusterState = ClusterState.builder( + createClusterState( + "source", + 10, + 0, + 40, + Settings.builder().put("index.blocks.write", true).put(SETTING_REMOTE_STORE_ENABLED, isRemoteStoreEnabled).build() + ) + ).nodes(DiscoveryNodes.builder().add(newNode("node1"))).build(); + AllocationService service = new AllocationService( + new AllocationDeciders(Collections.singleton(new MaxRetryAllocationDecider())), + new TestGatewayAllocator(), + new BalancedShardsAllocator(Settings.EMPTY), + EmptyClusterInfoService.INSTANCE, + EmptySnapshotsInfoService.INSTANCE + ); + + RoutingTable routingTable = service.reroute(clusterState, "reroute").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + // now we start the shard + routingTable = OpenSearchAllocationTestCase.startInitializingShardsAndReroute(service, clusterState, "source").routingTable(); + clusterState = ClusterState.builder(clusterState).routingTable(routingTable).build(); + DocsStats stats = new DocsStats(between(0, (IndexWriter.MAX_DOCS) / 10), between(1, 1000), between(1, 10000)); + ResizeRequest resizeRequest = new ResizeRequest("target", "source"); + ResizeType resizeType; + int expectedShardsNum; + String cause; + switch (randomIntBetween(0, 2)) { + case 0: + resizeType = ResizeType.SHRINK; + expectedShardsNum = 5; + cause = "shrink_index"; + break; + case 1: + resizeType = ResizeType.SPLIT; + expectedShardsNum = 20; + cause = "split_index"; + break; + default: + resizeType = ResizeType.CLONE; + expectedShardsNum = 10; + cause = "clone_index"; + } + resizeRequest.setResizeType(resizeType); + resizeRequest.getTargetIndexRequest() + .settings(Settings.builder().put("index.number_of_shards", expectedShardsNum).put("index.blocks.read_only", false).build()); + final ActiveShardCount activeShardCount = randomBoolean() ? ActiveShardCount.ALL : ActiveShardCount.ONE; + resizeRequest.setWaitForActiveShards(activeShardCount); + + if (compatibilityMode == CompatibilityMode.MIXED) { + if ((migrationDirection == RemoteStoreNodeService.Direction.REMOTE_STORE && isRemoteStoreEnabled == false) + || migrationDirection == RemoteStoreNodeService.Direction.DOCREP && isRemoteStoreEnabled == true) { + ClusterState finalState = clusterState; + IllegalStateException ise = expectThrows( + IllegalStateException.class, + () -> TransportResizeAction.prepareCreateIndexRequest( + resizeRequest, + finalState, + (i) -> stats, + new StoreStats(between(1, 10000), between(1, 10000)), + clusterSettings, + "source", + "target" + ) + ); + assertEquals( + ise.getMessage(), + "Index " + + resizeType + + " is not allowed as remote migration mode is mixed" + + " and index is remote store " + + (isRemoteStoreEnabled ? "enabled" : "disabled") + ); + } else { + CreateIndexClusterStateUpdateRequest request = TransportResizeAction.prepareCreateIndexRequest( + resizeRequest, + clusterState, + (i) -> stats, + new StoreStats(100, between(1, 10000)), + clusterSettings, + "source", + "target" + ); + assertNotNull(request.recoverFrom()); + assertEquals("source", request.recoverFrom().getName()); + assertEquals(String.valueOf(expectedShardsNum), request.settings().get("index.number_of_shards")); + assertEquals(cause, request.cause()); + assertEquals(request.waitForActiveShards(), activeShardCount); + } + } else { + CreateIndexClusterStateUpdateRequest request = TransportResizeAction.prepareCreateIndexRequest( + resizeRequest, + clusterState, + (i) -> stats, + new StoreStats(100, between(1, 10000)), + clusterSettings, + "source", + "target" + ); + assertNotNull(request.recoverFrom()); + assertEquals("source", request.recoverFrom().getName()); + assertEquals(String.valueOf(expectedShardsNum), request.settings().get("index.number_of_shards")); + assertEquals(cause, request.cause()); + assertEquals(request.waitForActiveShards(), activeShardCount); + } + } + private DiscoveryNode newNode(String nodeId) { final Set roles = Collections.unmodifiableSet( new HashSet<>(Arrays.asList(DiscoveryNodeRole.CLUSTER_MANAGER_ROLE, DiscoveryNodeRole.DATA_ROLE))