From a43e6cf4245d097f7dba96e7519f3a163db8791e Mon Sep 17 00:00:00 2001 From: Minal Shah <87717056+minalsha@users.noreply.github.com> Date: Tue, 31 Aug 2021 09:55:28 -0700 Subject: [PATCH 01/20] Fixed copyright to OpenSearch (#1175) --- NOTICE.txt | 2 +- README.md | 2 +- .../InternalDistributionArchiveCheckPluginFuncTest.groovy | 2 +- .../gradle/internal/InternalDistributionArchiveCheckPlugin.java | 2 +- distribution/packages/build.gradle | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/NOTICE.txt b/NOTICE.txt index 82463e622f27e..6c7dc983f8c7a 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,5 +1,5 @@ OpenSearch (https://opensearch.org/) -Copyright 2021 OpenSearch Contributors +Copyright OpenSearch Contributors This product includes software developed by Elasticsearch (http://www.elastic.co). diff --git a/README.md b/README.md index c4c60360b9152..6274d25c0f70f 100644 --- a/README.md +++ b/README.md @@ -34,4 +34,4 @@ This project is licensed under the [Apache v2.0 License](LICENSE.txt). ## Copyright -Copyright 2020-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. +Copyright OpenSearch Contributors. See [NOTICE](NOTICE.txt) for details. diff --git a/buildSrc/src/integTest/groovy/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPluginFuncTest.groovy b/buildSrc/src/integTest/groovy/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPluginFuncTest.groovy index f21e34087b131..2230b367e6ca2 100644 --- a/buildSrc/src/integTest/groovy/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPluginFuncTest.groovy +++ b/buildSrc/src/integTest/groovy/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPluginFuncTest.groovy @@ -108,7 +108,7 @@ Copyright 2009-2018 Acme Coorp""" result.task(":darwin-tar:checkNotice").outcome == TaskOutcome.FAILED normalizedOutput(result.output).contains("> expected line [2] in " + "[./darwin-tar/build/tar-extracted/opensearch-${VersionProperties.getOpenSearch()}/NOTICE.txt] " + - "to be [Copyright 2021 OpenSearch Contributors] but was [Copyright 2009-2018 Acme Coorp]") + "to be [Copyright OpenSearch Contributors] but was [Copyright 2009-2018 Acme Coorp]") } void license(File file = file("licenses/APACHE-LICENSE-2.0.txt")) { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java index 7e3087a30ddae..2a162e5f12d7b 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java @@ -105,7 +105,7 @@ private TaskProvider registerCheckNoticeTask(Project project, TaskProvider public void execute(Task task) { final List noticeLines = Arrays.asList( "OpenSearch (https://opensearch.org/)", - "Copyright 2021 OpenSearch Contributors" + "Copyright OpenSearch Contributors" ); final Path noticePath = checkExtraction.get() .getDestinationDir() diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index 2891cd634d028..5b6b41db3ceab 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -500,7 +500,7 @@ subprojects { (project.name.contains('deb') && dpkgExists.call(it)) || (project.name.contains('rpm') && rpmExists.call(it)) } doLast { - final List noticeLines = Arrays.asList("OpenSearch (https://opensearch.org/)", "Copyright 2021 OpenSearch Contributors") + final List noticeLines = Arrays.asList("OpenSearch (https://opensearch.org/)", "Copyright OpenSearch Contributors") final Path noticePath = packageExtractionDir.toPath().resolve("usr/share/opensearch/NOTICE.txt") assertLinesInFile(noticePath, noticeLines) } From f60d093c63129b320cc0ad6289937249b7dcad18 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Tue, 31 Aug 2021 14:18:37 -0400 Subject: [PATCH 02/20] Drop mocksocket & securemock dependencies from sniffer and rest client (no needed) (#1174) * Drop mocksocket & securemock dependencies from sniffer and rest client (not needed) Signed-off-by: Andriy Redko * Removing .gitignore Signed-off-by: Andriy Redko --- buildSrc/version.properties | 2 ++ client/rest/build.gradle | 4 ++-- .../org/opensearch/client/RestClientBuilderIntegTests.java | 3 +-- .../org/opensearch/client/RestClientGzipCompressionTests.java | 3 +-- .../opensearch/client/RestClientMultipleHostsIntegTests.java | 3 +-- .../org/opensearch/client/RestClientSingleHostIntegTests.java | 3 +-- client/sniffer/build.gradle | 4 ++-- .../opensearch/client/sniff/OpenSearchNodesSnifferTests.java | 3 +-- 8 files changed, 11 insertions(+), 14 deletions(-) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index a7f46a545cf1c..51b9255891667 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -38,6 +38,8 @@ commonscodec = 1.13 hamcrest = 2.1 securemock = 1.2 mocksocket = 1.2 +mockito = 1.9.5 +objenesis = 1.0 # benchmark dependencies jmh = 1.19 diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 0d995bc05e344..e296ccf9d9f15 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -51,8 +51,8 @@ dependencies { testImplementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testImplementation "junit:junit:${versions.junit}" testImplementation "org.hamcrest:hamcrest:${versions.hamcrest}" - testImplementation "org.elasticsearch:securemock:${versions.securemock}" - testImplementation "org.elasticsearch:mocksocket:${versions.mocksocket}" + testImplementation "org.mockito:mockito-core:${versions.mockito}" + testImplementation "org.objenesis:objenesis:${versions.objenesis}" } tasks.withType(CheckForbiddenApis).configureEach { diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientBuilderIntegTests.java index c95db3d922df3..421536be79561 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientBuilderIntegTests.java @@ -37,7 +37,6 @@ import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsServer; import org.apache.http.HttpHost; -import org.elasticsearch.mocksocket.MockHttpServer; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -73,7 +72,7 @@ public class RestClientBuilderIntegTests extends RestClientTestCase { @BeforeClass public static void startHttpServer() throws Exception { - httpsServer = MockHttpServer.createHttps(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + httpsServer = HttpsServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); httpsServer.setHttpsConfigurator(new HttpsConfigurator(getSslContext())); httpsServer.createContext("/", new ResponseHandler()); httpsServer.start(); diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientGzipCompressionTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientGzipCompressionTests.java index 85b21933e835e..8c4d993517fee 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientGzipCompressionTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientGzipCompressionTests.java @@ -39,7 +39,6 @@ import org.apache.http.HttpHost; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.elasticsearch.mocksocket.MockHttpServer; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; @@ -61,7 +60,7 @@ public class RestClientGzipCompressionTests extends RestClientTestCase { @BeforeClass public static void startHttpServer() throws Exception { - httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); httpServer.createContext("/", new GzipResponseHandler()); httpServer.start(); } diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsIntegTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsIntegTests.java index c747e08f9319e..cfacadb1650fb 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsIntegTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsIntegTests.java @@ -36,7 +36,6 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; import org.apache.http.HttpHost; -import org.elasticsearch.mocksocket.MockHttpServer; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -107,7 +106,7 @@ private static RestClient buildRestClient(NodeSelector nodeSelector) { } private static HttpServer createHttpServer() throws Exception { - HttpServer httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + HttpServer httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); httpServer.start(); //returns a different status code depending on the path for (int statusCode : getAllStatusCodes()) { diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostIntegTests.java index c3f1391454e8f..b412130be77c4 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostIntegTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostIntegTests.java @@ -52,7 +52,6 @@ import org.apache.http.message.BasicHeader; import org.apache.http.nio.entity.NStringEntity; import org.apache.http.util.EntityUtils; -import org.elasticsearch.mocksocket.MockHttpServer; import org.junit.After; import org.junit.Before; @@ -107,7 +106,7 @@ public void startHttpServer() throws Exception { } private HttpServer createHttpServer() throws Exception { - HttpServer httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + HttpServer httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); httpServer.start(); //returns a different status code depending on the path for (int statusCode : getAllStatusCodes()) { diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index 220835d00977b..057446c981834 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -47,8 +47,8 @@ dependencies { testImplementation project(":client:test") testImplementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testImplementation "junit:junit:${versions.junit}" - testImplementation "org.elasticsearch:securemock:${versions.securemock}" - testImplementation "org.elasticsearch:mocksocket:${versions.mocksocket}" + testImplementation "org.mockito:mockito-core:${versions.mockito}" + testImplementation "org.objenesis:objenesis:${versions.objenesis}" } tasks.named('forbiddenApisMain').configure { diff --git a/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferTests.java b/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferTests.java index 97fd3d852ed2f..38ef722d5c383 100644 --- a/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferTests.java +++ b/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferTests.java @@ -48,7 +48,6 @@ import org.opensearch.client.ResponseException; import org.opensearch.client.RestClient; import org.opensearch.client.RestClientTestCase; -import org.elasticsearch.mocksocket.MockHttpServer; import org.junit.After; import org.junit.Before; @@ -153,7 +152,7 @@ public void testSniffNodes() throws IOException { } private static HttpServer createHttpServer(final SniffResponse sniffResponse, final int sniffTimeoutMillis) throws IOException { - HttpServer httpServer = MockHttpServer.createHttp(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); + HttpServer httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 0); httpServer.createContext("/_nodes/http", new ResponseHandler(sniffTimeoutMillis, sniffResponse)); return httpServer; } From f298a41e4beeecb0ca6154326ef6480521ddde9e Mon Sep 17 00:00:00 2001 From: "Daniel Doubrovkine (dB.)" Date: Tue, 31 Aug 2021 18:20:48 -0400 Subject: [PATCH 03/20] Extract excludes into a file, fix the link checker by adding http://site.icu-project.org/. (#1189) --- .github/workflows/links.yml | 7 ++++++- .lycheeexclude | 10 ++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 .lycheeexclude diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index dec7435f63fc3..1ecf5ee84bc18 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -12,11 +12,16 @@ jobs: steps: - uses: actions/checkout@v2 + - name: Load Excludes + run: | + LYCHEE_EXCLUDE=$(sed -e :a -e 'N;s/\n/ --exclude /;ta' .lycheeexclude) + echo "LYCHEE_EXCLUDE=$LYCHEE_EXCLUDE" >> $GITHUB_ENV + - name: lychee Link Checker id: lychee uses: lycheeverse/lychee-action@master with: - args: --accept=200,403,429 --exclude=http://www.unicode.org/Public/PROGRAMS/CVTUTF --exclude=http://www.eclipse.org/jetty/downloads.php --exclude=http://project.carrot2.org/license.html --exclude=http://bitbucket.org/jpbarrette/moman/overview/ --exclude=http://opensource.adobe.com/wiki/display/cmap/Downloads --exclude=http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ --exclude=http://eid-applet.googlecode.com/ --exclude=http://www.ecma-international.org/publications/files/ECMA-ST/Ecma%20PATENT/Patent%20statements%20ok/ECMA-376%20Edition%202%20Microsoft%20Patent%20Declaration.pdf --exclude=http://www.ecma-international.org/publications/files/ECMA-ST/Ecma%20PATENT/Patent%20statements%20ok/ECMA-376%20Adobe%20Patent%20Declaration.pdf --exclude=http://snapshot/ --exclude=http://www.darwinsys.com/file/ --exclude=https://www.sqlite.org/copyright.html --exclude=http://www.bouncycastle.org/ --exclude=http://www.icu-project.org --exclude=http://www.sjp.pl/slownik/en/ --exclude=http://www.brics.dk/automaton/ --exclude=http://jaspell.sourceforge.net/ --exclude=http://www.opensource.org/licenses/bsd-license.php --exclude=http://www.jcraft.com/jzlib/ --exclude=http://www.slf4j.org/ --exclude=http://www.python.org/download/releases/2.4.2/license/ --exclude=http://www.python.org/download/releases/3.1.2/license/ --exclude=http://www.jcip.net --exclude=http://www.7-zip.org/sdk.html --exclude=http://www.mozilla.org/MPL/ --exclude=http://www.ecma-international.org/publications/standards/Ecma-376.htm --exclude=http://www.ecma-international.org/memento/Ecmabylaws.htm --exclude=http://www.microsoft.com/openspecifications/en/us/programs/osp/default.aspx --exclude=http://www.bea.com/ --exclude=http://www.unidata.ucar.edu/software/netcdf-java/ --exclude=http://www.clker.com/clipart-13653.html --exclude=http://www.joda.org/ --exclude=http://www.eclipse.org/licenses/edl-v10.html --exclude=http://www.eclipse.org/legal/cpl-v10.html --exclude=http://www.mozilla.org/MPL/MPL-1.1.html --exclude=http://source.icu-project.org/repos/icu/icu/trunk/license.html --exclude=http://unicode.org/copyright.html --exclude-mail "**/*.html" "**/*.md" "**/*.txt" "**/*.json" + args: --accept=200,403,429 --exclude ${{ env.LYCHEE_EXCLUDE }} --exclude-mail "**/*.html" "**/*.md" "**/*.txt" "**/*.json" env: GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - name: Fail if there were link errors diff --git a/.lycheeexclude b/.lycheeexclude new file mode 100644 index 0000000000000..b43db13f4c87d --- /dev/null +++ b/.lycheeexclude @@ -0,0 +1,10 @@ +http://bitbucket.org/jpbarrette/moman/overview/ +http://eid-applet.googlecode.com/ +http://opensource.adobe.com/wiki/display/cmap/Downloads +http://project.carrot2.org/license.html +http://site.icu-project.org/ +http://snapshot/ +http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ +http://www.eclipse.org/jetty/downloads.php +http://www.ecma-international.org/publications/files/ECMA-ST/Ecma%20PATENT/Patent%20statements%20ok/ECMA-376%20* +http://www.unicode.org/Public/PROGRAMS/CVTUTF \ No newline at end of file From 3a7f0762ac35272a4935e70e3586748b79f03d9f Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Wed, 1 Sep 2021 14:45:46 -0500 Subject: [PATCH 04/20] [Bug] Fix mixed cluster support for OpenSearch 2+ (#1191) The version framework only added support for OpenSearch 1.x bwc with legacy clusters. This commit adds support for v2.0 which will be the last version with bwc support for legacy clusters (v7.10) Signed-off-by: Nicholas Walter Knize --- .../java/org/opensearch/LegacyESVersion.java | 2 +- .../src/main/java/org/opensearch/Version.java | 24 +++++++++-- .../coordination/JoinTaskExecutor.java | 3 +- .../java/org/opensearch/VersionTests.java | 42 +++++++++++++++++++ .../coordination/JoinTaskExecutorTests.java | 3 +- .../org/opensearch/test/VersionUtils.java | 40 ++++++++++++++++++ 6 files changed, 105 insertions(+), 9 deletions(-) diff --git a/server/src/main/java/org/opensearch/LegacyESVersion.java b/server/src/main/java/org/opensearch/LegacyESVersion.java index e535d209e595e..ab1b4400906da 100644 --- a/server/src/main/java/org/opensearch/LegacyESVersion.java +++ b/server/src/main/java/org/opensearch/LegacyESVersion.java @@ -167,7 +167,7 @@ public class LegacyESVersion extends Version { final int minor = Integer.valueOf(fields[2]) * 10000; final int revision = Integer.valueOf(fields[3]) * 100; final int expectedId; - if (fields[1].equals("1")) { + if (major > 0 && major < 6000000) { expectedId = 0x08000000 ^ (major + minor + revision + 99); } else { expectedId = (major + minor + revision + 99); diff --git a/server/src/main/java/org/opensearch/Version.java b/server/src/main/java/org/opensearch/Version.java index 142fabf5ad079..999c39cabbc45 100644 --- a/server/src/main/java/org/opensearch/Version.java +++ b/server/src/main/java/org/opensearch/Version.java @@ -238,8 +238,8 @@ public boolean onOrBefore(Version version) { // LegacyESVersion major 7 is equivalent to Version major 1 public int compareMajor(Version other) { - int m = major == 1 ? 7 : major; - int om = other.major == 1 ? 7 : other.major; + int m = major == 1 ? 7 : major == 2 ? 8 : major; + int om = other.major == 1 ? 7 : other.major == 2 ? 8 : other.major; return Integer.compare(m, om); } @@ -293,6 +293,8 @@ protected Version computeMinCompatVersion() { } else if (major == 6) { // force the minimum compatibility for version 6 to 5.6 since we don't reference version 5 anymore return Version.fromId(5060099); + } else if (major == 2) { + return LegacyESVersion.V_7_10_0; } else if (major >= 7) { // all major versions from 7 onwards are compatible with last minor series of the previous major Version bwcVersion = null; @@ -339,6 +341,8 @@ private Version computeMinIndexCompatVersion() { bwcMajor = 2; // we jumped from 2 to 5 } else if (major == 7 || major == 1) { return LegacyESVersion.V_6_0_0_beta1; + } else if (major == 2) { + return LegacyESVersion.V_7_0_0; } else { bwcMajor = major - 1; } @@ -354,8 +358,20 @@ public boolean isCompatible(Version version) { && version.onOrAfter(minimumCompatibilityVersion()); // OpenSearch version 1 is the functional equivalent of predecessor version 7 - int a = major == 1 ? 7 : major; - int b = version.major == 1 ? 7 : version.major; + // OpenSearch version 2 is the functional equivalent of predecessor unreleased version "8" + // todo refactor this logic after removing deprecated features + int a = major; + if (major == 1) { + a = 7; + } else if (major == 2) { + a = 8; + } + int b = version.major; + if (version.major == 1) { + b = 7; + } else if (version.major == 2) { + b = 8; + } assert compatible == false || Math.max(a, b) - Math.min(a, b) <= 1; return compatible; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java b/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java index ac725e613e0bd..7cf8caf22bccf 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/JoinTaskExecutor.java @@ -367,9 +367,8 @@ public static void ensureNodesCompatibility(Version joiningNodeVersion, Version * version mode **/ public static void ensureMajorVersionBarrier(Version joiningNodeVersion, Version minClusterNodeVersion) { - final byte jnMajor = joiningNodeVersion.major == 1 ? 7 : joiningNodeVersion.major; final byte clusterMajor = minClusterNodeVersion.major == 1? 7: minClusterNodeVersion.major; - if (jnMajor < clusterMajor) { + if (joiningNodeVersion.compareMajor(minClusterNodeVersion) < 0) { throw new IllegalStateException("node version [" + joiningNodeVersion + "] is not supported. " + "All nodes in the cluster are of a higher major [" + clusterMajor + "]."); } diff --git a/server/src/test/java/org/opensearch/VersionTests.java b/server/src/test/java/org/opensearch/VersionTests.java index 36bdbd2e1b7ba..6d8f774fa3f7e 100644 --- a/server/src/test/java/org/opensearch/VersionTests.java +++ b/server/src/test/java/org/opensearch/VersionTests.java @@ -211,6 +211,48 @@ public void testMinCompatVersion() { assertEquals(0, LegacyESVersion.V_7_0_0.minimumCompatibilityVersion().revision); } + /** test opensearch min wire compatibility */ + public void testOpenSearchMinCompatVersion() { + Version opensearchVersion = VersionUtils.randomOpenSearchVersion(random()); + // opensearch 1.x minCompat is Legacy 6.8.0 + // opensearch 2.x minCompat is Legacy 7.10.0 + // opensearch 3.x minCompat is 1.{last minor version}.0 + // until 3.0 is staged the following line will only return legacy versions + List candidates = opensearchVersion.major >= 3 ? VersionUtils.allOpenSearchVersions() : VersionUtils.allLegacyVersions(); + int opensearchMajor = opensearchVersion.major; + int major = opensearchMajor - 1; + if (opensearchMajor == 1) { + major = 7; + } else if (opensearchMajor == 2) { + major = 8; + } + assertEquals(VersionUtils.lastFirstReleasedMinorFromMajor(candidates, major - 1), + opensearchVersion.minimumCompatibilityVersion()); + } + + /** test opensearch min index compatibility */ + public void testOpenSearchMinIndexCompatVersion() { + Version opensearchVersion = VersionUtils.randomOpenSearchVersion(random()); + // opensearch 1.x minIndexCompat is Legacy 6.8.0 + // opensearch 2.x minCompat is Legacy 7.10.0 + // opensearch 3.x minCompat is 1.{last minor version}.0 + // until 3.0 is staged the following line will only return legacy versions + List candidates = opensearchVersion.major >= 3 ? VersionUtils.allOpenSearchVersions() : VersionUtils.allLegacyVersions(); + int opensearchMajor = opensearchVersion.major; + int major = opensearchMajor - 1; + if (opensearchMajor == 1) { + major = 7; + } else if (opensearchMajor == 2) { + major = 8; + } + Version expected = VersionUtils.getFirstVersionOfMajor(candidates, major - 1); + Version actual = opensearchVersion.minimumIndexCompatibilityVersion(); + // since some legacy versions still support build (alpha, beta, RC) we check major minor revision only + assertEquals(expected.major, actual.major); + assertEquals(expected.minor, actual.minor); + assertEquals(expected.revision, actual.revision); + } + public void testToString() { // with 2.0.beta we lowercase assertEquals("2.0.0-beta1", LegacyESVersion.fromString("2.0.0-beta1").toString()); diff --git a/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java b/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java index a7998d95a524e..a24bed1c762d0 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java @@ -88,8 +88,7 @@ public void testPreventJoinClusterWithUnsupportedIndices() { Settings.builder().build(); Metadata.Builder metaBuilder = Metadata.builder(); IndexMetadata indexMetadata = IndexMetadata.builder("test") - .settings(settings(VersionUtils.getPreviousVersion(Version.CURRENT - .minimumIndexCompatibilityVersion()))) + .settings(settings(Version.fromString("5.8.0"))) .numberOfShards(1) .numberOfReplicas(1).build(); metaBuilder.put(indexMetadata, false); diff --git a/test/framework/src/main/java/org/opensearch/test/VersionUtils.java b/test/framework/src/main/java/org/opensearch/test/VersionUtils.java index 747190cbd5bf2..efeae6c6909c0 100644 --- a/test/framework/src/main/java/org/opensearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/opensearch/test/VersionUtils.java @@ -39,6 +39,7 @@ import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Optional; @@ -149,6 +150,8 @@ private static Version moveLastToUnreleased(List> versions, List RELEASED_VERSIONS; private static final List UNRELEASED_VERSIONS; private static final List ALL_VERSIONS; + private static final List ALL_OPENSEARCH_VERSIONS; + private static final List ALL_LEGACY_VERSIONS; static { Tuple, List> versions = resolveReleasedVersions(Version.CURRENT, LegacyESVersion.class); @@ -159,6 +162,9 @@ private static Version moveLastToUnreleased(List> versions, List v.major < 6).collect(Collectors.toList()); + ALL_LEGACY_VERSIONS = ALL_VERSIONS.stream().filter(v -> v.major >= 6).collect(Collectors.toList()); } /** @@ -182,6 +188,16 @@ public static List allVersions() { return ALL_VERSIONS; } + /** Returns an immutable, sorted list containing all opensearch versions; released and unreleased */ + public static List allOpenSearchVersions() { + return ALL_OPENSEARCH_VERSIONS; + } + + /** Returns an immutable, sorted list containing all legacy versions; released and unreleased */ + public static List allLegacyVersions() { + return ALL_LEGACY_VERSIONS; + } + /** * Get the released version before {@code version}. */ @@ -224,11 +240,35 @@ public static Version getFirstVersion() { return RELEASED_VERSIONS.get(0); } + public static Version getFirstVersionOfMajor(List versions, int major) { + Map> majorVersions = versions.stream().collect(Collectors.groupingBy(v -> (int)v.major)); + return majorVersions.get(major).get(0); + } + /** Returns a random {@link Version} from all available versions. */ public static Version randomVersion(Random random) { return ALL_VERSIONS.get(random.nextInt(ALL_VERSIONS.size())); } + /** + * Return a random {@link Version} from all available opensearch versions. + **/ + public static Version randomOpenSearchVersion(Random random) { + return ALL_OPENSEARCH_VERSIONS.get(random.nextInt(ALL_OPENSEARCH_VERSIONS.size())); + } + + /** Returns the first released (e.g., patch version 0) {@link Version} of the last minor from the requested major version + * e.g., for version 1.0.0 this would be legacy version (7.10.0); the first release (patch 0), of the last + * minor (for 7.x that is minor version 10) for the desired major version (7) + **/ + public static Version lastFirstReleasedMinorFromMajor(List allVersions, int major) { + Map> majorVersions = allVersions.stream().collect(Collectors.groupingBy(v -> (int)v.major)); + Map> groupedByMinor = majorVersions.get(major).stream().collect( + Collectors.groupingBy(v -> (int)v.minor)); + List candidates = Collections.max(groupedByMinor.entrySet(), Comparator.comparing(Map.Entry::getKey)).getValue(); + return candidates.get(0); + } + /** Returns a random {@link Version} from all available versions, that is compatible with the given version. */ public static Version randomCompatibleVersion(Random random, Version version) { final List compatible = ALL_VERSIONS.stream().filter(version::isCompatible).collect(Collectors.toList()); From 652cdbd5af9b2787ab364dfe40709db5c5fe7da8 Mon Sep 17 00:00:00 2001 From: Tianli Feng Date: Wed, 1 Sep 2021 14:15:22 -0700 Subject: [PATCH 05/20] Clarify JDK requirement in the developer guide (#1153) * Explicitly point out the JDK 8 requirement is for runtime, but not for compiling. * Clarify the JAVAx_HOME env variables are for the "backwards compatibility test". * Add explanation on how the backwards compatibility tests get the OpenSearch distributions for a specific version. Signed-off-by: Tianli Feng --- DEVELOPER_GUIDE.md | 4 ++-- TESTING.md | 10 +++++++--- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 14b219ea07ed8..c628a9abae8a8 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -49,9 +49,9 @@ Fork [opensearch-project/OpenSearch](https://github.com/opensearch-project/OpenS OpenSearch builds using Java 11 at a minimum. This means you must have a JDK 11 installed with the environment variable `JAVA_HOME` referencing the path to Java home for your JDK 11 installation, e.g. `JAVA_HOME=/usr/lib/jvm/jdk-11`. -By default, tests use the same runtime as `JAVA_HOME`. However, since OpenSearch supports JDK 8, the build supports compiling with JDK 11 and testing on a different version of JDK runtime. To do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of another JDK installation, e.g. `RUNTIME_JAVA_HOME=/usr/lib/jvm/jdk-8`. +By default, tests use the same runtime as `JAVA_HOME`. However, since OpenSearch also supports JDK 8 as the runtime, the build supports compiling with JDK 11 and testing on a different version of JDK runtime. To do this, set `RUNTIME_JAVA_HOME` pointing to the Java home of another JDK installation, e.g. `RUNTIME_JAVA_HOME=/usr/lib/jvm/jdk-8`. -To run the full suite of tests you will also need `JAVA8_HOME`, `JAVA9_HOME`, `JAVA10_HOME`, `JAVA11_HOME`, and `JAVA12_HOME`. +To run the full suite of tests you will also need `JAVA8_HOME`, `JAVA11_HOME`, and `JAVA14_HOME`. They are required by the [backwards compatibility test](./TESTING.md#testing-backwards-compatibility). #### Docker diff --git a/TESTING.md b/TESTING.md index 6e72ceb063ac6..7a57f5197a05b 100644 --- a/TESTING.md +++ b/TESTING.md @@ -363,7 +363,13 @@ These test tasks can use the `--tests`, `--info`, and `--debug` parameters just # Testing backwards compatibility -Backwards compatibility tests exist to test upgrading from each supported version to the current version. To run them all use: +Backwards compatibility tests exist to test upgrading from each supported version to the current version. + +The test can be run for any versions which the current version will be compatible with. Tests are run for released versions download the distributions from the artifact repository, see [DistributionDownloadPlugin](./buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java) for the repository location. Tests are run for versions that are not yet released automatically check out the branch and build from source to get the distributions, see [BwcVersions](./buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java) and [distribution/bwc/build.gradle](./distribution/bwc/build.gradle) for more information. + +The minimum JDK versions for runtime and compiling need to be installed, and environment variables `JAVAx_HOME`, such as `JAVA8_HOME`, pointing to the JDK installations are required to run the tests against unreleased versions, since the distributions are created by building from source. The required JDK versions for each branch are located at [.ci/java-versions.properties](.ci/java-versions.properties), see [BwcSetupExtension](./buildSrc/src/main/java/org/opensearch/gradle/internal/BwcSetupExtension.java) for more information. + +To run all the backwards compatibility tests use: ./gradlew bwcTest @@ -377,8 +383,6 @@ Use -Dtest.class and -Dtests.method to run a specific bwcTest test. For example -Dtests.class=org.opensearch.upgrades.RecoveryIT \ -Dtests.method=testHistoryUUIDIsGenerated -Tests are run for versions that are not yet released but with which the current version will be compatible with. These are automatically checked out and built from source. See [BwcVersions](./buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java) and [distribution/bwc/build.gradle](./distribution/bwc/build.gradle) for more information. - When running `./gradlew check`, minimal bwc checks are also run against compatible versions that are not yet released. ## BWC Testing against a specific remote/branch From 6e199d223b3fb89e546e831c2aa9f7d67815518f Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Wed, 1 Sep 2021 19:22:20 -0400 Subject: [PATCH 06/20] Restoring alpha/beta/rc version semantics (#1112) Signed-off-by: Andriy Redko --- .../java/org/opensearch/LegacyESVersion.java | 25 ++++++++++++ .../src/main/java/org/opensearch/Version.java | 38 +++++++++++-------- .../java/org/opensearch/VersionTests.java | 37 +++++++++++++++--- 3 files changed, 79 insertions(+), 21 deletions(-) diff --git a/server/src/main/java/org/opensearch/LegacyESVersion.java b/server/src/main/java/org/opensearch/LegacyESVersion.java index ab1b4400906da..3ba2195d4e6db 100644 --- a/server/src/main/java/org/opensearch/LegacyESVersion.java +++ b/server/src/main/java/org/opensearch/LegacyESVersion.java @@ -288,4 +288,29 @@ private static Version fromStringSlow(String version) { protected int maskId(final int id) { return id; } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(major).append('.').append(minor).append('.').append(revision); + if (isAlpha()) { + sb.append("-alpha"); + sb.append(build); + } else if (isBeta()) { + if (major >= 2) { + sb.append("-beta"); + } else { + sb.append(".Beta"); + } + sb.append(major < 5 ? build : build-25); + } else if (build < 99) { + if (major >= 2) { + sb.append("-rc"); + } else { + sb.append(".RC"); + } + sb.append(build - 50); + } + return sb.toString(); + } } diff --git a/server/src/main/java/org/opensearch/Version.java b/server/src/main/java/org/opensearch/Version.java index 999c39cabbc45..c640f3863e213 100644 --- a/server/src/main/java/org/opensearch/Version.java +++ b/server/src/main/java/org/opensearch/Version.java @@ -177,19 +177,35 @@ private static Version fromStringSlow(String version) { throw new IllegalArgumentException("illegal version format - snapshot labels are not supported"); } String[] parts = version.split("[.-]"); - // todo: add back optional build number - if (parts.length != 3) { - throw new IllegalArgumentException("the version needs to contain major, minor, and revision: " + version); + if (parts.length < 3 || parts.length > 4) { + throw new IllegalArgumentException( + "the version needs to contain major, minor, and revision, and optionally the build: " + version); } try { final int rawMajor = Integer.parseInt(parts[0]); - + final int betaOffset = 25; // 0 - 24 is taking by alpha builds + //we reverse the version id calculation based on some assumption as we can't reliably reverse the modulo final int major = rawMajor * 1000000; final int minor = Integer.parseInt(parts[1]) * 10000; final int revision = Integer.parseInt(parts[2]) * 100; int build = 99; + if (parts.length == 4) { + String buildStr = parts[3]; + if (buildStr.startsWith("alpha")) { + build = Integer.parseInt(buildStr.substring(5)); + assert build < 25 : "expected a alpha build but " + build + " >= 25"; + } else if (buildStr.startsWith("Beta") || buildStr.startsWith("beta")) { + build = betaOffset + Integer.parseInt(buildStr.substring(4)); + assert build < 50 : "expected a beta build but " + build + " >= 50"; + } else if (buildStr.startsWith("RC") || buildStr.startsWith("rc")) { + build = Integer.parseInt(buildStr.substring(2)) + 50; + } else { + throw new IllegalArgumentException("unable to parse version " + version); + } + } + return fromId((major + minor + revision + build) ^ MASK); } catch (NumberFormatException e) { throw new IllegalArgumentException("unable to parse version " + version, e); @@ -398,18 +414,10 @@ public String toString() { sb.append("-alpha"); sb.append(build); } else if (isBeta()) { - if (major >= 2) { - sb.append("-beta"); - } else { - sb.append(".Beta"); - } - sb.append(major < 5 ? build : build-25); + sb.append("-beta"); + sb.append(build - 25); } else if (build < 99) { - if (major >= 2) { - sb.append("-rc"); - } else { - sb.append(".RC"); - } + sb.append("-rc"); sb.append(build - 50); } return sb.toString(); diff --git a/server/src/test/java/org/opensearch/VersionTests.java b/server/src/test/java/org/opensearch/VersionTests.java index 6d8f774fa3f7e..14ba2fa2d73ec 100644 --- a/server/src/test/java/org/opensearch/VersionTests.java +++ b/server/src/test/java/org/opensearch/VersionTests.java @@ -265,12 +265,36 @@ public void testToString() { assertEquals("5.0.0-alpha1", LegacyESVersion.fromString("5.0.0-alpha1").toString()); } + public void testIsRc() { + assertTrue(LegacyESVersion.fromString("2.0.0-rc1").isRC()); + assertTrue(LegacyESVersion.fromString("1.0.0.RC1").isRC()); + assertTrue(Version.fromString("1.0.0-rc1").isRC()); + assertTrue(Version.fromString("2.0.0.RC1").isRC()); + + for (int i = 0 ; i < 25; i++) { + assertEquals(LegacyESVersion.fromString("5.0.0-rc" + i).id, LegacyESVersion.fromId(5000000 + i + 50).id); + assertEquals("5.0.0-rc" + i, LegacyESVersion.fromId(5000000 + i + 50).toString()); + + assertEquals(Version.fromString("1.0.0-rc" + i).id, Version.fromId(135217728 + i + 50).id); + assertEquals("1.0.0-rc" + i, Version.fromId(135217728 + i + 50).toString()); + } + } + public void testIsBeta() { assertTrue(LegacyESVersion.fromString("2.0.0-beta1").isBeta()); assertTrue(LegacyESVersion.fromString("1.0.0.Beta1").isBeta()); assertTrue(LegacyESVersion.fromString("0.90.0.Beta1").isBeta()); - } + assertTrue(Version.fromString("1.0.0.Beta1").isBeta()); + assertTrue(Version.fromString("2.0.0.beta1").isBeta()); + for (int i = 0 ; i < 25; i++) { + assertEquals(LegacyESVersion.fromString("5.0.0-beta" + i).id, LegacyESVersion.fromId(5000000 + i + 25).id); + assertEquals("5.0.0-beta" + i, LegacyESVersion.fromId(5000000 + i + 25).toString()); + + assertEquals(Version.fromString("1.0.0-beta" + i).id, Version.fromId(135217728 + i + 25).id); + assertEquals("1.0.0-beta" + i, Version.fromId(135217728 + i + 25).toString()); + } + } public void testIsAlpha() { assertTrue(new LegacyESVersion(5000001, org.apache.lucene.util.Version.LUCENE_7_0_0).isAlpha()); @@ -279,15 +303,16 @@ public void testIsAlpha() { assertTrue(LegacyESVersion.fromString("5.0.0-alpha14").isAlpha()); assertEquals(5000014, LegacyESVersion.fromString("5.0.0-alpha14").id); assertTrue(LegacyESVersion.fromId(5000015).isAlpha()); + + assertEquals(135217742, Version.fromString("1.0.0-alpha14").id); + assertTrue(Version.fromString("1.0.0-alpha14").isAlpha()); for (int i = 0 ; i < 25; i++) { assertEquals(LegacyESVersion.fromString("5.0.0-alpha" + i).id, LegacyESVersion.fromId(5000000 + i).id); assertEquals("5.0.0-alpha" + i, LegacyESVersion.fromId(5000000 + i).toString()); - } - - for (int i = 0 ; i < 25; i++) { - assertEquals(LegacyESVersion.fromString("5.0.0-beta" + i).id, LegacyESVersion.fromId(5000000 + i + 25).id); - assertEquals("5.0.0-beta" + i, LegacyESVersion.fromId(5000000 + i + 25).toString()); + + assertEquals(Version.fromString("1.0.0-alpha" + i).id, Version.fromId(135217728 + i).id); + assertEquals("1.0.0-alpha" + i, Version.fromId(135217728 + i).toString()); } } From 20c6f786ac931a457a3f59599c3621cb7db3de3f Mon Sep 17 00:00:00 2001 From: "Daniel Doubrovkine (dB.)" Date: Wed, 1 Sep 2021 21:13:48 -0400 Subject: [PATCH 07/20] Added all icu-project.org websites to the link checker exclusions. (#1201) Signed-off-by: dblock --- .github/workflows/links.yml | 2 +- .lycheeexclude | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index 1ecf5ee84bc18..5e25e0f48b927 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -14,7 +14,7 @@ jobs: - uses: actions/checkout@v2 - name: Load Excludes run: | - LYCHEE_EXCLUDE=$(sed -e :a -e 'N;s/\n/ --exclude /;ta' .lycheeexclude) + LYCHEE_EXCLUDE=$(sed -e :a -e 'N;s/\n/ /;ta' .lycheeexclude) echo "LYCHEE_EXCLUDE=$LYCHEE_EXCLUDE" >> $GITHUB_ENV - name: lychee Link Checker diff --git a/.lycheeexclude b/.lycheeexclude index b43db13f4c87d..830cd6ee72403 100644 --- a/.lycheeexclude +++ b/.lycheeexclude @@ -2,9 +2,11 @@ http://bitbucket.org/jpbarrette/moman/overview/ http://eid-applet.googlecode.com/ http://opensource.adobe.com/wiki/display/cmap/Downloads http://project.carrot2.org/license.html +http://source.icu-project.org/ http://site.icu-project.org/ +http://www.icu-project.org/ http://snapshot/ http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ http://www.eclipse.org/jetty/downloads.php http://www.ecma-international.org/publications/files/ECMA-ST/Ecma%20PATENT/Patent%20statements%20ok/ECMA-376%20* -http://www.unicode.org/Public/PROGRAMS/CVTUTF \ No newline at end of file +http://www.unicode.org/Public/PROGRAMS/CVTUTF From ecd9875e7cf6cf27e14f350fbf77ce1ffb9f7b07 Mon Sep 17 00:00:00 2001 From: "Daniel Doubrovkine (dB.)" Date: Wed, 1 Sep 2021 21:54:41 -0400 Subject: [PATCH 08/20] Clarify opensearch.version to not include -SNAPSHOT. (#1186) Signed-off-by: dblock --- .../src/main/resources/plugin-descriptor.properties | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/buildSrc/src/main/resources/plugin-descriptor.properties b/buildSrc/src/main/resources/plugin-descriptor.properties index ce1803bdb676b..885eb99ee3acb 100644 --- a/buildSrc/src/main/resources/plugin-descriptor.properties +++ b/buildSrc/src/main/resources/plugin-descriptor.properties @@ -36,7 +36,7 @@ version=${version} # 'name': the plugin name name=${name} # -# 'classname': the name of the class to load, fully-qualified. +# 'classname': the name of the class to load, fully-qualified classname=${classname} # # 'java.version': version of java the code is built against @@ -45,14 +45,16 @@ classname=${classname} # separated by "."'s and may have leading zeros java.version=${javaVersion} # -# 'opensearch.version': version of opensearch compiled against +# 'opensearch.version': semantic version of opensearch the plugin is compatible with +# does not include -SNAPSHOT if compiled against a snapshot build opensearch.version=${opensearchVersion} +# ### optional elements for plugins: # -# 'custom.foldername': the custom name of the folder in which the plugin is installed. +# 'custom.foldername': the custom name of the folder in which the plugin is installed custom.foldername=${customFolderName} # -# 'extended.plugins': other plugins this plugin extends through SPI +# 'extended.plugins': other plugins this plugin extends through SPI extended.plugins=${extendedPlugins} # # 'has.native.controller': whether or not the plugin has a native controller From fa8126004ce4ac15cdadb169c2bb5f21050fd948 Mon Sep 17 00:00:00 2001 From: Abbas Hussain Date: Wed, 1 Sep 2021 20:05:42 -0700 Subject: [PATCH 09/20] Upgrade apache commons-compress to 1.21 (#1197) Signed-off-by: Abbas Hussain --- buildSrc/build.gradle | 2 +- plugins/ingest-attachment/build.gradle | 2 +- .../ingest-attachment/licenses/commons-compress-1.19.jar.sha1 | 1 - .../ingest-attachment/licenses/commons-compress-1.21.jar.sha1 | 1 + 4 files changed, 3 insertions(+), 3 deletions(-) delete mode 100644 plugins/ingest-attachment/licenses/commons-compress-1.19.jar.sha1 create mode 100644 plugins/ingest-attachment/licenses/commons-compress-1.21.jar.sha1 diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 2b3eae929168d..e01be9796da1d 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -103,7 +103,7 @@ dependencies { api localGroovy() api 'commons-codec:commons-codec:1.13' - api 'org.apache.commons:commons-compress:1.19' + api 'org.apache.commons:commons-compress:1.21' api 'org.apache.ant:ant:1.10.9' api 'com.netflix.nebula:gradle-extra-configurations-plugin:3.0.3' api 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index a31deb24fbbec..231ce40f3f38c 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -77,7 +77,7 @@ dependencies { // MS Office api "org.apache.poi:poi-scratchpad:${versions.poi}" // Apple iWork - api 'org.apache.commons:commons-compress:1.19' + api 'org.apache.commons:commons-compress:1.21' // Outlook documents api "org.apache.james:apache-mime4j-core:${versions.mime4j}" api "org.apache.james:apache-mime4j-dom:${versions.mime4j}" diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.19.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.19.jar.sha1 deleted file mode 100644 index 045fc5c1a22e2..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-compress-1.19.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7e65777fb451ddab6a9c054beb879e521b7eab78 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.21.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.21.jar.sha1 new file mode 100644 index 0000000000000..81ac609a1aa26 --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-compress-1.21.jar.sha1 @@ -0,0 +1 @@ +4ec95b60d4e86b5c95a0e919cb172a0af98011ef \ No newline at end of file From 4b31e1ba04c44005de33fb5bb3f1564d34c6ee8a Mon Sep 17 00:00:00 2001 From: Nick Knize Date: Wed, 1 Sep 2021 22:35:53 -0500 Subject: [PATCH 10/20] [Version] Increment main to 2.0 (#1192) Increment version on main to 2.0.0. Signed-off-by: Nicholas Walter Knize Co-authored-by: dblock --- .../org/opensearch/gradle/BwcVersions.java | 12 ++++++- .../java/org/opensearch/gradle/Version.java | 2 +- .../gradle/BwcOpenSearchVersionsTests.java | 4 +++ .../DistributionDownloadPluginTests.java | 10 +++--- .../org/opensearch/gradle/VersionTests.java | 2 +- buildSrc/version.properties | 2 +- .../org/opensearch/upgrades/IndexingIT.java | 23 ------------- .../src/main/java/org/opensearch/Version.java | 11 ++++--- .../coordination/DiscoveryUpgradeService.java | 2 +- .../metadata/MetadataCreateIndexService.java | 2 +- .../opensearch/transport/InboundDecoder.java | 3 +- .../transport/TransportHandshaker.java | 6 ++-- .../java/org/opensearch/VersionTests.java | 4 +-- .../action/support/IndicesOptionsTests.java | 33 ------------------- .../coordination/JoinTaskExecutorTests.java | 22 ++++++------- .../zen/NodeJoinControllerTests.java | 3 +- .../transport/TransportHandshakerTests.java | 4 ++- .../org/opensearch/test/VersionUtils.java | 21 ++++++++---- .../AbstractSimpleTransportTestCase.java | 29 ++++++++-------- .../AbstractFullClusterRestartTestCase.java | 29 ---------------- .../opensearch/test/VersionUtilsTests.java | 30 ++++++++--------- 21 files changed, 98 insertions(+), 156 deletions(-) diff --git a/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java b/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java index 0d4f389f0eb5a..67581a35cdc04 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java @@ -379,6 +379,9 @@ public List getIndexCompatible() { if (currentMajor == 1) { // add 6.x compatible for OpenSearch 1.0.0 return unmodifiableList(Stream.concat(groupByMajor.get(prevMajor - 1).stream(), result.stream()).collect(Collectors.toList())); + } else if (currentMajor == 2) { + // add 7.x compatible for OpenSearch 2.0.0 + return unmodifiableList(Stream.concat(groupByMajor.get(7).stream(), result.stream()).collect(Collectors.toList())); } return unmodifiableList(result); } @@ -386,7 +389,7 @@ public List getIndexCompatible() { public List getWireCompatible() { List wireCompat = new ArrayList<>(); int currentMajor = currentVersion.getMajor(); - int lastMajor = currentMajor == 1 ? 6 : currentMajor - 1; + int lastMajor = currentMajor == 1 ? 6 : currentMajor == 2 ? 7 : currentMajor - 1; List lastMajorList = groupByMajor.get(lastMajor); if (lastMajorList == null) { throw new IllegalStateException("Expected to find a list of versions for version: " + lastMajor); @@ -402,7 +405,14 @@ public List getWireCompatible() { for (Version v : previousMajor) { wireCompat.add(v); } + } else if (currentMajor == 2) { + // add all of the 1.x line: + List previousMajor = groupByMajor.get(1); + for (Version v : previousMajor) { + wireCompat.add(v); + } } + wireCompat.addAll(groupByMajor.get(currentMajor)); wireCompat.remove(currentVersion); wireCompat.sort(Version::compareTo); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/Version.java b/buildSrc/src/main/java/org/opensearch/gradle/Version.java index 8e00b4419f5f3..3012488381729 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/Version.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/Version.java @@ -77,7 +77,7 @@ public Version(int major, int minor, int revision) { // currently snapshot is not taken into account int id = major * 10000000 + minor * 100000 + revision * 1000; // identify if new OpenSearch version 1 - this.id = major == 1 ? id ^ MASK : id; + this.id = major == 1 || major == 2 ? id ^ MASK : id; } private static int parseSuffixNumber(String substring) { diff --git a/buildSrc/src/test/java/org/opensearch/gradle/BwcOpenSearchVersionsTests.java b/buildSrc/src/test/java/org/opensearch/gradle/BwcOpenSearchVersionsTests.java index 28f116710e408..14931c83ba29b 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/BwcOpenSearchVersionsTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/BwcOpenSearchVersionsTests.java @@ -37,6 +37,10 @@ public class BwcOpenSearchVersionsTests extends GradleUnitTestCase { static { sampleVersions.put("1.0.0", asList("5_6_13", "6_6_1", "6_8_15", "7_0_0", "7_9_1", "7_10_0", "7_10_1", "7_10_2", "1_0_0")); sampleVersions.put("1.1.0", asList("5_6_13", "6_6_1", "6_8_15", "7_0_0", "7_9_1", "7_10_0", "7_10_1", "7_10_2", "1_0_0", "1_1_0")); + sampleVersions.put( + "2.0.0", + asList("5_6_13", "6_6_1", "6_8_15", "7_0_0", "7_9_1", "7_10_0", "7_10_1", "7_10_2", "1_0_0", "1_1_0", "2_0_0") + ); } public void testWireCompatible() { diff --git a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java index 54660c4c69e68..98feb3ef2ac93 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java @@ -52,11 +52,11 @@ public class DistributionDownloadPluginTests extends GradleUnitTestCase { private static Project packagesProject; private static Project bwcProject; - private static final Version BWC_MAJOR_VERSION = Version.fromString("4.0.0"); - private static final Version BWC_MINOR_VERSION = Version.fromString("3.1.0"); - private static final Version BWC_STAGED_VERSION = Version.fromString("3.0.0"); - private static final Version BWC_BUGFIX_VERSION = Version.fromString("3.0.1"); - private static final Version BWC_MAINTENANCE_VERSION = Version.fromString("2.90.1"); + private static final Version BWC_MAJOR_VERSION = Version.fromString("5.0.0"); + private static final Version BWC_MINOR_VERSION = Version.fromString("4.1.0"); + private static final Version BWC_STAGED_VERSION = Version.fromString("4.0.0"); + private static final Version BWC_BUGFIX_VERSION = Version.fromString("4.0.1"); + private static final Version BWC_MAINTENANCE_VERSION = Version.fromString("3.90.1"); private static final BwcVersions BWC_MINOR = new BwcVersions( new TreeSet<>(Arrays.asList(BWC_BUGFIX_VERSION, BWC_MINOR_VERSION, BWC_MAJOR_VERSION)), BWC_MAJOR_VERSION diff --git a/buildSrc/src/test/java/org/opensearch/gradle/VersionTests.java b/buildSrc/src/test/java/org/opensearch/gradle/VersionTests.java index 40f9ffe7dfa8d..a9f32886f7927 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/VersionTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/VersionTests.java @@ -65,7 +65,7 @@ public void testRelaxedVersionParsing() { public void testCompareWithStringVersions() { // 1.10.2 is now rebased to OpenSearch version; so this needs to report - assertTrue("OpenSearch 1.10.20 is not interpreted as after Legacy 2.0.0", Version.fromString("1.10.20").after("2.0.0")); + assertTrue("OpenSearch 1.10.20 is not interpreted as after Legacy 3.0.0", Version.fromString("1.10.20").after("3.0.0")); assertTrue( "7.0.0-alpha1 should be equal to 7.0.0-alpha1", Version.fromString("7.0.0-alpha1").equals(Version.fromString("7.0.0-alpha1")) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 51b9255891667..396c13b2c7981 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,4 +1,4 @@ -opensearch = 1.1.0 +opensearch = 2.0.0 lucene = 8.9.0 bundled_jdk_vendor = adoptopenjdk diff --git a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java index 1f875158932ee..79745b1cc2f95 100644 --- a/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java +++ b/qa/rolling-upgrade/src/test/java/org/opensearch/upgrades/IndexingIT.java @@ -39,20 +39,16 @@ import org.opensearch.client.ResponseException; import org.opensearch.cluster.metadata.IndexMetadata; import org.opensearch.common.Booleans; -import org.opensearch.common.Strings; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.rest.action.document.RestBulkAction; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Map; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; import static org.opensearch.rest.action.search.RestSearchAction.TOTAL_HITS_AS_INT_PARAM; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.either; -import static org.hamcrest.Matchers.equalTo; /** * Basic test that indexed documents survive the rolling restart. See @@ -89,25 +85,6 @@ public void testIndexing() throws IOException { } if (CLUSTER_TYPE == ClusterType.OLD) { - { - Version minimumIndexCompatibilityVersion = Version.CURRENT.minimumIndexCompatibilityVersion(); - assertThat("this branch is not needed if we aren't compatible with 6.0", - minimumIndexCompatibilityVersion.onOrBefore(LegacyESVersion.V_6_0_0), equalTo(true)); - if (minimumIndexCompatibilityVersion.before(LegacyESVersion.V_7_0_0)) { - XContentBuilder template = jsonBuilder(); - template.startObject(); - { - template.array("index_patterns", "test_index", "index_with_replicas", "empty_index"); - template.startObject("settings"); - template.field("number_of_shards", 5); - template.endObject(); - } - template.endObject(); - Request createTemplate = new Request("PUT", "/_template/prevent-bwc-deprecation-template"); - createTemplate.setJsonEntity(Strings.toString(template)); - client().performRequest(createTemplate); - } - } Request createTestIndex = new Request("PUT", "/test_index"); createTestIndex.setJsonEntity("{\"settings\": {\"index.number_of_replicas\": 0}}"); useIgnoreMultipleMatchingTemplatesWarningsHandler(createTestIndex); diff --git a/server/src/main/java/org/opensearch/Version.java b/server/src/main/java/org/opensearch/Version.java index c640f3863e213..0495bff3a8fb3 100644 --- a/server/src/main/java/org/opensearch/Version.java +++ b/server/src/main/java/org/opensearch/Version.java @@ -73,7 +73,8 @@ public class Version implements Comparable, ToXContentFragment { public static final Version V_1_0_0 = new Version(1000099, org.apache.lucene.util.Version.LUCENE_8_8_2); public static final Version V_1_0_1 = new Version(1000199, org.apache.lucene.util.Version.LUCENE_8_8_2); public static final Version V_1_1_0 = new Version(1010099, org.apache.lucene.util.Version.LUCENE_8_9_0); - public static final Version CURRENT = V_1_1_0; + public static final Version V_2_0_0 = new Version(2000099, org.apache.lucene.util.Version.LUCENE_8_9_0); + public static final Version CURRENT = V_2_0_0; public static Version readVersion(StreamInput in) throws IOException { return fromId(in.readVInt()); @@ -185,7 +186,7 @@ private static Version fromStringSlow(String version) { try { final int rawMajor = Integer.parseInt(parts[0]); final int betaOffset = 25; // 0 - 24 is taking by alpha builds - + //we reverse the version id calculation based on some assumption as we can't reliably reverse the modulo final int major = rawMajor * 1000000; final int minor = Integer.parseInt(parts[1]) * 10000; @@ -205,7 +206,7 @@ private static Version fromStringSlow(String version) { throw new IllegalArgumentException("unable to parse version " + version); } } - + return fromId((major + minor + revision + build) ^ MASK); } catch (NumberFormatException e) { throw new IllegalArgumentException("unable to parse version " + version, e); @@ -306,11 +307,11 @@ public Version minimumCompatibilityVersion() { protected Version computeMinCompatVersion() { if (major == 1) { return LegacyESVersion.V_6_8_0; + } else if (major == 2) { + return LegacyESVersion.V_7_10_0; } else if (major == 6) { // force the minimum compatibility for version 6 to 5.6 since we don't reference version 5 anymore return Version.fromId(5060099); - } else if (major == 2) { - return LegacyESVersion.V_7_10_0; } else if (major >= 7) { // all major versions from 7 onwards are compatible with last minor series of the previous major Version bwcVersion = null; diff --git a/server/src/main/java/org/opensearch/cluster/coordination/DiscoveryUpgradeService.java b/server/src/main/java/org/opensearch/cluster/coordination/DiscoveryUpgradeService.java index 87f7acc9e3c1b..f4f14938ff93d 100644 --- a/server/src/main/java/org/opensearch/cluster/coordination/DiscoveryUpgradeService.java +++ b/server/src/main/java/org/opensearch/cluster/coordination/DiscoveryUpgradeService.java @@ -118,7 +118,7 @@ public DiscoveryUpgradeService(Settings settings, TransportService transportServ BooleanSupplier isBootstrappedSupplier, JoinHelper joinHelper, Supplier> peersSupplier, Consumer initialConfigurationConsumer) { - assert Version.CURRENT.major == 1 : "remove this service once unsafe upgrades are no longer needed"; + assert Version.CURRENT.major == 1 || Version.CURRENT.major == 2 : "remove this service once unsafe upgrades are no longer needed"; this.transportService = transportService; this.isBootstrappedSupplier = isBootstrappedSupplier; this.joinHelper = joinHelper; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java index e40361cde6af4..9a4b4f2fee508 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java @@ -800,7 +800,7 @@ public static void validateStoreTypeSettings(Settings settings) { static int getNumberOfShards(final Settings.Builder indexSettingsBuilder) { // TODO: this logic can be removed when the current major version is 8 - assert Version.CURRENT.major == 1; + assert Version.CURRENT.major == 1 || Version.CURRENT.major == 2; final int numberOfShards; final Version indexVersionCreated = Version.fromId(Integer.parseInt(indexSettingsBuilder.get(IndexMetadata.SETTING_INDEX_VERSION_CREATED.getKey()))); diff --git a/server/src/main/java/org/opensearch/transport/InboundDecoder.java b/server/src/main/java/org/opensearch/transport/InboundDecoder.java index 057794d7bdabc..5bf2bfcb99cd8 100644 --- a/server/src/main/java/org/opensearch/transport/InboundDecoder.java +++ b/server/src/main/java/org/opensearch/transport/InboundDecoder.java @@ -217,7 +217,8 @@ static IllegalStateException ensureVersionCompatibility(Version remoteVersion, V // handshake. This looks odd but it's required to establish the connection correctly we check for real compatibility // once the connection is established final Version compatibilityVersion = isHandshake ? currentVersion.minimumCompatibilityVersion() : currentVersion; - if (remoteVersion.isCompatible(compatibilityVersion) == false) { + if ((currentVersion.equals(Version.V_2_0_0) && remoteVersion.equals(Version.fromId(6079999))) == false + && remoteVersion.isCompatible(compatibilityVersion) == false) { final Version minCompatibilityVersion = isHandshake ? compatibilityVersion : compatibilityVersion.minimumCompatibilityVersion(); String msg = "Received " + (isHandshake ? "handshake " : "") + "message from unsupported version: ["; return new IllegalStateException(msg + remoteVersion + "] minimal compatible version is: [" + minCompatibilityVersion + "]"); diff --git a/server/src/main/java/org/opensearch/transport/TransportHandshaker.java b/server/src/main/java/org/opensearch/transport/TransportHandshaker.java index 645a687c00ffb..70d29411877a2 100644 --- a/server/src/main/java/org/opensearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/opensearch/transport/TransportHandshaker.java @@ -81,7 +81,7 @@ void sendHandshake(long requestId, DiscoveryNode node, TcpChannel channel, TimeV // we also have no payload on the request but the response will contain the actual version of the node we talk // to as the payload. Version minCompatVersion = version.minimumCompatibilityVersion(); - if(version.onOrAfter(Version.V_1_0_0)) { + if(version.onOrAfter(Version.V_1_0_0) && version.before(Version.V_2_0_0)) { // the minCompatibleVersion for OpenSearch 1.x is sent as 6.7.99 instead of 6.8.0 // as this helps in (indirectly) identifying the remote node version during handle HandshakeRequest itself // and then send appropriate version (7.10.2/ OpenSearch 1.x version) in response. @@ -91,6 +91,8 @@ void sendHandshake(long requestId, DiscoveryNode node, TcpChannel channel, TimeV // Sending only BC version to ElasticSearch node provide easy deprecation path for this BC version logic // in OpenSearch 2.0.0. minCompatVersion = Version.fromId(6079999); + } else if (version.onOrAfter(Version.V_2_0_0)) { + minCompatVersion = Version.fromId(7099999); } handshakeRequestSender.sendRequest(node, channel, requestId, minCompatVersion); @@ -120,7 +122,7 @@ void handleHandshake(TransportChannel channel, long requestId, StreamInput strea // 1. if remote node is 7.x, then StreamInput version would be 6.8.0 // 2. if remote node is 6.8 then it would be 5.6.0 // 3. if remote node is OpenSearch 1.x then it would be 6.7.99 - if(this.version.onOrAfter(Version.V_1_0_0) && + if((this.version.onOrAfter(Version.V_1_0_0) && this.version.before(Version.V_2_0_0)) && (stream.getVersion().equals(LegacyESVersion.V_6_8_0) || stream.getVersion().equals(Version.fromId(5060099)))) { channel.sendResponse(new HandshakeResponse(LegacyESVersion.V_7_10_2)); diff --git a/server/src/test/java/org/opensearch/VersionTests.java b/server/src/test/java/org/opensearch/VersionTests.java index 14ba2fa2d73ec..d8109216c7d44 100644 --- a/server/src/test/java/org/opensearch/VersionTests.java +++ b/server/src/test/java/org/opensearch/VersionTests.java @@ -448,7 +448,7 @@ public void testIsCompatible() { } final Version lastMinorFromPreviousMajor = VersionUtils - .allVersions() + .allReleasedVersions() .stream() .filter(v -> v.major == (currentOrNextMajorVersion.major == 1 ? 7 : currentOrNextMajorVersion.major - 1)) .max(Version::compareTo) @@ -464,7 +464,7 @@ public void testIsCompatible() { Locale.ROOT, "[%s] should %s be compatible with [%s]", previousMinorVersion, - isCompatible ? "" : " not", + isCompatible ? "" : "not", currentOrNextMajorVersion); assertThat( message, diff --git a/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java b/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java index b2e6cef2eb094..15a9e89e2eb95 100644 --- a/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java +++ b/server/src/test/java/org/opensearch/action/support/IndicesOptionsTests.java @@ -57,7 +57,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.OptionalInt; import static org.opensearch.test.VersionUtils.randomVersionBetween; import static org.hamcrest.CoreMatchers.equalTo; @@ -98,38 +97,6 @@ public void testSerialization() throws Exception { } } - public void testSerializationPre70() throws Exception { - int iterations = randomIntBetween(5, 20); - List declaredVersions = Version.getDeclaredVersions(LegacyESVersion.class); - OptionalInt maxV6Id = declaredVersions.stream().filter(v -> v.major == 6).mapToInt(v -> v.id).max(); - assertTrue(maxV6Id.isPresent()); - final Version maxVersion = Version.fromId(maxV6Id.getAsInt()); - for (int i = 0; i < iterations; i++) { - Version version = randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), maxVersion); - IndicesOptions indicesOptions = IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean(), - randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean()); - - BytesStreamOutput output = new BytesStreamOutput(); - output.setVersion(version); - indicesOptions.writeIndicesOptions(output); - - StreamInput streamInput = output.bytes().streamInput(); - streamInput.setVersion(version); - IndicesOptions indicesOptions2 = IndicesOptions.readIndicesOptions(streamInput); - - assertThat(indicesOptions2.ignoreUnavailable(), equalTo(indicesOptions.ignoreUnavailable())); - assertThat(indicesOptions2.allowNoIndices(), equalTo(indicesOptions.allowNoIndices())); - assertThat(indicesOptions2.expandWildcardsOpen(), equalTo(indicesOptions.expandWildcardsOpen())); - assertThat(indicesOptions2.expandWildcardsClosed(), equalTo(indicesOptions.expandWildcardsClosed())); - - assertThat(indicesOptions2.forbidClosedIndices(), equalTo(indicesOptions.forbidClosedIndices())); - assertThat(indicesOptions2.allowAliasesToMultipleIndices(), equalTo(indicesOptions.allowAliasesToMultipleIndices())); - - assertEquals(indicesOptions2.ignoreAliases(), indicesOptions.ignoreAliases()); - assertEquals(indicesOptions2.ignoreThrottled(), indicesOptions.ignoreThrottled()); - } - } - public void testFromOptions() { final boolean ignoreUnavailable = randomBoolean(); final boolean allowNoIndices = randomBoolean(); diff --git a/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java b/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java index a24bed1c762d0..fc2ffd6a516f3 100644 --- a/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java +++ b/server/src/test/java/org/opensearch/cluster/coordination/JoinTaskExecutorTests.java @@ -218,10 +218,10 @@ public void testUpdatesNodeWithOpenSearchVersionForExistingAndNewNodes() throws String node_5 = UUIDs.base64UUID(); // ES node 7.10.2 in cluster but missing channel version String node_6 = UUIDs.base64UUID(); // ES node 7.9.0 String node_7 = UUIDs.base64UUID(); // ES node 7.9.0 in cluster but missing channel version - channelVersions.put(node_1, LegacyESVersion.CURRENT); - channelVersions.put(node_2, LegacyESVersion.CURRENT); + channelVersions.put(node_1, Version.CURRENT); + channelVersions.put(node_2, Version.CURRENT); channelVersions.put(node_4, LegacyESVersion.V_7_10_2); - channelVersions.put(node_6, LegacyESVersion.V_7_9_0); + channelVersions.put(node_6, LegacyESVersion.V_7_10_0); final TransportService transportService = mock(TransportService.class); when(transportService.getChannelVersion(any())).thenReturn(channelVersions); @@ -231,8 +231,8 @@ public void testUpdatesNodeWithOpenSearchVersionForExistingAndNewNodes() throws nodes.add(new DiscoveryNode(node_3, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_2)); nodes.add(new DiscoveryNode(node_4, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_2)); nodes.add(new DiscoveryNode(node_5, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_2)); - nodes.add(new DiscoveryNode(node_6, buildNewFakeTransportAddress(), LegacyESVersion.V_7_9_0)); - nodes.add(new DiscoveryNode(node_7, buildNewFakeTransportAddress(), LegacyESVersion.V_7_9_0)); + nodes.add(new DiscoveryNode(node_6, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_1)); + nodes.add(new DiscoveryNode(node_7, buildNewFakeTransportAddress(), LegacyESVersion.V_7_10_0)); final ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).nodes(nodes).build(); final JoinTaskExecutor joinTaskExecutor = new JoinTaskExecutor(Settings.EMPTY, allocationService, logger, rerouteService, transportService); @@ -247,13 +247,13 @@ public void testUpdatesNodeWithOpenSearchVersionForExistingAndNewNodes() throws final ClusterStateTaskExecutor.TaskResult taskResult = result.executionResults.values().iterator().next(); assertTrue(taskResult.isSuccess()); DiscoveryNodes resultNodes = result.resultingState.getNodes(); - assertEquals(resultNodes.get(node_1).getVersion(), Version.CURRENT); - assertEquals(resultNodes.get(node_2).getVersion(), Version.CURRENT); - assertEquals(resultNodes.get(node_3).getVersion(), Version.CURRENT); // 7.10.2 in old state but sent new join and processed - assertEquals(resultNodes.get(node_4).getVersion(), LegacyESVersion.V_7_10_2); + assertEquals(Version.CURRENT, resultNodes.get(node_1).getVersion()); + assertEquals(Version.CURRENT, resultNodes.get(node_2).getVersion()); + assertEquals(Version.CURRENT, resultNodes.get(node_3).getVersion()); // 7.10.2 in old state but sent new join and processed + assertEquals(LegacyESVersion.V_7_10_2, resultNodes.get(node_4).getVersion()); assertFalse(resultNodes.nodeExists(node_5)); // 7.10.2 node without active channel will be removed and should rejoin - assertEquals(resultNodes.get(node_6).getVersion(), LegacyESVersion.V_7_9_0); + assertEquals(LegacyESVersion.V_7_10_0, resultNodes.get(node_6).getVersion()); // 7.9.0 node without active channel but shouldn't get removed - assertEquals(resultNodes.get(node_7).getVersion(), LegacyESVersion.V_7_9_0); + assertEquals(LegacyESVersion.V_7_10_0, resultNodes.get(node_7).getVersion()); } } diff --git a/server/src/test/java/org/opensearch/discovery/zen/NodeJoinControllerTests.java b/server/src/test/java/org/opensearch/discovery/zen/NodeJoinControllerTests.java index 1f8759e57a7fb..6d34f02a51934 100644 --- a/server/src/test/java/org/opensearch/discovery/zen/NodeJoinControllerTests.java +++ b/server/src/test/java/org/opensearch/discovery/zen/NodeJoinControllerTests.java @@ -97,6 +97,7 @@ import static org.opensearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; import static org.opensearch.test.ClusterServiceUtils.setState; import static org.opensearch.test.VersionUtils.allVersions; +import static org.opensearch.test.VersionUtils.allOpenSearchVersions; import static org.opensearch.test.VersionUtils.getPreviousVersion; import static org.opensearch.test.VersionUtils.randomCompatibleVersion; import static org.hamcrest.Matchers.allOf; @@ -617,7 +618,7 @@ private void assertRejectingJoinWithIncompatibleVersion(final Version badVersion new HashSet<>(randomSubsetOf(DiscoveryNodeRole.BUILT_IN_ROLES)), badVersion); final Version goodVersion = - randomFrom(allVersions().stream().filter(v -> v.compareMajor(Version.CURRENT) >= 0).collect(Collectors.toList())); + randomFrom(allOpenSearchVersions().stream().filter(v -> v.compareMajor(Version.CURRENT) >= 0).collect(Collectors.toList())); final DiscoveryNode goodNode = new DiscoveryNode("goodNode", buildNewFakeTransportAddress(), emptyMap(), new HashSet<>(randomSubsetOf(DiscoveryNodeRole.BUILT_IN_ROLES)), goodVersion); diff --git a/server/src/test/java/org/opensearch/transport/TransportHandshakerTests.java b/server/src/test/java/org/opensearch/transport/TransportHandshakerTests.java index f14c4d4afa13e..426f7302a808e 100644 --- a/server/src/test/java/org/opensearch/transport/TransportHandshakerTests.java +++ b/server/src/test/java/org/opensearch/transport/TransportHandshakerTests.java @@ -180,8 +180,10 @@ public void testHandshakeTimeout() throws IOException { } private Version getMinCompatibilityVersionForHandshakeRequest() { - if(Version.CURRENT.onOrAfter(Version.V_1_0_0) && Version.CURRENT.major == 1) { + if (Version.CURRENT.onOrAfter(Version.V_1_0_0) && Version.CURRENT.major == 1) { return Version.fromId(6079999); + } else if (Version.CURRENT.onOrAfter(Version.V_2_0_0) && Version.CURRENT.major == 2) { + return Version.fromId(7099999); } return Version.CURRENT.minimumCompatibilityVersion(); } diff --git a/test/framework/src/main/java/org/opensearch/test/VersionUtils.java b/test/framework/src/main/java/org/opensearch/test/VersionUtils.java index efeae6c6909c0..77cd6a560d62f 100644 --- a/test/framework/src/main/java/org/opensearch/test/VersionUtils.java +++ b/test/framework/src/main/java/org/opensearch/test/VersionUtils.java @@ -66,11 +66,19 @@ static Tuple, List> resolveReleasedVersions(Version curre .collect(Collectors.groupingBy(v -> (int)v.major)); // this breaks b/c 5.x is still in version list but master doesn't care about it! //assert majorVersions.size() == 2; + List> oldVersions = new ArrayList<>(0); + List> previousMajor = new ArrayList<>(0); + if (current.major == 2) { + // add legacy first + oldVersions.addAll(splitByMinor(majorVersions.getOrDefault(6, Collections.emptyList()))); + previousMajor.addAll(splitByMinor(majorVersions.getOrDefault(7, Collections.emptyList()))); + } // TODO: remove oldVersions, we should only ever have 2 majors in Version + // rebasing OpenSearch to 1.0.0 means the previous major version was Legacy 7.0.0 int previousMajorID = current.major == 1 ? 7 : current.major - 1; - List> oldVersions = splitByMinor(majorVersions.getOrDefault(previousMajorID - 1, Collections.emptyList())); - // rebasing OpenSearch to 1.0.0 means the previous major version was 7.0.0 - List> previousMajor = splitByMinor(majorVersions.get(previousMajorID)); + oldVersions.addAll(splitByMinor(majorVersions.getOrDefault(previousMajorID - 1, Collections.emptyList()))); + previousMajor.addAll(splitByMinor(majorVersions.getOrDefault(previousMajorID, Collections.emptyList()))); + List> currentMajor = splitByMinor(majorVersions.get((int)current.major)); List unreleasedVersions = new ArrayList<>(); @@ -80,7 +88,7 @@ static Tuple, List> resolveReleasedVersions(Version curre stableVersions = previousMajor; // remove current moveLastToUnreleased(currentMajor, unreleasedVersions); - } else if (current.major != 1) { + } else if (current.major != 1 && current.major != 2) { // on a stable or release branch, ie N.x stableVersions = currentMajor; // remove the next maintenance bugfix @@ -93,7 +101,7 @@ static Tuple, List> resolveReleasedVersions(Version curre stableVersions = currentMajor; } - // remove last minor unless the it's the first OpenSearch version. + // remove last minor unless it's the first OpenSearch version. // all Legacy ES versions are released, so we don't exclude any. if (current.equals(Version.V_1_0_0) == false) { List lastMinorLine = stableVersions.get(stableVersions.size() - 1); @@ -227,8 +235,7 @@ public static Version getPreviousVersion() { public static Version getPreviousMinorVersion() { for (int i = RELEASED_VERSIONS.size() - 1; i >= 0; i--) { Version v = RELEASED_VERSIONS.get(i); - if (v.minor < Version.CURRENT.minor - || (v.major != 1 && v.major < (Version.CURRENT.major != 1 ? Version.CURRENT.major : 8))) { + if (v.minor < Version.CURRENT.minor || v.major < Version.CURRENT.major) { return v; } } diff --git a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java index d55754e30fa0c..b3bf875a7e3cd 100644 --- a/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/opensearch/transport/AbstractSimpleTransportTestCase.java @@ -1979,7 +1979,7 @@ public void testTimeoutPerConnection() throws IOException { public void testHandshakeWithIncompatVersion() { assumeTrue("only tcp transport has a handshake method", serviceA.getOriginalTransport() instanceof TcpTransport); - Version version = Version.fromString("2.0.0"); + Version version = LegacyESVersion.fromString("6.0.0"); try (MockTransportService service = buildService("TS_C", version, Settings.EMPTY)) { service.start(); service.acceptIncomingRequests(); @@ -2013,8 +2013,9 @@ public void testHandshakeUpdatesVersion() throws IOException { TransportRequestOptions.Type.REG, TransportRequestOptions.Type.STATE); try (Transport.Connection connection = serviceA.openConnection(node, builder.build())) { - // OpenSearch 1.0+ in bwc mode should only "upgrade" to Legacy v7.10.2 - assertEquals(connection.getVersion(), version.onOrAfter(Version.V_1_0_0) ? LegacyESVersion.V_7_10_2 : version); + // OpenSearch [1.0:2.0) in bwc mode should only "upgrade" to Legacy v7.10.2 + assertEquals(connection.getVersion(), + version.onOrAfter(Version.V_1_0_0) && version.before(Version.V_2_0_0) ? LegacyESVersion.V_7_10_2 : version); } } } @@ -2050,9 +2051,7 @@ public void testTcpHandshake() { PlainActionFuture future = PlainActionFuture.newFuture(); serviceA.getOriginalTransport().openConnection(node, connectionProfile, future); try (Transport.Connection connection = future.actionGet()) { - // OpenSearch sends a handshake version spoofed as Legacy version 7_10_2 - // todo change for OpenSearch 2.0.0 - assertEquals(LegacyESVersion.V_7_10_2, connection.getVersion()); + assertEquals(Version.V_2_0_0, connection.getVersion()); } } } @@ -2371,7 +2370,7 @@ public String executor() { assertEquals(1, transportStats.getRxCount()); assertEquals(1, transportStats.getTxCount()); assertEquals(25, transportStats.getRxSize().getBytes()); - assertEquals(50, transportStats.getTxSize().getBytes()); + assertEquals(51, transportStats.getTxSize().getBytes()); }); serviceC.sendRequest(connection, "internal:action", new TestRequest("hello world"), TransportRequestOptions.EMPTY, transportResponseHandler); @@ -2381,15 +2380,15 @@ public String executor() { assertEquals(1, transportStats.getRxCount()); assertEquals(2, transportStats.getTxCount()); assertEquals(25, transportStats.getRxSize().getBytes()); - assertEquals(106, transportStats.getTxSize().getBytes()); + assertEquals(111, transportStats.getTxSize().getBytes()); }); sendResponseLatch.countDown(); responseLatch.await(); stats = serviceC.transport.getStats(); // response has been received assertEquals(2, stats.getRxCount()); assertEquals(2, stats.getTxCount()); - assertEquals(46, stats.getRxSize().getBytes()); - assertEquals(106, stats.getTxSize().getBytes()); + assertEquals(50, stats.getRxSize().getBytes()); + assertEquals(111, stats.getTxSize().getBytes()); } finally { serviceC.close(); } @@ -2486,7 +2485,7 @@ public String executor() { assertEquals(1, transportStats.getRxCount()); assertEquals(1, transportStats.getTxCount()); assertEquals(25, transportStats.getRxSize().getBytes()); - assertEquals(50, transportStats.getTxSize().getBytes()); + assertEquals(51, transportStats.getTxSize().getBytes()); }); serviceC.sendRequest(connection, "internal:action", new TestRequest("hello world"), TransportRequestOptions.EMPTY, transportResponseHandler); @@ -2496,7 +2495,7 @@ public String executor() { assertEquals(1, transportStats.getRxCount()); assertEquals(2, transportStats.getTxCount()); assertEquals(25, transportStats.getRxSize().getBytes()); - assertEquals(106, transportStats.getTxSize().getBytes()); + assertEquals(111, transportStats.getTxSize().getBytes()); }); sendResponseLatch.countDown(); responseLatch.await(); @@ -2508,10 +2507,10 @@ public String executor() { BytesStreamOutput streamOutput = new BytesStreamOutput(); exception.writeTo(streamOutput); String failedMessage = "Unexpected read bytes size. The transport exception that was received=" + exception; - // 49 bytes are the non-exception message bytes that have been received. It should include the initial + // 53 bytes are the non-exception message bytes that have been received. It should include the initial // handshake message and the header, version, etc bytes in the exception message. - assertEquals(failedMessage, 49 + streamOutput.bytes().length(), stats.getRxSize().getBytes()); - assertEquals(106, stats.getTxSize().getBytes()); + assertEquals(failedMessage, 53 + streamOutput.bytes().length(), stats.getRxSize().getBytes()); + assertEquals(111, stats.getTxSize().getBytes()); } finally { serviceC.close(); } diff --git a/test/framework/src/main/java/org/opensearch/upgrades/AbstractFullClusterRestartTestCase.java b/test/framework/src/main/java/org/opensearch/upgrades/AbstractFullClusterRestartTestCase.java index bb7e3438243e6..ff4fa54769287 100644 --- a/test/framework/src/main/java/org/opensearch/upgrades/AbstractFullClusterRestartTestCase.java +++ b/test/framework/src/main/java/org/opensearch/upgrades/AbstractFullClusterRestartTestCase.java @@ -34,45 +34,16 @@ import org.opensearch.LegacyESVersion; import org.opensearch.Version; -import org.opensearch.client.Request; import org.opensearch.common.Booleans; -import org.opensearch.common.Strings; -import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.support.XContentMapValues; import org.opensearch.test.rest.OpenSearchRestTestCase; -import org.junit.Before; -import java.io.IOException; import java.util.Map; -import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.equalTo; - public abstract class AbstractFullClusterRestartTestCase extends OpenSearchRestTestCase { private static final boolean runningAgainstOldCluster = Booleans.parseBoolean(System.getProperty("tests.is_old_cluster")); - @Before - public void init() throws IOException { - assertThat("we don't need this branch if we aren't compatible with 6.0", - Version.CURRENT.minimumIndexCompatibilityVersion().onOrBefore(LegacyESVersion.V_6_0_0), equalTo(true)); - if (isRunningAgainstOldCluster() && getOldClusterVersion().before(LegacyESVersion.V_7_0_0)) { - XContentBuilder template = jsonBuilder(); - template.startObject(); - { - template.field("index_patterns", "*"); - template.field("order", "0"); - template.startObject("settings"); - template.field("number_of_shards", 5); - template.endObject(); - } - template.endObject(); - Request createTemplate = new Request("PUT", "/_template/template"); - createTemplate.setJsonEntity(Strings.toString(template)); - client().performRequest(createTemplate); - } - } - public static boolean isRunningAgainstOldCluster() { return runningAgainstOldCluster; } diff --git a/test/framework/src/test/java/org/opensearch/test/VersionUtilsTests.java b/test/framework/src/test/java/org/opensearch/test/VersionUtilsTests.java index a1b0517584517..7999dd99bbc6c 100644 --- a/test/framework/src/test/java/org/opensearch/test/VersionUtilsTests.java +++ b/test/framework/src/test/java/org/opensearch/test/VersionUtilsTests.java @@ -224,13 +224,13 @@ public void testResolveReleasedVersionsAtNewMajorRelease() { } public static class TestVersionBumpIn2x { - public static final Version V_1_6_0 = Version.fromString("1.6.0"); - public static final Version V_1_6_1 = Version.fromString("1.6.1"); - public static final Version V_1_6_2 = Version.fromString("1.6.2"); - public static final Version V_2_0_0 = Version.fromString("2.0.0"); - public static final Version V_2_0_1 = Version.fromString("2.0.1"); - public static final Version V_2_1_0 = Version.fromString("2.1.0"); - public static final Version CURRENT = V_2_1_0; + public static final Version V_2_6_0 = Version.fromString("2.6.0"); + public static final Version V_2_6_1 = Version.fromString("2.6.1"); + public static final Version V_2_6_2 = Version.fromString("2.6.2"); + public static final Version V_3_0_0 = Version.fromString("3.0.0"); + public static final Version V_3_0_1 = Version.fromString("3.0.1"); + public static final Version V_3_1_0 = Version.fromString("3.1.0"); + public static final Version CURRENT = V_3_1_0; } public void testResolveReleasedVersionsAtVersionBumpIn2x() { @@ -240,13 +240,13 @@ public void testResolveReleasedVersionsAtVersionBumpIn2x() { List unreleased = t.v2(); assertThat(released, equalTo(Arrays.asList( - TestVersionBumpIn2x.V_1_6_0, - TestVersionBumpIn2x.V_1_6_1, - TestVersionBumpIn2x.V_2_0_0))); + TestVersionBumpIn2x.V_2_6_0, + TestVersionBumpIn2x.V_2_6_1, + TestVersionBumpIn2x.V_3_0_0))); assertThat(unreleased, equalTo(Arrays.asList( - TestVersionBumpIn2x.V_1_6_2, - TestVersionBumpIn2x.V_2_0_1, - TestVersionBumpIn2x.V_2_1_0))); + TestVersionBumpIn2x.V_2_6_2, + TestVersionBumpIn2x.V_3_0_1, + TestVersionBumpIn2x.V_3_1_0))); } public static class TestNewMinorBranchIn6x { @@ -262,7 +262,7 @@ public static class TestNewMinorBranchIn6x { public static final Version CURRENT = V_2_2_0; } - public void testResolveReleasedVersionsAtNewMinorBranchIn6x() { + public void testResolveReleasedVersionsAtNewMinorBranchIn2x() { Tuple, List> t = VersionUtils.resolveReleasedVersions(TestNewMinorBranchIn6x.CURRENT, TestNewMinorBranchIn6x.class); List released = t.v1(); @@ -271,12 +271,12 @@ public void testResolveReleasedVersionsAtNewMinorBranchIn6x() { assertThat(released, equalTo(Arrays.asList( TestNewMinorBranchIn6x.V_1_6_0, TestNewMinorBranchIn6x.V_1_6_1, + TestNewMinorBranchIn6x.V_1_6_2, TestNewMinorBranchIn6x.V_2_0_0, TestNewMinorBranchIn6x.V_2_0_1, TestNewMinorBranchIn6x.V_2_1_0, TestNewMinorBranchIn6x.V_2_1_1))); assertThat(unreleased, equalTo(Arrays.asList( - TestNewMinorBranchIn6x.V_1_6_2, TestNewMinorBranchIn6x.V_2_1_2, TestNewMinorBranchIn6x.V_2_2_0))); } From f37f29c996d8dce0f63b3aea29dc4c45714c61ca Mon Sep 17 00:00:00 2001 From: Anas Alkouz Date: Thu, 2 Sep 2021 14:24:12 -0700 Subject: [PATCH 11/20] Add more instructions how to install/configure git secrets (#1202) Signed-off-by: Anas Alkouz Co-authored-by: Anas Alkouz --- DEVELOPER_GUIDE.md | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index c628a9abae8a8..c81cdcec06bfe 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -282,14 +282,30 @@ of this is `junit`. ### git-secrets -Security is our top priority. Avoid checking in credentials, install [awslabs/git-secrets](https://github.com/awslabs/git-secrets). +Security is our top priority. Avoid checking in credentials. +#### Installation +Install [awslabs/git-secrets](https://github.com/awslabs/git-secrets) by running the following commands. ``` git clone https://github.com/awslabs/git-secrets.git cd git-secrets make install ``` +#### Configuration +You can configure git secrets per repository, you need to change the directory to the root of the repository and run the following command. +``` +git secrets --install +✓ Installed commit-msg hook to .git/hooks/commit-msg +✓ Installed pre-commit hook to .git/hooks/pre-commit +✓ Installed prepare-commit-msg hook to .git/hooks/prepare-commit-msg +``` +Then, you need to apply patterns for git-secrets, you can install the AWS standard patterns by running the following command. +``` +git secrets --register-aws +``` + + ## Submitting Changes See [CONTRIBUTING](CONTRIBUTING.md). From 83332c8ab6f8c134b09b9a3dc57ddf39a507859e Mon Sep 17 00:00:00 2001 From: Vacha Date: Thu, 2 Sep 2021 18:09:15 -0700 Subject: [PATCH 12/20] Kept the original constructor for PluginInfo to maintain bwc (#1206) --- .../org/opensearch/plugins/PluginInfo.java | 18 +++++++ .../opensearch/plugins/PluginInfoTests.java | 12 ++--- .../plugins/PluginsServiceTests.java | 54 +++++++++---------- 3 files changed, 51 insertions(+), 33 deletions(-) diff --git a/server/src/main/java/org/opensearch/plugins/PluginInfo.java b/server/src/main/java/org/opensearch/plugins/PluginInfo.java index 18082b9dbeb7f..a0edda4e801c9 100644 --- a/server/src/main/java/org/opensearch/plugins/PluginInfo.java +++ b/server/src/main/java/org/opensearch/plugins/PluginInfo.java @@ -99,6 +99,24 @@ public PluginInfo(String name, String description, String version, Version opens this.hasNativeController = hasNativeController; } + /** + * Construct plugin info. + * + * @param name the name of the plugin + * @param description a description of the plugin + * @param version an opaque version identifier for the plugin + * @param opensearchVersion the version of OpenSearch the plugin was built for + * @param javaVersion the version of Java the plugin was built with + * @param classname the entry point to the plugin + * @param extendedPlugins other plugins this plugin extends through SPI + * @param hasNativeController whether or not the plugin has a native controller + */ + public PluginInfo(String name, String description, String version, Version opensearchVersion, String javaVersion, + String classname, List extendedPlugins, boolean hasNativeController) { + this(name, description, version, opensearchVersion, javaVersion, classname, null /* customFolderName */, + extendedPlugins, hasNativeController); + } + /** * Construct plugin info from a stream. * diff --git a/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java b/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java index 3dda3dad3a82f..f46ea36073428 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginInfoTests.java @@ -237,7 +237,7 @@ public void testExtendedPluginsEmpty() throws Exception { public void testSerialize() throws Exception { PluginInfo info = new PluginInfo("c", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - "c", Collections.singletonList("foo"), randomBoolean()); + "c", Collections.singletonList("foo"), randomBoolean()); BytesStreamOutput output = new BytesStreamOutput(); info.writeTo(output); ByteBuffer buffer = ByteBuffer.wrap(output.bytes().toBytesRef().bytes); @@ -250,15 +250,15 @@ public void testSerialize() throws Exception { public void testPluginListSorted() { List plugins = new ArrayList<>(); plugins.add(new PluginInfo("c", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - null, Collections.emptyList(), randomBoolean())); + Collections.emptyList(), randomBoolean())); plugins.add(new PluginInfo("b", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - null, Collections.emptyList(), randomBoolean())); + Collections.emptyList(), randomBoolean())); plugins.add(new PluginInfo( "e", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - null, Collections.emptyList(), randomBoolean())); + Collections.emptyList(), randomBoolean())); plugins.add(new PluginInfo("a", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - null, Collections.emptyList(), randomBoolean())); + Collections.emptyList(), randomBoolean())); plugins.add(new PluginInfo("d", "foo", "dummy", Version.CURRENT, "1.8", "dummyclass", - null, Collections.emptyList(), randomBoolean())); + Collections.emptyList(), randomBoolean())); PluginsAndModules pluginsInfo = new PluginsAndModules(plugins, Collections.emptyList()); final List infos = pluginsInfo.getPluginInfos(); diff --git a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java index eef8e50a20836..66f273ac65f8d 100644 --- a/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/opensearch/plugins/PluginsServiceTests.java @@ -323,7 +323,7 @@ public OneParameterIncorrectType(Object object) { public void testSortBundlesCycleSelfReference() throws Exception { Path pluginDir = createTempDir(); PluginInfo info = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.singletonList("foo"), false); + "MyPlugin", Collections.singletonList("foo"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.sortBundles(Collections.singleton(bundle)) @@ -335,16 +335,16 @@ public void testSortBundlesCycle() throws Exception { Path pluginDir = createTempDir(); Set bundles = new LinkedHashSet<>(); // control iteration order, so we get know the beginning of the cycle PluginInfo info = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Arrays.asList("bar", "other"), false); + "MyPlugin", Arrays.asList("bar", "other"), false); bundles.add(new PluginsService.Bundle(info, pluginDir)); PluginInfo info2 = new PluginInfo("bar", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.singletonList("baz"), false); + "MyPlugin", Collections.singletonList("baz"), false); bundles.add(new PluginsService.Bundle(info2, pluginDir)); PluginInfo info3 = new PluginInfo("baz", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.singletonList("foo"), false); + "MyPlugin", Collections.singletonList("foo"), false); bundles.add(new PluginsService.Bundle(info3, pluginDir)); PluginInfo info4 = new PluginInfo("other", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.emptyList(), false); + "MyPlugin", Collections.emptyList(), false); bundles.add(new PluginsService.Bundle(info4, pluginDir)); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.sortBundles(bundles)); @@ -354,7 +354,7 @@ public void testSortBundlesCycle() throws Exception { public void testSortBundlesSingle() throws Exception { Path pluginDir = createTempDir(); PluginInfo info = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.emptyList(), false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info, pluginDir); List sortedBundles = PluginsService.sortBundles(Collections.singleton(bundle)); assertThat(sortedBundles, Matchers.contains(bundle)); @@ -364,15 +364,15 @@ public void testSortBundlesNoDeps() throws Exception { Path pluginDir = createTempDir(); Set bundles = new LinkedHashSet<>(); // control iteration order PluginInfo info1 = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.emptyList(), false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle1 = new PluginsService.Bundle(info1, pluginDir); bundles.add(bundle1); PluginInfo info2 = new PluginInfo("bar", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.emptyList(), false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle2 = new PluginsService.Bundle(info2, pluginDir); bundles.add(bundle2); PluginInfo info3 = new PluginInfo("baz", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.emptyList(), false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle3 = new PluginsService.Bundle(info3, pluginDir); bundles.add(bundle3); List sortedBundles = PluginsService.sortBundles(bundles); @@ -382,7 +382,7 @@ public void testSortBundlesNoDeps() throws Exception { public void testSortBundlesMissingDep() throws Exception { Path pluginDir = createTempDir(); PluginInfo info = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", "", Collections.singletonList("dne"), false); + "MyPlugin", Collections.singletonList("dne"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info, pluginDir); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginsService.sortBundles(Collections.singleton(bundle)) @@ -394,19 +394,19 @@ public void testSortBundlesCommonDep() throws Exception { Path pluginDir = createTempDir(); Set bundles = new LinkedHashSet<>(); // control iteration order PluginInfo info1 = new PluginInfo("grandparent", "desc", "1.0",Version.CURRENT, "1.8", - "MyPlugin", null, Collections.emptyList(), false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle1 = new PluginsService.Bundle(info1, pluginDir); bundles.add(bundle1); PluginInfo info2 = new PluginInfo("foo", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.singletonList("common"), false); + "MyPlugin", Collections.singletonList("common"), false); PluginsService.Bundle bundle2 = new PluginsService.Bundle(info2, pluginDir); bundles.add(bundle2); PluginInfo info3 = new PluginInfo("bar", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.singletonList("common"), false); + "MyPlugin", Collections.singletonList("common"), false); PluginsService.Bundle bundle3 = new PluginsService.Bundle(info3, pluginDir); bundles.add(bundle3); PluginInfo info4 = new PluginInfo("common", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.singletonList("grandparent"), false); + "MyPlugin", Collections.singletonList("grandparent"), false); PluginsService.Bundle bundle4 = new PluginsService.Bundle(info4, pluginDir); bundles.add(bundle4); List sortedBundles = PluginsService.sortBundles(bundles); @@ -417,11 +417,11 @@ public void testSortBundlesAlreadyOrdered() throws Exception { Path pluginDir = createTempDir(); Set bundles = new LinkedHashSet<>(); // control iteration order PluginInfo info1 = new PluginInfo("dep", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.emptyList(), false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle1 = new PluginsService.Bundle(info1, pluginDir); bundles.add(bundle1); PluginInfo info2 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.singletonList("dep"), false); + "MyPlugin", Collections.singletonList("dep"), false); PluginsService.Bundle bundle2 = new PluginsService.Bundle(info2, pluginDir); bundles.add(bundle2); List sortedBundles = PluginsService.sortBundles(bundles); @@ -480,7 +480,7 @@ public void testJarHellDuplicateCodebaseWithDep() throws Exception { Map> transitiveDeps = new HashMap<>(); transitiveDeps.put("dep", Collections.singleton(dupJar.toUri().toURL())); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.singletonList("dep"), false); + "MyPlugin", Collections.singletonList("dep"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveDeps)); @@ -499,7 +499,7 @@ public void testJarHellDuplicateCodebaseAcrossDeps() throws Exception { transitiveDeps.put("dep1", Collections.singleton(dupJar.toUri().toURL())); transitiveDeps.put("dep2", Collections.singleton(dupJar.toUri().toURL())); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Arrays.asList("dep1", "dep2"), false); + "MyPlugin", Arrays.asList("dep1", "dep2"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveDeps)); @@ -516,7 +516,7 @@ public void testJarHellDuplicateClassWithCore() throws Exception { Path pluginJar = pluginDir.resolve("plugin.jar"); makeJar(pluginJar, Level.class); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.emptyList(), false); + "MyPlugin", Collections.emptyList(), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, new HashMap<>())); @@ -535,7 +535,7 @@ public void testJarHellDuplicateClassWithDep() throws Exception { Map> transitiveDeps = new HashMap<>(); transitiveDeps.put("dep", Collections.singleton(depJar.toUri().toURL())); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Collections.singletonList("dep"), false); + "MyPlugin", Collections.singletonList("dep"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveDeps)); @@ -558,7 +558,7 @@ public void testJarHellDuplicateClassAcrossDeps() throws Exception { transitiveDeps.put("dep1", Collections.singleton(dep1Jar.toUri().toURL())); transitiveDeps.put("dep2", Collections.singleton(dep2Jar.toUri().toURL())); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Arrays.asList("dep1", "dep2"), false); + "MyPlugin", Arrays.asList("dep1", "dep2"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveDeps)); @@ -581,7 +581,7 @@ public void testJarHellTransitiveMap() throws Exception { transitiveDeps.put("dep1", Collections.singleton(dep1Jar.toUri().toURL())); transitiveDeps.put("dep2", Collections.singleton(dep2Jar.toUri().toURL())); PluginInfo info1 = new PluginInfo("myplugin", "desc", "1.0", Version.CURRENT, "1.8", - "MyPlugin", null, Arrays.asList("dep1", "dep2"), false); + "MyPlugin", Arrays.asList("dep1", "dep2"), false); PluginsService.Bundle bundle = new PluginsService.Bundle(info1, pluginDir); PluginsService.checkBundleJarHell(JarHell.parseClassPath(), bundle, transitiveDeps); Set deps = transitiveDeps.get("myplugin"); @@ -630,14 +630,14 @@ public void testNonExtensibleDep() throws Exception { public void testIncompatibleOpenSearchVersion() throws Exception { PluginInfo info = new PluginInfo("my_plugin", "desc", "1.0", LegacyESVersion.V_6_0_0, - "1.8", "FakePlugin", null, Collections.emptyList(), false); + "1.8", "FakePlugin", Collections.emptyList(), false); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginsService.verifyCompatibility(info)); assertThat(e.getMessage(), containsString("was built for OpenSearch version 6.0.0")); } public void testIncompatibleJavaVersion() throws Exception { PluginInfo info = new PluginInfo("my_plugin", "desc", "1.0", Version.CURRENT, - "1000000.0", "FakePlugin", null, Collections.emptyList(), false); + "1000000.0", "FakePlugin", Collections.emptyList(), false); IllegalStateException e = expectThrows(IllegalStateException.class, () -> PluginsService.verifyCompatibility(info)); assertThat(e.getMessage(), containsString("my_plugin requires Java")); } @@ -761,7 +761,7 @@ public void testPluginLoadFailure() throws IOException { public void testExtensiblePlugin() { TestExtensiblePlugin extensiblePlugin = new TestExtensiblePlugin(); PluginsService.loadExtensions(Collections.singletonList( - Tuple.tuple(new PluginInfo("extensible", null, null, null, null, null, null, Collections.emptyList(), false), extensiblePlugin) + Tuple.tuple(new PluginInfo("extensible", null, null, null, null, null, Collections.emptyList(), false), extensiblePlugin) )); assertThat(extensiblePlugin.extensions, notNullValue()); @@ -770,8 +770,8 @@ public void testExtensiblePlugin() { extensiblePlugin = new TestExtensiblePlugin(); TestPlugin testPlugin = new TestPlugin(); PluginsService.loadExtensions(Arrays.asList( - Tuple.tuple(new PluginInfo("extensible", null, null, null, null, null, null, Collections.emptyList(), false), extensiblePlugin), - Tuple.tuple(new PluginInfo("test", null, null, null, null, null, null, Collections.singletonList("extensible"), false), + Tuple.tuple(new PluginInfo("extensible", null, null, null, null, null, Collections.emptyList(), false), extensiblePlugin), + Tuple.tuple(new PluginInfo("test", null, null, null, null, null, Collections.singletonList("extensible"), false), testPlugin) )); From 268d93b3753dfa6c5aac380457530b62a3f37086 Mon Sep 17 00:00:00 2001 From: Sven R Date: Wed, 8 Sep 2021 00:17:58 +0200 Subject: [PATCH 13/20] FreeBSD Java support (#1014) Signed-off-by: hackacad --- distribution/src/bin/opensearch-env | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/distribution/src/bin/opensearch-env b/distribution/src/bin/opensearch-env index 8817cd1823414..99bded2ad0e52 100644 --- a/distribution/src/bin/opensearch-env +++ b/distribution/src/bin/opensearch-env @@ -6,6 +6,15 @@ CDPATH="" SCRIPT="$0" +UNAME=$(uname -s) +if [ $UNAME = "FreeBSD" ]; then + OS="freebsd" +elif [ $UNAME = "Darwin" ]; then + OS="darwin" +else + OS="other" +fi + # SCRIPT might be an arbitrarily deep series of symbolic links; loop until we # have the concrete path while [ -h "$SCRIPT" ] ; do @@ -40,9 +49,12 @@ if [ ! -z "$JAVA_HOME" ]; then JAVA="$JAVA_HOME/bin/java" JAVA_TYPE="JAVA_HOME" else - if [ "$(uname -s)" = "Darwin" ]; then - # macOS has a different structure + if [ $OS = "darwin" ]; then + # macOS bundled Java JAVA="$OPENSEARCH_HOME/jdk.app/Contents/Home/bin/java" + elif [ $OS = "freebsd" ]; then + # using FreeBSD default java from ports if JAVA_HOME is not set + JAVA="/usr/local/bin/java" else JAVA="$OPENSEARCH_HOME/jdk/bin/java" fi From 919c5e05aa6d0938d082791f755aa0c746ff6d0a Mon Sep 17 00:00:00 2001 From: Sai Date: Wed, 8 Sep 2021 13:39:53 +0530 Subject: [PATCH 14/20] Exclude failing links from plugins/modules (#1223) Signed-off-by: Sai Kumar --- .lycheeexclude | 1 + 1 file changed, 1 insertion(+) diff --git a/.lycheeexclude b/.lycheeexclude index 830cd6ee72403..679f2c37c81f6 100644 --- a/.lycheeexclude +++ b/.lycheeexclude @@ -10,3 +10,4 @@ http://viewvc.jboss.org/cgi-bin/viewvc.cgi/jbosscache/experimental/jsr166/ http://www.eclipse.org/jetty/downloads.php http://www.ecma-international.org/publications/files/ECMA-ST/Ecma%20PATENT/Patent%20statements%20ok/ECMA-376%20* http://www.unicode.org/Public/PROGRAMS/CVTUTF +http://sgjp.pl/morfeusz/ From a9e1913bd5c9ab4906e9cac2c189ed5f797a2a53 Mon Sep 17 00:00:00 2001 From: Sai Date: Wed, 8 Sep 2021 16:23:00 +0530 Subject: [PATCH 15/20] Support for translog pruning based on retention leases (#1038) * Support for translog pruning based on retention leases Signed-off-by: Sai Kumar * Addressed CR Comments Signed-off-by: Sai Kumar * Addressed test case issue Signed-off-by: Sai Kumar --- .../common/settings/IndexScopedSettings.java | 1 + .../org/opensearch/index/IndexSettings.java | 46 +++++++++- .../org/opensearch/index/engine/Engine.java | 5 ++ .../index/engine/InternalEngine.java | 7 +- .../opensearch/index/shard/IndexShard.java | 6 +- .../translog/TranslogDeletionPolicy.java | 45 +++++++++- .../opensearch/index/IndexSettingsTests.java | 31 +++++++ .../translog/TranslogDeletionPolicyTests.java | 90 ++++++++++++++++++- 8 files changed, 220 insertions(+), 11 deletions(-) diff --git a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java index 9e73d6fdd9c4f..559d0fc62935b 100644 --- a/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/IndexScopedSettings.java @@ -156,6 +156,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { EnableAllocationDecider.INDEX_ROUTING_REBALANCE_ENABLE_SETTING, EnableAllocationDecider.INDEX_ROUTING_ALLOCATION_ENABLE_SETTING, IndexSettings.INDEX_FLUSH_AFTER_MERGE_THRESHOLD_SIZE_SETTING, + IndexSettings.INDEX_TRANSLOG_RETENTION_LEASE_PRUNING_ENABLED_SETTING, IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING, IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING, IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING, diff --git a/server/src/main/java/org/opensearch/index/IndexSettings.java b/server/src/main/java/org/opensearch/index/IndexSettings.java index 10aaaf821cc15..f51acea5185be 100644 --- a/server/src/main/java/org/opensearch/index/IndexSettings.java +++ b/server/src/main/java/org/opensearch/index/IndexSettings.java @@ -260,6 +260,13 @@ public final class IndexSettings { settings -> Boolean.toString(IndexMetadata.SETTING_INDEX_VERSION_CREATED.get(settings).onOrAfter(LegacyESVersion.V_7_0_0)), Property.IndexScope, Property.Final); + /** + * Specifies if the index translog should prune based on retention leases. + */ + public static final Setting INDEX_TRANSLOG_RETENTION_LEASE_PRUNING_ENABLED_SETTING = + Setting.boolSetting("index.translog.retention_lease.pruning.enabled", false, + Property.IndexScope, Property.Dynamic); + /** * Controls how many soft-deleted documents will be kept around before being merged away. Keeping more deleted * documents increases the chance of operation-based recoveries and allows querying a longer history of documents. @@ -286,9 +293,11 @@ public final class IndexSettings { * the chance of ops based recoveries for indices with soft-deletes disabled. * This setting will be ignored if soft-deletes is used in peer recoveries (default in 7.4). **/ + private static final ByteSizeValue DEFAULT_TRANSLOG_RETENTION_SIZE = new ByteSizeValue(512, ByteSizeUnit.MB); + public static final Setting INDEX_TRANSLOG_RETENTION_SIZE_SETTING = Setting.byteSizeSetting("index.translog.retention.size", - settings -> shouldDisableTranslogRetention(settings) ? "-1" : "512MB", + settings -> DEFAULT_TRANSLOG_RETENTION_SIZE.getStringRep(), Property.Dynamic, Property.IndexScope); /** @@ -389,6 +398,7 @@ public final class IndexSettings { private final IndexScopedSettings scopedSettings; private long gcDeletesInMillis = DEFAULT_GC_DELETES.millis(); private final boolean softDeleteEnabled; + private volatile boolean translogPruningByRetentionLease; private volatile long softDeleteRetentionOperations; private volatile long retentionLeaseMillis; @@ -525,6 +535,9 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti mergeSchedulerConfig = new MergeSchedulerConfig(this); gcDeletesInMillis = scopedSettings.get(INDEX_GC_DELETES_SETTING).getMillis(); softDeleteEnabled = version.onOrAfter(LegacyESVersion.V_6_5_0) && scopedSettings.get(INDEX_SOFT_DELETES_SETTING); + translogPruningByRetentionLease = version.onOrAfter(Version.V_1_1_0) && + softDeleteEnabled && + scopedSettings.get(INDEX_TRANSLOG_RETENTION_LEASE_PRUNING_ENABLED_SETTING); softDeleteRetentionOperations = scopedSettings.get(INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING); retentionLeaseMillis = scopedSettings.get(INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING).millis(); warmerEnabled = scopedSettings.get(INDEX_WARMER_ENABLED_SETTING); @@ -593,6 +606,8 @@ public IndexSettings(final IndexMetadata indexMetadata, final Settings nodeSetti this::setGenerationThresholdSize); scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_RETENTION_AGE_SETTING, this::setTranslogRetentionAge); scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_RETENTION_SIZE_SETTING, this::setTranslogRetentionSize); + scopedSettings.addSettingsUpdateConsumer(INDEX_TRANSLOG_RETENTION_LEASE_PRUNING_ENABLED_SETTING, + this::setTranslogPruningByRetentionLease); scopedSettings.addSettingsUpdateConsumer(INDEX_REFRESH_INTERVAL_SETTING, this::setRefreshInterval); scopedSettings.addSettingsUpdateConsumer(MAX_REFRESH_LISTENERS_PER_SHARD, this::setMaxRefreshListeners); scopedSettings.addSettingsUpdateConsumer(MAX_ANALYZED_OFFSET_SETTING, this::setHighlightMaxAnalyzedOffset); @@ -623,8 +638,14 @@ private void setFlushAfterMergeThresholdSize(ByteSizeValue byteSizeValue) { this.flushAfterMergeThresholdSize = byteSizeValue; } + private void setTranslogPruningByRetentionLease(boolean enabled) { + this.translogPruningByRetentionLease = INDEX_SOFT_DELETES_SETTING.get(settings) && enabled; + } + private void setTranslogRetentionSize(ByteSizeValue byteSizeValue) { - if (shouldDisableTranslogRetention(settings) && byteSizeValue.getBytes() >= 0) { + if (shouldDisableTranslogRetention(settings) && + !shouldPruneTranslogByRetentionLease(settings) && + byteSizeValue.getBytes() >= 0) { // ignore the translog retention settings if soft-deletes enabled this.translogRetentionSize = new ByteSizeValue(-1); } else { @@ -826,7 +847,12 @@ public TimeValue getRefreshInterval() { * Returns the transaction log retention size which controls how much of the translog is kept around to allow for ops based recoveries */ public ByteSizeValue getTranslogRetentionSize() { - assert shouldDisableTranslogRetention(settings) == false || translogRetentionSize.getBytes() == -1L : translogRetentionSize; + if(shouldDisableTranslogRetention(settings) && !shouldPruneTranslogByRetentionLease(settings)) { + return new ByteSizeValue(-1); + } + else if(shouldPruneTranslogByRetentionLease(settings) && translogRetentionSize.getBytes() == -1) { + return DEFAULT_TRANSLOG_RETENTION_SIZE; + } return translogRetentionSize; } @@ -1071,6 +1097,20 @@ public void setRequiredPipeline(final String requiredPipeline) { this.requiredPipeline = requiredPipeline; } + /** + * Returns true if translog ops should be pruned based on retention lease + */ + public boolean shouldPruneTranslogByRetentionLease() { + return translogPruningByRetentionLease; + } + + /** + * Returns true if translog ops should be pruned based on retention lease + */ + public static boolean shouldPruneTranslogByRetentionLease(Settings settings) { + return INDEX_TRANSLOG_RETENTION_LEASE_PRUNING_ENABLED_SETTING.get(settings); + } + /** * Returns true if soft-delete is enabled. */ diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index c4be05b779d42..a6322b5cca008 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -1849,6 +1849,11 @@ public IndexCommit getIndexCommit() { } public void onSettingsChanged(TimeValue translogRetentionAge, ByteSizeValue translogRetentionSize, long softDeletesRetentionOps) { + onSettingsChanged(translogRetentionAge, translogRetentionSize, softDeletesRetentionOps, false); + } + + public void onSettingsChanged(TimeValue translogRetentionAge, ByteSizeValue translogRetentionSize, + long softDeletesRetentionOps, boolean translogPruningByRetentionLease) { } diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 4a6b8e21715ab..50ad79ae27c87 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -227,7 +227,8 @@ public InternalEngine(EngineConfig engineConfig) { final TranslogDeletionPolicy translogDeletionPolicy = new TranslogDeletionPolicy( engineConfig.getIndexSettings().getTranslogRetentionSize().getBytes(), engineConfig.getIndexSettings().getTranslogRetentionAge().getMillis(), - engineConfig.getIndexSettings().getTranslogRetentionTotalFiles() + engineConfig.getIndexSettings().getTranslogRetentionTotalFiles(), + engineConfig.retentionLeasesSupplier() ); store.incRef(); IndexWriter writer = null; @@ -2572,7 +2573,8 @@ final void ensureCanFlush() { } @Override - public void onSettingsChanged(TimeValue translogRetentionAge, ByteSizeValue translogRetentionSize, long softDeletesRetentionOps) { + public void onSettingsChanged(TimeValue translogRetentionAge, ByteSizeValue translogRetentionSize, + long softDeletesRetentionOps, boolean translogPruningByRetentionLease) { mergeScheduler.refreshConfig(); // config().isEnableGcDeletes() or config.getGcDeletesInMillis() may have changed: maybePruneDeletes(); @@ -2585,6 +2587,7 @@ public void onSettingsChanged(TimeValue translogRetentionAge, ByteSizeValue tran final TranslogDeletionPolicy translogDeletionPolicy = translog.getDeletionPolicy(); translogDeletionPolicy.setRetentionAgeInMillis(translogRetentionAge.millis()); translogDeletionPolicy.setRetentionSizeInBytes(translogRetentionSize.getBytes()); + translogDeletionPolicy.shouldPruneTranslogByRetentionLease(translogPruningByRetentionLease); softDeletesPolicy.setRetentionOperations(softDeletesRetentionOps); } diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 75cf919185c13..0b3311f872c63 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -1961,8 +1961,10 @@ public void onSettingsChanged() { final boolean disableTranslogRetention = indexSettings.isSoftDeleteEnabled() && useRetentionLeasesInPeerRecovery; engineOrNull.onSettingsChanged( disableTranslogRetention ? TimeValue.MINUS_ONE : indexSettings.getTranslogRetentionAge(), - disableTranslogRetention ? new ByteSizeValue(-1) : indexSettings.getTranslogRetentionSize(), - indexSettings.getSoftDeleteRetentionOperations() + disableTranslogRetention && !indexSettings.shouldPruneTranslogByRetentionLease() ? + new ByteSizeValue(-1) : indexSettings.getTranslogRetentionSize(), + indexSettings.getSoftDeleteRetentionOperations(), + indexSettings.shouldPruneTranslogByRetentionLease() ); } } diff --git a/server/src/main/java/org/opensearch/index/translog/TranslogDeletionPolicy.java b/server/src/main/java/org/opensearch/index/translog/TranslogDeletionPolicy.java index 42f3893fd98ad..02d12f69f2201 100644 --- a/server/src/main/java/org/opensearch/index/translog/TranslogDeletionPolicy.java +++ b/server/src/main/java/org/opensearch/index/translog/TranslogDeletionPolicy.java @@ -35,6 +35,8 @@ import org.apache.lucene.util.Counter; import org.opensearch.Assertions; import org.opensearch.common.lease.Releasable; +import org.opensearch.index.seqno.RetentionLease; +import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.seqno.SequenceNumbers; import java.io.IOException; @@ -43,10 +45,12 @@ import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.Supplier; public class TranslogDeletionPolicy { private final Map openTranslogRef; + private Supplier retentionLeasesSupplier; public void assertNoOpenTranslogRefs() { if (openTranslogRef.isEmpty() == false) { @@ -69,6 +73,8 @@ public void assertNoOpenTranslogRefs() { private int retentionTotalFiles; + private boolean shouldPruneTranslogByRetentionLease; + public TranslogDeletionPolicy(long retentionSizeInBytes, long retentionAgeInMillis, int retentionTotalFiles) { this.retentionSizeInBytes = retentionSizeInBytes; this.retentionAgeInMillis = retentionAgeInMillis; @@ -80,6 +86,12 @@ public TranslogDeletionPolicy(long retentionSizeInBytes, long retentionAgeInMill } } + public TranslogDeletionPolicy(long retentionSizeInBytes, long retentionAgeInMillis, int retentionTotalFiles, + Supplier retentionLeasesSupplier) { + this(retentionSizeInBytes, retentionAgeInMillis, retentionTotalFiles); + this.retentionLeasesSupplier = retentionLeasesSupplier; + } + public synchronized void setLocalCheckpointOfSafeCommit(long newCheckpoint) { if (newCheckpoint < this.localCheckpointOfSafeCommit) { throw new IllegalArgumentException("local checkpoint of the safe commit can't go backwards: " + @@ -100,6 +112,10 @@ synchronized void setRetentionTotalFiles(int retentionTotalFiles) { this.retentionTotalFiles = retentionTotalFiles; } + public synchronized void shouldPruneTranslogByRetentionLease(boolean translogPruneByRetentionLease) { + this.shouldPruneTranslogByRetentionLease = translogPruneByRetentionLease; + } + /** * acquires the basis generation for a new snapshot. Any translog generation above, and including, the returned generation * will not be deleted until the returned {@link Releasable} is closed. @@ -157,6 +173,12 @@ synchronized long minTranslogGenRequired(List readers, TranslogW long minByLocks = getMinTranslogGenRequiredByLocks(); long minByAge = getMinTranslogGenByAge(readers, writer, retentionAgeInMillis, currentTime()); long minBySize = getMinTranslogGenBySize(readers, writer, retentionSizeInBytes); + long minByRetentionLeasesAndSize = Long.MAX_VALUE; + if(shouldPruneTranslogByRetentionLease) { + // If retention size is specified, size takes precedence. + long minByRetentionLeases = getMinTranslogGenByRetentionLease(readers, writer, retentionLeasesSupplier); + minByRetentionLeasesAndSize = Math.max(minBySize, minByRetentionLeases); + } final long minByAgeAndSize; if (minBySize == Long.MIN_VALUE && minByAge == Long.MIN_VALUE) { // both size and age are disabled; @@ -165,7 +187,28 @@ synchronized long minTranslogGenRequired(List readers, TranslogW minByAgeAndSize = Math.max(minByAge, minBySize); } long minByNumFiles = getMinTranslogGenByTotalFiles(readers, writer, retentionTotalFiles); - return Math.min(Math.max(minByAgeAndSize, minByNumFiles), minByLocks); + long minByTranslogGenSettings = Math.min(Math.max(minByAgeAndSize, minByNumFiles), minByLocks); + return Math.min(minByTranslogGenSettings, minByRetentionLeasesAndSize); + } + + static long getMinTranslogGenByRetentionLease(List readers, TranslogWriter writer, + Supplier retentionLeasesSupplier) { + long minGen = writer.getGeneration(); + final long minimumRetainingSequenceNumber = retentionLeasesSupplier.get() + .leases() + .stream() + .mapToLong(RetentionLease::retainingSequenceNumber) + .min() + .orElse(Long.MAX_VALUE); + + for (int i = readers.size() - 1; i >= 0; i--) { + final TranslogReader reader = readers.get(i); + if(reader.getCheckpoint().minSeqNo <= minimumRetainingSequenceNumber && + reader.getCheckpoint().maxSeqNo >= minimumRetainingSequenceNumber) { + minGen = Math.min(minGen, reader.getGeneration()); + } + } + return minGen; } static long getMinTranslogGenBySize(List readers, TranslogWriter writer, long retentionSizeInBytes) { diff --git a/server/src/test/java/org/opensearch/index/IndexSettingsTests.java b/server/src/test/java/org/opensearch/index/IndexSettingsTests.java index d2c285c06a5dc..97d85300bb3c2 100644 --- a/server/src/test/java/org/opensearch/index/IndexSettingsTests.java +++ b/server/src/test/java/org/opensearch/index/IndexSettingsTests.java @@ -655,4 +655,35 @@ public void testUpdateTranslogRetentionSettingsWithSoftDeletesDisabled() { assertThat(indexSettings.getTranslogRetentionAge(), equalTo(ageSetting)); assertThat(indexSettings.getTranslogRetentionSize(), equalTo(sizeSetting)); } + + public void testTranslogPruningSettingsWithSoftDeletesEnabled() { + Settings.Builder settings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.V_1_1_0); + + ByteSizeValue retentionSize = new ByteSizeValue(512, ByteSizeUnit.MB); + boolean translogPruningEnabled = randomBoolean(); + settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_LEASE_PRUNING_ENABLED_SETTING.getKey(), translogPruningEnabled); + IndexMetadata metadata = newIndexMeta("index", settings.build()); + IndexSettings indexSettings = new IndexSettings(metadata, Settings.EMPTY); + if(translogPruningEnabled) { + assertTrue(indexSettings.shouldPruneTranslogByRetentionLease()); + assertThat(indexSettings.getTranslogRetentionSize().getBytes(), equalTo(retentionSize.getBytes())); + } else { + assertFalse(indexSettings.shouldPruneTranslogByRetentionLease()); + assertThat(indexSettings.getTranslogRetentionSize().getBytes(), equalTo(-1L)); + } + } + + public void testTranslogPruningSettingsWithSoftDeletesDisabled() { + Settings.Builder settings = Settings.builder() + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false) + .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT); + boolean translogPruningEnabled = randomBoolean(); + ByteSizeValue retentionSize = new ByteSizeValue(512, ByteSizeUnit.MB); + settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_LEASE_PRUNING_ENABLED_SETTING.getKey(), translogPruningEnabled); + IndexMetadata metadata = newIndexMeta("index", settings.build()); + IndexSettings indexSettings = new IndexSettings(metadata, Settings.EMPTY); + assertFalse(indexSettings.shouldPruneTranslogByRetentionLease()); + assertThat(indexSettings.getTranslogRetentionSize().getBytes(), equalTo(retentionSize.getBytes())); + } } diff --git a/server/src/test/java/org/opensearch/index/translog/TranslogDeletionPolicyTests.java b/server/src/test/java/org/opensearch/index/translog/TranslogDeletionPolicyTests.java index 44aceab0445ad..75bae17a2fe6a 100644 --- a/server/src/test/java/org/opensearch/index/translog/TranslogDeletionPolicyTests.java +++ b/server/src/test/java/org/opensearch/index/translog/TranslogDeletionPolicyTests.java @@ -40,6 +40,8 @@ import org.opensearch.common.lease.Releasable; import org.opensearch.common.util.BigArrays; import org.opensearch.core.internal.io.IOUtils; +import org.opensearch.index.seqno.RetentionLease; +import org.opensearch.index.seqno.RetentionLeases; import org.opensearch.index.shard.ShardId; import org.opensearch.test.OpenSearchTestCase; import org.mockito.Mockito; @@ -49,7 +51,9 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; +import java.util.LinkedList; import java.util.List; +import java.util.function.Supplier; import static java.lang.Math.min; import static org.hamcrest.Matchers.equalTo; @@ -57,6 +61,8 @@ public class TranslogDeletionPolicyTests extends OpenSearchTestCase { + private static long TOTAL_OPS_IN_GEN = 10L; + public void testNoRetention() throws IOException { long now = System.currentTimeMillis(); Tuple, TranslogWriter> readersAndWriter = createReadersAndWriter(now); @@ -90,6 +96,33 @@ public void testBytesRetention() throws IOException { } } + public void testWithRetentionLease() throws IOException { + long now = System.currentTimeMillis(); + Tuple, TranslogWriter> readersAndWriter = createReadersAndWriter(now); + List allGens = new ArrayList<>(readersAndWriter.v1()); + allGens.add(readersAndWriter.v2()); + Supplier retentionLeasesSupplier = createRetentionLeases(now, 0L, + readersAndWriter.v1().size() * TOTAL_OPS_IN_GEN - 1); + try { + final long minimumRetainingSequenceNumber = retentionLeasesSupplier.get() + .leases() + .stream() + .mapToLong(RetentionLease::retainingSequenceNumber) + .min() + .orElse(Long.MAX_VALUE); + + final long selectedReader = (minimumRetainingSequenceNumber/TOTAL_OPS_IN_GEN); + final long selectedGen = allGens.get((int) selectedReader).generation; + assertThat(TranslogDeletionPolicy + .getMinTranslogGenByRetentionLease(readersAndWriter.v1(), readersAndWriter.v2(), retentionLeasesSupplier), + equalTo(selectedGen)); + + } finally { + IOUtils.close(readersAndWriter.v1()); + IOUtils.close(readersAndWriter.v2()); + } + } + public void testAgeRetention() throws IOException { long now = System.currentTimeMillis(); Tuple, TranslogWriter> readersAndWriter = createReadersAndWriter(now); @@ -128,6 +161,38 @@ public void testTotalFilesRetention() throws Exception { } } + public void testBySizeAndRetentionLease() throws Exception { + long now = System.currentTimeMillis(); + Tuple, TranslogWriter> readersAndWriter = createReadersAndWriter(now); + List allGens = new ArrayList<>(readersAndWriter.v1()); + allGens.add(readersAndWriter.v2()); + try { + int selectedReader = randomIntBetween(0, allGens.size() - 1); + final long selectedGeneration = allGens.get(selectedReader).generation; + // Retaining seqno is part of lower gen + long size = allGens.stream().skip(selectedReader).map(BaseTranslogReader::sizeInBytes).reduce(Long::sum).get(); + Supplier retentionLeasesSupplier = createRetentionLeases(now, 0L, + selectedGeneration * TOTAL_OPS_IN_GEN - 1); + TranslogDeletionPolicy deletionPolicy = new MockDeletionPolicy(now, size, Integer.MAX_VALUE, + Integer.MAX_VALUE, retentionLeasesSupplier); + assertThat(deletionPolicy + .minTranslogGenRequired(readersAndWriter.v1(), readersAndWriter.v2()), equalTo(selectedGeneration)); + assertThat(TranslogDeletionPolicy + .getMinTranslogGenByAge(readersAndWriter.v1(), readersAndWriter.v2(), 100L, System.currentTimeMillis()), + equalTo(readersAndWriter.v2().generation)); + + // Retention lease is part of higher gen + retentionLeasesSupplier = createRetentionLeases(now, selectedGeneration * TOTAL_OPS_IN_GEN, + allGens.size() * TOTAL_OPS_IN_GEN + TOTAL_OPS_IN_GEN - 1); + deletionPolicy = new MockDeletionPolicy(now, size, Long.MIN_VALUE, + Integer.MAX_VALUE, retentionLeasesSupplier); + assertThat(deletionPolicy.minTranslogGenRequired(readersAndWriter.v1(), readersAndWriter.v2()), equalTo(selectedGeneration)); + } finally { + IOUtils.close(readersAndWriter.v1()); + IOUtils.close(readersAndWriter.v2()); + } + } + /** * Tests that age trumps size but recovery trumps both. */ @@ -207,19 +272,32 @@ private Tuple, TranslogWriter> createReadersAndWriter(final () -> 1L, randomNonNegativeLong(), new TragicExceptionHolder(), seqNo -> {}, BigArrays.NON_RECYCLING_INSTANCE); writer = Mockito.spy(writer); Mockito.doReturn(now - (numberOfReaders - gen + 1) * 1000).when(writer).getLastModifiedTime(); - byte[] bytes = new byte[4]; ByteArrayDataOutput out = new ByteArrayDataOutput(bytes); - for (int ops = randomIntBetween(0, 20); ops > 0; ops--) { + final long startSeqNo = (gen - 1) * TOTAL_OPS_IN_GEN; + final long endSeqNo = startSeqNo + TOTAL_OPS_IN_GEN - 1; + for (long ops = endSeqNo; ops >= startSeqNo; ops--) { out.reset(bytes); - out.writeInt(ops); + out.writeInt((int) ops); writer.add(ReleasableBytesReference.wrap(new BytesArray(bytes)), ops); } } return new Tuple<>(readers, writer); } + private Supplier createRetentionLeases(final Long now, final Long lowestSeqNo, + final Long highestSeqNo) throws IOException { + LinkedList leases = new LinkedList<>(); + final int numberOfLeases = randomIntBetween(1, 5); + for(int i=0 ;i new RetentionLeases(1L, 1L, leases); + } + private static class MockDeletionPolicy extends TranslogDeletionPolicy { long now; @@ -229,6 +307,12 @@ private static class MockDeletionPolicy extends TranslogDeletionPolicy { this.now = now; } + MockDeletionPolicy(long now, long retentionSizeInBytes, long maxRetentionAgeInMillis, + int maxRetentionTotalFiles, Supplier retentionLeasesSupplier) { + super(retentionSizeInBytes, maxRetentionAgeInMillis, maxRetentionTotalFiles, retentionLeasesSupplier); + this.now = now; + } + @Override protected long currentTime() { return now; From ddb37dea329b72d3b3c6653eb9fc4e183d4e62d7 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Wed, 8 Sep 2021 09:05:02 -0400 Subject: [PATCH 16/20] Added Eclipse import instructions to DEVELOPER_GUIDE.md (#1215) Signed-off-by: Andriy Redko --- DEVELOPER_GUIDE.md | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index c81cdcec06bfe..7b83c1b5d94ea 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -147,7 +147,18 @@ Follow links in the [Java Tutorial](https://code.visualstudio.com/docs/java/java ### Eclipse -We would like to support Eclipse, but few of us use it and has fallen into disrepair. Please [contribute](CONTRIBUTING.md). +When importing to Eclipse, you need to have [Eclipse Buildship](https://projects.eclipse.org/projects/tools.buildship) plugin installed and, preferrably, have JDK 11 set as default JRE in **Preferences -> Java -> Installed JREs**. Once this is done, generate Eclipse projects using Gradle wrapper: + + ./gradlew eclipse + +You can now import the OpenSearch project into Eclipse as follows. + +1. Select **File > Import -> Existing Gradle Project** +2. In the subsequent dialog navigate to the root of `build.gradle` file +3. In the subsequent dialog, if JDK 11 is not set as default JRE, please make sure to check **[Override workpace settings]**, keep **[Gradle Wrapper]** and provide the correct path to JDK11 using **[Java Home]** property under **[Advanced Options]**. Otherwise, you may run into cryptic import failures and only top level project is going to be imported. +4. In the subsequent dialog, you sould see **[Gradle project structure]** populated, please click **[Finish]** to complete the import + +**Note:** it may look non-intuitive why one needs to use Gradle wrapper and then import existing Gradle project (in general, **File > Import -> Existing Gradle Project** should be enough). Practially, as it stands now, Eclipse Buildship plugin does not import OpenSearch project dependencies correctly but does work in conjuction with Gradle wrapper. ## Project Layout From 0b773c2ceccbb54d5f7bdfa8e3dc0d84659626bf Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Wed, 8 Sep 2021 09:08:05 -0400 Subject: [PATCH 17/20] Fix org.opensearch.index.reindex.ReindexRestClientSslTests#testClientSucceedsWithCertificateAuthorities - javax.net.ssl.SSLPeerUnverifiedException (#1212) Signed-off-by: Andriy Redko --- .../opensearch/index/reindex/ReindexRestClientSslTests.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRestClientSslTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRestClientSslTests.java index 645517d8ad2da..0e944907b4f8f 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRestClientSslTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexRestClientSslTests.java @@ -62,7 +62,6 @@ import javax.net.ssl.X509ExtendedKeyManager; import javax.net.ssl.X509ExtendedTrustManager; import java.io.IOException; -import java.net.InetAddress; import java.net.InetSocketAddress; import java.nio.file.Path; import java.security.cert.Certificate; @@ -90,7 +89,7 @@ public class ReindexRestClientSslTests extends OpenSearchTestCase { @BeforeClass public static void setupHttpServer() throws Exception { - InetSocketAddress address = new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0); + InetSocketAddress address = new InetSocketAddress("localhost", 0); SSLContext sslContext = buildServerSslContext(); server = MockHttpServer.createHttps(address, 0); server.setHttpsConfigurator(new ClientAuthHttpsConfigurator(sslContext)); @@ -213,7 +212,7 @@ public void testClientPassesClientCertificate() throws IOException { } private RemoteInfo getRemoteInfo() { - return new RemoteInfo("https", server.getAddress().getHostName(), server.getAddress().getPort(), "/", + return new RemoteInfo("https", "localhost", server.getAddress().getPort(), "/", new BytesArray("{\"match_all\":{}}"), "user", "password", Collections.emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT); } From 7e440af9e4effc74e398ad77c2f5b00d514eee0f Mon Sep 17 00:00:00 2001 From: Sai Date: Wed, 8 Sep 2021 20:22:24 +0530 Subject: [PATCH 18/20] Changes to support retrieval of operations from translog based on specified range (#1210) * Changes to support retrieval of operations from translog based on specified range Signed-off-by: Sai Kumar * Addressed CR comments Signed-off-by: Sai Kumar * Added testcases for internal engine Signed-off-by: Sai Kumar --- .../org/opensearch/index/engine/Engine.java | 9 +++ .../index/engine/InternalEngine.java | 11 +++ .../MissingHistoryOperationsException.java | 2 +- .../opensearch/index/shard/IndexShard.java | 11 +++ .../opensearch/index/translog/Translog.java | 23 +++++-- .../index/engine/InternalEngineTests.java | 42 ++++++++++++ .../index/translog/TranslogTests.java | 68 +++++++++++++++++++ .../index/engine/EngineTestCase.java | 16 +++++ 8 files changed, 176 insertions(+), 6 deletions(-) diff --git a/server/src/main/java/org/opensearch/index/engine/Engine.java b/server/src/main/java/org/opensearch/index/engine/Engine.java index a6322b5cca008..53f077c44d5a9 100644 --- a/server/src/main/java/org/opensearch/index/engine/Engine.java +++ b/server/src/main/java/org/opensearch/index/engine/Engine.java @@ -742,6 +742,15 @@ public enum SearcherScope { public abstract Translog.Snapshot newChangesSnapshot(String source, MapperService mapperService, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException; + /** + * Creates a new history snapshot from either Lucene/Translog for reading operations whose seqno in the requesting + * seqno range (both inclusive). + */ + public Translog.Snapshot newChangesSnapshot(String source, HistorySource historySource, MapperService mapperService, + long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { + return newChangesSnapshot(source, mapperService, fromSeqNo, toSeqNo, requiredFullRange); + } + /** * Creates a new history snapshot for reading operations since {@code startingSeqNo} (inclusive). * The returned snapshot can be retrieved from either Lucene index or translog files. diff --git a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java index 50ad79ae27c87..0df1747156672 100644 --- a/server/src/main/java/org/opensearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/opensearch/index/engine/InternalEngine.java @@ -2702,6 +2702,17 @@ private void ensureSoftDeletesEnabled() { } } + @Override + public Translog.Snapshot newChangesSnapshot(String source, HistorySource historySource, MapperService mapperService, + long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { + if(historySource == HistorySource.INDEX) { + return newChangesSnapshot(source, mapperService, fromSeqNo, toSeqNo, requiredFullRange); + } else { + return getTranslog().newSnapshot(fromSeqNo, toSeqNo, requiredFullRange); + } + } + + @Override public Translog.Snapshot newChangesSnapshot(String source, MapperService mapperService, long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { diff --git a/server/src/main/java/org/opensearch/index/engine/MissingHistoryOperationsException.java b/server/src/main/java/org/opensearch/index/engine/MissingHistoryOperationsException.java index 39defce5b1e59..7c7a32a57bf9a 100644 --- a/server/src/main/java/org/opensearch/index/engine/MissingHistoryOperationsException.java +++ b/server/src/main/java/org/opensearch/index/engine/MissingHistoryOperationsException.java @@ -38,7 +38,7 @@ */ public final class MissingHistoryOperationsException extends IllegalStateException { - MissingHistoryOperationsException(String message) { + public MissingHistoryOperationsException(String message) { super(message); } } diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 0b3311f872c63..53977ca2c600d 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -2011,6 +2011,17 @@ public Translog.Snapshot getHistoryOperations(String reason, Engine.HistorySourc return getEngine().readHistoryOperations(reason, source, mapperService, startingSeqNo); } + /** + * + * Creates a new history snapshot for reading operations since + * the provided starting seqno (inclusive) and ending seqno (inclusive) + * The returned snapshot can be retrieved from either Lucene index or translog files. + */ + public Translog.Snapshot getHistoryOperations(String reason, Engine.HistorySource source, + long startingSeqNo, long endSeqNo) throws IOException { + return getEngine().newChangesSnapshot(reason, source, mapperService, startingSeqNo, endSeqNo, true); + } + /** * Checks if we have a completed history of operations since the given starting seqno (inclusive). * This method should be called after acquiring the retention lock; See {@link #acquireHistoryRetentionLock(Engine.HistorySource)} diff --git a/server/src/main/java/org/opensearch/index/translog/Translog.java b/server/src/main/java/org/opensearch/index/translog/Translog.java index 15104a4fc32be..3151e36785342 100644 --- a/server/src/main/java/org/opensearch/index/translog/Translog.java +++ b/server/src/main/java/org/opensearch/index/translog/Translog.java @@ -53,6 +53,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.VersionType; import org.opensearch.index.engine.Engine; +import org.opensearch.index.engine.MissingHistoryOperationsException; import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.index.shard.AbstractIndexShardComponent; import org.opensearch.index.shard.IndexShardComponent; @@ -614,7 +615,11 @@ final Checkpoint getLastSyncedCheckpoint() { // for testing public Snapshot newSnapshot() throws IOException { - return newSnapshot(0, Long.MAX_VALUE); + return newSnapshot(0, Long.MAX_VALUE, false); + } + + public Snapshot newSnapshot(long fromSeqNo, long toSeqNo) throws IOException { + return newSnapshot(fromSeqNo, toSeqNo, false); } /** @@ -624,7 +629,7 @@ public Snapshot newSnapshot() throws IOException { * @param toSeqNo the upper bound of the range (inclusive) * @return the new snapshot */ - public Snapshot newSnapshot(long fromSeqNo, long toSeqNo) throws IOException { + public Snapshot newSnapshot(long fromSeqNo, long toSeqNo, boolean requiredFullRange) throws IOException { assert fromSeqNo <= toSeqNo : fromSeqNo + " > " + toSeqNo; assert fromSeqNo >= 0 : "from_seq_no must be non-negative " + fromSeqNo; try (ReleasableLock ignored = readLock.acquire()) { @@ -633,7 +638,7 @@ public Snapshot newSnapshot(long fromSeqNo, long toSeqNo) throws IOException { .filter(reader -> reader.getCheckpoint().minSeqNo <= toSeqNo && fromSeqNo <= reader.getCheckpoint().maxEffectiveSeqNo()) .map(BaseTranslogReader::newSnapshot).toArray(TranslogSnapshot[]::new); final Snapshot snapshot = newMultiSnapshot(snapshots); - return new SeqNoFilterSnapshot(snapshot, fromSeqNo, toSeqNo); + return new SeqNoFilterSnapshot(snapshot, fromSeqNo, toSeqNo, requiredFullRange); } } @@ -959,14 +964,17 @@ default int skippedOperations() { private static final class SeqNoFilterSnapshot implements Snapshot { private final Snapshot delegate; private int filteredOpsCount; + private int opsCount; + private boolean requiredFullRange; private final long fromSeqNo; // inclusive private final long toSeqNo; // inclusive - SeqNoFilterSnapshot(Snapshot delegate, long fromSeqNo, long toSeqNo) { + SeqNoFilterSnapshot(Snapshot delegate, long fromSeqNo, long toSeqNo, boolean requiredFullRange) { assert fromSeqNo <= toSeqNo : "from_seq_no[" + fromSeqNo + "] > to_seq_no[" + toSeqNo + "]"; this.delegate = delegate; this.fromSeqNo = fromSeqNo; this.toSeqNo = toSeqNo; + this.requiredFullRange = requiredFullRange; } @Override @@ -980,15 +988,20 @@ public int skippedOperations() { } @Override - public Operation next() throws IOException { + public Operation next() throws IOException, MissingHistoryOperationsException { Translog.Operation op; while ((op = delegate.next()) != null) { if (fromSeqNo <= op.seqNo() && op.seqNo() <= toSeqNo) { + opsCount++; return op; } else { filteredOpsCount++; } } + if(requiredFullRange && (toSeqNo - fromSeqNo +1) != opsCount) { + throw new MissingHistoryOperationsException("Not all operations between from_seqno [" + fromSeqNo + "] " + + "and to_seqno [" + toSeqNo + "] found"); + } return null; } diff --git a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java index 01b8481e3ce8e..b6e24c2f1ac8a 100644 --- a/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/opensearch/index/engine/InternalEngineTests.java @@ -5417,6 +5417,48 @@ public void testTrimUnsafeCommits() throws Exception { } } + public void testHistoryBasedOnSource() throws Exception { + final List operations = generateSingleDocHistory(false, + randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), 2, 10, 300, "1"); + final MergePolicy keepSoftDeleteDocsMP = new SoftDeletesRetentionMergePolicy( + Lucene.SOFT_DELETES_FIELD, () -> new MatchAllDocsQuery(), engine.config().getMergePolicy()); + Settings.Builder settings = Settings.builder() + .put(defaultSettings.getSettings()) + .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) + .put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), randomLongBetween(0, 10)); + final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build(); + final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata); + Set expectedSeqNos = new HashSet<>(); + try (Store store = createStore(); + Engine engine = createEngine(config(indexSettings, store, createTempDir(), keepSoftDeleteDocsMP, null))) { + for (Engine.Operation op : operations) { + if (op instanceof Engine.Index) { + Engine.IndexResult indexResult = engine.index((Engine.Index) op); + assertThat(indexResult.getFailure(), nullValue()); + expectedSeqNos.add(indexResult.getSeqNo()); + } else { + Engine.DeleteResult deleteResult = engine.delete((Engine.Delete) op); + assertThat(deleteResult.getFailure(), nullValue()); + expectedSeqNos.add(deleteResult.getSeqNo()); + } + if (rarely()) { + engine.refresh("test"); + } + if (rarely()) { + engine.flush(); + } + if (rarely()) { + engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID()); + } + } + MapperService mapperService = createMapperService("test"); + List luceneOps = readAllOperationsBasedOnSource(engine, Engine.HistorySource.INDEX, mapperService); + List translogOps = readAllOperationsBasedOnSource(engine, Engine.HistorySource.TRANSLOG, mapperService); + assertThat(luceneOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray())); + assertThat(translogOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray())); + } + } + public void testLuceneHistoryOnPrimary() throws Exception { final List operations = generateSingleDocHistory(false, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), 2, 10, 300, "1"); diff --git a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java index 0c6bbceff80d8..221a2e65bfba7 100644 --- a/server/src/test/java/org/opensearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/opensearch/index/translog/TranslogTests.java @@ -75,6 +75,7 @@ import org.opensearch.index.VersionType; import org.opensearch.index.engine.Engine; import org.opensearch.index.engine.Engine.Operation.Origin; +import org.opensearch.index.engine.MissingHistoryOperationsException; import org.opensearch.index.mapper.IdFieldMapper; import org.opensearch.index.mapper.ParseContext.Document; import org.opensearch.index.mapper.ParsedDocument; @@ -120,6 +121,7 @@ import java.util.List; import java.util.Map; import java.util.Queue; +import java.util.Random; import java.util.Set; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; @@ -737,6 +739,72 @@ public void testRangeSnapshot() throws Exception { } } + private Long populateTranslogOps(boolean withMissingOps) throws IOException { + long minSeqNo = SequenceNumbers.NO_OPS_PERFORMED; + long maxSeqNo = SequenceNumbers.NO_OPS_PERFORMED; + final int generations = between(2, 20); + long currentSeqNo = 0L; + List firstGenOps = null; + Map> operationsByGen = new HashMap<>(); + for (int gen = 0; gen < generations; gen++) { + List seqNos = new ArrayList<>(); + int numOps = randomIntBetween(4, 10); + for (int i = 0; i < numOps; i++, currentSeqNo++) { + minSeqNo = SequenceNumbers.min(minSeqNo, currentSeqNo); + maxSeqNo = SequenceNumbers.max(maxSeqNo, currentSeqNo); + seqNos.add(currentSeqNo); + } + Collections.shuffle(seqNos, new Random(100)); + List ops = new ArrayList<>(seqNos.size()); + for (long seqNo : seqNos) { + Translog.Index op = new Translog.Index("_doc", randomAlphaOfLength(10), seqNo, primaryTerm.get(), new byte[]{randomByte()}); + boolean shouldAdd = !withMissingOps || seqNo % 4 != 0; + if(shouldAdd) { + translog.add(op); + ops.add(op); + } + } + operationsByGen.put(translog.currentFileGeneration(), ops); + if(firstGenOps == null) { + firstGenOps = ops; + } + translog.rollGeneration(); + if (rarely()) { + translog.rollGeneration(); // empty generation + } + } + return currentSeqNo; + } + + public void testFullRangeSnapshot() throws Exception { + // Successful snapshot + long nextSeqNo = populateTranslogOps(false); + long fromSeqNo = 0L; + long toSeqNo = Math.max(nextSeqNo - 1, fromSeqNo + 15); + try (Translog.Snapshot snapshot = translog.newSnapshot(fromSeqNo, toSeqNo, true)) { + int totOps = 0; + for (Translog.Operation op = snapshot.next(); op != null; op = snapshot.next()) { + totOps++; + } + assertEquals(totOps, toSeqNo - fromSeqNo + 1); + } + } + + public void testFullRangeSnapshotWithFailures() throws Exception { + long nextSeqNo = populateTranslogOps(true); + long fromSeqNo = 0L; + long toSeqNo = Math.max(nextSeqNo-1, fromSeqNo + 15); + try (Translog.Snapshot snapshot = translog.newSnapshot(fromSeqNo, toSeqNo, true)) { + int totOps = 0; + for (Translog.Operation op = snapshot.next(); op != null; op = snapshot.next()) { + totOps++; + } + fail("Should throw exception for missing operations"); + } catch(MissingHistoryOperationsException e) { + assertTrue(e.getMessage().contains("Not all operations between from_seqno")); + } + } + public void assertFileIsPresent(Translog translog, long id) { if (Files.exists(translog.location().resolve(Translog.getFilename(id)))) { return; diff --git a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java index c4689363bdea1..ec223dc962d4b 100644 --- a/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/opensearch/index/engine/EngineTestCase.java @@ -1081,6 +1081,22 @@ public static List readAllOperationsInLucene(Engine engine, return operations; } + /** + * Reads all engine operations that have been processed by the engine from Lucene index/Translog based on source. + */ + public static List readAllOperationsBasedOnSource(Engine engine, Engine.HistorySource historySource, + MapperService mapper) throws IOException { + final List operations = new ArrayList<>(); + try (Translog.Snapshot snapshot = engine.newChangesSnapshot("test", historySource, mapper, + 0, Long.MAX_VALUE, false)) { + Translog.Operation op; + while ((op = snapshot.next()) != null){ + operations.add(op); + } + } + return operations; + } + /** * Asserts the provided engine has a consistent document history between translog and Lucene index. */ From 3870522228650ed242a29d5dc29d1a10a1270d4a Mon Sep 17 00:00:00 2001 From: Rabi Panda Date: Wed, 8 Sep 2021 11:24:19 -0700 Subject: [PATCH 19/20] Run link checker GitHub action on schedule. (#1221) Most PRs do not add/update links, however sites go down often. This change makes sure that we catch any broken link in the repository and fix it, but at the same time we do not want to block PRs because of some unrelated broken links. This PR updates the workflow to run everyday at midnight UTC. Signed-off-by: Rabi Panda --- .github/workflows/links.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index 5e25e0f48b927..fb9752f58bd4b 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -1,10 +1,7 @@ name: Link Checker on: - push: - branches: [ main ] - pull_request: - branches: [ main ] - + schedule: + - cron: '0 0 * * *' jobs: linkchecker: From 65abe4a45c03fcbd5bc2fad6d813a23f3221568c Mon Sep 17 00:00:00 2001 From: Abbas Hussain Date: Wed, 8 Sep 2021 12:21:53 -0700 Subject: [PATCH 20/20] Generate release notes for 1.1 (#1230) Signed-off-by: Abbas Hussain --- .../opensearch.release-notes-1.1.0.md | 309 ++++++++++++++++++ 1 file changed, 309 insertions(+) create mode 100644 release-notes/opensearch.release-notes-1.1.0.md diff --git a/release-notes/opensearch.release-notes-1.1.0.md b/release-notes/opensearch.release-notes-1.1.0.md new file mode 100644 index 0000000000000..9a895598621e7 --- /dev/null +++ b/release-notes/opensearch.release-notes-1.1.0.md @@ -0,0 +1,309 @@ +## Version 1.1.0 Release Notes + +* __Kept the original constructor for PluginInfo to maintain bwc (#1206) (#1209)__ + + [Vacha](mailto:vachshah@amazon.com) - Thu, 2 Sep 2021 22:05:44 -0400 + + efs/remotes/upstream/1.x, refs/remotes/origin/1.x, refs/heads/1.x + Signed-off-by: Vacha <vachshah@amazon.com> + +* __Clarify JDK requirement in the developer guide (#1153) (#1208)__ + + [Tianli Feng](mailto:ftianli@amazon.com) - Thu, 2 Sep 2021 17:02:59 -0700 + + + * Explicitly point out the JDK 8 requirement is for runtime, but not for + compiling. + * Clarify the JAVAx_HOME env variables are for the "backwards compatibility + test". + * Add explanation on how the backwards compatibility tests get the OpenSearch + distributions for a specific version. + Signed-off-by: Tianli Feng + <ftianli@amazon.com> + +* __Upgrade apache commons-compress to 1.21 (#1197) (#1203)__ + + [Abbas Hussain](mailto:abbashus@amazon.com) - Fri, 3 Sep 2021 01:47:57 +0530 + + + Signed-off-by: Abbas Hussain <abbas_10690@yahoo.com> + +* __Restoring alpha/beta/rc version semantics (#1112) (#1204)__ + + [Andriy Redko](mailto:andriy.redko@aiven.io) - Thu, 2 Sep 2021 08:01:46 -0500 + + + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + +* __[Bug] Fix mixed cluster support for OpenSearch 2+ (#1191) (#1195)__ + + [Nick Knize](mailto:nknize@apache.org) - Wed, 1 Sep 2021 17:04:40 -0500 + + + The version framework only added support for OpenSearch 1.x bwc with legacy + + clusters. This commit adds support for v2.0 which will be the last version with + bwc support for legacy clusters (v7.10) + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + +* __Drop mocksocket & securemock dependencies from sniffer and rest client (no needed) (#1174) (#1187)__ + + [Andriy Redko](mailto:drreta@gmail.com) - Tue, 31 Aug 2021 19:44:42 -0400 + + + * Drop mocksocket & securemock dependencies from sniffer and rest client (not + needed) + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + + * Removing .gitignore + Signed-off-by: Andriy Redko <andriy.redko@aiven.io> + +* __Backporting the fix to 1.x for fixing Copyright licensing (#1188)__ + + [Minal Shah](mailto:87717056+minalsha@users.noreply.github.com) - Tue, 31 Aug 2021 19:38:56 -0400 + + + Signed-off-by: Minal Shah <minalsha@amazon.com> + +* __Reduce iterations to improve test run time (#1168) (#1177)__ + + [Abbas Hussain](mailto:abbashus@amazon.com) - Tue, 31 Aug 2021 01:06:29 +0530 + + + Signed-off-by: Abbas Hussain <abbas_10690@yahoo.com> + +* __Tune datanode count and shards count to improve test run time (#1170) (#1176)__ + + [Abbas Hussain](mailto:abbashus@amazon.com) - Tue, 31 Aug 2021 00:14:38 +0530 + + + Signed-off-by: Abbas Hussain <abbas_10690@yahoo.com> + +* __Add 1.0.1 revision (#1152) (#1160)__ + + [Nick Knize](mailto:nknize@gmail.com) - Thu, 26 Aug 2021 07:10:24 -0500 + + + This commit stages the branch to the next 1.0.1 patch release. BWC testing + needs + this even if the next revision is never actually released. + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + +* __[Bug] Change 1.0.0 version check in PluginInfo (#1159)__ + + [Nick Knize](mailto:nknize@gmail.com) - Wed, 25 Aug 2021 23:58:30 -0500 + + + PluginInfo should use .onOrAfter(Version.V_1_1_0) instead of + .after(Version.V_1_0_0) for the new custom folder name for plugin feature. + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + +* __Include sources and javadoc artifacts while publishing to a Maven repository (#1049) (#1139)__ + + [Rabi Panda](mailto:adnapibar@gmail.com) - Mon, 23 Aug 2021 17:12:20 -0700 + + + This change fixes the issue where the sources and javadoc artifacts were not + built and included with the publish. + Signed-off-by: Rabi Panda <adnapibar@gmail.com> + +* __Allowing custom folder name for plugin installation (#848) (#1116)__ + + [Vacha](mailto:vachshah@amazon.com) - Mon, 23 Aug 2021 14:39:27 -0700 + + + Signed-off-by: Vacha Shah <vachshah@amazon.com> + +* __Upgrade to Lucene 8.9 (#1080) (#1115)__ + + [Nick Knize](mailto:nknize@gmail.com) - Mon, 23 Aug 2021 10:06:34 -0700 + + + This commit upgrades to the official lucene 8.9 release + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + +* __[DEPRECATE] SimpleFS in favor of NIOFS (#1073) (#1114)__ + + [Nick Knize](mailto:nknize@gmail.com) - Fri, 20 Aug 2021 11:26:22 -0500 + + + Lucene 9 removes support for SimpleFS File System format. This commit + deprecates + the SimpleFS format in favor of NIOFS. + Signed-off-by: Nicholas Walter Knize <nknize@apache.org> + +* __Fix failure in SearchCancellationIT.testMSearchChildReqCancellationWithHybridTimeout (#1105)__ + + [Sorabh](mailto:sohami.apache@gmail.com) - Tue, 17 Aug 2021 16:23:55 -0400 + + + In some cases as one shared with issue #1099, the maxConcurrentSearchRequests + was chosen as 0 which + will compute the final value during execution of the + request based on processor counts. When this + computed value is less than + number of search request in msearch request, it will execute all the + requests + in multiple iterations causing the failure since test will only wait for one + such + iteration. Hence setting the maxConcurrentSearchRequests explicitly to + number of search requests + being added in the test to ensure correct behavior + Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com> + +* __Support for bwc tests for plugins (#1051) (#1090)__ + + [Vacha](mailto:vachshah@amazon.com) - Sun, 15 Aug 2021 08:07:55 -0700 + + + * Support for bwc tests for plugins + Signed-off-by: Vacha <vachshah@amazon.com> + + * Adding support for restart upgrades for plugins bwc + Signed-off-by: Vacha <vachshah@amazon.com> + +* __Improving the Grok circular reference check to prevent stack overflow (#1079) (#1087)__ + + [kartg](mailto:85275476+kartg@users.noreply.github.com) - Thu, 12 Aug 2021 18:47:56 -0400 + + + This change refactors the circular reference check in the Grok processor class + + to use a formal depth-first traversal. It also includes a logic update to + + prevent a stack overflow in one scenario and a check for malformed patterns. + + This bugfix addresses CVE-2021-22144. + Signed-off-by: Kartik Ganesh <85275476+kartg@users.noreply.github.com> + +* __Part 1: Support for cancel_after_timeinterval parameter in search and msearch request (#986) (#1085)__ + + [Sorabh](mailto:sorabh@apache.org) - Thu, 12 Aug 2021 13:52:28 -0400 + + + * Part 1: Support for cancel_after_timeinterval parameter in search and msearch + request + This commit introduces the new request level parameter to configure the + timeout interval after which + a search request will be cancelled. For msearch + request the parameter is supported both at parent + request and at sub child + search requests. If it is provided at parent level and child search request + + doesn't have it then the parent level value is set at such child request. The + parent level msearch + is not used to cancel the parent request as it may be + tricky to come up with correct value in cases + when child search request can + have different runtimes + TEST: Added test for ser/de with new parameter + Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com> + + * Part 2: Support for cancel_after_timeinterval parameter in search and msearch + request + This commit adds the handling of the new request level parameter and schedule + cancellation task. It + also adds a cluster setting to set a global cancellation + timeout for search request which will be + used in absence of request level + timeout. + TEST: Added new tests in SearchCancellationIT + Signed-off-by: Sorabh + Hamirwasia <sohami.apache@gmail.com> + + * Address Review feedback for Part 1 + Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com> + + * Address review feedback for Part 2 + Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com> + + * Update CancellableTask to remove the cancelOnTimeout boolean flag + Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com> + + * Replace search.cancellation.timeout cluster setting with + search.enforce_server.timeout.cancellation to control if cluster level + cancel_after_time_interval should take precedence over request level + cancel_after_time_interval value + Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com> + + * Removing the search.enforce_server.timeout.cancellation cluster setting and + just keeping search.cancel_after_time_interval setting with request level + parameter taking the precedence. + Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com> + Co-authored-by: Sorabh Hamirwasia <hsorabh@amazon.com> + Co-authored-by: Sorabh Hamirwasia <hsorabh@amazon.com> + +* __Avoid crashing on using the index.lifecycle.name in the API body (#1060) (#1070)__ + + [frotsch](mailto:86320880+frotsch@users.noreply.github.com) - Tue, 10 Aug 2021 14:16:44 -0400 + + + * Avoid crashing on using the index.lifecycle.name in the API body + Signed-off-by: frotsch <frotsch@mailbox.org> + +* __Introduce RestHandler.Wrapper to help with delegate implementations (#1004) (#1031)__ + + [Vlad Rozov](mailto:vrozov@users.noreply.github.com) - Tue, 3 Aug 2021 09:02:40 -0400 + + + Signed-off-by: Vlad Rozov <vrozov@users.noreply.github.com> + +* __Rank feature - unknown field linear (#983) (#1025)__ + + [Yevhen Tienkaiev](mailto:hronom@gmail.com) - Fri, 30 Jul 2021 15:17:47 -0400 + + + Signed-off-by: Yevhen Tienkaiev <hronom@gmail.com> + +* __Replace Elasticsearch docs links in scripts (#994) (#1001)__ + + [Poojita Raj](mailto:poojiraj@amazon.com) - Fri, 23 Jul 2021 14:21:31 -0700 + + + Replace the docs links In scripts bin/opensearch-env and config/jvm.options, + with OpenSearch docs links. + Signed-off-by: Poojita-Raj <poojiraj@amazon.com> + (cherry picked from commit 6bc4ce017ad654cc2c8d7d37553c82d61c61b964) + + Signed-off-by: Poojita-Raj <poojiraj@amazon.com> + +* __Introduce replaceRoutes() method and 2 new constructors to RestHandler.java (#947) (#998)__ + + [Chang Liu](mailto:lc12251109@gmail.com) - Thu, 22 Jul 2021 14:26:16 -0400 + + + * Add addRoutesPrefix() method to RestHandler.java + Signed-off-by: Azar Fazel <azar.fazel@gmail.com> + Signed-off-by: cliu123 + <lc12251109@gmail.com> + Co-authored-by: afazel <afazel@users.noreply.github.com> + +* __Avoid override of routes() in BaseRestHandler to respect the default behavior defined in RestHandler (#889) (#991)__ + + [Chang Liu](mailto:lc12251109@gmail.com) - Thu, 22 Jul 2021 10:57:18 -0400 + + + Signed-off-by: cliu123 <lc12251109@gmail.com> + +* __Cleanup TESTING and DEVELOPER_GUIDE markdowns (#946) (#954)__ + + [Daniel Doubrovkine (dB.)](mailto:dblock@dblock.org) - Tue, 13 Jul 2021 14:13:26 -0500 + + + + +* __Updated READMEs on releasing, maintaining, admins and security. (#853) (#950)__ + + [Daniel Doubrovkine (dB.)](mailto:dblock@dblock.org) - Mon, 12 Jul 2021 15:06:20 -0500 + + + Signed-off-by: dblock <dblock@amazon.com> + +* __Pass interceptor to super constructor (#876) (#937)__ + + [Sooraj Sinha](mailto:81695996+soosinha@users.noreply.github.com) - Mon, 12 Jul 2021 11:48:09 -0700 + + + Signed-off-by: Sooraj Sinha <soosinha@amazon.com> + +