diff --git a/.ci/bwcVersions b/.ci/bwcVersions index 1dc8dc955f7c6..65fd9e7281ad1 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -1,47 +1,4 @@ BWC_VERSION: - - "7.0.0" - - "7.0.1" - - "7.1.0" - - "7.1.1" - - "7.2.0" - - "7.2.1" - - "7.3.0" - - "7.3.1" - - "7.3.2" - - "7.4.0" - - "7.4.1" - - "7.4.2" - - "7.5.0" - - "7.5.1" - - "7.5.2" - - "7.6.0" - - "7.6.1" - - "7.6.2" - - "7.7.0" - - "7.7.1" - - "7.8.0" - - "7.8.1" - - "7.9.0" - - "7.9.1" - - "7.9.2" - - "7.9.3" - - "7.10.0" - - "7.10.1" - - "7.10.2" - - "1.0.0" - - "1.1.0" - - "1.2.0" - - "1.2.1" - - "1.2.2" - - "1.2.3" - - "1.2.4" - - "1.2.5" - - "1.3.0" - - "1.3.1" - - "1.3.2" - - "1.3.3" - - "1.3.4" - - "1.3.5" - "2.0.0" - "2.0.1" - "2.0.2" @@ -51,4 +8,7 @@ BWC_VERSION: - "2.2.1" - "2.2.2" - "2.3.0" + - "2.3.1" - "2.4.0" + - "2.4.1" + - "2.5.0" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8b63b291a8a54..3affbbd820774 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,3 +1 @@ -# This should match the owning team set up in https://github.com/orgs/opensearch-project/teams -* @opensearch-project/opensearch-core @reta - +* @reta @anasalkouz @andrross @reta @Bukhtawar @CEHENKLE @dblock @setiah @kartg @kotwanikunal @mch2 @nknize @owaiskazi19 @adnapibar @Rishikesh1159 @ryanbogan @saratvemulapalli @shwetathareja @dreamer-89 @tlfeng @VachaShah @xuezhou25 diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 4537cadf71074..f54aa394cf83f 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,3 +1,10 @@ + + ### Description [Describe what this change achieves] diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index e47d8d88c0243..5a75d2c877992 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -7,6 +7,7 @@ on: jobs: backport: + if: ${{ contains(github.event.label.name, 'backport') }} runs-on: ubuntu-latest permissions: contents: write @@ -22,7 +23,7 @@ jobs: installation_id: 22958780 - name: Backport - uses: VachaShah/backport@v1.1.4 + uses: VachaShah/backport@v2.1.0 with: github_token: ${{ steps.github_app_token.outputs.token }} - branch_name: backport/backport-${{ github.event.number }} + head_template: backport/backport-<%= number %>-to-<%= base %> diff --git a/.github/workflows/changelog_verifier.yml b/.github/workflows/changelog_verifier.yml index 96f99f17b016e..992a38b624d7a 100644 --- a/.github/workflows/changelog_verifier.yml +++ b/.github/workflows/changelog_verifier.yml @@ -15,4 +15,4 @@ jobs: - uses: dangoslen/changelog-enforcer@v3 with: - skipLabels: "autocut" + skipLabels: "autocut, skip-changelog" diff --git a/.github/workflows/gradle-check.yml b/.github/workflows/gradle-check.yml index cbaa7fa10fbb6..9567bcd63bc2e 100644 --- a/.github/workflows/gradle-check.yml +++ b/.github/workflows/gradle-check.yml @@ -8,8 +8,15 @@ on: pull_request_target: types: [opened, synchronize, reopened] +permissions: + contents: read # to fetch code (actions/checkout) + jobs: gradle-check: + permissions: + contents: read # to fetch code (actions/checkout) + pull-requests: write # to create or update comment (peter-evans/create-or-update-comment) + runs-on: ubuntu-latest timeout-minutes: 130 steps: @@ -67,7 +74,7 @@ jobs: files: ./codeCoverage.xml - name: Create Comment Success - if: ${{ github.event_name == 'pull_request_target' && success() }} + if: ${{ github.event_name == 'pull_request_target' && success() && env.result == 'SUCCESS' }} uses: peter-evans/create-or-update-comment@v2 with: issue-number: ${{ env.pr_number }} @@ -77,6 +84,33 @@ jobs: * **URL:** ${{ env.workflow_url }} * **CommitID:** ${{ env.pr_from_sha }} + - name: Extract Test Failure + if: ${{ github.event_name == 'pull_request_target' && env.result != 'SUCCESS' }} + run: | + TEST_FAILURES=`curl -s "${{ env.workflow_url }}/testReport/api/json?tree=suites\[cases\[status,className,name\]\]" | jq -r '.. | objects | select(.status=="FAILED",.status=="REGRESSION") | (.className + "." + .name)' | uniq -c | sort -n -r | head -n 10` + if [[ "$TEST_FAILURES" != "" ]] + then + echo "test_failures<> $GITHUB_ENV + echo "" >> $GITHUB_ENV + echo "* **TEST FAILURES:**" >> $GITHUB_ENV + echo '```' >> $GITHUB_ENV + echo "$TEST_FAILURES" >> $GITHUB_ENV + echo '```' >> $GITHUB_ENV + echo "EOF" >> $GITHUB_ENV + fi + + - name: Create Comment Flaky + if: ${{ github.event_name == 'pull_request_target' && success() && env.result != 'SUCCESS' }} + uses: peter-evans/create-or-update-comment@v2 + with: + issue-number: ${{ env.pr_number }} + body: | + ### Gradle Check (Jenkins) Run Completed with: + * **RESULT:** ${{ env.result }} :grey_exclamation: ${{ env.test_failures }} + * **URL:** ${{ env.workflow_url }} + * **CommitID:** ${{ env.pr_from_sha }} + Please review all [flaky tests](https://github.com/opensearch-project/OpenSearch/blob/main/DEVELOPER_GUIDE.md#flaky-tests) that succeeded after retry and create an issue if one does not already exist to track the flaky failure. + - name: Create Comment Failure if: ${{ github.event_name == 'pull_request_target' && failure() }} uses: peter-evans/create-or-update-comment@v2 @@ -84,6 +118,8 @@ jobs: issue-number: ${{ env.pr_number }} body: | ### Gradle Check (Jenkins) Run Completed with: - * **RESULT:** ${{ env.result }} :x: + * **RESULT:** ${{ env.result }} :x: ${{ env.test_failures }} * **URL:** ${{ env.workflow_url }} * **CommitID:** ${{ env.pr_from_sha }} + Please examine the workflow log, locate, and copy-paste the failure(s) below, then iterate to green. + Is the failure [a flaky test](https://github.com/opensearch-project/OpenSearch/blob/main/DEVELOPER_GUIDE.md#flaky-tests) unrelated to your change? diff --git a/.github/workflows/links.yml b/.github/workflows/links.yml index ca05aee8be378..ac94f5ef5ec5e 100644 --- a/.github/workflows/links.yml +++ b/.github/workflows/links.yml @@ -2,6 +2,8 @@ name: Link Checker on: schedule: - cron: '0 0 * * *' +permissions: + contents: read # to fetch code (actions/checkout) jobs: linkchecker: if: github.repository == 'opensearch-project/OpenSearch' diff --git a/.github/workflows/precommit.yml b/.github/workflows/precommit.yml index 9860be4159b83..e264d65cdf191 100644 --- a/.github/workflows/precommit.yml +++ b/.github/workflows/precommit.yml @@ -1,9 +1,12 @@ name: Gradle Precommit on: [pull_request] - + jobs: precommit: - runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] steps: - uses: actions/checkout@v2 - name: Set up JDK 11 @@ -13,4 +16,4 @@ jobs: distribution: adopt - name: Run Gradle run: | - ./gradlew precommit --parallel + ./gradlew javadoc precommit --parallel diff --git a/.github/workflows/version.yml b/.github/workflows/version.yml index 42c2d21d106ce..d1b5e90484ec4 100644 --- a/.github/workflows/version.yml +++ b/.github/workflows/version.yml @@ -5,6 +5,7 @@ on: tags: - '*.*.*' +permissions: {} jobs: build: runs-on: ubuntu-latest diff --git a/.gitignore b/.gitignore index 8ea328ce2f1e9..a0dabfb8798f9 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ out/ !.idea/inspectionProfiles/Project_Default.xml !.idea/runConfigurations/Debug_OpenSearch.xml !.idea/vcs.xml +!.idea/icon.svg # These files are generated in the main tree by IntelliJ benchmarks/src/main/generated/* diff --git a/.idea/icon.svg b/.idea/icon.svg new file mode 100644 index 0000000000000..4f8132d18a2e0 --- /dev/null +++ b/.idea/icon.svg @@ -0,0 +1,5 @@ + + + + + diff --git a/CHANGELOG.md b/CHANGELOG.md index a10824a56af05..04f3fbeb4b068 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,85 +1,109 @@ # CHANGELOG -Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) -## [Unreleased] +All notable changes to this project are documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). See the [CONTRIBUTING guide](./CONTRIBUTING.md#Changelog) for instructions on how to add changelog entries. + +## [Unreleased 3.0] ### Added -- Add support for s390x architecture ([#4001](https://github.com/opensearch-project/OpenSearch/pull/4001)) -- Github workflow for changelog verification ([#4085](https://github.com/opensearch-project/OpenSearch/pull/4085)) -- Point in time rest layer changes for create and delete PIT API ([#4064](https://github.com/opensearch-project/OpenSearch/pull/4064)) -- Added @dreamer-89 as an Opensearch maintainer ([#4342](https://github.com/opensearch-project/OpenSearch/pull/4342)) -- Added release notes for 1.3.5 ([#4343](https://github.com/opensearch-project/OpenSearch/pull/4343)) -- Added release notes for 2.2.1 ([#4344](https://github.com/opensearch-project/OpenSearch/pull/4344)) -- Label configuration for dependabot PRs ([#4348](https://github.com/opensearch-project/OpenSearch/pull/4348)) +- Hardened token permissions in GitHub workflows ([#4587](https://github.com/opensearch-project/OpenSearch/pull/4587)) - Support for HTTP/2 (server-side) ([#3847](https://github.com/opensearch-project/OpenSearch/pull/3847)) -- BWC version 2.2.2 ([#4383](https://github.com/opensearch-project/OpenSearch/pull/4383)) -- Support for labels on version bump PRs, skip label support for changelog verifier ([#4391](https://github.com/opensearch-project/OpenSearch/pull/4391)) -- Update previous release bwc version to 2.4.0 ([#4455](https://github.com/opensearch-project/OpenSearch/pull/4455)) -- 2.3.0 release notes ([#4457](https://github.com/opensearch-project/OpenSearch/pull/4457)) +- Add getter for path field in NestedQueryBuilder ([#4636](https://github.com/opensearch-project/OpenSearch/pull/4636)) +- Apply reproducible builds configuration for OpenSearch plugins through gradle plugin ([#4746](https://github.com/opensearch-project/OpenSearch/pull/4746)) +- Add project health badges to the README.md ([#4843](https://github.com/opensearch-project/OpenSearch/pull/4843)) +- [Test] Add IAE test for deprecated edgeNGram analyzer name ([#5040](https://github.com/opensearch-project/OpenSearch/pull/5040)) +- Allow mmap to use new JDK-19 preview APIs in Apache Lucene 9.4+ ([#5151](https://github.com/opensearch-project/OpenSearch/pull/5151)) +- Add feature flag for extensions ([#5211](https://github.com/opensearch-project/OpenSearch/pull/5211)) +- Added jackson dependency to server ([#5366] (https://github.com/opensearch-project/OpenSearch/pull/5366)) +- Added experimental extensions to main ([#5347](https://github.com/opensearch-project/OpenSearch/pull/5347)) ### Dependencies -- Bumps `org.gradle.test-retry` from 1.4.0 to 1.4.1 -- Bumps `reactor-netty-core` from 1.0.19 to 1.0.22 - -### Dependencies -- Bumps `com.diffplug.spotless` from 6.9.1 to 6.10.0 -- Bumps `xmlbeans` from 5.1.0 to 5.1.1 -- Bumps azure-core-http-netty from 1.12.0 to 1.12.4([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) -- Bumps azure-core from 1.27.0 to 1.31.0([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) -- Bumps azure-storage-common from 12.16.0 to 12.18.0([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps `log4j-core` from 2.18.0 to 2.19.0 +- Bumps `reactor-netty-http` from 1.0.18 to 1.0.23 +- Bumps `jettison` from 1.5.0 to 1.5.1 +- Bumps `azure-storage-common` from 12.18.0 to 12.18.1 +- Bumps `forbiddenapis` from 3.3 to 3.4 +- Bumps `gson` from 2.9.0 to 2.10 +- Bumps `protobuf-java` from 3.21.2 to 3.21.9 +- Bumps `azure-core` from 1.31.0 to 1.33.0 +- Bumps `avro` from 1.11.0 to 1.11.1 +- Bumps `woodstox-core` from 6.3.0 to 6.3.1 +- Bumps `xmlbeans` from 5.1.0 to 5.1.1 ([#4354](https://github.com/opensearch-project/OpenSearch/pull/4354)) +- Bumps `azure-core-http-netty` from 1.12.0 to 1.12.4 ([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps `azure-core` from 1.27.0 to 1.31.0 ([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps `azure-storage-common` from 12.16.0 to 12.18.0 ([#4160](https://github.com/opensearch-project/OpenSearch/pull/4160)) +- Bumps `org.gradle.test-retry` from 1.4.0 to 1.4.1 ([#4411](https://github.com/opensearch-project/OpenSearch/pull/4411)) +- Bumps `reactor-netty-core` from 1.0.19 to 1.0.22 ([#4447](https://github.com/opensearch-project/OpenSearch/pull/4447)) +- Bumps `reactive-streams` from 1.0.3 to 1.0.4 ([#4488](https://github.com/opensearch-project/OpenSearch/pull/4488)) +- Bumps `com.diffplug.spotless` from 6.10.0 to 6.11.0 ([#4547](https://github.com/opensearch-project/OpenSearch/pull/4547)) +- Bumps `reactor-core` from 3.4.18 to 3.4.23 ([#4548](https://github.com/opensearch-project/OpenSearch/pull/4548)) +- Bumps `jempbox` from 1.8.16 to 1.8.17 ([#4550](https://github.com/opensearch-project/OpenSearch/pull/4550)) +- Bumps `commons-compress` from 1.21 to 1.22 +- Bumps `jcodings` from 1.0.57 to 1.0.58 ([#5233](https://github.com/opensearch-project/OpenSearch/pull/5233)) +- Bumps `google-http-client-jackson2` from 1.35.0 to 1.42.3 ([#5234](https://github.com/opensearch-project/OpenSearch/pull/5234)) +- Bumps `maxmind-db` from 2.0.0 to 2.1.0 ([#5236](https://github.com/opensearch-project/OpenSearch/pull/5236)) +- Bumps `azure-core` from 1.33.0 to 1.34.0 ([#5235](https://github.com/opensearch-project/OpenSearch/pull/5235)) +- Bumps `azure-core-http-netty` from 1.12.4 to 1.12.7 ([#5235](https://github.com/opensearch-project/OpenSearch/pull/5235)) +- Bumps `spock-core` from from 2.1-groovy-3.0 to 2.3-groovy-3.0 ([#5315](https://github.com/opensearch-project/OpenSearch/pull/5315)) +- Bumps `json-schema-validator` from 1.0.69 to 1.0.73 ([#5316](https://github.com/opensearch-project/OpenSearch/pull/5316)) +- Bumps `proto-google-common-protos` from 2.8.0 to 2.10.0 ([#5318](https://github.com/opensearch-project/OpenSearch/pull/5318)) +- Bumps `protobuf-java` from 3.21.7 to 3.21.9 ([#5319](https://github.com/opensearch-project/OpenSearch/pull/5319)) +- Update Apache Lucene to 9.5.0-snapshot-a4ef70f ([#4979](https://github.com/opensearch-project/OpenSearch/pull/4979)) +- Update to Gradle 7.6 and JDK-19 ([#4973](https://github.com/opensearch-project/OpenSearch/pull/4973)) ### Changed -- Dependency updates (httpcore, mockito, slf4j, httpasyncclient, commons-codec) ([#4308](https://github.com/opensearch-project/OpenSearch/pull/4308)) -- Use RemoteSegmentStoreDirectory instead of RemoteDirectory ([#4240](https://github.com/opensearch-project/OpenSearch/pull/4240)) -- Plugin ZIP publication groupId value is configurable ([#4156](https://github.com/opensearch-project/OpenSearch/pull/4156)) -- Add index specific setting for remote repository ([#4253](https://github.com/opensearch-project/OpenSearch/pull/4253)) -- [Segment Replication] Update replicas to commit SegmentInfos instead of relying on SIS files from primary shards. ([#4402](https://github.com/opensearch-project/OpenSearch/pull/4402)) +- [CCR] Add getHistoryOperationsFromTranslog method to fetch the history snapshot from translogs ([#3948](https://github.com/opensearch-project/OpenSearch/pull/3948)) +- Relax visibility of the HTTP_CHANNEL_KEY and HTTP_SERVER_CHANNEL_KEY to make it possible for the plugins to access associated Netty4HttpChannel / Netty4HttpServerChannel instance ([#4638](https://github.com/opensearch-project/OpenSearch/pull/4638)) +- Use ReplicationFailedException instead of OpensearchException in ReplicationTarget ([#4725](https://github.com/opensearch-project/OpenSearch/pull/4725)) +- Migrate client transports to Apache HttpClient / Core 5.x ([#4459](https://github.com/opensearch-project/OpenSearch/pull/4459)) +- Support remote translog transfer for request level durability([#4480](https://github.com/opensearch-project/OpenSearch/pull/4480)) +- Changed http code on create index API with bad input raising NotXContentException from 500 to 400 ([#4773](https://github.com/opensearch-project/OpenSearch/pull/4773)) +- Change http code for DecommissioningFailedException from 500 to 400 ([#5283](https://github.com/opensearch-project/OpenSearch/pull/5283)) ### Deprecated ### Removed +- Remove deprecated code to add node name into log pattern of log4j property file ([#4568](https://github.com/opensearch-project/OpenSearch/pull/4568)) +- Unused object and import within TransportClusterAllocationExplainAction ([#4639](https://github.com/opensearch-project/OpenSearch/pull/4639)) +- Remove LegacyESVersion.V_7_0_* and V_7_1_* Constants ([#2768](https://https://github.com/opensearch-project/OpenSearch/pull/2768)) +- Remove LegacyESVersion.V_7_2_ and V_7_3_ Constants ([#4702](https://github.com/opensearch-project/OpenSearch/pull/4702)) +- Always auto release the flood stage block ([#4703](https://github.com/opensearch-project/OpenSearch/pull/4703)) +- Remove LegacyESVersion.V_7_4_ and V_7_5_ Constants ([#4704](https://github.com/opensearch-project/OpenSearch/pull/4704)) +- Remove Legacy Version support from Snapshot/Restore Service ([#4728](https://github.com/opensearch-project/OpenSearch/pull/4728)) +- Remove deprecated serialization logic from pipeline aggs ([#4847](https://github.com/opensearch-project/OpenSearch/pull/4847)) +- Remove unused private methods ([#4926](https://github.com/opensearch-project/OpenSearch/pull/4926)) +- Remove LegacyESVersion.V_7_8_ and V_7_9_ Constants ([#4855](https://github.com/opensearch-project/OpenSearch/pull/4855)) +- Remove LegacyESVersion.V_7_6_ and V_7_7_ Constants ([#4837](https://github.com/opensearch-project/OpenSearch/pull/4837)) +- Remove LegacyESVersion.V_7_10_ Constants ([#5018](https://github.com/opensearch-project/OpenSearch/pull/5018)) +- Remove Version.V_1_ Constants ([#5021](https://github.com/opensearch-project/OpenSearch/pull/5021)) ### Fixed -- `opensearch-service.bat start` and `opensearch-service.bat manager` failing to run ([#4289](https://github.com/opensearch-project/OpenSearch/pull/4289)) -- PR reference to checkout code for changelog verifier ([#4296](https://github.com/opensearch-project/OpenSearch/pull/4296)) -- `opensearch.bat` and `opensearch-service.bat install` failing to run, missing logs directory ([#4305](https://github.com/opensearch-project/OpenSearch/pull/4305)) -- Restore using the class ClusterInfoRequest and ClusterInfoRequestBuilder from package 'org.opensearch.action.support.master.info' for subclasses ([#4307](https://github.com/opensearch-project/OpenSearch/pull/4307)) -- Do not fail replica shard due to primary closure ([#4133](https://github.com/opensearch-project/OpenSearch/pull/4133)) -- Add timeout on Mockito.verify to reduce flakyness in testReplicationOnDone test([#4314](https://github.com/opensearch-project/OpenSearch/pull/4314)) -- Commit workflow for dependabot changelog helper ([#4331](https://github.com/opensearch-project/OpenSearch/pull/4331)) -- Fixed cancellation of segment replication events ([#4225](https://github.com/opensearch-project/OpenSearch/pull/4225)) -- [Segment Replication] Bump segment infos counter before commit during replica promotion ([#4365](https://github.com/opensearch-project/OpenSearch/pull/4365)) -- Bugs for dependabot changelog verifier workflow ([#4364](https://github.com/opensearch-project/OpenSearch/pull/4364)) -- Fix flaky random test `NRTReplicationEngineTests.testUpdateSegments` ([#4352](https://github.com/opensearch-project/OpenSearch/pull/4352)) -- [Segment Replication] Extend FileChunkWriter to allow cancel on transport client ([#4386](https://github.com/opensearch-project/OpenSearch/pull/4386)) -- [Segment Replication] Add check to cancel ongoing replication with old primary on onNewCheckpoint on replica ([#4363](https://github.com/opensearch-project/OpenSearch/pull/4363)) -- Fix NoSuchFileExceptions with segment replication when computing primary metadata snapshots ([#4366](https://github.com/opensearch-project/OpenSearch/pull/4366)) -- [Segment Replication] Update flaky testOnNewCheckpointFromNewPrimaryCancelOngoingReplication unit test ([#4414](https://github.com/opensearch-project/OpenSearch/pull/4414)) -- Fixed the `_cat/shards/10_basic.yml` test cases fix. -- [Segment Replication] Fix timeout issue by calculating time needed to process getSegmentFiles ([#4426](https://github.com/opensearch-project/OpenSearch/pull/4426)) -- [Bug]: gradle check failing with java heap OutOfMemoryError (([#4328](https://github.com/opensearch-project/OpenSearch/ -- `opensearch.bat` fails to execute when install path includes spaces ([#4362](https://github.com/opensearch-project/OpenSearch/pull/4362)) +- Fix 'org.apache.hc.core5.http.ParseException: Invalid protocol version' under JDK 16+ ([#4827](https://github.com/opensearch-project/OpenSearch/pull/4827)) +- Fixed compression support for h2c protocol ([#4944](https://github.com/opensearch-project/OpenSearch/pull/4944)) +- Reject bulk requests with invalid actions ([#5299](https://github.com/opensearch-project/OpenSearch/issues/5299)) +- Support OpenSSL Provider with default Netty allocator ([#5460](https://github.com/opensearch-project/OpenSearch/pull/5460)) ### Security -- CVE-2022-25857 org.yaml:snakeyaml DOS vulnerability ([#4341](https://github.com/opensearch-project/OpenSearch/pull/4341)) -## [2.x] +## [Unreleased 2.x] ### Added -- Github workflow for changelog verification ([#4085](https://github.com/opensearch-project/OpenSearch/pull/4085)) -- Label configuration for dependabot PRs ([#4348](https://github.com/opensearch-project/OpenSearch/pull/4348)) -- Added RestLayer Changes for PIT stats ([#4217](https://github.com/opensearch-project/OpenSearch/pull/4217)) +- Prevent deletion of snapshots that are backing searchable snapshot indexes ([#5069](https://github.com/opensearch-project/OpenSearch/pull/5069)) +- Add max_shard_size parameter for shrink API ([#5229](https://github.com/opensearch-project/OpenSearch/pull/5229)) +### Dependencies +- Bumps `bcpg-fips` from 1.0.5.1 to 1.0.7.1 +- Bumps `azure-storage-blob` from 12.16.1 to 12.20.0 ([#4995](https://github.com/opensearch-project/OpenSearch/pull/4995)) +- Bumps `commons-compress` from 1.21 to 1.22 ([#5104](https://github.com/opensearch-project/OpenSearch/pull/5104)) +- Bump `opencensus-contrib-http-util` from 0.18.0 to 0.31.1 ([#3633](https://github.com/opensearch-project/OpenSearch/pull/3633)) +- Bump `geoip2` from 3.0.1 to 3.0.2 ([#5103](https://github.com/opensearch-project/OpenSearch/pull/5103)) +- Bump gradle-extra-configurations-plugin from 7.0.0 to 8.0.0 ([#4808](https://github.com/opensearch-project/OpenSearch/pull/4808)) ### Changed - ### Deprecated - ### Removed - ### Fixed -- PR reference to checkout code for changelog verifier ([#4296](https://github.com/opensearch-project/OpenSearch/pull/4296)) -- Commit workflow for dependabot changelog helper ([#4331](https://github.com/opensearch-project/OpenSearch/pull/4331)) - +- Fix 1.x compatibility bug with stored Tasks ([#5412](https://github.com/opensearch-project/OpenSearch/pull/5412)) +- Fix case sensitivity for wildcard queries ([#5462](https://github.com/opensearch-project/OpenSearch/pull/5462)) ### Security - -[Unreleased]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...HEAD -[2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.2.0...2.x +[Unreleased 3.0]: https://github.com/opensearch-project/OpenSearch/compare/2.4...HEAD +[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.4...2.x diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index fc02d52f0bc3b..d379d78829318 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -119,20 +119,39 @@ You may type this line on your own when writing your commit messages. However, i ## Changelog -OpenSearch maintains version specific changelog by enforcing a change to the ongoing [CHANGELOG](CHANGELOG.md) file adhering to the [Keep A Changelog](https://keepachangelog.com/en/1.0.0/) format. +OpenSearch maintains version specific changelog by enforcing a change to the ongoing [CHANGELOG](CHANGELOG.md) file adhering to the [Keep A Changelog](https://keepachangelog.com/en/1.0.0/) format. The purpose of the changelog is for the contributors and maintainers to incrementally build the release notes throughout the development process to avoid a painful and error-prone process of attempting to compile the release notes at release time. On each release the "unreleased" entries of the changelog are moved to the appropriate release notes document in the `./release-notes` folder. Also, incrementally building the changelog provides a concise, human-readable list of significant features that have been added to the unreleased version under development. -Briefly, the changes are curated by version, with the changes to the main branch added chronologically to `Unreleased` version. Further, each version has corresponding sections which list out the category of the change - `Added`, `Changed`, `Deprecated`, `Removed`, `Fixed`, `Security`. +### Which changes require a CHANGELOG entry? +Changelogs are intended for operators/administrators, developers integrating with libraries and APIs, and end-users interacting with OpenSearch Dashboards and/or the REST API (collectively referred to as "user"). In short, any change that a user of OpenSearch might want to be aware of should be included in the changelog. The changelog is _not_ intended to replace the git commit log that developers of OpenSearch itself rely upon. The following are some examples of changes that should be in the changelog: +- A newly added feature +- A fix for a user-facing bug +- Dependency updates +- Fixes for security issues -### How to add my changes to [CHANGELOG](CHANGELOG.md)? +The following are some examples where a changelog entry is not necessary: -As a contributor, you must ensure that every pull request has the changes listed out within the corresponding version and appropriate section of [CHANGELOG](CHANGELOG.md) file. +- Adding, modifying, or fixing tests +- An incremental PR for a larger feature (such features should include _one_ changelog entry for the feature) +- Documentation changes or code refactoring +- Build-related changes -Adding in the change is two step process - -1. Add your changes to the corresponding section within the CHANGELOG file with dummy pull request information, publish the PR +Any PR that does not include a changelog entry will result in a failure of the validation workflow in GitHub. If the contributor and maintainers agree that no changelog entry is required, then the `skip-changelog` label can be applied to the PR which will result in the workflow passing. + +### How to add my changes to [CHANGELOG](CHANGELOG.md)? +Adding in the change is two step process: +1. Add your changes to the corresponding section within the CHANGELOG file with dummy pull request information, publish the PR 2. Update the entry for your change in [`CHANGELOG.md`](CHANGELOG.md) and make sure that you reference the pull request there. +### Where should I put my CHANGELOG entry? +Please review the [branching strategy](https://github.com/opensearch-project/.github/blob/main/RELEASING.md#opensearch-branching) document. The changelog on the `main` branch will contain sections for the _next major_ and _next minor_ releases. Your entry should go into the section it is intended to be released in. In practice, most changes to `main` will be backported to the next minor release so most entries will likely be in that section. + +The following examples assume the _next major_ release on main is 3.0, then _next minor_ release is 2.5, and the _current_ release is 2.4. + +- **Add a new feature to release in next minor:** Add a changelog entry to `[Unreleased 2.x]` on main, then backport to 2.x (including the changelog entry). +- **Introduce a breaking API change to release in next major:** Add a changelog entry to `[Unreleased 3.0]` on main, do not backport. +- **Upgrade a dependency to fix a CVE:** Add a changelog entry to `[Unreleased 2.x]` on main, then backport to 2.x (including the changelog entry), then backport to 2.4 and ensure the changelog entry is added to `[Unreleased 2.4.1]`. ## Review Process diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 8c2a6b4889122..313aecd62f5f9 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -38,21 +38,22 @@ - [Gradle Plugins](#gradle-plugins) - [Distribution Download Plugin](#distribution-download-plugin) - [Creating fat-JAR of a Module](#creating-fat-jar-of-a-module) - - [Misc](#misc) - - [git-secrets](#git-secrets) - - [Installation](#installation) - - [Configuration](#configuration) - [Components](#components) - [Build libraries & interfaces](#build-libraries--interfaces) - [Clients & Libraries](#clients--libraries) - [Plugins](#plugins-1) - - [Indexing & search](#indexing--search) + - [Indexing & Search](#indexing--search) - [Aggregations](#aggregations) - [Distributed Framework](#distributed-framework) - - [Submitting Changes](#submitting-changes) - - [Backports](#backports) - - [LineLint](#linelint) -- [Lucene Snapshots](#lucene-snapshots) + - [Misc](#misc) + - [Git Secrets](#git-secrets) + - [Installation](#installation) + - [Configuration](#configuration) + - [Submitting Changes](#submitting-changes) + - [Backports](#backports) + - [LineLint](#linelint) + - [Lucene Snapshots](#lucene-snapshots) + - [Flaky Tests](#flaky-tests) # Developer Guide @@ -414,37 +415,12 @@ Refer the installed JAR as any other maven artifact, e.g. ``` -## Misc - -### git-secrets - -Security is our top priority. Avoid checking in credentials. - -#### Installation -Install [awslabs/git-secrets](https://github.com/awslabs/git-secrets) by running the following commands. -``` -git clone https://github.com/awslabs/git-secrets.git -cd git-secrets -make install -``` - -#### Configuration -You can configure git secrets per repository, you need to change the directory to the root of the repository and run the following command. -``` -git secrets --install -✓ Installed commit-msg hook to .git/hooks/commit-msg -✓ Installed pre-commit hook to .git/hooks/pre-commit -✓ Installed prepare-commit-msg hook to .git/hooks/prepare-commit-msg -``` -Then, you need to apply patterns for git-secrets, you can install the AWS standard patterns by running the following command. -``` -git secrets --register-aws -``` - ## Components + As you work in the OpenSearch repo you may notice issues getting labeled with component labels. It's a housekeeping task to help group together similar pieces of work. You can pretty much ignore it, but if you're curious, here's what the different labels mean: ### Build libraries & interfaces + Tasks to make sure the build tasks are useful and packaging and distribution are easy. Includes: @@ -458,6 +434,7 @@ Includes: ### Clients & Libraries + APIs and communication mechanisms for external connections to OpenSearch. This includes the “library” directory in OpenSearch (a set of common functions). Includes: @@ -467,6 +444,7 @@ Includes: - CLI ### Plugins + Anything touching the plugin infrastructure within core OpenSearch. Includes: @@ -476,7 +454,8 @@ Includes: - Plugin interfaces -### Indexing & search +### Indexing & Search + The critical path of indexing and search, including: Measure index and search, performance, Improving the performance of indexing and search, ensure synchronization OpenSearch APIs with upstream Lucene change (e.g. new field types, changing doc values and codex). Includes: @@ -487,6 +466,7 @@ Includes: - DocValues ### Aggregations + Making sure OpenSearch can be used as a compute engine. Includes: @@ -495,6 +475,7 @@ Includes: - Framework ### Distributed Framework + Work to make sure that OpenSearch can scale in a distributed manner. Includes: @@ -506,15 +487,43 @@ Includes: - Shard Strategies - Circuit Breakers -## Submitting Changes +## Misc + +### Git Secrets + +Security is our top priority. Avoid checking in credentials. + +#### Installation +Install [awslabs/git-secrets](https://github.com/awslabs/git-secrets) by running the following commands. +``` +git clone https://github.com/awslabs/git-secrets.git +cd git-secrets +make install +``` + +#### Configuration +You can configure git secrets per repository, you need to change the directory to the root of the repository and run the following command. +``` +git secrets --install +✓ Installed commit-msg hook to .git/hooks/commit-msg +✓ Installed pre-commit hook to .git/hooks/pre-commit +✓ Installed prepare-commit-msg hook to .git/hooks/prepare-commit-msg +``` +Then, you need to apply patterns for git-secrets, you can install the AWS standard patterns by running the following command. +``` +git secrets --register-aws +``` + +### Submitting Changes See [CONTRIBUTING](CONTRIBUTING.md). -## Backports +### Backports The Github workflow in [`backport.yml`](.github/workflows/backport.yml) creates backport PRs automatically when the original PR with an appropriate label `backport ` is merged to main with the backport workflow run successfully on the PR. For example, if a PR on main needs to be backported to `1.x` branch, add a label `backport 1.x` to the PR and make sure the backport workflow runs on the PR along with other checks. Once this PR is merged to main, the workflow will create a backport PR to the `1.x` branch. -## LineLint +### LineLint + A linter in [`code-hygiene.yml`](.github/workflows/code-hygiene.yml) that validates simple newline and whitespace rules in all sorts of files. It can: - Recursively check a directory tree for files that do not end in a newline - Automatically fix these files by adding a newline or trimming extra newlines. @@ -529,7 +538,20 @@ Pass a list of files or directories to limit your search. linelint README.md LICENSE -# Lucene Snapshots +### Lucene Snapshots + The Github workflow in [lucene-snapshots.yml](.github/workflows/lucene-snapshots.yml) is a Github worfklow executable by maintainers to build a top-down snapshot build of lucene. -These snapshots are available to test compatibility with upcoming changes to Lucene by updating the version at [version.properties](buildsrc/version.properties) with the `version-snapshot-sha` version. -Example: `lucene = 10.0.0-snapshot-2e941fc`. +These snapshots are available to test compatibility with upcoming changes to Lucene by updating the version at [version.properties](buildsrc/version.properties) with the `version-snapshot-sha` version. Example: `lucene = 10.0.0-snapshot-2e941fc`. + +### Flaky Tests + +OpenSearch has a very large test suite with long running, often failing (flaky), integration tests. Such individual tests are labelled as [Flaky Random Test Failure](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aopen+is%3Aissue+label%3A%22flaky-test%22). Your help is wanted fixing these! + +If you encounter a build/test failure in CI that is unrelated to the change in your pull request, it may be a known flaky test, or a new test failure. + +1. Follow failed CI links, and locate the failing test(s). +2. Copy-paste the failure into a comment of your PR. +3. Search through [issues](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aopen+is%3Aissue+label%3A%22flaky-test%22) using the name of the failed test for whether this is a known flaky test. +5. If an existing issue is found, paste a link to the known issue in a comment to your PR. +6. If no existing issue is found, open one. +7. Retry CI via the GitHub UX or by pushing an update to your PR. diff --git a/MAINTAINERS.md b/MAINTAINERS.md index 2f54656b2ab59..789e250e10d19 100644 --- a/MAINTAINERS.md +++ b/MAINTAINERS.md @@ -5,7 +5,6 @@ | Maintainer | GitHub ID | Affiliation | | --------------- | --------- | ----------- | -| Abbas Hussain | [abbashus](https://github.com/abbashus) | Amazon | | Anas Alkouz | [anasalkouz](https://github.com/anasalkouz) | Amazon | | Andrew Ross | [andrross](https://github.com/andrross)| Amazon | | Andriy Redko | [reta](https://github.com/reta) | Aiven | @@ -22,8 +21,8 @@ | Rishikesh Pasham | [Rishikesh1159](https://github.com/Rishikesh1159) | Amazon| | Ryan Bogan | [ryanbogan](https://github.com/ryanbogan) | Amazon | | Sarat Vemulapalli | [saratvemulapalli](https://github.com/saratvemulapalli) | Amazon | -| Shweta Thareja |[shwetathareja](https://github.com/shwetathareja) | Amazon | -| Suraj Singh |[dreamer-89](https://github.com/dreamer-89) | Amazon | +| Shweta Thareja | [shwetathareja](https://github.com/shwetathareja) | Amazon | +| Suraj Singh | [dreamer-89](https://github.com/dreamer-89) | Amazon | | Tianli Feng | [tlfeng](https://github.com/tlfeng) | Amazon | | Vacha Shah | [VachaShah](https://github.com/VachaShah) | Amazon | | Xue Zhou | [xuezhou25](https://github.com/xuezhou25) | Amazon | @@ -32,6 +31,7 @@ | Maintainer | GitHub ID | Affiliation | | --------------- | --------- | ----------- | +| Abbas Hussain | [abbashus](https://github.com/abbashus) | Amazon | | Megha Sai Kavikondala | [meghasaik](https://github.com/meghasaik) | Amazon | [This document](https://github.com/opensearch-project/.github/blob/main/MAINTAINERS.md) explains what maintainers do in this repo, and how they should be doing it. If you're interested in contributing, see [CONTRIBUTING](CONTRIBUTING.md). diff --git a/README.md b/README.md index a7abedeefde8e..45d0a624ae0aa 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,13 @@ [![Chat](https://img.shields.io/badge/chat-on%20forums-blue)](https://forum.opensearch.org/c/opensearch/) [![Documentation](https://img.shields.io/badge/documentation-reference-blue)](https://opensearch.org/docs/latest/opensearch/index/) -[![codecov](https://codecov.io/gh/opensearch-project/OpenSearch/branch/main/graph/badge.svg)](https://codecov.io/gh/opensearch-project/OpenSearch) +[![Code Coverage](https://codecov.io/gh/opensearch-project/OpenSearch/branch/main/graph/badge.svg)](https://codecov.io/gh/opensearch-project/OpenSearch) +[![Untriaged Issues](https://img.shields.io/github/issues/opensearch-project/OpenSearch/untriaged?labelColor=red)](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3A"untriaged") +[![Security Vulnerabilities](https://img.shields.io/github/issues/opensearch-project/OpenSearch/security%20vulnerability?labelColor=red)](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3A"security%20vulnerability") +[![Open Issues](https://img.shields.io/github/issues/opensearch-project/OpenSearch)](https://github.com/opensearch-project/OpenSearch/issues) +[![Open Pull Requests](https://img.shields.io/github/issues-pr/opensearch-project/OpenSearch)](https://github.com/opensearch-project/OpenSearch/pulls) +[![2.4 Open Issues](https://img.shields.io/github/issues/opensearch-project/OpenSearch/v2.4.0)](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3A"v2.4.0") +[![3.0 Open Issues](https://img.shields.io/github/issues/opensearch-project/OpenSearch/v3.0.0)](https://github.com/opensearch-project/OpenSearch/issues?q=is%3Aissue+is%3Aopen+label%3A"v3.0.0") [![GHA gradle check](https://github.com/opensearch-project/OpenSearch/actions/workflows/gradle-check.yml/badge.svg)](https://github.com/opensearch-project/OpenSearch/actions/workflows/gradle-check.yml) [![GHA validate pull request](https://github.com/opensearch-project/OpenSearch/actions/workflows/wrapper.yml/badge.svg)](https://github.com/opensearch-project/OpenSearch/actions/workflows/wrapper.yml) [![GHA precommit](https://github.com/opensearch-project/OpenSearch/actions/workflows/precommit.yml/badge.svg)](https://github.com/opensearch-project/OpenSearch/actions/workflows/precommit.yml) diff --git a/build.gradle b/build.gradle index bcae5bc3884a7..2211da85345b5 100644 --- a/build.gradle +++ b/build.gradle @@ -55,7 +55,7 @@ plugins { id 'lifecycle-base' id 'opensearch.docker-support' id 'opensearch.global-build-info' - id "com.diffplug.spotless" version "6.10.0" apply false + id "com.diffplug.spotless" version "6.11.0" apply false id "org.gradle.test-retry" version "1.4.1" apply false id "test-report-aggregation" id 'jacoco-report-aggregation' @@ -316,6 +316,7 @@ allprojects { javadoc.options.encoding = 'UTF8' javadoc.options.addStringOption('Xdoclint:all,-missing', '-quiet') javadoc.options.tags = ["opensearch.internal", "opensearch.api", "opensearch.experimental"] + javadoc.options.addStringOption("-release", targetCompatibility.majorVersion) } // support for reproducible builds @@ -412,6 +413,9 @@ gradle.projectsEvaluated { if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_17) { task.jvmArgs += ["-Djava.security.manager=allow"] } + if (BuildParams.runtimeJavaVersion >= JavaVersion.VERSION_19) { + task.jvmArgs += ["--enable-preview"] + } } } @@ -450,9 +454,11 @@ subprojects { apply plugin: "org.gradle.test-retry" tasks.withType(Test).configureEach { retry { + if (BuildParams.isCi()) { + maxRetries = 3 + maxFailures = 10 + } failOnPassedAfterRetry = false - maxRetries = 3 - maxFailures = 10 } } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 3ef3c6f9faf49..adf69a533fcc9 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -105,19 +105,20 @@ dependencies { api 'commons-codec:commons-codec:1.15' api 'org.apache.commons:commons-compress:1.21' api 'org.apache.ant:ant:1.10.12' - api 'com.netflix.nebula:gradle-extra-configurations-plugin:7.0.0' - api 'com.netflix.nebula:nebula-publishing-plugin:4.4.4' + api 'com.netflix.nebula:gradle-extra-configurations-plugin:8.0.0' + api 'com.netflix.nebula:nebula-publishing-plugin:4.6.0' api 'com.netflix.nebula:gradle-info-plugin:11.3.3' api 'org.apache.rat:apache-rat:0.13' api 'commons-io:commons-io:2.7' api "net.java.dev.jna:jna:5.11.0" api 'gradle.plugin.com.github.johnrengelman:shadow:7.1.2' api 'org.jdom:jdom2:2.0.6.1' - api 'org.jetbrains.kotlin:kotlin-stdlib-jdk8:1.7.10' - api 'de.thetaphi:forbiddenapis:3.3' + api "org.jetbrains.kotlin:kotlin-stdlib-jdk8:${props.getProperty('kotlin')}" + api 'de.thetaphi:forbiddenapis:3.4' api 'com.avast.gradle:gradle-docker-compose-plugin:0.15.2' + api "org.yaml:snakeyaml:${props.getProperty('snakeyaml')}" api 'org.apache.maven:maven-model:3.6.2' - api 'com.networknt:json-schema-validator:1.0.69' + api 'com.networknt:json-schema-validator:1.0.73' api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson_databind')}" testFixturesApi "junit:junit:${props.getProperty('junit')}" @@ -126,7 +127,7 @@ dependencies { testFixturesApi gradleTestKit() testImplementation 'com.github.tomakehurst:wiremock-jre8-standalone:2.33.2' testImplementation "org.mockito:mockito-core:${props.getProperty('mockito')}" - integTestImplementation('org.spockframework:spock-core:2.1-groovy-3.0') { + integTestImplementation('org.spockframework:spock-core:2.3-groovy-3.0') { exclude module: "groovy" } } diff --git a/buildSrc/reaper/src/main/java/org/elasticsearch/gradle/reaper/package-info.java b/buildSrc/reaper/src/main/java/org/elasticsearch/gradle/reaper/package-info.java deleted file mode 100644 index 40c4b60f6deb0..0000000000000 --- a/buildSrc/reaper/src/main/java/org/elasticsearch/gradle/reaper/package-info.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Licensed to OpenSearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. OpenSearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - * SPDX-License-Identifier: Apache-2.0 - */ -/** - * Adding a sample package level javadoc to pass javadoc validation - * on reaper package. - * TODO - Need to add package description - */ -package org.elasticsearch.gradle.reaper; diff --git a/buildSrc/src/integTest/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTarIT.java b/buildSrc/src/integTest/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTarIT.java index b70574c507f70..61aa55b9c6b53 100644 --- a/buildSrc/src/integTest/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTarIT.java +++ b/buildSrc/src/integTest/java/org/opensearch/gradle/tar/SymbolicLinkPreservingTarIT.java @@ -35,6 +35,7 @@ import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.apache.tools.ant.taskdefs.condition.Os; import org.opensearch.gradle.test.GradleIntegrationTestCase; import org.gradle.api.GradleException; import org.gradle.testkit.runner.GradleRunner; @@ -52,6 +53,7 @@ import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.equalTo; +import static org.junit.Assume.assumeFalse; public class SymbolicLinkPreservingTarIT extends GradleIntegrationTestCase { @@ -60,6 +62,7 @@ public class SymbolicLinkPreservingTarIT extends GradleIntegrationTestCase { @Before public void before() throws IOException { + assumeFalse("Skip tar tests on windows.", Os.isFamily(Os.FAMILY_WINDOWS)); final Path realFolder = temporaryFolder.getRoot().toPath().resolve("real-folder"); Files.createDirectory(realFolder); Files.createFile(realFolder.resolve("file")); diff --git a/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy index 31677965ab0d3..b7c78991a0da3 100644 --- a/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/opensearch/gradle/plugin/PluginBuildPlugin.groovy @@ -29,13 +29,13 @@ package org.opensearch.gradle.plugin import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin +import org.gradle.api.tasks.bundling.AbstractArchiveTask import org.opensearch.gradle.BuildPlugin import org.opensearch.gradle.NoticeTask import org.opensearch.gradle.Version import org.opensearch.gradle.VersionProperties import org.opensearch.gradle.dependencies.CompileOnlyResolvePlugin import org.opensearch.gradle.info.BuildParams -import org.opensearch.gradle.plugin.PluginPropertiesExtension import org.opensearch.gradle.test.RestTestBasePlugin import org.opensearch.gradle.testclusters.RunTask import org.opensearch.gradle.util.Util @@ -134,6 +134,12 @@ class PluginBuildPlugin implements Plugin { } project.configurations.getByName('default') .extendsFrom(project.configurations.getByName('runtimeClasspath')) + project.tasks.withType(AbstractArchiveTask.class).configureEach { task -> + // ignore file timestamps + // be consistent in archive file order + task.preserveFileTimestamps = false + task.reproducibleFileOrder = true + } // allow running ES with this plugin in the foreground of a build project.tasks.register('run', RunTask) { dependsOn(project.tasks.bundlePlugin) diff --git a/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java b/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java index 3f65abcc25d17..cddd03ccc2019 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/BwcVersions.java @@ -105,7 +105,7 @@ public class BwcVersions { private final Map> groupByMajor; private final Map unreleased; - public class UnreleasedVersionInfo { + public static class UnreleasedVersionInfo { public final Version version; public final String branch; public final String gradleProjectPath; @@ -149,13 +149,7 @@ public BwcVersions(SortedSet allVersions, Version currentVersionPropert groupByMajor = allVersions.stream() // We only care about the last 2 majors when it comes to BWC. - // It might take us time to remove the older ones from versionLines, so we allow them to exist. - // Adjust the major number since OpenSearch 1.x is released after predecessor version 7.x - .filter( - version -> (version.getMajor() == 1 ? 7 : version.getMajor()) > (currentVersion.getMajor() == 1 - ? 7 - : currentVersion.getMajor()) - 2 - ) + .filter(version -> version.getMajor() > currentVersion.getMajor() - 2) .collect(Collectors.groupingBy(Version::getMajor, Collectors.toList())); assertCurrentVersionMatchesParsed(currentVersionProperty); @@ -174,9 +168,7 @@ public BwcVersions(SortedSet allVersions, Version currentVersionPropert private void assertNoOlderThanTwoMajors() { Set majors = groupByMajor.keySet(); - // until OpenSearch 3.0 we will need to carry three major support - // (1, 7, 6) && (2, 1, 7) since OpenSearch 1.0 === Legacy 7.x - int numSupportedMajors = (currentVersion.getMajor() < 3) ? 3 : 2; + int numSupportedMajors = 2; if (majors.size() != numSupportedMajors && currentVersion.getMinor() != 0 && currentVersion.getRevision() != 0) { throw new IllegalStateException("Expected exactly 2 majors in parsed versions but found: " + majors); } @@ -207,7 +199,7 @@ public void forPreviousUnreleased(Consumer consumer) { .map(version -> new UnreleasedVersionInfo(version, getBranchFor(version), getGradleProjectPathFor(version))) .collect(Collectors.toList()); - collect.forEach(uvi -> consumer.accept(uvi)); + collect.forEach(consumer); } private String getGradleProjectPathFor(Version version) { @@ -271,18 +263,9 @@ public List getUnreleased() { // The current version is being worked, is always unreleased unreleased.add(currentVersion); - // No unreleased versions for 1.0.0 - // todo remove this hack - if (currentVersion.equals(Version.fromString("1.0.0"))) { - return unmodifiableList(unreleased); - } - // the tip of the previous major is unreleased for sure, be it a minor or a bugfix if (currentVersion.getMajor() != 1) { - final Version latestOfPreviousMajor = getLatestVersionByKey( - this.groupByMajor, - currentVersion.getMajor() == 1 ? 7 : currentVersion.getMajor() - 1 - ); + final Version latestOfPreviousMajor = getLatestVersionByKey(this.groupByMajor, currentVersion.getMajor() - 1); unreleased.add(latestOfPreviousMajor); if (latestOfPreviousMajor.getRevision() == 0) { // if the previous major is a x.y.0 release, then the tip of the minor before that (y-1) is also unreleased @@ -311,7 +294,7 @@ public List getUnreleased() { } } - return unmodifiableList(unreleased.stream().sorted().distinct().collect(Collectors.toList())); + return unreleased.stream().sorted().distinct().collect(Collectors.toUnmodifiableList()); } private Version getLatestInMinor(int major, int minor) { @@ -342,7 +325,7 @@ private Map> getReleasedMajorGroupedByMinor() { public void compareToAuthoritative(List authoritativeReleasedVersions) { Set notReallyReleased = new HashSet<>(getReleased()); - notReallyReleased.removeAll(authoritativeReleasedVersions); + authoritativeReleasedVersions.forEach(notReallyReleased::remove); if (notReallyReleased.isEmpty() == false) { throw new IllegalStateException( "out-of-date released versions" @@ -370,32 +353,21 @@ private List getReleased() { .stream() .flatMap(Collection::stream) .filter(each -> unreleased.contains(each) == false) - // this is to make sure we only consider OpenSearch versions - // TODO remove this filter once legacy ES versions are no longer supported - .filter(v -> v.onOrAfter("1.0.0")) .collect(Collectors.toList()); } public List getIndexCompatible() { int currentMajor = currentVersion.getMajor(); int prevMajor = getPreviousMajor(currentMajor); - List result = Stream.concat(groupByMajor.get(prevMajor).stream(), groupByMajor.get(currentMajor).stream()) + return Stream.concat(groupByMajor.get(prevMajor).stream(), groupByMajor.get(currentMajor).stream()) .filter(version -> version.equals(currentVersion) == false) - .collect(Collectors.toList()); - if (currentMajor == 1) { - // add 6.x compatible for OpenSearch 1.0.0 - return unmodifiableList(Stream.concat(groupByMajor.get(prevMajor - 1).stream(), result.stream()).collect(Collectors.toList())); - } else if (currentMajor == 2) { - // add 7.x compatible for OpenSearch 2.0.0 - return unmodifiableList(Stream.concat(groupByMajor.get(7).stream(), result.stream()).collect(Collectors.toList())); - } - return unmodifiableList(result); + .collect(Collectors.toUnmodifiableList()); } public List getWireCompatible() { List wireCompat = new ArrayList<>(); int currentMajor = currentVersion.getMajor(); - int lastMajor = currentMajor == 1 ? 6 : currentMajor == 2 ? 7 : currentMajor - 1; + int lastMajor = currentMajor - 1; List lastMajorList = groupByMajor.get(lastMajor); if (lastMajorList == null) { throw new IllegalStateException("Expected to find a list of versions for version: " + lastMajor); @@ -405,20 +377,6 @@ public List getWireCompatible() { wireCompat.add(lastMajorList.get(i)); } - // if current is OpenSearch 1.0.0 add all of the 7.x line: - if (currentMajor == 1) { - List previousMajor = groupByMajor.get(7); - for (Version v : previousMajor) { - wireCompat.add(v); - } - } else if (currentMajor == 2) { - // add all of the 1.x line: - List previousMajor = groupByMajor.get(1); - for (Version v : previousMajor) { - wireCompat.add(v); - } - } - wireCompat.addAll(groupByMajor.get(currentMajor)); wireCompat.remove(currentVersion); wireCompat.sort(Version::compareTo); @@ -438,7 +396,7 @@ public List getUnreleasedWireCompatible() { } private int getPreviousMajor(int currentMajor) { - return currentMajor == 1 ? 7 : currentMajor - 1; + return currentMajor - 1; } } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java index ae7b0d938e8ef..87a565e6f4431 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/DistributionDownloadPlugin.java @@ -51,8 +51,7 @@ import java.util.Arrays; import java.util.Comparator; -import java.util.List; -import java.util.stream.Collectors; +import java.util.Objects; /** * A plugin to manage getting and extracting distributions of OpenSearch. @@ -71,12 +70,6 @@ public class DistributionDownloadPlugin implements Plugin { private static final String SNAPSHOT_REPO_NAME = "opensearch-snapshots"; public static final String DISTRO_EXTRACTED_CONFIG_PREFIX = "opensearch_distro_extracted_"; - // for downloading Elasticsearch OSS distributions to run BWC - private static final String FAKE_IVY_GROUP_ES = "elasticsearch-distribution"; - private static final String DOWNLOAD_REPO_NAME_ES = "elasticsearch-downloads"; - private static final String SNAPSHOT_REPO_NAME_ES = "elasticsearch-snapshots"; - private static final String FAKE_SNAPSHOT_IVY_GROUP_ES = "elasticsearch-distribution-snapshot"; - private static final String RELEASE_PATTERN_LAYOUT = "/core/opensearch/[revision]/[module]-min-[revision](-[classifier]).[ext]"; private static final String SNAPSHOT_PATTERN_LAYOUT = "/snapshots/core/opensearch/[revision]/[module]-min-[revision](-[classifier])-latest.[ext]"; @@ -159,35 +152,20 @@ private DistributionDependency resolveDependencyNotation(Project p, OpenSearchDi return distributionsResolutionStrategiesContainer.stream() .sorted(Comparator.comparingInt(DistributionResolution::getPriority)) .map(r -> r.getResolver().resolve(p, distribution)) - .filter(d -> d != null) + .filter(Objects::nonNull) .findFirst() .orElseGet(() -> DistributionDependency.of(dependencyNotation(distribution))); } private static void addIvyRepo(Project project, String name, String url, String group, String... patternLayout) { - final List repos = Arrays.stream(patternLayout).map(pattern -> project.getRepositories().ivy(repo -> { - repo.setName(name); - repo.setUrl(url); - repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); - repo.patternLayout(layout -> layout.artifact(pattern)); - })).collect(Collectors.toList()); - project.getRepositories().exclusiveContent(exclusiveContentRepository -> { exclusiveContentRepository.filter(config -> config.includeGroup(group)); - exclusiveContentRepository.forRepositories(repos.toArray(new IvyArtifactRepository[repos.size()])); - }); - } - - private static void addIvyRepo2(Project project, String name, String url, String group) { - IvyArtifactRepository ivyRepo = project.getRepositories().ivy(repo -> { - repo.setName(name); - repo.setUrl(url); - repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); - repo.patternLayout(layout -> layout.artifact("/downloads/elasticsearch/elasticsearch-oss-[revision](-[classifier]).[ext]")); - }); - project.getRepositories().exclusiveContent(exclusiveContentRepository -> { - exclusiveContentRepository.filter(config -> config.includeGroup(group)); - exclusiveContentRepository.forRepositories(ivyRepo); + exclusiveContentRepository.forRepositories(Arrays.stream(patternLayout).map(pattern -> project.getRepositories().ivy(repo -> { + repo.setName(name); + repo.setUrl(url); + repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); + repo.patternLayout(layout -> layout.artifact(pattern)); + })).toArray(IvyArtifactRepository[]::new)); }); } @@ -211,9 +189,6 @@ private static void setupDownloadServiceRepo(Project project) { ); addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://artifacts.opensearch.org", FAKE_SNAPSHOT_IVY_GROUP, SNAPSHOT_PATTERN_LAYOUT); } - - addIvyRepo2(project, DOWNLOAD_REPO_NAME_ES, "https://artifacts-no-kpi.elastic.co", FAKE_IVY_GROUP_ES); - addIvyRepo2(project, SNAPSHOT_REPO_NAME_ES, "https://snapshots-no-kpi.elastic.co", FAKE_SNAPSHOT_IVY_GROUP_ES); } /** @@ -222,16 +197,12 @@ private static void setupDownloadServiceRepo(Project project) { * The returned object is suitable to be passed to {@link DependencyHandler}. * The concrete type of the object will be a set of maven coordinates as a {@link String}. * Maven coordinates point to either the integ-test-zip coordinates on maven central, or a set of artificial - * coordinates that resolve to the Elastic download service through an ivy repository. + * coordinates that resolve to the OpenSearch download service through an ivy repository. */ private String dependencyNotation(OpenSearchDistribution distribution) { Version distroVersion = Version.fromString(distribution.getVersion()); if (distribution.getType() == Type.INTEG_TEST_ZIP) { - if (distroVersion.onOrAfter("1.0.0")) { - return "org.opensearch.distribution.integ-test-zip:opensearch:" + distribution.getVersion() + "@zip"; - } else { - return "org.elasticsearch.distribution.integ-test-zip:elasticsearch:" + distribution.getVersion() + "@zip"; - } + return "org.opensearch.distribution.integ-test-zip:opensearch:" + distribution.getVersion() + "@zip"; } String extension = distribution.getType().toString(); @@ -239,42 +210,24 @@ private String dependencyNotation(OpenSearchDistribution distribution) { if (distribution.getType() == Type.ARCHIVE) { extension = distribution.getPlatform() == Platform.WINDOWS ? "zip" : "tar.gz"; - if (distroVersion.onOrAfter("1.0.0")) { - switch (distribution.getArchitecture()) { - case ARM64: - classifier = ":" + distribution.getPlatform() + "-arm64"; - break; - case X64: - classifier = ":" + distribution.getPlatform() + "-x64"; - break; - case S390X: - classifier = ":" + distribution.getPlatform() + "-s390x"; - break; - default: - throw new IllegalArgumentException("Unsupported architecture: " + distribution.getArchitecture()); - } - } else if (distroVersion.onOrAfter("7.0.0")) { - classifier = ":" + distribution.getPlatform() + "-x86_64"; - } else { - classifier = ""; + switch (distribution.getArchitecture()) { + case ARM64: + classifier = ":" + distribution.getPlatform() + "-arm64"; + break; + case X64: + classifier = ":" + distribution.getPlatform() + "-x64"; + break; + case S390X: + classifier = ":" + distribution.getPlatform() + "-s390x"; + break; + default: + throw new IllegalArgumentException("Unsupported architecture: " + distribution.getArchitecture()); } } else if (distribution.getType() == Type.DEB) { - if (distroVersion.onOrAfter("7.0.0")) { - classifier = ":amd64"; - } else { - classifier = ""; - } - } else if (distribution.getType() == Type.RPM && distroVersion.before("7.0.0")) { - classifier = ""; + classifier = ":amd64"; } - String group; - if (distroVersion.onOrAfter("1.0.0")) { - group = distribution.getVersion().endsWith("-SNAPSHOT") ? FAKE_SNAPSHOT_IVY_GROUP : FAKE_IVY_GROUP; - return group + ":opensearch" + ":" + distribution.getVersion() + classifier + "@" + extension; - } else { - group = distribution.getVersion().endsWith("-SNAPSHOT") ? FAKE_SNAPSHOT_IVY_GROUP_ES : FAKE_IVY_GROUP_ES; - return group + ":elasticsearch-oss" + ":" + distribution.getVersion() + classifier + "@" + extension; - } + String group = distribution.getVersion().endsWith("-SNAPSHOT") ? FAKE_SNAPSHOT_IVY_GROUP : FAKE_IVY_GROUP; + return group + ":opensearch" + ":" + distribution.getVersion() + classifier + "@" + extension; } } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java b/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java index 4b289de3f0619..08b7054d7d53a 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/Jdk.java @@ -37,6 +37,7 @@ import org.gradle.api.model.ObjectFactory; import org.gradle.api.provider.Property; import org.gradle.api.tasks.TaskDependency; +import org.gradle.internal.os.OperatingSystem; import java.io.File; import java.util.Arrays; @@ -128,7 +129,7 @@ public void setArchitecture(final String architecture) { "unknown architecture [" + jdkArchitecture + "] for jdk [" + name + "], must be one of " + ALLOWED_ARCHITECTURES ); } - this.architecture.set(architecture); + this.architecture.set(jdkArchitecture); } public String getBaseVersion() { @@ -169,7 +170,7 @@ public Object getBinJavaPath() { return new Object() { @Override public String toString() { - return getHomeRoot() + "/bin/java"; + return OperatingSystem.current().getExecutableName(getHomeRoot() + "/bin/java"); } }; } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java index 9d6e78014916d..cdf22407f6076 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/OpenSearchTestBasePlugin.java @@ -109,7 +109,12 @@ public void execute(Task t) { test.systemProperty("java.locale.providers", "SPI,JRE"); } else { test.systemProperty("java.locale.providers", "SPI,COMPAT"); - test.jvmArgs("--illegal-access=warn"); + if (test.getJavaVersion().compareTo(JavaVersion.VERSION_17) < 0) { + test.jvmArgs("--illegal-access=warn"); + } + } + if (test.getJavaVersion().compareTo(JavaVersion.VERSION_17) > 0) { + test.jvmArgs("-Djava.security.manager=allow"); } } }); diff --git a/buildSrc/src/main/java/org/opensearch/gradle/PublishPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/PublishPlugin.java index 2bdef8e4cd244..be12fdd99c1df 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/PublishPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/PublishPlugin.java @@ -92,7 +92,7 @@ public String call() throws Exception { return String.format( "%s/distributions/%s-%s.pom", project.getBuildDir(), - getArchivesBaseName(project), + pomTask.getName().toLowerCase().contains("zip") ? project.getName() : getArchivesBaseName(project), project.getVersion() ); } @@ -130,7 +130,6 @@ public String call() throws Exception { publication.getPom().withXml(PublishPlugin::addScmInfo); if (!publication.getName().toLowerCase().contains("zip")) { - // have to defer this until archivesBaseName is set project.afterEvaluate(p -> publication.setArtifactId(getArchivesBaseName(project))); @@ -139,6 +138,8 @@ public String call() throws Exception { publication.artifact(project.getTasks().getByName("sourcesJar")); publication.artifact(project.getTasks().getByName("javadocJar")); } + } else { + project.afterEvaluate(p -> publication.setArtifactId(project.getName())); } generatePomTask.configure( diff --git a/buildSrc/src/main/java/org/opensearch/gradle/info/GlobalBuildInfoPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/info/GlobalBuildInfoPlugin.java index 166d8e3269d70..7708e4583ed7a 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/info/GlobalBuildInfoPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/info/GlobalBuildInfoPlugin.java @@ -45,13 +45,15 @@ import org.gradle.internal.jvm.Jvm; import org.gradle.internal.jvm.inspection.JvmInstallationMetadata; import org.gradle.internal.jvm.inspection.JvmMetadataDetector; +import org.gradle.jvm.toolchain.internal.InstallationLocation; import org.gradle.util.GradleVersion; import javax.inject.Inject; import java.io.File; import java.io.FileInputStream; import java.io.IOException; -import java.io.UncheckedIOException; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; @@ -196,7 +198,29 @@ private JavaVersion determineJavaVersion(String description, File javaHome, Java } private JvmInstallationMetadata getJavaInstallation(File javaHome) { - return jvmMetadataDetector.getMetadata(javaHome); + final InstallationLocation location = new InstallationLocation(javaHome, "Java home"); + + try { + try { + // The getMetadata(File) is used by Gradle pre-7.6 + return (JvmInstallationMetadata) MethodHandles.publicLookup() + .findVirtual(JvmMetadataDetector.class, "getMetadata", MethodType.methodType(JvmInstallationMetadata.class, File.class)) + .bindTo(jvmMetadataDetector) + .invokeExact(location.getLocation()); + } catch (NoSuchMethodException | IllegalAccessException ex) { + // The getMetadata(InstallationLocation) is used by Gradle post-7.6 + return (JvmInstallationMetadata) MethodHandles.publicLookup() + .findVirtual( + JvmMetadataDetector.class, + "getMetadata", + MethodType.methodType(JvmInstallationMetadata.class, InstallationLocation.class) + ) + .bindTo(jvmMetadataDetector) + .invokeExact(location); + } + } catch (Throwable ex) { + throw new IllegalStateException("Unable to find suitable JvmMetadataDetector::getMetadata", ex); + } } private List getAvailableJavaVersions(JavaVersion minimumCompilerVersion) { @@ -206,7 +230,7 @@ private List getAvailableJavaVersions(JavaVersion minimumCompilerVersi String javaHomeEnvVarName = getJavaHomeEnvVarName(Integer.toString(version)); if (System.getenv(javaHomeEnvVarName) != null) { File javaHomeDirectory = new File(findJavaHome(Integer.toString(version))); - JvmInstallationMetadata javaInstallation = jvmMetadataDetector.getMetadata(javaHomeDirectory); + JvmInstallationMetadata javaInstallation = getJavaInstallation(javaHomeDirectory); JavaHome javaHome = JavaHome.of(version, providers.provider(() -> { int actualVersion = Integer.parseInt(javaInstallation.getLanguageVersion().getMajorVersion()); if (actualVersion != version) { @@ -220,14 +244,6 @@ private List getAvailableJavaVersions(JavaVersion minimumCompilerVersi return javaVersions; } - private static boolean isCurrentJavaHome(File javaHome) { - try { - return Files.isSameFile(javaHome.toPath(), Jvm.current().getJavaHome().toPath()); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - private static String getTestSeed() { String testSeedProperty = System.getProperty("tests.seed"); final String testSeed; diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java index 2a162e5f12d7b..96a2928b6e71e 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveCheckPlugin.java @@ -73,12 +73,14 @@ public void apply(Project project) { .create("distributionArchiveCheck", DistributionArchiveCheckExtension.class); File archiveExtractionDir = calculateArchiveExtractionDir(project); - // sanity checks if archives can be extracted TaskProvider checkExtraction = registerCheckExtractionTask(project, buildDistTask, archiveExtractionDir); + checkExtraction.configure(InternalDistributionArchiveSetupPlugin.configure(buildTaskName)); TaskProvider checkLicense = registerCheckLicenseTask(project, checkExtraction); + checkLicense.configure(InternalDistributionArchiveSetupPlugin.configure(buildTaskName)); TaskProvider checkNotice = registerCheckNoticeTask(project, checkExtraction); + checkNotice.configure(InternalDistributionArchiveSetupPlugin.configure(buildTaskName)); TaskProvider checkTask = project.getTasks().named("check"); checkTask.configure(task -> { task.dependsOn(checkExtraction); @@ -118,7 +120,7 @@ public void execute(Task task) { } private TaskProvider registerCheckLicenseTask(Project project, TaskProvider checkExtraction) { - TaskProvider checkLicense = project.getTasks().register("checkLicense", task -> { + return project.getTasks().register("checkLicense", task -> { task.dependsOn(checkExtraction); task.doLast(new Action() { @Override @@ -138,7 +140,6 @@ public void execute(Task task) { } }); }); - return checkLicense; } private TaskProvider registerCheckExtractionTask(Project project, TaskProvider buildDistTask, File archiveExtractionDir) { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java index 8adfbff424278..0944f3960467b 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/internal/InternalDistributionArchiveSetupPlugin.java @@ -87,7 +87,7 @@ public void apply(Project project) { configureTarDefaults(project); } - private Action configure(String name) { + static Action configure(String name) { return (Task task) -> task.onlyIf(s -> { if (OperatingSystem.current().isWindows()) { // On Windows, include only Windows distributions and integTestZip diff --git a/buildSrc/src/main/java/org/opensearch/gradle/pluginzip/Publish.java b/buildSrc/src/main/java/org/opensearch/gradle/pluginzip/Publish.java index 70c3737ba3674..6b581fcaa7774 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/pluginzip/Publish.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/pluginzip/Publish.java @@ -9,30 +9,33 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; -import org.gradle.api.logging.Logger; -import org.gradle.api.logging.Logging; import org.gradle.api.publish.PublishingExtension; import org.gradle.api.publish.maven.MavenPublication; -import org.gradle.api.publish.maven.plugins.MavenPublishPlugin; import java.nio.file.Path; +import java.util.Set; +import java.util.stream.Collectors; + import org.gradle.api.Task; +import org.gradle.api.publish.maven.plugins.MavenPublishPlugin; public class Publish implements Plugin { - private static final Logger LOGGER = Logging.getLogger(Publish.class); - - public final static String EXTENSION_NAME = "zipmavensettings"; public final static String PUBLICATION_NAME = "pluginZip"; public final static String STAGING_REPO = "zipStaging"; - public final static String PLUGIN_ZIP_PUBLISH_POM_TASK = "generatePomFileForPluginZipPublication"; - public final static String LOCALMAVEN = "publishToMavenLocal"; public final static String LOCAL_STAGING_REPO_PATH = "/build/local-staging-repo"; - public String zipDistributionLocation = "/build/distributions/"; + // TODO: Does the path ^^ need to use platform dependant file separators ? + + private boolean isZipPublicationPresent(Project project) { + PublishingExtension pe = project.getExtensions().findByType(PublishingExtension.class); + if (pe == null) { + return false; + } + return pe.getPublications().findByName(PUBLICATION_NAME) != null; + } - public static void configMaven(Project project) { + private void addLocalMavenRepo(Project project) { final Path buildDirectory = project.getRootDir().toPath(); - project.getPluginManager().apply(MavenPublishPlugin.class); project.getExtensions().configure(PublishingExtension.class, publishing -> { publishing.repositories(repositories -> { repositories.maven(maven -> { @@ -40,52 +43,45 @@ public static void configMaven(Project project) { maven.setUrl(buildDirectory.toString() + LOCAL_STAGING_REPO_PATH); }); }); + }); + } + + private void addZipArtifact(Project project) { + project.getExtensions().configure(PublishingExtension.class, publishing -> { publishing.publications(publications -> { MavenPublication mavenZip = (MavenPublication) publications.findByName(PUBLICATION_NAME); - - if (mavenZip == null) { - mavenZip = publications.create(PUBLICATION_NAME, MavenPublication.class); + if (mavenZip != null) { + mavenZip.artifact(project.getTasks().named("bundlePlugin")); } - - String groupId = mavenZip.getGroupId(); - if (groupId == null) { - // The groupId is not customized thus we get the value from "project.group". - // See https://docs.gradle.org/current/userguide/publishing_maven.html#sec:identity_values_in_the_generated_pom - groupId = getProperty("group", project); - } - - String artifactId = project.getName(); - String pluginVersion = getProperty("version", project); - mavenZip.artifact(project.getTasks().named("bundlePlugin")); - mavenZip.setGroupId(groupId); - mavenZip.setArtifactId(artifactId); - mavenZip.setVersion(pluginVersion); }); }); } - static String getProperty(String name, Project project) { - if (project.hasProperty(name)) { - Object property = project.property(name); - if (property != null) { - return property.toString(); - } - } - return null; - } - @Override public void apply(Project project) { + project.getPluginManager().apply("nebula.maven-base-publish"); + project.getPluginManager().apply(MavenPublishPlugin.class); project.afterEvaluate(evaluatedProject -> { - configMaven(project); - Task validatePluginZipPom = project.getTasks().findByName("validatePluginZipPom"); - if (validatePluginZipPom != null) { - project.getTasks().getByName("validatePluginZipPom").dependsOn("generatePomFileForNebulaPublication"); - } - Task publishPluginZipPublicationToZipStagingRepository = project.getTasks() - .findByName("publishPluginZipPublicationToZipStagingRepository"); - if (publishPluginZipPublicationToZipStagingRepository != null) { - publishPluginZipPublicationToZipStagingRepository.dependsOn("generatePomFileForNebulaPublication"); + if (isZipPublicationPresent(project)) { + addLocalMavenRepo(project); + addZipArtifact(project); + Task validatePluginZipPom = project.getTasks().findByName("validatePluginZipPom"); + if (validatePluginZipPom != null) { + validatePluginZipPom.dependsOn("generatePomFileForNebulaPublication"); + } + + // There are number of tasks prefixed by 'publishPluginZipPublication', f.e.: + // publishPluginZipPublicationToZipStagingRepository, publishPluginZipPublicationToMavenLocal + final Set publishPluginZipPublicationToTasks = project.getTasks() + .stream() + .filter(t -> t.getName().startsWith("publishPluginZipPublicationTo")) + .collect(Collectors.toSet()); + if (!publishPluginZipPublicationToTasks.isEmpty()) { + publishPluginZipPublicationToTasks.forEach(t -> t.dependsOn("generatePomFileForNebulaPublication")); + } + } else { + project.getLogger() + .warn(String.format("Plugin 'opensearch.pluginzip' is applied but no '%s' publication is defined.", PUBLICATION_NAME)); } }); } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java index 5d707ce2b9f28..b2d12c4fc2f79 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/precommit/ThirdPartyAuditPrecommitPlugin.java @@ -51,7 +51,7 @@ public class ThirdPartyAuditPrecommitPlugin extends PrecommitPlugin { public TaskProvider createTask(Project project) { project.getPlugins().apply(CompileOnlyResolvePlugin.class); project.getConfigurations().create("forbiddenApisCliJar"); - project.getDependencies().add("forbiddenApisCliJar", "de.thetaphi:forbiddenapis:3.2"); + project.getDependencies().add("forbiddenApisCliJar", "de.thetaphi:forbiddenapis:3.4"); Configuration jdkJarHellConfig = project.getConfigurations().create(JDK_JAR_HELL_CONFIG_NAME); if (BuildParams.isInternal() && project.getPath().equals(":libs:opensearch-core") == false) { diff --git a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java index b14e93ecfd22d..e7c907dfdf000 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/test/DistroTestPlugin.java @@ -75,9 +75,9 @@ import java.util.stream.Stream; public class DistroTestPlugin implements Plugin { - private static final String SYSTEM_JDK_VERSION = "11.0.16+8"; + private static final String SYSTEM_JDK_VERSION = "11.0.17+8"; private static final String SYSTEM_JDK_VENDOR = "adoptium"; - private static final String GRADLE_JDK_VERSION = "17.0.4+8"; + private static final String GRADLE_JDK_VERSION = "17.0.5+8"; private static final String GRADLE_JDK_VENDOR = "adoptium"; // all distributions used by distro tests. this is temporary until tests are per distribution diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java index 0f5348d5a8dcf..86823b82a379f 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchCluster.java @@ -32,7 +32,6 @@ package org.opensearch.gradle.testclusters; import org.opensearch.gradle.FileSupplier; -import org.opensearch.gradle.Jdk; import org.opensearch.gradle.PropertyNormalization; import org.opensearch.gradle.ReaperService; import org.opensearch.gradle.http.WaitForHttpResource; @@ -75,7 +74,6 @@ public class OpenSearchCluster implements TestClusterConfiguration, Named { private final String path; private final String clusterName; private final NamedDomainObjectContainer nodes; - private final Jdk bwcJdk; private final File workingDirBase; private final LinkedHashMap> waitConditions = new LinkedHashMap<>(); private final Project project; @@ -92,8 +90,7 @@ public OpenSearchCluster( ReaperService reaper, File workingDirBase, FileSystemOperations fileSystemOperations, - ArchiveOperations archiveOperations, - Jdk bwcJdk + ArchiveOperations archiveOperations ) { this.path = project.getPath(); this.clusterName = clusterName; @@ -103,7 +100,6 @@ public OpenSearchCluster( this.archiveOperations = archiveOperations; this.workingDirBase = workingDirBase; this.nodes = project.container(OpenSearchNode.class); - this.bwcJdk = bwcJdk; // Always add the first node String zone = hasZoneProperty() ? "zone-1" : ""; @@ -167,7 +163,6 @@ private void addNode(String nodeName, String zoneName) { fileSystemOperations, archiveOperations, workingDirBase, - bwcJdk, zoneName ); // configure the cluster name eagerly diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java index ab765efde7885..bcf9a8ba4d780 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/OpenSearchNode.java @@ -37,7 +37,6 @@ import org.opensearch.gradle.DistributionDownloadPlugin; import org.opensearch.gradle.OpenSearchDistribution; import org.opensearch.gradle.FileSupplier; -import org.opensearch.gradle.Jdk; import org.opensearch.gradle.LazyPropertyList; import org.opensearch.gradle.LazyPropertyMap; import org.opensearch.gradle.LoggedExec; @@ -132,7 +131,6 @@ public class OpenSearchNode implements TestClusterConfiguration { private final String name; private final Project project; private final ReaperService reaper; - private final Jdk bwcJdk; private final FileSystemOperations fileSystemOperations; private final ArchiveOperations archiveOperations; @@ -163,7 +161,7 @@ public class OpenSearchNode implements TestClusterConfiguration { private int currentDistro = 0; private TestDistribution testDistribution; - private List distributions = new ArrayList<>(); + private final List distributions = new ArrayList<>(); private volatile Process opensearchProcess; private Function nameCustomization = Function.identity(); private boolean isWorkingDirConfigured = false; @@ -172,11 +170,12 @@ public class OpenSearchNode implements TestClusterConfiguration { private Path confPathData; private String keystorePassword = ""; private boolean preserveDataDir = false; - private final Config opensearchConfig; - private final Config legacyESConfig; - private Config currentConfig; - private String zone; + private final Path configFile; + private final Path stdoutFile; + private final Path stderrFile; + private final Path stdinFile; + private final String zone; OpenSearchNode( String path, @@ -186,7 +185,6 @@ public class OpenSearchNode implements TestClusterConfiguration { FileSystemOperations fileSystemOperations, ArchiveOperations archiveOperations, File workingDirBase, - Jdk bwcJdk, String zone ) { this.path = path; @@ -195,7 +193,6 @@ public class OpenSearchNode implements TestClusterConfiguration { this.reaper = reaper; this.fileSystemOperations = fileSystemOperations; this.archiveOperations = archiveOperations; - this.bwcJdk = bwcJdk; workingDir = workingDirBase.toPath().resolve(safeName(name)).toAbsolutePath(); confPathRepo = workingDir.resolve("repo"); confPathData = workingDir.resolve("data"); @@ -203,107 +200,16 @@ public class OpenSearchNode implements TestClusterConfiguration { transportPortFile = confPathLogs.resolve("transport.ports"); httpPortsFile = confPathLogs.resolve("http.ports"); tmpDir = workingDir.resolve("tmp"); + configFile = workingDir.resolve("config/opensearch.yml"); + stdoutFile = confPathLogs.resolve("opensearch.stdout.log"); + stderrFile = confPathLogs.resolve("opensearch.stderr.log"); + stdinFile = workingDir.resolve("opensearch.stdin"); waitConditions.put("ports files", this::checkPortsFilesExistWithDelay); setTestDistribution(TestDistribution.INTEG_TEST); setVersion(VersionProperties.getOpenSearch()); - opensearchConfig = Config.getOpenSearchConfig(workingDir); - legacyESConfig = Config.getLegacyESConfig(workingDir); - currentConfig = opensearchConfig; this.zone = zone; } - /* - * An object to contain the configuration needed to install - * either an OpenSearch or an elasticsearch distribution on - * this test node. - * - * This is added to be able to run BWC testing against a - * cluster running elasticsearch. - * - * legacyESConfig will be removed in a future release. - */ - private static class Config { - final String distroName; - final String command; - final String keystoreTool; - final String pluginTool; - final String envTempDir; - final String envJavaOpts; - final String envPathConf; - final Path configFile; - final Path stdoutFile; - final Path stderrFile; - final Path stdinFile; - - Config( - String distroName, - String command, - String keystoreTool, - String pluginTool, - String envTempDir, - String envJavaOpts, - String envPathConf, - Path configFile, - Path stdoutFile, - Path stderrFile, - Path stdinFile - ) { - this.distroName = distroName; - this.command = command; - this.keystoreTool = keystoreTool; - this.pluginTool = pluginTool; - this.envTempDir = envTempDir; - this.envJavaOpts = envJavaOpts; - this.envPathConf = envPathConf; - this.configFile = configFile; - this.stdoutFile = stdoutFile; - this.stderrFile = stderrFile; - this.stdinFile = stdinFile; - } - - static Config getOpenSearchConfig(Path workingDir) { - Path confPathLogs = workingDir.resolve("logs"); - return new Config( - "OpenSearch", - "opensearch", - "opensearch-keystore", - "opensearch-plugin", - "OPENSEARCH_TMPDIR", - "OPENSEARCH_JAVA_OPTS", - "OPENSEARCH_PATH_CONF", - workingDir.resolve("config/opensearch.yml"), - confPathLogs.resolve("opensearch.stdout.log"), - confPathLogs.resolve("opensearch.stderr.log"), - workingDir.resolve("opensearch.stdin") - ); - } - - static Config getLegacyESConfig(Path workingDir) { - Path confPathLogs = workingDir.resolve("logs"); - return new Config( - "Elasticsearch", - "elasticsearch", - "elasticsearch-keystore", - "elasticsearch-plugin", - "ES_TMPDIR", - "ES_JAVA_OPTS", - "ES_PATH_CONF", - workingDir.resolve("config/elasticsearch.yml"), - confPathLogs.resolve("es.stdout.log"), - confPathLogs.resolve("es.stderr.log"), - workingDir.resolve("es.stdin") - ); - } - } - - private void applyConfig() { - if (getVersion().onOrAfter("1.0.0")) { - currentConfig = opensearchConfig; - } else { - currentConfig = legacyESConfig; - } - } - @Input @Optional public String getName() { @@ -321,7 +227,6 @@ public void setVersion(String version) { checkFrozen(); distributions.clear(); doSetVersion(version); - applyConfig(); } @Override @@ -331,7 +236,6 @@ public void setVersions(List versions) { for (String version : versions) { doSetVersion(version); } - applyConfig(); } private void doSetVersion(String version) { @@ -528,7 +432,7 @@ public void jvmArgs(String... values) { @Internal public Path getConfigDir() { - return currentConfig.configFile.getParent(); + return configFile.getParent(); } @Override @@ -555,7 +459,7 @@ public void freeze() { * @return stream of log lines */ public Stream logLines() throws IOException { - return Files.lines(currentConfig.stdoutFile, StandardCharsets.UTF_8); + return Files.lines(stdoutFile, StandardCharsets.UTF_8); } @Override @@ -601,23 +505,17 @@ public synchronized void start() { } if (pluginsToInstall.isEmpty() == false) { - if (getVersion().onOrAfter("7.6.0")) { - logToProcessStdout("installing " + pluginsToInstall.size() + " plugins in a single transaction"); - final String[] arguments = Stream.concat(Stream.of("install", "--batch"), pluginsToInstall.stream()).toArray(String[]::new); - runOpenSearchBinScript(currentConfig.pluginTool, arguments); - logToProcessStdout("installed plugins"); - } else { - logToProcessStdout("installing " + pluginsToInstall.size() + " plugins sequentially"); - pluginsToInstall.forEach(plugin -> runOpenSearchBinScript(currentConfig.pluginTool, "install", "--batch", plugin)); - logToProcessStdout("installed plugins"); - } + logToProcessStdout("installing " + pluginsToInstall.size() + " plugins in a single transaction"); + final String[] arguments = Stream.concat(Stream.of("install", "--batch"), pluginsToInstall.stream()).toArray(String[]::new); + runOpenSearchBinScript("opensearch-plugin", arguments); + logToProcessStdout("installed plugins"); } - logToProcessStdout("Creating " + currentConfig.command + " keystore with password set to [" + keystorePassword + "]"); + logToProcessStdout("Creating opensearch keystore with password set to [" + keystorePassword + "]"); if (keystorePassword.length() > 0) { - runOpenSearchBinScriptWithInput(keystorePassword + "\n" + keystorePassword, currentConfig.keystoreTool, "create", "-p"); + runOpenSearchBinScriptWithInput(keystorePassword + "\n" + keystorePassword, "opensearch-keystore", "create", "-p"); } else { - runOpenSearchBinScript(currentConfig.keystoreTool, "-v", "create"); + runOpenSearchBinScript("opensearch-keystore", "-v", "create"); } if (keystoreSettings.isEmpty() == false || keystoreFiles.isEmpty() == false) { @@ -645,7 +543,7 @@ public synchronized void start() { } } - logToProcessStdout("Starting " + currentConfig.distroName + " process"); + logToProcessStdout("Starting OpenSearch process"); startOpenSearchProcess(); } @@ -657,11 +555,11 @@ private boolean canUseSharedDistribution() { private void logToProcessStdout(String message) { try { - if (Files.exists(currentConfig.stdoutFile.getParent()) == false) { - Files.createDirectories(currentConfig.stdoutFile.getParent()); + if (Files.exists(stdoutFile.getParent()) == false) { + Files.createDirectories(stdoutFile.getParent()); } Files.write( - currentConfig.stdoutFile, + stdoutFile, ("[" + Instant.now().toString() + "] [BUILD] " + message + "\n").getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE, StandardOpenOption.APPEND @@ -684,7 +582,6 @@ void goToNextVersion() { } logToProcessStdout("Switch version from " + getVersion() + " to " + distributions.get(currentDistro + 1).getVersion()); currentDistro += 1; - applyConfig(); setting("node.attr.upgraded", "true"); } @@ -696,7 +593,7 @@ private void copyExtraConfigFiles() { if (Files.exists(from.toPath()) == false) { throw new TestClustersException("Can't create extra config file from " + from + " for " + this + " as it does not exist"); } - Path dst = currentConfig.configFile.getParent().resolve(destination); + Path dst = configFile.getParent().resolve(destination); try { Files.createDirectories(dst.getParent()); Files.copy(from.toPath(), dst, StandardCopyOption.REPLACE_EXISTING); @@ -721,7 +618,7 @@ private void copyExtraJars() { Files.copy(from.toPath(), destination, StandardCopyOption.REPLACE_EXISTING); LOGGER.info("Added extra jar {} to {}", from.getName(), destination); } catch (IOException e) { - throw new UncheckedIOException("Can't copy extra jar dependency " + from.getName() + " to " + destination.toString(), e); + throw new UncheckedIOException("Can't copy extra jar dependency " + from.getName() + " to " + destination, e); } }); } @@ -794,9 +691,7 @@ private void runOpenSearchBinScriptWithInput(String input, String tool, CharSequ ArrayList result = new ArrayList<>(); result.add("/c"); result.add("bin\\" + tool + ".bat"); - for (CharSequence arg : args) { - result.add(arg); - } + result.addAll(Arrays.asList(args)); return result; }).onUnix(() -> Arrays.asList(args)).supply()); spec.setStandardInput(byteArrayInputStream); @@ -809,7 +704,7 @@ private void runOpenSearchBinScriptWithInput(String input, String tool, CharSequ private void runKeystoreCommandWithPassword(String keystorePassword, String input, CharSequence... args) { final String actualInput = keystorePassword.length() > 0 ? keystorePassword + "\n" + input : input; - runOpenSearchBinScriptWithInput(actualInput, currentConfig.keystoreTool, args); + runOpenSearchBinScriptWithInput(actualInput, "opensearch-keystore", args); } private void runOpenSearchBinScript(String tool, CharSequence... args) { @@ -819,7 +714,7 @@ private void runOpenSearchBinScript(String tool, CharSequence... args) { private Map getOpenSearchEnvironment() { Map defaultEnv = new HashMap<>(); getRequiredJavaHome().ifPresent(javaHome -> defaultEnv.put("JAVA_HOME", javaHome)); - defaultEnv.put(currentConfig.envPathConf, currentConfig.configFile.getParent().toString()); + defaultEnv.put("OPENSEARCH_PATH_CONF", configFile.getParent().toString()); String systemPropertiesString = ""; if (systemProperties.isEmpty() == false) { systemPropertiesString = " " @@ -829,7 +724,7 @@ private Map getOpenSearchEnvironment() { // OPENSEARCH_PATH_CONF is also set as an environment variable and for a reference to ${OPENSEARCH_PATH_CONF} // to work OPENSEARCH_JAVA_OPTS, we need to make sure that OPENSEARCH_PATH_CONF before OPENSEARCH_JAVA_OPTS. Instead, // we replace the reference with the actual value in other environment variables - .map(p -> p.replace("${" + currentConfig.envPathConf + "}", currentConfig.configFile.getParent().toString())) + .map(p -> p.replace("${OPENSEARCH_PATH_CONF}", configFile.getParent().toString())) .collect(Collectors.joining(" ")); } String jvmArgsString = ""; @@ -844,12 +739,12 @@ private Map getOpenSearchEnvironment() { } String heapSize = System.getProperty("tests.heap.size", "512m"); defaultEnv.put( - currentConfig.envJavaOpts, + "OPENSEARCH_JAVA_OPTS", "-Xms" + heapSize + " -Xmx" + heapSize + " -ea -esa " + systemPropertiesString + " " + jvmArgsString + " " + // Support passing in additional JVM arguments System.getProperty("tests.jvm.argline", "") ); - defaultEnv.put(currentConfig.envTempDir, tmpDir.toString()); + defaultEnv.put("OPENSEARCH_TMPDIR", tmpDir.toString()); // Windows requires this as it defaults to `c:\windows` despite OPENSEARCH_TMPDIR defaultEnv.put("TMP", tmpDir.toString()); @@ -868,27 +763,20 @@ private Map getOpenSearchEnvironment() { } private java.util.Optional getRequiredJavaHome() { - // If we are testing the current version of Elasticsearch, use the configured runtime Java + // If we are testing the current version of OpenSearch, use the configured runtime Java if (getTestDistribution() == TestDistribution.INTEG_TEST || getVersion().equals(VersionProperties.getOpenSearchVersion())) { return java.util.Optional.of(BuildParams.getRuntimeJavaHome()).map(File::getAbsolutePath); - } else if (getVersion().before("7.0.0")) { - return java.util.Optional.of(bwcJdk.getJavaHomePath().toString()); } else { // otherwise use the bundled JDK return java.util.Optional.empty(); } } - @Internal - Jdk getBwcJdk() { - return getVersion().before("7.0.0") ? bwcJdk : null; - } - private void startOpenSearchProcess() { final ProcessBuilder processBuilder = new ProcessBuilder(); Path effectiveDistroDir = getDistroDir(); List command = OS.>conditional() - .onUnix(() -> Arrays.asList(effectiveDistroDir.resolve("./bin/" + currentConfig.command).toString())) - .onWindows(() -> Arrays.asList("cmd", "/c", effectiveDistroDir.resolve("bin\\" + currentConfig.command + ".bat").toString())) + .onUnix(() -> List.of(effectiveDistroDir.resolve("./bin/opensearch").toString())) + .onWindows(() -> Arrays.asList("cmd", "/c", effectiveDistroDir.resolve("bin\\opensearch.bat").toString())) .supply(); processBuilder.command(command); processBuilder.directory(workingDir.toFile()); @@ -898,13 +786,13 @@ private void startOpenSearchProcess() { environment.putAll(getOpenSearchEnvironment()); // don't buffer all in memory, make sure we don't block on the default pipes - processBuilder.redirectError(ProcessBuilder.Redirect.appendTo(currentConfig.stderrFile.toFile())); - processBuilder.redirectOutput(ProcessBuilder.Redirect.appendTo(currentConfig.stdoutFile.toFile())); + processBuilder.redirectError(ProcessBuilder.Redirect.appendTo(stderrFile.toFile())); + processBuilder.redirectOutput(ProcessBuilder.Redirect.appendTo(stdoutFile.toFile())); if (keystorePassword != null && keystorePassword.length() > 0) { try { - Files.write(currentConfig.stdinFile, (keystorePassword + "\n").getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE); - processBuilder.redirectInput(currentConfig.stdinFile.toFile()); + Files.write(stdinFile, (keystorePassword + "\n").getBytes(StandardCharsets.UTF_8), StandardOpenOption.CREATE); + processBuilder.redirectInput(stdinFile.toFile()); } catch (IOException e) { throw new TestClustersException("Failed to set the keystore password for " + this, e); } @@ -913,7 +801,7 @@ private void startOpenSearchProcess() { try { opensearchProcess = processBuilder.start(); } catch (IOException e) { - throw new TestClustersException("Failed to start " + currentConfig.command + " process for " + this, e); + throw new TestClustersException("Failed to start opensearch process for " + this, e); } reaper.registerPid(toString(), opensearchProcess.pid()); } @@ -985,8 +873,8 @@ public synchronized void stop(boolean tailLogs) { stopProcess(opensearchProcess.toHandle(), true); reaper.unregister(toString()); if (tailLogs) { - logFileContents("Standard output of node", currentConfig.stdoutFile); - logFileContents("Standard error of node", currentConfig.stderrFile); + logFileContents("Standard output of node", stdoutFile); + logFileContents("Standard error of node", stderrFile); } opensearchProcess = null; // Clean up the ports file in case this is started again. @@ -1014,16 +902,13 @@ private void stopProcess(ProcessHandle processHandle, boolean forcibly) { return; } - // Stop all children last - if the ML processes are killed before the ES JVM then + // Stop all children last - if the ML processes are killed before the OpenSearch JVM then // they'll be recorded as having failed and won't restart when the cluster restarts. - // ES could actually be a child when there's some wrapper process like on Windows, + // OpenSearch could actually be a child when there's some wrapper process like on Windows, // and in that case the ML processes will be grandchildren of the wrapper. List children = processHandle.children().collect(Collectors.toList()); try { - logProcessInfo( - "Terminating " + currentConfig.command + " process" + (forcibly ? " forcibly " : "gracefully") + ":", - processHandle.info() - ); + logProcessInfo("Terminating opensearch process" + (forcibly ? " forcibly " : "gracefully") + ":", processHandle.info()); if (forcibly) { processHandle.destroyForcibly(); @@ -1043,7 +928,7 @@ private void stopProcess(ProcessHandle processHandle, boolean forcibly) { waitForProcessToExit(processHandle); if (processHandle.isAlive()) { - throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this); + throw new TestClustersException("Was not able to terminate opensearch process for " + this); } } finally { children.forEach(each -> stopProcess(each, forcibly)); @@ -1051,7 +936,7 @@ private void stopProcess(ProcessHandle processHandle, boolean forcibly) { waitForProcessToExit(processHandle); if (processHandle.isAlive()) { - throw new TestClustersException("Was not able to terminate " + currentConfig.command + " process for " + this); + throw new TestClustersException("Was not able to terminate opensearch process for " + this); } } @@ -1135,7 +1020,7 @@ private void waitForProcessToExit(ProcessHandle processHandle) { try { processHandle.onExit().get(OPENSEARCH_DESTROY_TIMEOUT, OPENSEARCH_DESTROY_TIMEOUT_UNIT); } catch (InterruptedException e) { - LOGGER.info("Interrupted while waiting for {} process", currentConfig.command, e); + LOGGER.info("Interrupted while waiting for opensearch process", e); Thread.currentThread().interrupt(); } catch (ExecutionException e) { LOGGER.info("Failure while waiting for process to exist", e); @@ -1146,8 +1031,8 @@ private void waitForProcessToExit(ProcessHandle processHandle) { private void createWorkingDir() throws IOException { // Start configuration from scratch in case of a restart - fileSystemOperations.delete(d -> d.delete(currentConfig.configFile.getParent())); - Files.createDirectories(currentConfig.configFile.getParent()); + fileSystemOperations.delete(d -> d.delete(configFile.getParent())); + Files.createDirectories(configFile.getParent()); Files.createDirectories(confPathRepo); Files.createDirectories(confPathData); Files.createDirectories(confPathLogs); @@ -1250,42 +1135,27 @@ private void createConfiguration() { } baseConfig.put("node.portsfile", "true"); baseConfig.put("http.port", httpPort); - if (getVersion().onOrAfter(Version.fromString("6.7.0"))) { - baseConfig.put("transport.port", transportPort); - } else { - baseConfig.put("transport.tcp.port", transportPort); - } + baseConfig.put("transport.port", transportPort); // Default the watermarks to absurdly low to prevent the tests from failing on nodes without enough disk space baseConfig.put("cluster.routing.allocation.disk.watermark.low", "1b"); baseConfig.put("cluster.routing.allocation.disk.watermark.high", "1b"); // increase script compilation limit since tests can rapid-fire script compilations - if (getVersion().onOrAfter(Version.fromString("7.9.0"))) { - baseConfig.put("script.disable_max_compilations_rate", "true"); - } else { - baseConfig.put("script.max_compilations_rate", "2048/1m"); - } + baseConfig.put("script.disable_max_compilations_rate", "true"); baseConfig.put("cluster.routing.allocation.disk.watermark.flood_stage", "1b"); // Temporarily disable the real memory usage circuit breaker. It depends on real memory usage which we have no full control // over and the REST client will not retry on circuit breaking exceptions yet (see #31986 for details). Once the REST client // can retry on circuit breaking exceptions, we can revert again to the default configuration. - if (getVersion().onOrAfter("7.0.0")) { - baseConfig.put("indices.breaker.total.use_real_memory", "false"); - } + baseConfig.put("indices.breaker.total.use_real_memory", "false"); // Don't wait for state, just start up quickly. This will also allow new and old nodes in the BWC case to become the master baseConfig.put("discovery.initial_state_timeout", "0s"); // TODO: Remove these once https://github.com/elastic/elasticsearch/issues/46091 is fixed - if (getVersion().onOrAfter("1.0.0")) { - baseConfig.put("logger.org.opensearch.action.support.master", "DEBUG"); - baseConfig.put("logger.org.opensearch.cluster.coordination", "DEBUG"); - } else { - baseConfig.put("logger.org.elasticsearch.action.support.master", "DEBUG"); - baseConfig.put("logger.org.elasticsearch.cluster.coordination", "DEBUG"); - } + baseConfig.put("logger.org.opensearch.action.support.master", "DEBUG"); + baseConfig.put("logger.org.opensearch.cluster.coordination", "DEBUG"); HashSet overriden = new HashSet<>(baseConfig.keySet()); overriden.retainAll(settings.keySet()); - overriden.removeAll(OVERRIDABLE_SETTINGS); + OVERRIDABLE_SETTINGS.forEach(overriden::remove); if (overriden.isEmpty() == false) { throw new IllegalArgumentException( "Testclusters does not allow the following settings to be changed:" + overriden + " for " + this @@ -1294,10 +1164,10 @@ private void createConfiguration() { // Make sure no duplicate config keys settings.keySet().stream().filter(OVERRIDABLE_SETTINGS::contains).forEach(baseConfig::remove); - final Path configFileRoot = currentConfig.configFile.getParent(); + final Path configFileRoot = configFile.getParent(); try { Files.write( - currentConfig.configFile, + configFile, Stream.concat(settings.entrySet().stream(), baseConfig.entrySet().stream()) .map(entry -> entry.getKey() + ": " + entry.getValue()) .collect(Collectors.joining("\n")) @@ -1312,17 +1182,17 @@ private void createConfiguration() { } logToProcessStdout("Copying additional config files from distro " + configFiles); for (Path file : configFiles) { - Path dest = currentConfig.configFile.getParent().resolve(file.getFileName()); + Path dest = configFile.getParent().resolve(file.getFileName()); if (Files.exists(dest) == false) { Files.copy(file, dest); } } } catch (IOException e) { - throw new UncheckedIOException("Could not write config file: " + currentConfig.configFile, e); + throw new UncheckedIOException("Could not write config file: " + configFile, e); } tweakJvmOptions(configFileRoot); - LOGGER.info("Written config file:{} for {}", currentConfig.configFile, this); + LOGGER.info("Written config file:{} for {}", configFile, this); } private void tweakJvmOptions(Path configFileRoot) { @@ -1346,18 +1216,11 @@ private void tweakJvmOptions(Path configFileRoot) { private Map jvmOptionExpansions() { Map expansions = new HashMap<>(); Version version = getVersion(); - String heapDumpOrigin = getVersion().onOrAfter("6.3.0") ? "-XX:HeapDumpPath=data" : "-XX:HeapDumpPath=/heap/dump/path"; + String heapDumpOrigin = "-XX:HeapDumpPath=data"; Path relativeLogPath = workingDir.relativize(confPathLogs); - expansions.put(heapDumpOrigin, "-XX:HeapDumpPath=" + relativeLogPath.toString()); - if (version.onOrAfter("6.2.0")) { - expansions.put("logs/gc.log", relativeLogPath.resolve("gc.log").toString()); - } - if (getVersion().onOrAfter("7.0.0")) { - expansions.put( - "-XX:ErrorFile=logs/hs_err_pid%p.log", - "-XX:ErrorFile=" + relativeLogPath.resolve("hs_err_pid%p.log").toString() - ); - } + expansions.put(heapDumpOrigin, "-XX:HeapDumpPath=" + relativeLogPath); + expansions.put("logs/gc.log", relativeLogPath.resolve("gc.log").toString()); + expansions.put("-XX:ErrorFile=logs/hs_err_pid%p.log", "-XX:ErrorFile=" + relativeLogPath.resolve("hs_err_pid%p.log")); return expansions; } @@ -1488,7 +1351,7 @@ void waitForAllConditions() { // Installing plugins at config time and loading them when nods start requires additional time we need to // account for ADDITIONAL_CONFIG_TIMEOUT_UNIT.toMillis( - ADDITIONAL_CONFIG_TIMEOUT * (plugins.size() + keystoreFiles.size() + keystoreSettings.size() + credentials.size()) + (long) ADDITIONAL_CONFIG_TIMEOUT * (plugins.size() + keystoreFiles.size() + keystoreSettings.size() + credentials.size()) ), TimeUnit.MILLISECONDS, this); } @@ -1546,17 +1409,17 @@ void setDataPath(Path dataPath) { @Internal Path getOpensearchStdoutFile() { - return currentConfig.stdoutFile; + return stdoutFile; } @Internal Path getOpensearchStderrFile() { - return currentConfig.stderrFile; + return stderrFile; } private static class FileEntry implements Named { - private String name; - private File file; + private final String name; + private final File file; FileEntry(String name, File file) { this.name = name; @@ -1577,8 +1440,8 @@ public File getFile() { } private static class CliEntry { - private String executable; - private CharSequence[] args; + private final String executable; + private final CharSequence[] args; CliEntry(String executable, CharSequence[] args) { this.executable = executable; diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClustersAware.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClustersAware.java index e5d264121b0aa..e5c413df00d0d 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClustersAware.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClustersAware.java @@ -31,7 +31,6 @@ package org.opensearch.gradle.testclusters; -import org.opensearch.gradle.Jdk; import org.gradle.api.Task; import org.gradle.api.artifacts.Configuration; import org.gradle.api.tasks.Nested; @@ -52,9 +51,6 @@ default void useCluster(OpenSearchCluster cluster) { // Add configured distributions as task dependencies so they are built before starting the cluster cluster.getNodes().stream().flatMap(node -> node.getDistributions().stream()).forEach(distro -> dependsOn(distro.getExtracted())); - // Add legacy BWC JDK runtime as a dependency so it's downloaded before starting the cluster if necessary - cluster.getNodes().stream().map(node -> (Callable) node::getBwcJdk).forEach(this::dependsOn); - cluster.getNodes().forEach(node -> dependsOn((Callable>) node::getPluginAndModuleConfigurations)); getClusters().add(cluster); } diff --git a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClustersPlugin.java b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClustersPlugin.java index 2ef14a39b6669..8735970b0d65b 100644 --- a/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClustersPlugin.java +++ b/buildSrc/src/main/java/org/opensearch/gradle/testclusters/TestClustersPlugin.java @@ -31,11 +31,8 @@ package org.opensearch.gradle.testclusters; -import org.opensearch.gradle.Architecture; import org.opensearch.gradle.DistributionDownloadPlugin; -import org.opensearch.gradle.Jdk; import org.opensearch.gradle.JdkDownloadPlugin; -import org.opensearch.gradle.OS; import org.opensearch.gradle.ReaperPlugin; import org.opensearch.gradle.ReaperService; import org.opensearch.gradle.info.BuildParams; @@ -68,8 +65,6 @@ public class TestClustersPlugin implements Plugin { private static final String LIST_TASK_NAME = "listTestClusters"; private static final String REGISTRY_SERVICE_NAME = "testClustersRegistry"; - private static final String LEGACY_JAVA_VENDOR = "adoptopenjdk"; - private static final String LEGACY_JAVA_VERSION = "8u242+b08"; private static final Logger logger = Logging.getLogger(TestClustersPlugin.class); @Inject @@ -95,16 +90,8 @@ public void apply(Project project) { ReaperService reaper = project.getRootProject().getExtensions().getByType(ReaperService.class); - // register legacy jdk distribution for testing pre-7.0 BWC clusters - Jdk bwcJdk = JdkDownloadPlugin.getContainer(project).create("bwc_jdk", jdk -> { - jdk.setVendor(LEGACY_JAVA_VENDOR); - jdk.setVersion(LEGACY_JAVA_VERSION); - jdk.setPlatform(OS.current().name().toLowerCase()); - jdk.setArchitecture(Architecture.current().name().toLowerCase()); - }); - // enable the DSL to describe clusters - NamedDomainObjectContainer container = createTestClustersContainerExtension(project, reaper, bwcJdk); + NamedDomainObjectContainer container = createTestClustersContainerExtension(project, reaper); // provide a task to be able to list defined clusters. createListClustersTask(project, container); @@ -125,11 +112,7 @@ public void apply(Project project) { project.getRootProject().getPluginManager().apply(TestClustersHookPlugin.class); } - private NamedDomainObjectContainer createTestClustersContainerExtension( - Project project, - ReaperService reaper, - Jdk bwcJdk - ) { + private NamedDomainObjectContainer createTestClustersContainerExtension(Project project, ReaperService reaper) { // Create an extensions that allows describing clusters NamedDomainObjectContainer container = project.container( OpenSearchCluster.class, @@ -139,8 +122,7 @@ private NamedDomainObjectContainer createTestClustersContaine reaper, new File(project.getBuildDir(), "testclusters"), getFileSystemOperations(), - getArchiveOperations(), - bwcJdk + getArchiveOperations() ) ); project.getExtensions().add(EXTENSION_NAME, container); diff --git a/buildSrc/src/main/resources/forbidden/http-signatures.txt b/buildSrc/src/main/resources/forbidden/http-signatures.txt index dcf20bbb09387..bfd81b3521a40 100644 --- a/buildSrc/src/main/resources/forbidden/http-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/http-signatures.txt @@ -15,31 +15,14 @@ # language governing permissions and limitations under the License. @defaultMessage Explicitly specify the ContentType of HTTP entities when creating -org.apache.http.entity.StringEntity#(java.lang.String) -org.apache.http.entity.StringEntity#(java.lang.String,java.lang.String) -org.apache.http.entity.StringEntity#(java.lang.String,java.nio.charset.Charset) -org.apache.http.entity.ByteArrayEntity#(byte[]) -org.apache.http.entity.ByteArrayEntity#(byte[],int,int) -org.apache.http.entity.FileEntity#(java.io.File) -org.apache.http.entity.InputStreamEntity#(java.io.InputStream) -org.apache.http.entity.InputStreamEntity#(java.io.InputStream,long) -org.apache.http.nio.entity.NByteArrayEntity#(byte[]) -org.apache.http.nio.entity.NByteArrayEntity#(byte[],int,int) -org.apache.http.nio.entity.NFileEntity#(java.io.File) -org.apache.http.nio.entity.NStringEntity#(java.lang.String) -org.apache.http.nio.entity.NStringEntity#(java.lang.String,java.lang.String) +org.apache.hc.core5.http.io.entity.StringEntity#(java.lang.String) +org.apache.hc.core5.http.io.entity.StringEntity#(java.lang.String,java.nio.charset.Charset) @defaultMessage Use non-deprecated constructors -org.apache.http.nio.entity.NFileEntity#(java.io.File,java.lang.String) -org.apache.http.nio.entity.NFileEntity#(java.io.File,java.lang.String,boolean) -org.apache.http.entity.FileEntity#(java.io.File,java.lang.String) -org.apache.http.entity.StringEntity#(java.lang.String,java.lang.String,java.lang.String) +org.apache.hc.core5.http.io.entity.FileEntity#(java.io.File,org.apache.hc.core5.http.ContentType) @defaultMessage BasicEntity is easy to mess up and forget to set content type -org.apache.http.entity.BasicHttpEntity#() - -@defaultMessage EntityTemplate is easy to mess up and forget to set content type -org.apache.http.entity.EntityTemplate#(org.apache.http.entity.ContentProducer) +org.apache.hc.core5.http.io.entity.BasicHttpEntity#(java.io.InputStream,org.apache.hc.core5.http.ContentType) @defaultMessage SerializableEntity uses java serialization and makes it easy to forget to set content type -org.apache.http.entity.SerializableEntity#(java.io.Serializable) +org.apache.hc.core5.http.io.entity.SerializableEntity#(java.io.Serializable,org.apache.hc.core5.http.ContentType) diff --git a/buildSrc/src/test/java/org/opensearch/gradle/BwcOpenSearchVersionsTests.java b/buildSrc/src/test/java/org/opensearch/gradle/BwcOpenSearchVersionsTests.java deleted file mode 100644 index 14931c83ba29b..0000000000000 --- a/buildSrc/src/test/java/org/opensearch/gradle/BwcOpenSearchVersionsTests.java +++ /dev/null @@ -1,95 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -package org.opensearch.gradle; - -import org.opensearch.gradle.test.GradleUnitTestCase; -import org.junit.Rule; -import org.junit.rules.ExpectedException; - -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - -import static java.util.Arrays.asList; - -/** - * Tests to specifically verify the OpenSearch version 1.x with Legacy ES versions. - * This supplements the tests in BwcVersionsTests. - * - * Currently the versioning logic doesn't work for OpenSearch 2.x as the masking - * is only applied specifically for 1.x. - */ -public class BwcOpenSearchVersionsTests extends GradleUnitTestCase { - - private static final Map> sampleVersions = new HashMap<>(); - - @Rule - public ExpectedException expectedEx = ExpectedException.none(); - - static { - sampleVersions.put("1.0.0", asList("5_6_13", "6_6_1", "6_8_15", "7_0_0", "7_9_1", "7_10_0", "7_10_1", "7_10_2", "1_0_0")); - sampleVersions.put("1.1.0", asList("5_6_13", "6_6_1", "6_8_15", "7_0_0", "7_9_1", "7_10_0", "7_10_1", "7_10_2", "1_0_0", "1_1_0")); - sampleVersions.put( - "2.0.0", - asList("5_6_13", "6_6_1", "6_8_15", "7_0_0", "7_9_1", "7_10_0", "7_10_1", "7_10_2", "1_0_0", "1_1_0", "2_0_0") - ); - } - - public void testWireCompatible() { - assertVersionsEquals( - asList("6.8.15", "7.0.0", "7.9.1", "7.10.0", "7.10.1", "7.10.2"), - getVersionCollection("1.0.0").getWireCompatible() - ); - assertVersionsEquals( - asList("6.8.15", "7.0.0", "7.9.1", "7.10.0", "7.10.1", "7.10.2", "1.0.0"), - getVersionCollection("1.1.0").getWireCompatible() - ); - } - - public void testWireCompatibleUnreleased() { - assertVersionsEquals(Collections.emptyList(), getVersionCollection("1.0.0").getUnreleasedWireCompatible()); - } - - public void testIndexCompatible() { - assertVersionsEquals( - asList("6.6.1", "6.8.15", "7.0.0", "7.9.1", "7.10.0", "7.10.1", "7.10.2"), - getVersionCollection("1.0.0").getIndexCompatible() - ); - assertVersionsEquals( - asList("6.6.1", "6.8.15", "7.0.0", "7.9.1", "7.10.0", "7.10.1", "7.10.2", "1.0.0"), - getVersionCollection("1.1.0").getIndexCompatible() - ); - } - - public void testIndexCompatibleUnreleased() { - assertVersionsEquals(Collections.emptyList(), getVersionCollection("1.0.0").getUnreleasedIndexCompatible()); - } - - public void testGetUnreleased() { - assertVersionsEquals(Collections.singletonList("1.0.0"), getVersionCollection("1.0.0").getUnreleased()); - } - - private String formatVersionToLine(final String version) { - return " public static final Version V_" + version.replaceAll("\\.", "_") + " "; - } - - private void assertVersionsEquals(List expected, List actual) { - assertEquals(expected.stream().map(Version::fromString).collect(Collectors.toList()), actual); - } - - private BwcVersions getVersionCollection(String versionString) { - List versionMap = sampleVersions.get(versionString); - assertNotNull(versionMap); - Version version = Version.fromString(versionString); - assertNotNull(version); - return new BwcVersions(versionMap.stream().map(this::formatVersionToLine).collect(Collectors.toList()), version); - } -} diff --git a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java index d7798ef5040bb..1a9647573f948 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/DistributionDownloadPluginTests.java @@ -86,7 +86,7 @@ public void testCustomDistributionUrlWithUrl() { project.getExtensions().getExtraProperties().set("customDistributionUrl", customUrl); DistributionDownloadPlugin plugin = new DistributionDownloadPlugin(); plugin.apply(project); - assertEquals(4, project.getRepositories().size()); + assertEquals(2, project.getRepositories().size()); assertEquals( ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads")).getUrl().toString(), customUrl @@ -95,22 +95,13 @@ public void testCustomDistributionUrlWithUrl() { ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-snapshots")).getUrl().toString(), customUrl ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-downloads")).getUrl().toString(), - "https://artifacts-no-kpi.elastic.co" - ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-snapshots")).getUrl().toString(), - "https://snapshots-no-kpi.elastic.co" - ); - } public void testCustomDistributionUrlWithoutUrl() { Project project = ProjectBuilder.builder().build(); DistributionDownloadPlugin plugin = new DistributionDownloadPlugin(); plugin.apply(project); - assertEquals(5, project.getRepositories().size()); + assertEquals(3, project.getRepositories().size()); assertEquals( ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads")).getUrl().toString(), "https://artifacts.opensearch.org" @@ -123,14 +114,6 @@ public void testCustomDistributionUrlWithoutUrl() { ((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-snapshots")).getUrl().toString(), "https://artifacts.opensearch.org" ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-downloads")).getUrl().toString(), - "https://artifacts-no-kpi.elastic.co" - ); - assertEquals( - ((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-snapshots")).getUrl().toString(), - "https://snapshots-no-kpi.elastic.co" - ); } public void testBadVersionFormat() { @@ -332,7 +315,8 @@ private void checkBwc( Project archiveProject = ProjectBuilder.builder().withParent(bwcProject).withName(projectName).build(); archiveProject.getConfigurations().create(config); archiveProject.getArtifacts().add(config, new File("doesnotmatter")); - createDistro(project, "distro", version.toString(), type, platform, true); + final OpenSearchDistribution distro = createDistro(project, "distro", version.toString(), type, platform, true); + distro.setArchitecture(Architecture.current()); checkPlugin(project); } diff --git a/buildSrc/src/test/java/org/opensearch/gradle/plugin/PluginBuildPluginTests.java b/buildSrc/src/test/java/org/opensearch/gradle/plugin/PluginBuildPluginTests.java index 9ed0e3e494992..8772a9fbd65ee 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/plugin/PluginBuildPluginTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/plugin/PluginBuildPluginTests.java @@ -31,6 +31,7 @@ package org.opensearch.gradle.plugin; +import org.gradle.api.tasks.bundling.AbstractArchiveTask; import org.opensearch.gradle.BwcVersions; import org.opensearch.gradle.test.GradleUnitTestCase; import org.gradle.api.Project; @@ -64,6 +65,10 @@ public void testApply() { assertNotNull("plugin extensions has the right type", project.getExtensions().findByType(PluginPropertiesExtension.class)); assertNull("plugin should not create the integTest task", project.getTasks().findByName("integTest")); + project.getTasks().withType(AbstractArchiveTask.class).forEach(t -> { + assertFalse(String.format("task '%s' should not preserve timestamps", t.getName()), t.isPreserveFileTimestamps()); + assertTrue(String.format("task '%s' should have reproducible file order", t.getName()), t.isReproducibleFileOrder()); + }); } @Ignore("https://github.com/elastic/elasticsearch/issues/47123") diff --git a/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java b/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java index 06632e2dfa476..148a836f32b41 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/pluginzip/PublishTests.java @@ -8,6 +8,8 @@ package org.opensearch.gradle.pluginzip; +import org.gradle.api.Project; +import org.gradle.testfixtures.ProjectBuilder; import org.gradle.testkit.runner.BuildResult; import org.gradle.testkit.runner.GradleRunner; import org.gradle.testkit.runner.UnexpectedBuildFailure; @@ -54,20 +56,152 @@ public void tearDown() { projectDir.delete(); } + /** + * This test is used to verify that adding the 'opensearch.pluginzip' to the project + * adds some other transitive plugins and tasks under the hood. This is basically + * a behavioral test of the {@link Publish#apply(Project)} method. + * + * This is equivalent of having a build.gradle script with just the following section: + *
+     *     plugins {
+     *       id 'opensearch.pluginzip'
+     *     }
+     * 
+ */ + @Test + public void applyZipPublicationPluginNoConfig() { + // All we do here is creating an empty project and applying the Publish plugin. + Project project = ProjectBuilder.builder().build(); + project.getPluginManager().apply(Publish.class); + + // WARNING: ===================================================================== + // All the following tests will work only before the gradle project is evaluated. + // There are some methods that will cause the project to be evaluated, such as: + // project.getTasksByName() + // After the project is evaluated there are more tasks found in the project, like + // the [assemble, build, ...] and other standard tasks. + // This can potentially break in future gradle versions (?) + // =============================================================================== + + assertEquals( + "The Publish plugin is applied which adds total of five tasks from Nebula and MavenPublishing plugins.", + 5, + project.getTasks().size() + ); + + // Tasks applied from "nebula.maven-base-publish" + assertTrue( + project.getTasks() + .findByName("generateMetadataFileForNebulaPublication") instanceof org.gradle.api.publish.tasks.GenerateModuleMetadata + ); + assertTrue( + project.getTasks() + .findByName("generatePomFileForNebulaPublication") instanceof org.gradle.api.publish.maven.tasks.GenerateMavenPom + ); + assertTrue( + project.getTasks() + .findByName("publishNebulaPublicationToMavenLocal") instanceof org.gradle.api.publish.maven.tasks.PublishToMavenLocal + ); + + // Tasks applied from MavenPublishPlugin + assertTrue(project.getTasks().findByName("publishToMavenLocal") instanceof org.gradle.api.DefaultTask); + assertTrue(project.getTasks().findByName("publish") instanceof org.gradle.api.DefaultTask); + + // And we miss the pluginzip publication task (because no publishing was defined for it) + assertNull(project.getTasks().findByName(ZIP_PUBLISH_TASK)); + + // We have the following publishing plugins + assertEquals(4, project.getPlugins().size()); + // ... of the following types: + assertNotNull( + "Project is expected to have OpenSearch pluginzip Publish plugin", + project.getPlugins().findPlugin(org.opensearch.gradle.pluginzip.Publish.class) + ); + assertNotNull( + "Project is expected to have MavenPublishPlugin (applied from OpenSearch pluginzip plugin)", + project.getPlugins().findPlugin(org.gradle.api.publish.maven.plugins.MavenPublishPlugin.class) + ); + assertNotNull( + "Project is expected to have Publishing plugin (applied from MavenPublishPublish plugin)", + project.getPlugins().findPlugin(org.gradle.api.publish.plugins.PublishingPlugin.class) + ); + assertNotNull( + "Project is expected to have nebula MavenBasePublishPlugin plugin (applied from OpenSearch pluginzip plugin)", + project.getPlugins().findPlugin(nebula.plugin.publishing.maven.MavenBasePublishPlugin.class) + ); + } + + /** + * Verify that if the zip publication is configured then relevant tasks are chained correctly. + * This test that the dependsOn() is applied correctly. + */ + @Test + public void applyZipPublicationPluginWithConfig() throws IOException, URISyntaxException, InterruptedException { + + /* ------------------------------- + // The ideal approach would be to create a project (via ProjectBuilder) with publishzip plugin, + // have it evaluated (API call) and then check if there are tasks that the plugin uses to hookup into + // and how these tasks are chained. The problem is that there is a known gradle issue (#20301) that does + // not allow for it ATM. If, however, it is fixed in the future the following is the code that can + // be used... + + Project project = ProjectBuilder.builder().build(); + project.getPluginManager().apply(Publish.class); + // add publications via API + + // evaluate the project + ((DefaultProject)project).evaluate(); + + // - Check that "validatePluginZipPom" and/or "publishPluginZipPublicationToZipStagingRepository" + // tasks have dependencies on "generatePomFileForNebulaPublication". + // - Check that there is the staging repository added. + + // However, due to known issue(1): https://github.com/gradle/gradle/issues/20301 + // it is impossible to reach to individual tasks and work with them. + // (1): https://docs.gradle.org/7.4/release-notes.html#known-issues + + // I.e.: The following code throws exception, basically any access to individual tasks fails. + project.getTasks().getByName("validatePluginZipPom"); + ------------------------------- */ + + // Instead, we run the gradle project via GradleRunner (this way we get fully evaluated project) + // and using the minimal possible configuration (missingPOMEntity) we test that as soon as the zip publication + // configuration is specified then all the necessary tasks are hooked up and executed correctly. + // However, this does not test execution order of the tasks. + GradleRunner runner = prepareGradleRunnerFromTemplate("missingPOMEntity.gradle", ZIP_PUBLISH_TASK/*, "-m"*/); + BuildResult result = runner.build(); + + assertEquals(SUCCESS, result.task(":" + "bundlePlugin").getOutcome()); + assertEquals(SUCCESS, result.task(":" + "generatePomFileForNebulaPublication").getOutcome()); + assertEquals(SUCCESS, result.task(":" + "generatePomFileForPluginZipPublication").getOutcome()); + assertEquals(SUCCESS, result.task(":" + ZIP_PUBLISH_TASK).getOutcome()); + } + + /** + * If the plugin is used but relevant publication is not defined then a message is printed. + */ + @Test + public void missingPublications() throws IOException, URISyntaxException { + GradleRunner runner = prepareGradleRunnerFromTemplate("missingPublications.gradle", "build", "-m"); + BuildResult result = runner.build(); + + assertTrue(result.getOutput().contains("Plugin 'opensearch.pluginzip' is applied but no 'pluginZip' publication is defined.")); + } + @Test public void missingGroupValue() throws IOException, URISyntaxException, XmlPullParserException { - GradleRunner runner = prepareGradleRunnerFromTemplate("missingGroupValue.gradle"); + GradleRunner runner = prepareGradleRunnerFromTemplate("missingGroupValue.gradle", "build", ZIP_PUBLISH_TASK); Exception e = assertThrows(UnexpectedBuildFailure.class, runner::build); assertTrue(e.getMessage().contains("Invalid publication 'pluginZip': groupId cannot be empty.")); } /** - * This would be the most common use case where user declares Maven publication entity with basic info - * and the resulting POM file will use groupId and version values from the Gradle project object. + * This would be the most common use case where user declares Maven publication entity with minimal info + * and the resulting POM file will use artifactId, groupId and version values based on the Gradle project object. */ @Test - public void groupAndVersionValue() throws IOException, URISyntaxException, XmlPullParserException { - GradleRunner runner = prepareGradleRunnerFromTemplate("groupAndVersionValue.gradle"); + public void useDefaultValues() throws IOException, URISyntaxException, XmlPullParserException { + GradleRunner runner = prepareGradleRunnerFromTemplate("useDefaultValues.gradle", "build", ZIP_PUBLISH_TASK); BuildResult result = runner.build(); /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */ @@ -108,7 +242,7 @@ public void groupAndVersionValue() throws IOException, URISyntaxException, XmlPu ).exists() ); - // Parse the maven file and validate the groupID + // Parse the maven file and validate default values MavenXpp3Reader reader = new MavenXpp3Reader(); Model model = reader.read( new FileReader( @@ -130,16 +264,90 @@ public void groupAndVersionValue() throws IOException, URISyntaxException, XmlPu ); assertEquals(model.getVersion(), "2.0.0.0"); assertEquals(model.getGroupId(), "org.custom.group"); + assertEquals(model.getArtifactId(), PROJECT_NAME); + assertNull(model.getName()); + assertNull(model.getDescription()); + assertEquals(model.getUrl(), "https://github.com/doe/sample-plugin"); } + /** + * If the `group` is defined in gradle's allprojects section then it does not have to defined in publications. + */ + @Test + public void allProjectsGroup() throws IOException, URISyntaxException, XmlPullParserException { + GradleRunner runner = prepareGradleRunnerFromTemplate("allProjectsGroup.gradle", "build", ZIP_PUBLISH_TASK); + BuildResult result = runner.build(); + + /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */ + assertEquals(SUCCESS, result.task(":" + "build").getOutcome()); + assertEquals(SUCCESS, result.task(":" + ZIP_PUBLISH_TASK).getOutcome()); + + // Parse the maven file and validate default values + MavenXpp3Reader reader = new MavenXpp3Reader(); + Model model = reader.read( + new FileReader( + new File( + projectDir.getRoot(), + String.join( + File.separator, + "build", + "local-staging-repo", + "org", + "opensearch", + PROJECT_NAME, + "2.0.0.0", + PROJECT_NAME + "-2.0.0.0.pom" + ) + ) + ) + ); + assertEquals(model.getVersion(), "2.0.0.0"); + assertEquals(model.getGroupId(), "org.opensearch"); + } + + /** + * The groupId value can be defined on several levels. This tests that the most internal level outweighs other levels. + */ + @Test + public void groupPriorityLevel() throws IOException, URISyntaxException, XmlPullParserException { + GradleRunner runner = prepareGradleRunnerFromTemplate("groupPriorityLevel.gradle", "build", ZIP_PUBLISH_TASK); + BuildResult result = runner.build(); + + /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */ + assertEquals(SUCCESS, result.task(":" + "build").getOutcome()); + assertEquals(SUCCESS, result.task(":" + ZIP_PUBLISH_TASK).getOutcome()); + + // Parse the maven file and validate default values + MavenXpp3Reader reader = new MavenXpp3Reader(); + Model model = reader.read( + new FileReader( + new File( + projectDir.getRoot(), + String.join( + File.separator, + "build", + "local-staging-repo", + "level", + "3", + PROJECT_NAME, + "2.0.0.0", + PROJECT_NAME + "-2.0.0.0.pom" + ) + ) + ) + ); + assertEquals(model.getVersion(), "2.0.0.0"); + assertEquals(model.getGroupId(), "level.3"); + } + /** * In this case the Publication entity is completely missing but still the POM file is generated using the default * values including the groupId and version values obtained from the Gradle project object. */ @Test public void missingPOMEntity() throws IOException, URISyntaxException, XmlPullParserException { - GradleRunner runner = prepareGradleRunnerFromTemplate("missingPOMEntity.gradle"); + GradleRunner runner = prepareGradleRunnerFromTemplate("missingPOMEntity.gradle", "build", ZIP_PUBLISH_TASK); BuildResult result = runner.build(); /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */ @@ -186,7 +394,7 @@ public void missingPOMEntity() throws IOException, URISyntaxException, XmlPullPa */ @Test public void customizedGroupValue() throws IOException, URISyntaxException, XmlPullParserException { - GradleRunner runner = prepareGradleRunnerFromTemplate("customizedGroupValue.gradle"); + GradleRunner runner = prepareGradleRunnerFromTemplate("customizedGroupValue.gradle", "build", ZIP_PUBLISH_TASK); BuildResult result = runner.build(); /** Check if build and {@value ZIP_PUBLISH_TASK} tasks have run well */ @@ -223,21 +431,94 @@ public void customizedGroupValue() throws IOException, URISyntaxException, XmlPu */ @Test public void customizedInvalidGroupValue() throws IOException, URISyntaxException { - GradleRunner runner = prepareGradleRunnerFromTemplate("customizedInvalidGroupValue.gradle"); + GradleRunner runner = prepareGradleRunnerFromTemplate("customizedInvalidGroupValue.gradle", "build", ZIP_PUBLISH_TASK); Exception e = assertThrows(UnexpectedBuildFailure.class, runner::build); assertTrue( e.getMessage().contains("Invalid publication 'pluginZip': groupId ( ) is not a valid Maven identifier ([A-Za-z0-9_\\-.]+).") ); } - private GradleRunner prepareGradleRunnerFromTemplate(String templateName) throws IOException, URISyntaxException { + /** + * This test verifies that use of the pluginZip does not clash with other maven publication plugins. + * It covers the case when user calls the "publishToMavenLocal" task. + */ + @Test + public void publishToMavenLocal() throws IOException, URISyntaxException, XmlPullParserException { + // By default, the "publishToMavenLocal" publishes artifacts to a local m2 repo, typically + // found in `~/.m2/repository`. But this is not practical for this unit test at all. We need to point + // the 'maven-publish' plugin to a custom m2 repo located in temporary directory associated with this + // test case instead. + // + // According to Gradle documentation this should be possible by proper configuration of the publishing + // task (https://docs.gradle.org/current/userguide/publishing_maven.html#publishing_maven:install). + // But for some reason this never worked as expected and artifacts created during this test case + // were always pushed into the default local m2 repository (ie: `~/.m2/repository`). + // The only workaround that seems to work is to pass "-Dmaven.repo.local" property via runner argument. + // (Kudos to: https://stackoverflow.com/questions/72265294/gradle-publishtomavenlocal-specify-custom-directory) + // + // The temporary directory that is used as the local m2 repository is created via in task "prepareLocalMVNRepo". + GradleRunner runner = prepareGradleRunnerFromTemplate( + "publishToMavenLocal.gradle", + String.join(File.separator, "-Dmaven.repo.local=" + projectDir.getRoot(), "build", "local-staging-repo"), + "build", + "prepareLocalMVNRepo", + "publishToMavenLocal" + ); + BuildResult result = runner.build(); + + assertEquals(SUCCESS, result.task(":" + "build").getOutcome()); + assertEquals(SUCCESS, result.task(":" + "publishToMavenLocal").getOutcome()); + + // Parse the maven file and validate it + MavenXpp3Reader reader = new MavenXpp3Reader(); + Model model = reader.read( + new FileReader( + new File( + projectDir.getRoot(), + String.join( + File.separator, + "build", + "local-staging-repo", + "org", + "custom", + "group", + PROJECT_NAME, + "2.0.0.0", + PROJECT_NAME + "-2.0.0.0.pom" + ) + ) + ) + ); + + // The "publishToMavenLocal" task will run ALL maven publications, hence we can expect the ZIP publication + // present as well: https://docs.gradle.org/current/userguide/publishing_maven.html#publishing_maven:tasks + assertEquals(model.getArtifactId(), PROJECT_NAME); + assertEquals(model.getGroupId(), "org.custom.group"); + assertEquals(model.getVersion(), "2.0.0.0"); + assertEquals(model.getPackaging(), "zip"); + + // We have two publications in the build.gradle file, both are "MavenPublication" based. + // Both the mavenJava and pluginZip publications publish to the same location (coordinates) and + // artifacts (the POM file) overwrite each other. However, we can verify that the Zip plugin is + // the last one and "wins" over the mavenJava. + assertEquals(model.getDescription(), "pluginZip publication"); + } + + /** + * A helper method for use cases + * + * @param templateName The name of the file (from "pluginzip" folder) to use as a build.gradle for the test + * @param gradleArguments Optional CLI parameters to pass into Gradle runner + */ + private GradleRunner prepareGradleRunnerFromTemplate(String templateName, String... gradleArguments) throws IOException, + URISyntaxException { useTemplateFile(projectDir.newFile("build.gradle"), templateName); prepareGradleFilesAndSources(); GradleRunner runner = GradleRunner.create() .forwardOutput() .withPluginClasspath() - .withArguments("build", ZIP_PUBLISH_TASK) + .withArguments(gradleArguments) .withProjectDir(projectDir.getRoot()); return runner; @@ -246,7 +527,7 @@ private GradleRunner prepareGradleRunnerFromTemplate(String templateName) throws private void prepareGradleFilesAndSources() throws IOException { // A dummy "source" file that is processed with bundlePlugin and put into a ZIP artifact file File bundleFile = new File(projectDir.getRoot(), PROJECT_NAME + "-source.txt"); - Path zipFile = Files.createFile(bundleFile.toPath()); + Files.createFile(bundleFile.toPath()); // Setting a project name via settings.gradle file writeString(projectDir.newFile("settings.gradle"), "rootProject.name = '" + PROJECT_NAME + "'"); } diff --git a/buildSrc/src/test/java/org/opensearch/gradle/precommit/ForbiddenPatternsTaskTests.java b/buildSrc/src/test/java/org/opensearch/gradle/precommit/ForbiddenPatternsTaskTests.java index ea4db8954bca4..6ce2e70f68381 100644 --- a/buildSrc/src/test/java/org/opensearch/gradle/precommit/ForbiddenPatternsTaskTests.java +++ b/buildSrc/src/test/java/org/opensearch/gradle/precommit/ForbiddenPatternsTaskTests.java @@ -104,10 +104,6 @@ private ForbiddenPatternsTask createTask(Project project) { return project.getTasks().create("forbiddenPatterns", ForbiddenPatternsTask.class); } - private ForbiddenPatternsTask createTask(Project project, String taskName) { - return project.getTasks().create(taskName, ForbiddenPatternsTask.class); - } - private void writeSourceFile(Project project, String name, String... lines) throws IOException { File file = new File(project.getProjectDir(), name); file.getParentFile().mkdirs(); diff --git a/buildSrc/src/test/resources/pluginzip/allProjectsGroup.gradle b/buildSrc/src/test/resources/pluginzip/allProjectsGroup.gradle new file mode 100644 index 0000000000000..80638107c86e1 --- /dev/null +++ b/buildSrc/src/test/resources/pluginzip/allProjectsGroup.gradle @@ -0,0 +1,28 @@ +plugins { + id 'java-gradle-plugin' + id 'opensearch.pluginzip' +} + +version='2.0.0.0' + +// A bundlePlugin task mockup +tasks.register('bundlePlugin', Zip.class) { + archiveFileName = "sample-plugin-${version}.zip" + destinationDirectory = layout.buildDirectory.dir('distributions') + from layout.projectDirectory.file('sample-plugin-source.txt') +} + +allprojects { + group = 'org.opensearch' +} + +publishing { + publications { + pluginZip(MavenPublication) { publication -> + pom { + name = "sample-plugin" + description = "pluginDescription" + } + } + } +} diff --git a/buildSrc/src/test/resources/pluginzip/customizedGroupValue.gradle b/buildSrc/src/test/resources/pluginzip/customizedGroupValue.gradle index 1bde3edda2d91..94f03132faa80 100644 --- a/buildSrc/src/test/resources/pluginzip/customizedGroupValue.gradle +++ b/buildSrc/src/test/resources/pluginzip/customizedGroupValue.gradle @@ -1,6 +1,5 @@ plugins { id 'java-gradle-plugin' - id 'nebula.maven-base-publish' id 'opensearch.pluginzip' } diff --git a/buildSrc/src/test/resources/pluginzip/customizedInvalidGroupValue.gradle b/buildSrc/src/test/resources/pluginzip/customizedInvalidGroupValue.gradle index b6deeeb12ca6a..6f2abbdacd6d4 100644 --- a/buildSrc/src/test/resources/pluginzip/customizedInvalidGroupValue.gradle +++ b/buildSrc/src/test/resources/pluginzip/customizedInvalidGroupValue.gradle @@ -1,6 +1,5 @@ plugins { id 'java-gradle-plugin' - id 'nebula.maven-base-publish' id 'opensearch.pluginzip' } diff --git a/buildSrc/src/test/resources/pluginzip/groupPriorityLevel.gradle b/buildSrc/src/test/resources/pluginzip/groupPriorityLevel.gradle new file mode 100644 index 0000000000000..4da02c9f191d8 --- /dev/null +++ b/buildSrc/src/test/resources/pluginzip/groupPriorityLevel.gradle @@ -0,0 +1,30 @@ +plugins { + id 'java-gradle-plugin' + id 'opensearch.pluginzip' +} + +version='2.0.0.0' + +// A bundlePlugin task mockup +tasks.register('bundlePlugin', Zip.class) { + archiveFileName = "sample-plugin-${version}.zip" + destinationDirectory = layout.buildDirectory.dir('distributions') + from layout.projectDirectory.file('sample-plugin-source.txt') +} + +allprojects { + group = 'level.1' +} + +publishing { + publications { + pluginZip(MavenPublication) { publication -> + groupId = "level.2" + pom { + name = "sample-plugin" + description = "pluginDescription" + groupId = "level.3" + } + } + } +} diff --git a/buildSrc/src/test/resources/pluginzip/missingGroupValue.gradle b/buildSrc/src/test/resources/pluginzip/missingGroupValue.gradle index 602c178ea1a5b..8fcd1d6600b5a 100644 --- a/buildSrc/src/test/resources/pluginzip/missingGroupValue.gradle +++ b/buildSrc/src/test/resources/pluginzip/missingGroupValue.gradle @@ -1,6 +1,5 @@ plugins { id 'java-gradle-plugin' - id 'nebula.maven-base-publish' id 'opensearch.pluginzip' } diff --git a/buildSrc/src/test/resources/pluginzip/missingPOMEntity.gradle b/buildSrc/src/test/resources/pluginzip/missingPOMEntity.gradle index 2cc67c2e98954..394bc53622769 100644 --- a/buildSrc/src/test/resources/pluginzip/missingPOMEntity.gradle +++ b/buildSrc/src/test/resources/pluginzip/missingPOMEntity.gradle @@ -1,6 +1,5 @@ plugins { id 'java-gradle-plugin' - id 'nebula.maven-base-publish' id 'opensearch.pluginzip' } diff --git a/buildSrc/src/test/resources/pluginzip/missingPublications.gradle b/buildSrc/src/test/resources/pluginzip/missingPublications.gradle new file mode 100644 index 0000000000000..ba6b33ad86463 --- /dev/null +++ b/buildSrc/src/test/resources/pluginzip/missingPublications.gradle @@ -0,0 +1,21 @@ +plugins { + id 'java-gradle-plugin' + id 'opensearch.pluginzip' +} + +group="org.custom.group" +version='2.0.0.0' + +// A bundlePlugin task mockup +tasks.register('bundlePlugin', Zip.class) { + archiveFileName = "sample-plugin-${version}.zip" + destinationDirectory = layout.buildDirectory.dir('distributions') + from layout.projectDirectory.file('sample-plugin-source.txt') +} + +//publishing { +// publications { +// pluginZip(MavenPublication) { +// } +// } +//} diff --git a/buildSrc/src/test/resources/pluginzip/publishToMavenLocal.gradle b/buildSrc/src/test/resources/pluginzip/publishToMavenLocal.gradle new file mode 100644 index 0000000000000..8d248dbe08a42 --- /dev/null +++ b/buildSrc/src/test/resources/pluginzip/publishToMavenLocal.gradle @@ -0,0 +1,47 @@ +plugins { + // The java-gradle-plugin adds a new publication called 'pluginMaven' that causes some warnings because it + // clashes a bit with other publications defined in this file. If you are running at the --info level then you can + // expect some warning like the following: + // "Multiple publications with coordinates 'org.custom.group:sample-plugin:2.0.0.0' are published to repository 'mavenLocal'." + id 'java-gradle-plugin' + id 'opensearch.pluginzip' +} + +group="org.custom.group" +version='2.0.0.0' + +// A bundlePlugin task mockup +tasks.register('bundlePlugin', Zip.class) { + archiveFileName = "sample-plugin-${version}.zip" + destinationDirectory = layout.buildDirectory.dir('distributions') + from layout.projectDirectory.file('sample-plugin-source.txt') +} + +// A task to prepare directory for a temporary maven local repository +tasks.register('prepareLocalMVNRepo') { + dependsOn ':bundlePlugin' + doFirst { + File localMVNRepo = new File (layout.buildDirectory.get().getAsFile().getPath(), 'local-staging-repo') + System.out.println('Creating temporary folder for mavenLocal repo: '+ localMVNRepo.toString()) + System.out.println("Success: " + localMVNRepo.mkdir()) + } +} + +publishing { + publications { + // Plugin zip publication + pluginZip(MavenPublication) { + pom { + url = 'http://www.example.com/library' + description = 'pluginZip publication' + } + } + // Standard maven publication + mavenJava(MavenPublication) { + pom { + url = 'http://www.example.com/library' + description = 'mavenJava publication' + } + } + } +} diff --git a/buildSrc/src/test/resources/pluginzip/groupAndVersionValue.gradle b/buildSrc/src/test/resources/pluginzip/useDefaultValues.gradle similarity index 90% rename from buildSrc/src/test/resources/pluginzip/groupAndVersionValue.gradle rename to buildSrc/src/test/resources/pluginzip/useDefaultValues.gradle index bdab385f6082c..52f1c042fd47c 100644 --- a/buildSrc/src/test/resources/pluginzip/groupAndVersionValue.gradle +++ b/buildSrc/src/test/resources/pluginzip/useDefaultValues.gradle @@ -1,6 +1,5 @@ plugins { id 'java-gradle-plugin' - id 'nebula.maven-base-publish' id 'opensearch.pluginzip' } @@ -18,8 +17,8 @@ publishing { publications { pluginZip(MavenPublication) { pom { - name = "sample-plugin" - description = "pluginDescription" +// name = "plugin name" +// description = "plugin description" licenses { license { name = "The Apache License, Version 2.0" diff --git a/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/JUnit3MethodProvider.java b/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/JUnit3MethodProvider.java index 0c01b6d519d62..163a903d31832 100644 --- a/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/JUnit3MethodProvider.java +++ b/buildSrc/src/testFixtures/java/org/opensearch/gradle/test/JUnit3MethodProvider.java @@ -59,7 +59,7 @@ public Collection getTestMethods(Class suiteClass, ClassModel classMo if (m.getName().startsWith("test") && Modifier.isPublic(m.getModifiers()) && !Modifier.isStatic(m.getModifiers()) - && m.getParameterTypes().length == 0) { + && m.getParameterCount() == 0) { result.add(m); } } diff --git a/buildSrc/src/testKit/thirdPartyAudit/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/build.gradle index 2c86d28cf0206..537bf3c1fad71 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/build.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/build.gradle @@ -40,7 +40,7 @@ repositories { } dependencies { - forbiddenApisCliJar 'de.thetaphi:forbiddenapis:3.3' + forbiddenApisCliJar 'de.thetaphi:forbiddenapis:3.4' jdkJarHell 'org.opensearch:opensearch-core:current' compileOnly "org.${project.properties.compileOnlyGroup}:${project.properties.compileOnlyVersion}" implementation "org.${project.properties.compileGroup}:${project.properties.compileVersion}" diff --git a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle index f0f9e74ba96a2..d67c33cb98193 100644 --- a/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle +++ b/buildSrc/src/testKit/thirdPartyAudit/sample_jars/build.gradle @@ -16,7 +16,7 @@ repositories { mavenCentral() } dependencies { - implementation 'org.apache.logging.log4j:log4j-core:2.18.0' + implementation 'org.apache.logging.log4j:log4j-core:2.19.0' } ["0.0.1", "0.0.2"].forEach { v -> diff --git a/buildSrc/version.properties b/buildSrc/version.properties index 072dcc4578977..d65ea2ad6dbf0 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,30 +1,35 @@ opensearch = 3.0.0 -lucene = 9.4.0-snapshot-ddf0d0a +lucene = 9.5.0-snapshot-a4ef70f bundled_jdk_vendor = adoptium -bundled_jdk = 17.0.4+8 - +bundled_jdk = 19.0.1+10 # optional dependencies spatial4j = 0.7 jts = 1.15.0 -jackson = 2.13.3 -jackson_databind = 2.13.3 -snakeyaml = 1.31 +jackson = 2.14.1 +jackson_databind = 2.14.1 +snakeyaml = 1.32 icu4j = 70.1 supercsv = 2.4.0 +# Update to 2.17.2+ is breaking OpenSearchJsonLayout (see https://issues.apache.org/jira/browse/LOG4J2-3562) log4j = 2.17.1 slf4j = 1.7.36 -asm = 9.3 +asm = 9.4 +jettison = 1.5.1 +woodstox = 6.4.0 +kotlin = 1.7.10 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 5.5.0 -netty = 4.1.79.Final +netty = 4.1.84.Final joda = 2.10.13 # client dependencies +httpclient5 = 5.1.4 +httpcore5 = 5.1.5 httpclient = 4.5.13 httpcore = 4.4.15 httpasyncclient = 4.1.5 @@ -42,9 +47,10 @@ bouncycastle=1.70 randomizedrunner = 2.7.1 junit = 4.13.2 hamcrest = 2.1 +# Update to 4.8.0 is using reflection without SecurityManager checks (fails with java.security.AccessControlException) mockito = 4.7.0 objenesis = 3.2 -bytebuddy = 1.12.12 +bytebuddy = 1.12.18 # benchmark dependencies jmh = 1.35 diff --git a/client/benchmark/src/main/java/org/opensearch/client/benchmark/rest/RestClientBenchmark.java b/client/benchmark/src/main/java/org/opensearch/client/benchmark/rest/RestClientBenchmark.java index d2d7163b8dee2..e8dcff814603d 100644 --- a/client/benchmark/src/main/java/org/opensearch/client/benchmark/rest/RestClientBenchmark.java +++ b/client/benchmark/src/main/java/org/opensearch/client/benchmark/rest/RestClientBenchmark.java @@ -31,10 +31,10 @@ package org.opensearch.client.benchmark.rest; -import org.apache.http.HttpHeaders; -import org.apache.http.HttpHost; -import org.apache.http.HttpStatus; -import org.apache.http.message.BasicHeader; +import org.apache.hc.core5.http.HttpHeaders; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.HttpStatus; +import org.apache.hc.core5.http.message.BasicHeader; import org.opensearch.OpenSearchException; import org.opensearch.client.Request; import org.opensearch.client.Response; diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 07147ce81b72e..7fa2855d85487 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -104,3 +104,9 @@ testClusters.all { extraConfigFile nodeTrustStore.name, nodeTrustStore extraConfigFile pkiTrustCert.name, pkiTrustCert } + +thirdPartyAudit.ignoreMissingClasses( + 'org.conscrypt.Conscrypt', + 'org.slf4j.Logger', + 'org.slf4j.LoggerFactory' +) diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/ClusterRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/ClusterRequestConverters.java index 37a1ab8812845..4ff8e75b521b6 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/ClusterRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/ClusterRequestConverters.java @@ -32,10 +32,10 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpHead; +import org.apache.hc.client5.http.classic.methods.HttpPut; import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; import org.opensearch.action.admin.cluster.settings.ClusterGetSettingsRequest; import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java index 3a5384f23b90e..ca9154340a660 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/IndicesRequestConverters.java @@ -32,11 +32,11 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpHead; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest; import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; import org.opensearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/IngestRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/IngestRequestConverters.java index 2504dec3af36e..4c044413642ac 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/IngestRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/IngestRequestConverters.java @@ -32,10 +32,10 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; import org.opensearch.action.ingest.DeletePipelineRequest; import org.opensearch.action.ingest.GetPipelineRequest; import org.opensearch.action.ingest.PutPipelineRequest; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java index eedc27d1d2ea7..88e3a3a904830 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RequestConverters.java @@ -32,14 +32,14 @@ package org.opensearch.client; -import org.apache.http.HttpEntity; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.ContentType; -import org.apache.http.nio.entity.NByteArrayEntity; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpHead; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.io.entity.ByteArrayEntity; import org.apache.lucene.util.BytesRef; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; @@ -269,7 +269,7 @@ static Request bulk(BulkRequest bulkRequest) throws IOException { } } request.addParameters(parameters.asMap()); - request.setEntity(new NByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType)); + request.setEntity(new ByteArrayEntity(content.toByteArray(), 0, content.size(), requestContentType)); return request; } @@ -358,7 +358,7 @@ static Request index(IndexRequest indexRequest) { BytesRef source = indexRequest.source().toBytesRef(); ContentType contentType = createContentType(indexRequest.getContentType()); request.addParameters(parameters.asMap()); - request.setEntity(new NByteArrayEntity(source.bytes, source.offset, source.length, contentType)); + request.setEntity(new ByteArrayEntity(source.bytes, source.offset, source.length, contentType)); return request; } @@ -498,6 +498,10 @@ static Request deleteAllPits() { return new Request(HttpDelete.METHOD_NAME, "/_search/point_in_time/_all"); } + static Request getAllPits() { + return new Request(HttpGet.METHOD_NAME, "/_search/point_in_time/_all"); + } + static Request multiSearch(MultiSearchRequest multiSearchRequest) throws IOException { Request request = new Request(HttpPost.METHOD_NAME, "/_msearch"); @@ -510,7 +514,7 @@ static Request multiSearch(MultiSearchRequest multiSearchRequest) throws IOExcep XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent(); byte[] source = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, xContent); request.addParameters(params.asMap()); - request.setEntity(new NByteArrayEntity(source, createContentType(xContent.type()))); + request.setEntity(new ByteArrayEntity(source, createContentType(xContent.type()))); return request; } @@ -545,7 +549,7 @@ static Request multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplat XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent(); byte[] source = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, xContent); - request.setEntity(new NByteArrayEntity(source, createContentType(xContent.type()))); + request.setEntity(new ByteArrayEntity(source, createContentType(xContent.type()))); return request; } @@ -813,7 +817,7 @@ static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType, ToXContent.Params toXContentParams) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, toXContentParams, false).toBytesRef(); - return new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); + return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); } static String endpoint(String index, String id) { diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java index 0c73c65f6175f..27f13fc3c00c4 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/RestHighLevelClient.java @@ -32,7 +32,7 @@ package org.opensearch.client; -import org.apache.http.HttpEntity; +import org.apache.hc.core5.http.HttpEntity; import org.opensearch.OpenSearchException; import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; @@ -63,6 +63,7 @@ import org.opensearch.action.search.CreatePitResponse; import org.opensearch.action.search.DeletePitRequest; import org.opensearch.action.search.DeletePitResponse; +import org.opensearch.action.search.GetAllPitNodesResponse; import org.opensearch.action.search.MultiSearchRequest; import org.opensearch.action.search.MultiSearchResponse; import org.opensearch.action.search.SearchRequest; @@ -1368,6 +1369,40 @@ public final Cancellable deleteAllPitsAsync(RequestOptions options, ActionListen ); } + /** + * Get all point in time searches using list all PITs API + * + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + */ + public final GetAllPitNodesResponse getAllPits(RequestOptions options) throws IOException { + return performRequestAndParseEntity( + new MainRequest(), + (request) -> RequestConverters.getAllPits(), + options, + GetAllPitNodesResponse::fromXContent, + emptySet() + ); + } + + /** + * Asynchronously get all point in time searches using list all PITs API + * + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + * @return the response + */ + public final Cancellable getAllPitsAsync(RequestOptions options, ActionListener listener) { + return performRequestAsyncAndParseEntity( + new MainRequest(), + (request) -> RequestConverters.getAllPits(), + options, + GetAllPitNodesResponse::fromXContent, + listener, + emptySet() + ); + } + /** * Clears one or more scroll ids using the Clear Scroll API. * @@ -2185,9 +2220,9 @@ protected final Resp parseEntity(final HttpEntity entity, final CheckedFu if (entity.getContentType() == null) { throw new IllegalStateException("OpenSearch didn't return the [Content-Type] header, unable to parse response body"); } - XContentType xContentType = XContentType.fromMediaType(entity.getContentType().getValue()); + XContentType xContentType = XContentType.fromMediaType(entity.getContentType()); if (xContentType == null) { - throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType().getValue()); + throw new IllegalStateException("Unsupported Content-Type: " + entity.getContentType()); } try (XContentParser parser = xContentType.xContent().createParser(registry, DEPRECATION_HANDLER, entity.getContent())) { return entityParser.apply(parser); diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotRequestConverters.java index 3d44820966608..263d7db82ba08 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/SnapshotRequestConverters.java @@ -32,10 +32,10 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; import org.opensearch.action.admin.cluster.repositories.cleanup.CleanupRepositoryRequest; import org.opensearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/TasksRequestConverters.java b/client/rest-high-level/src/main/java/org/opensearch/client/TasksRequestConverters.java index ff89950f37cb9..78a74ca04ff9b 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/TasksRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/TasksRequestConverters.java @@ -32,8 +32,8 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPost; import org.opensearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.opensearch.client.RequestConverters.EndpointBuilder; import org.opensearch.client.tasks.CancelTasksRequest; diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/core/CountResponse.java b/client/rest-high-level/src/main/java/org/opensearch/client/core/CountResponse.java index 1d67a50f68f40..ca4446258446b 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/core/CountResponse.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/core/CountResponse.java @@ -233,7 +233,7 @@ static ShardStats fromXContent(XContentParser parser) throws IOException { parser.skipChildren(); } } - return new ShardStats(successfulShards, totalShards, skippedShards, failures.toArray(new ShardSearchFailure[failures.size()])); + return new ShardStats(successfulShards, totalShards, skippedShards, failures.toArray(new ShardSearchFailure[0])); } @Override diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutIndexTemplateRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutIndexTemplateRequest.java index 5f43ec7f1d0fe..cd0eb8881ab0c 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutIndexTemplateRequest.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/PutIndexTemplateRequest.java @@ -435,7 +435,7 @@ public PutIndexTemplateRequest alias(Alias alias) { @Override public String[] indices() { - return indexPatterns.toArray(new String[indexPatterns.size()]); + return indexPatterns.toArray(new String[0]); } @Override diff --git a/client/rest-high-level/src/main/java/org/opensearch/client/indices/ResizeRequest.java b/client/rest-high-level/src/main/java/org/opensearch/client/indices/ResizeRequest.java index 2a22c8d7d19e9..ebbd813c9fe15 100644 --- a/client/rest-high-level/src/main/java/org/opensearch/client/indices/ResizeRequest.java +++ b/client/rest-high-level/src/main/java/org/opensearch/client/indices/ResizeRequest.java @@ -39,6 +39,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.ToXContentObject; import org.opensearch.common.xcontent.XContentBuilder; +import org.opensearch.common.unit.ByteSizeValue; import java.io.IOException; import java.util.Collections; @@ -58,6 +59,7 @@ public class ResizeRequest extends TimedRequest implements Validatable, ToXConte private final String targetIndex; private Settings settings = Settings.EMPTY; private Set aliases = new HashSet<>(); + private ByteSizeValue maxShardSize; /** * Creates a new resize request @@ -155,6 +157,24 @@ public ActiveShardCount getWaitForActiveShards() { return waitForActiveShards; } + /** + * Sets the maximum size of a primary shard in the new shrunken index. + * This parameter can be used to calculate the lowest factor of the source index's shards number + * which satisfies the maximum shard size requirement. + * + * @param maxShardSize the maximum size of a primary shard in the new shrunken index + */ + public void setMaxShardSize(ByteSizeValue maxShardSize) { + this.maxShardSize = maxShardSize; + } + + /** + * Returns the maximum size of a primary shard in the new shrunken index. + */ + public ByteSizeValue getMaxShardSize() { + return maxShardSize; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt b/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt index 68dc509e5ff27..0d7749b39fb91 100644 --- a/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt +++ b/client/rest-high-level/src/main/resources/forbidden/rest-high-level-signatures.txt @@ -15,10 +15,9 @@ # language governing permissions and limitations under the License. @defaultMessage Use Request#createContentType(XContentType) to be sure to pass the right MIME type -org.apache.http.entity.ContentType#create(java.lang.String) -org.apache.http.entity.ContentType#create(java.lang.String,java.lang.String) -org.apache.http.entity.ContentType#create(java.lang.String,java.nio.charset.Charset) -org.apache.http.entity.ContentType#create(java.lang.String,org.apache.http.NameValuePair[]) +org.apache.hc.core5.http.ContentType#create(java.lang.String) +org.apache.hc.core5.http.ContentType#create(java.lang.String,java.lang.String) +org.apache.hc.core5.http.ContentType#create(java.lang.String,java.nio.charset.Charset) @defaultMessage ES's logging infrastructure uses log4j2 which we don't want to force on high level rest client users org.opensearch.common.logging.DeprecationLogger @@ -30,7 +29,3 @@ org.opensearch.common.logging.PrefixLogger @defaultMessage We can't rely on log4j2 being on the classpath so don't log deprecations! org.opensearch.common.xcontent.LoggingDeprecationHandler - -@defaultMessage Use Nonblocking org.apache.http.nio.entity.NByteArrayEntity -org.apache.http.entity.ByteArrayEntity -org.apache.http.entity.StringEntity diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/ClusterClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/ClusterClientIT.java index 71b869fb59e7b..82d2cbe9149ca 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/ClusterClientIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/ClusterClientIT.java @@ -32,7 +32,8 @@ package org.opensearch.client; -import org.apache.http.util.EntityUtils; +import org.apache.hc.core5.http.ParseException; +import org.apache.hc.core5.http.io.entity.EntityUtils; import org.opensearch.OpenSearchException; import org.opensearch.OpenSearchStatusException; import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; @@ -220,7 +221,7 @@ public void testClusterHealthGreen() throws IOException { assertThat(response.getStatus(), equalTo(ClusterHealthStatus.GREEN)); } - public void testClusterHealthYellowClusterLevel() throws IOException { + public void testClusterHealthYellowClusterLevel() throws IOException, ParseException { createIndex("index", Settings.EMPTY); createIndex("index2", Settings.EMPTY); ClusterHealthRequest request = new ClusterHealthRequest(); @@ -231,7 +232,7 @@ public void testClusterHealthYellowClusterLevel() throws IOException { assertThat(response.getIndices().size(), equalTo(0)); } - public void testClusterHealthYellowIndicesLevel() throws IOException { + public void testClusterHealthYellowIndicesLevel() throws IOException, ParseException { String firstIndex = "index"; String secondIndex = "index2"; // including another index that we do not assert on, to ensure that we are not diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java index 27adc18fd37b8..f201599632969 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/ClusterRequestConvertersTests.java @@ -32,8 +32,6 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPut; import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; import org.opensearch.action.admin.cluster.settings.ClusterGetSettingsRequest; import org.opensearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; @@ -44,6 +42,8 @@ import org.opensearch.common.Priority; import org.opensearch.common.util.CollectionUtils; import org.opensearch.test.OpenSearchTestCase; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPut; import org.hamcrest.CoreMatchers; import org.junit.Assert; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/CustomRestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/CustomRestHighLevelClientTests.java index 1d94f190c611c..972c96999945f 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/CustomRestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/CustomRestHighLevelClientTests.java @@ -32,15 +32,14 @@ package org.opensearch.client; -import org.apache.http.Header; -import org.apache.http.HttpHost; -import org.apache.http.ProtocolVersion; -import org.apache.http.RequestLine; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.entity.ContentType; -import org.apache.http.message.BasicRequestLine; -import org.apache.http.message.BasicStatusLine; -import org.apache.http.nio.entity.NByteArrayEntity; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.ProtocolVersion; +import org.apache.hc.core5.http.io.entity.ByteArrayEntity; +import org.apache.hc.core5.http.message.RequestLine; +import org.apache.hc.core5.http.message.StatusLine; import org.apache.lucene.util.BytesRef; import org.opensearch.Build; import org.opensearch.Version; @@ -172,13 +171,13 @@ private Response mockPerformRequest(Request request) throws IOException { when(mockResponse.getHost()).thenReturn(new HttpHost("localhost", 9200)); ProtocolVersion protocol = new ProtocolVersion("HTTP", 1, 1); - when(mockResponse.getStatusLine()).thenReturn(new BasicStatusLine(protocol, 200, "OK")); + when(mockResponse.getStatusLine()).thenReturn(new StatusLine(protocol, 200, "OK")); MainResponse response = new MainResponse(httpHeader.getValue(), Version.CURRENT, ClusterName.DEFAULT, "_na", Build.CURRENT); BytesRef bytesRef = XContentHelper.toXContent(response, XContentType.JSON, false).toBytesRef(); - when(mockResponse.getEntity()).thenReturn(new NByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON)); + when(mockResponse.getEntity()).thenReturn(new ByteArrayEntity(bytesRef.bytes, ContentType.APPLICATION_JSON)); - RequestLine requestLine = new BasicRequestLine(HttpGet.METHOD_NAME, ENDPOINT, protocol); + RequestLine requestLine = new RequestLine(HttpGet.METHOD_NAME, ENDPOINT, protocol); when(mockResponse.getRequestLine()).thenReturn(requestLine); return mockResponse; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/HighLevelRestClientCompressionIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/HighLevelRestClientCompressionIT.java index c0c03ed1d0e7c..6985353806a01 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/HighLevelRestClientCompressionIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/HighLevelRestClientCompressionIT.java @@ -31,13 +31,22 @@ package org.opensearch.client; -import org.apache.http.HttpHeaders; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; +import org.apache.hc.client5.http.entity.GzipCompressingEntity; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.CloseableHttpResponse; +import org.apache.hc.client5.http.impl.classic.HttpClients; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpHeaders; +import org.apache.hc.core5.http.io.entity.StringEntity; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; import static org.hamcrest.Matchers.equalTo; @@ -62,4 +71,32 @@ public void testCompressesResponseIfRequested() throws IOException { assertEquals(SAMPLE_DOCUMENT, searchResponse.getHits().getHits()[0].getSourceAsString()); } + /** + * The default CloseableHttpAsyncClient does not support compression out of the box (so that applies to RestClient + * and RestHighLevelClient). To check the compression works on both sides, crafting the request using CloseableHttpClient + * instead which uses compression by default. + */ + public void testCompressesRequest() throws IOException, URISyntaxException { + try (CloseableHttpClient client = HttpClients.custom().build()) { + final Node node = client().getNodes().iterator().next(); + final URI baseUri = new URI(node.getHost().toURI()); + + final HttpPut index = new HttpPut(baseUri.resolve("/company/_doc/1")); + index.setEntity(new GzipCompressingEntity(new StringEntity(SAMPLE_DOCUMENT, ContentType.APPLICATION_JSON))); + try (CloseableHttpResponse response = client.execute(index)) { + assertThat(response.getCode(), equalTo(201)); + } + + final HttpGet refresh = new HttpGet(baseUri.resolve("/_refresh")); + try (CloseableHttpResponse response = client.execute(refresh)) { + assertThat(response.getCode(), equalTo(200)); + } + + final HttpPost search = new HttpPost(baseUri.resolve("/_search")); + index.setEntity(new GzipCompressingEntity(new StringEntity("{}", ContentType.APPLICATION_JSON))); + try (CloseableHttpResponse response = client.execute(search)) { + assertThat(response.getCode(), equalTo(200)); + } + } + } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java index f9c8851f8839e..750b0c15e9c14 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesClientIT.java @@ -32,9 +32,9 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; import org.opensearch.OpenSearchException; import org.opensearch.OpenSearchStatusException; import org.opensearch.action.admin.indices.alias.Alias; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java index fdb5f2843b44d..7ed06129dc893 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/IndicesRequestConvertersTests.java @@ -32,11 +32,11 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpHead; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; import org.apache.lucene.tests.util.LuceneTestCase; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.admin.indices.alias.Alias; @@ -79,6 +79,7 @@ import org.opensearch.common.xcontent.XContentType; import org.opensearch.test.OpenSearchTestCase; import org.junit.Assert; +import org.opensearch.common.unit.ByteSizeValue; import java.io.IOException; import java.util.Arrays; @@ -701,6 +702,8 @@ private void resizeTest(ResizeType resizeType, CheckedFunction toMap(Response response) throws IOException { + protected static Map toMap(Response response) throws IOException, OpenSearchParseException, ParseException { return XContentHelper.convertToMap(JsonXContent.jsonXContent, EntityUtils.toString(response.getEntity()), false); } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/PingAndInfoIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/PingAndInfoIT.java index 09ef90cef144d..6f66a5279afa3 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/PingAndInfoIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/PingAndInfoIT.java @@ -32,7 +32,7 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpGet; import org.opensearch.client.core.MainResponse; import java.io.IOException; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java index 395ec6e46a7b3..1f10deb400ecc 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/PitIT.java @@ -8,8 +8,8 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; import org.junit.Before; import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; @@ -18,12 +18,15 @@ import org.opensearch.action.search.DeletePitInfo; import org.opensearch.action.search.DeletePitRequest; import org.opensearch.action.search.DeletePitResponse; +import org.opensearch.action.search.GetAllPitNodesResponse; import org.opensearch.common.unit.TimeValue; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; /** * Tests point in time API with rest high level client @@ -52,21 +55,24 @@ public void indexDocuments() throws IOException { public void testCreateAndDeletePit() throws IOException { CreatePitRequest pitRequest = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, "index"); - CreatePitResponse pitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); - assertTrue(pitResponse.getId() != null); - assertEquals(1, pitResponse.getTotalShards()); - assertEquals(1, pitResponse.getSuccessfulShards()); - assertEquals(0, pitResponse.getFailedShards()); - assertEquals(0, pitResponse.getSkippedShards()); + CreatePitResponse createPitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); + assertTrue(createPitResponse.getId() != null); + assertEquals(1, createPitResponse.getTotalShards()); + assertEquals(1, createPitResponse.getSuccessfulShards()); + assertEquals(0, createPitResponse.getFailedShards()); + assertEquals(0, createPitResponse.getSkippedShards()); + GetAllPitNodesResponse getAllPitResponse = highLevelClient().getAllPits(RequestOptions.DEFAULT); + List pits = getAllPitResponse.getPitInfos().stream().map(r -> r.getPitId()).collect(Collectors.toList()); + assertTrue(pits.contains(createPitResponse.getId())); List pitIds = new ArrayList<>(); - pitIds.add(pitResponse.getId()); + pitIds.add(createPitResponse.getId()); DeletePitRequest deletePitRequest = new DeletePitRequest(pitIds); DeletePitResponse deletePitResponse = execute(deletePitRequest, highLevelClient()::deletePit, highLevelClient()::deletePitAsync); assertTrue(deletePitResponse.getDeletePitResults().get(0).isSuccessful()); - assertTrue(deletePitResponse.getDeletePitResults().get(0).getPitId().equals(pitResponse.getId())); + assertTrue(deletePitResponse.getDeletePitResults().get(0).getPitId().equals(createPitResponse.getId())); } - public void testDeleteAllPits() throws IOException { + public void testDeleteAllAndListAllPits() throws IOException, InterruptedException { CreatePitRequest pitRequest = new CreatePitRequest(new TimeValue(1, TimeUnit.DAYS), true, "index"); CreatePitResponse pitResponse = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); CreatePitResponse pitResponse1 = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); @@ -80,9 +86,16 @@ public void testDeleteAllPits() throws IOException { pitResponse1 = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); assertTrue(pitResponse.getId() != null); assertTrue(pitResponse1.getId() != null); + GetAllPitNodesResponse getAllPitResponse = highLevelClient().getAllPits(RequestOptions.DEFAULT); + + List pits = getAllPitResponse.getPitInfos().stream().map(r -> r.getPitId()).collect(Collectors.toList()); + assertTrue(pits.contains(pitResponse.getId())); + assertTrue(pits.contains(pitResponse1.getId())); + CountDownLatch countDownLatch = new CountDownLatch(1); ActionListener deletePitListener = new ActionListener<>() { @Override public void onResponse(DeletePitResponse response) { + countDownLatch.countDown(); for (DeletePitInfo deletePitInfo : response.getDeletePitResults()) { assertTrue(deletePitInfo.isSuccessful()); } @@ -90,13 +103,34 @@ public void onResponse(DeletePitResponse response) { @Override public void onFailure(Exception e) { + countDownLatch.countDown(); if (!(e instanceof OpenSearchStatusException)) { throw new AssertionError("Delete all failed"); } } }; + final CreatePitResponse pitResponse3 = execute(pitRequest, highLevelClient()::createPit, highLevelClient()::createPitAsync); + + ActionListener getPitsListener = new ActionListener() { + @Override + public void onResponse(GetAllPitNodesResponse response) { + List pits = response.getPitInfos().stream().map(r -> r.getPitId()).collect(Collectors.toList()); + assertTrue(pits.contains(pitResponse3.getId())); + } + + @Override + public void onFailure(Exception e) { + if (!(e instanceof OpenSearchStatusException)) { + throw new AssertionError("List all PITs failed", e); + } + } + }; + highLevelClient().getAllPitsAsync(RequestOptions.DEFAULT, getPitsListener); highLevelClient().deleteAllPitsAsync(RequestOptions.DEFAULT, deletePitListener); + assertTrue(countDownLatch.await(10, TimeUnit.SECONDS)); // validate no pits case + getAllPitResponse = highLevelClient().getAllPits(RequestOptions.DEFAULT); + assertTrue(getAllPitResponse.getPitInfos().size() == 0); highLevelClient().deleteAllPitsAsync(RequestOptions.DEFAULT, deletePitListener); } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java index ee5795deb165d..576fe02718ba3 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/RequestConvertersTests.java @@ -32,14 +32,6 @@ package org.opensearch.client; -import org.apache.http.HttpEntity; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.nio.entity.NByteArrayEntity; -import org.apache.http.util.EntityUtils; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.opensearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; @@ -120,6 +112,14 @@ import org.opensearch.tasks.TaskId; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.test.RandomObjects; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpHead; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.io.entity.ByteArrayEntity; +import org.apache.hc.core5.http.io.entity.EntityUtils; import org.hamcrest.Matchers; import java.io.IOException; @@ -733,8 +733,8 @@ public void testIndex() throws IOException { assertEquals(method, request.getMethod()); HttpEntity entity = request.getEntity(); - assertTrue(entity instanceof NByteArrayEntity); - assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType().getValue()); + assertTrue(entity instanceof ByteArrayEntity); + assertEquals(indexRequest.getContentType().mediaTypeWithoutParameters(), entity.getContentType()); try (XContentParser parser = createParser(xContentType.xContent(), entity.getContent())) { assertEquals(nbFields, parser.map().size()); } @@ -805,11 +805,11 @@ public void testUpdate() throws IOException { assertEquals(HttpPost.METHOD_NAME, request.getMethod()); HttpEntity entity = request.getEntity(); - assertTrue(entity instanceof NByteArrayEntity); + assertTrue(entity instanceof ByteArrayEntity); UpdateRequest parsedUpdateRequest = new UpdateRequest(); - XContentType entityContentType = XContentType.fromMediaType(entity.getContentType().getValue()); + XContentType entityContentType = XContentType.fromMediaType(entity.getContentType()); try (XContentParser parser = createParser(entityContentType.xContent(), entity.getContent())) { parsedUpdateRequest.fromXContent(parser); } @@ -926,7 +926,7 @@ public void testBulk() throws IOException { assertEquals("/_bulk", request.getEndpoint()); assertEquals(expectedParams, request.getParameters()); assertEquals(HttpPost.METHOD_NAME, request.getMethod()); - assertEquals(xContentType.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals(xContentType.mediaTypeWithoutParameters(), request.getEntity().getContentType()); byte[] content = new byte[(int) request.getEntity().getContentLength()]; try (InputStream inputStream = request.getEntity().getContent()) { Streams.readFully(inputStream, content); @@ -979,7 +979,7 @@ public void testBulkWithDifferentContentTypes() throws IOException { bulkRequest.add(new DeleteRequest("index", "2")); Request request = RequestConverters.bulk(bulkRequest); - assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), request.getEntity().getContentType()); } { XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); @@ -989,7 +989,7 @@ public void testBulkWithDifferentContentTypes() throws IOException { bulkRequest.add(new DeleteRequest("index", "2")); Request request = RequestConverters.bulk(bulkRequest); - assertEquals(xContentType.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals(xContentType.mediaTypeWithoutParameters(), request.getEntity().getContentType()); } { XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); @@ -1001,7 +1001,7 @@ public void testBulkWithDifferentContentTypes() throws IOException { } Request request = RequestConverters.bulk(new BulkRequest().add(updateRequest)); - assertEquals(xContentType.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals(xContentType.mediaTypeWithoutParameters(), request.getEntity().getContentType()); } { BulkRequest bulkRequest = new BulkRequest(); @@ -1289,7 +1289,7 @@ public void testSearchScroll() throws IOException { assertEquals("/_search/scroll", request.getEndpoint()); assertEquals(0, request.getParameters().size()); assertToXContentBody(searchScrollRequest, request.getEntity()); - assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType()); } public void testClearScroll() throws IOException { @@ -1303,7 +1303,7 @@ public void testClearScroll() throws IOException { assertEquals("/_search/scroll", request.getEndpoint()); assertEquals(0, request.getParameters().size()); assertToXContentBody(clearScrollRequest, request.getEntity()); - assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType()); } public void testCreatePit() throws IOException { @@ -1324,7 +1324,7 @@ public void testCreatePit() throws IOException { assertEquals(endpoint.toString(), request.getEndpoint()); assertEquals(expectedParams, request.getParameters()); assertToXContentBody(createPitRequest, request.getEntity()); - assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType()); } public void testDeletePit() throws IOException { @@ -1337,7 +1337,7 @@ public void testDeletePit() throws IOException { assertEquals(HttpDelete.METHOD_NAME, request.getMethod()); assertEquals(endpoint, request.getEndpoint()); assertToXContentBody(deletePitRequest, request.getEntity()); - assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType()); } public void testDeleteAllPits() { @@ -1456,7 +1456,7 @@ public void testMultiSearchTemplate() throws Exception { HttpEntity actualEntity = multiRequest.getEntity(); byte[] expectedBytes = MultiSearchTemplateRequest.writeMultiLineFormat(multiSearchTemplateRequest, XContentType.JSON.xContent()); - assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue()); + assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType()); assertEquals(new BytesArray(expectedBytes), new BytesArray(EntityUtils.toByteArray(actualEntity))); } @@ -1763,7 +1763,7 @@ public void testDeleteScriptRequest() { static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException { BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, REQUEST_BODY_CONTENT_TYPE, false); - assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue()); + assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType()); assertEquals(expectedBytes, new BytesArray(EntityUtils.toByteArray(actualEntity))); } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientExtTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientExtTests.java index dbdf7eba3dca4..1499b006da410 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientExtTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientExtTests.java @@ -32,9 +32,9 @@ package org.opensearch.client; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.nio.entity.NStringEntity; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.io.entity.StringEntity; import org.junit.Before; import org.opensearch.common.ParseField; import org.opensearch.common.xcontent.NamedXContentRegistry; @@ -64,14 +64,14 @@ public void initClient() { public void testParseEntityCustomResponseSection() throws IOException { { - HttpEntity jsonEntity = new NStringEntity("{\"custom1\":{ \"field\":\"value\"}}", ContentType.APPLICATION_JSON); + HttpEntity jsonEntity = new StringEntity("{\"custom1\":{ \"field\":\"value\"}}", ContentType.APPLICATION_JSON); BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent); assertThat(customSection, instanceOf(CustomResponseSection1.class)); CustomResponseSection1 customResponseSection1 = (CustomResponseSection1) customSection; assertEquals("value", customResponseSection1.value); } { - HttpEntity jsonEntity = new NStringEntity("{\"custom2\":{ \"array\": [\"item1\", \"item2\"]}}", ContentType.APPLICATION_JSON); + HttpEntity jsonEntity = new StringEntity("{\"custom2\":{ \"array\": [\"item1\", \"item2\"]}}", ContentType.APPLICATION_JSON); BaseCustomResponseSection customSection = restHighLevelClient.parseEntity(jsonEntity, BaseCustomResponseSection::fromXContent); assertThat(customSection, instanceOf(CustomResponseSection2.class)); CustomResponseSection2 customResponseSection2 = (CustomResponseSection2) customSection; @@ -153,7 +153,7 @@ static CustomResponseSection2 fromXContent(XContentParser parser) throws IOExcep values.add(parser.text()); } assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); - CustomResponseSection2 responseSection2 = new CustomResponseSection2(values.toArray(new String[values.size()])); + CustomResponseSection2 responseSection2 = new CustomResponseSection2(values.toArray(new String[0])); assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); return responseSection2; } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java index cdd63743f2644..dc89b605be689 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/RestHighLevelClientTests.java @@ -33,19 +33,6 @@ package org.opensearch.client; import com.fasterxml.jackson.core.JsonParseException; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHost; -import org.apache.http.HttpResponse; -import org.apache.http.ProtocolVersion; -import org.apache.http.RequestLine; -import org.apache.http.StatusLine; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.entity.ContentType; -import org.apache.http.message.BasicHttpResponse; -import org.apache.http.message.BasicRequestLine; -import org.apache.http.message.BasicStatusLine; -import org.apache.http.nio.entity.NByteArrayEntity; -import org.apache.http.nio.entity.NStringEntity; import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; import org.opensearch.action.ActionRequest; @@ -87,6 +74,17 @@ import org.opensearch.test.InternalAggregationTestCase; import org.opensearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.opensearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.ProtocolVersion; +import org.apache.hc.core5.http.io.entity.ByteArrayEntity; +import org.apache.hc.core5.http.io.entity.StringEntity; +import org.apache.hc.core5.http.message.BasicClassicHttpResponse; +import org.apache.hc.core5.http.message.RequestLine; +import org.apache.hc.core5.http.message.StatusLine; import org.hamcrest.Matchers; import org.junit.Before; @@ -123,7 +121,7 @@ public class RestHighLevelClientTests extends OpenSearchTestCase { private static final String SUBMIT_TASK_PREFIX = "submit_"; private static final String SUBMIT_TASK_SUFFIX = "_task"; private static final ProtocolVersion HTTP_PROTOCOL = new ProtocolVersion("http", 1, 1); - private static final RequestLine REQUEST_LINE = new BasicRequestLine(HttpGet.METHOD_NAME, "/", HTTP_PROTOCOL); + private static final RequestLine REQUEST_LINE = new RequestLine(HttpGet.METHOD_NAME, "/", HTTP_PROTOCOL); /** * These APIs do not use a Request object (because they don't have a body, or any request parameters). @@ -135,6 +133,7 @@ public class RestHighLevelClientTests extends OpenSearchTestCase { "ping", "info", "delete_all_pits", + "get_all_pits", // security "security.get_ssl_certificates", "security.authenticate", @@ -257,7 +256,7 @@ private void mockResponse(ToXContent toXContent) throws IOException { Response response = mock(Response.class); ContentType contentType = ContentType.parse(RequestConverters.REQUEST_BODY_CONTENT_TYPE.mediaType()); String requestBody = toXContent(toXContent, RequestConverters.REQUEST_BODY_CONTENT_TYPE, false).utf8ToString(); - when(response.getEntity()).thenReturn(new NStringEntity(requestBody, contentType)); + when(response.getEntity()).thenReturn(new StringEntity(requestBody, contentType)); when(restClient.performRequest(any(Request.class))).thenReturn(response); } @@ -307,14 +306,14 @@ public void testParseEntity() throws IOException { { IllegalStateException ise = expectThrows( IllegalStateException.class, - () -> restHighLevelClient.parseEntity(new NStringEntity("", (ContentType) null), null) + () -> restHighLevelClient.parseEntity(new StringEntity("", (ContentType) null), null) ); assertEquals("OpenSearch didn't return the [Content-Type] header, unable to parse response body", ise.getMessage()); } { - NStringEntity entity = new NStringEntity("", ContentType.APPLICATION_SVG_XML); + StringEntity entity = new StringEntity("", ContentType.APPLICATION_SVG_XML); IllegalStateException ise = expectThrows(IllegalStateException.class, () -> restHighLevelClient.parseEntity(entity, null)); - assertEquals("Unsupported Content-Type: " + entity.getContentType().getValue(), ise.getMessage()); + assertEquals("Unsupported Content-Type: " + entity.getContentType(), ise.getMessage()); } { CheckedFunction entityParser = parser -> { @@ -325,9 +324,9 @@ public void testParseEntity() throws IOException { assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); return value; }; - HttpEntity jsonEntity = new NStringEntity("{\"field\":\"value\"}", ContentType.APPLICATION_JSON); + HttpEntity jsonEntity = new StringEntity("{\"field\":\"value\"}", ContentType.APPLICATION_JSON); assertEquals("value", restHighLevelClient.parseEntity(jsonEntity, entityParser)); - HttpEntity yamlEntity = new NStringEntity("---\nfield: value\n", ContentType.create("application/yaml")); + HttpEntity yamlEntity = new StringEntity("---\nfield: value\n", ContentType.create("application/yaml")); assertEquals("value", restHighLevelClient.parseEntity(yamlEntity, entityParser)); HttpEntity smileEntity = createBinaryEntity(SmileXContent.contentBuilder(), ContentType.create("application/smile")); assertEquals("value", restHighLevelClient.parseEntity(smileEntity, entityParser)); @@ -341,13 +340,13 @@ private static HttpEntity createBinaryEntity(XContentBuilder xContentBuilder, Co builder.startObject(); builder.field("field", "value"); builder.endObject(); - return new NByteArrayEntity(BytesReference.bytes(builder).toBytesRef().bytes, contentType); + return new ByteArrayEntity(BytesReference.bytes(builder).toBytesRef().bytes, contentType); } } public void testConvertExistsResponse() { RestStatus restStatus = randomBoolean() ? RestStatus.OK : randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); boolean result = RestHighLevelClient.convertExistsResponse(response); assertEquals(restStatus == RestStatus.OK, result); @@ -356,7 +355,7 @@ public void testConvertExistsResponse() { public void testParseResponseException() throws IOException { { RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); OpenSearchException openSearchException = restHighLevelClient.parseResponseException(responseException); @@ -366,9 +365,9 @@ public void testParseResponseException() throws IOException { } { RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); httpResponse.setEntity( - new NStringEntity( + new StringEntity( "{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON ) @@ -382,8 +381,8 @@ public void testParseResponseException() throws IOException { } { RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new NStringEntity("{\"error\":", ContentType.APPLICATION_JSON)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); + httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); OpenSearchException openSearchException = restHighLevelClient.parseResponseException(responseException); @@ -394,8 +393,8 @@ public void testParseResponseException() throws IOException { } { RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new NStringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); + httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); OpenSearchException openSearchException = restHighLevelClient.parseResponseException(responseException); @@ -410,7 +409,7 @@ public void testPerformRequestOnSuccess() throws IOException { MainRequest mainRequest = new MainRequest(); CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); when(restClient.performRequest(any(Request.class))).thenReturn(mockResponse); { @@ -436,7 +435,7 @@ public void testPerformRequestOnSuccess() throws IOException { ); assertEquals( "Unable to parse response body for Response{requestLine=GET / http/1.1, host=http://localhost:9200, " - + "response=http/1.1 " + + "response=HTTP/1.1 " + restStatus.getStatus() + " " + restStatus.name() @@ -450,7 +449,7 @@ public void testPerformRequestOnResponseExceptionWithoutEntity() throws IOExcept MainRequest mainRequest = new MainRequest(); CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); when(restClient.performRequest(any(Request.class))).thenThrow(responseException); @@ -473,9 +472,9 @@ public void testPerformRequestOnResponseExceptionWithEntity() throws IOException MainRequest mainRequest = new MainRequest(); CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); httpResponse.setEntity( - new NStringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON) + new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON) ); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); @@ -499,8 +498,8 @@ public void testPerformRequestOnResponseExceptionWithBrokenEntity() throws IOExc MainRequest mainRequest = new MainRequest(); CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new NStringEntity("{\"error\":", ContentType.APPLICATION_JSON)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); + httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON)); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); when(restClient.performRequest(any(Request.class))).thenThrow(responseException); @@ -524,8 +523,8 @@ public void testPerformRequestOnResponseExceptionWithBrokenEntity2() throws IOEx MainRequest mainRequest = new MainRequest(); CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new NStringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); + httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); when(restClient.performRequest(any(Request.class))).thenThrow(responseException); @@ -548,7 +547,7 @@ public void testPerformRequestOnResponseExceptionWithBrokenEntity2() throws IOEx public void testPerformRequestOnResponseExceptionWithIgnores() throws IOException { MainRequest mainRequest = new MainRequest(); CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(RestStatus.NOT_FOUND.getStatus(), RestStatus.NOT_FOUND.name()); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); when(restClient.performRequest(any(Request.class))).thenThrow(responseException); @@ -568,7 +567,7 @@ public void testPerformRequestOnResponseExceptionWithIgnores() throws IOExceptio public void testPerformRequestOnResponseExceptionWithIgnoresErrorNoBody() throws IOException { MainRequest mainRequest = new MainRequest(); CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(RestStatus.NOT_FOUND.getStatus(), RestStatus.NOT_FOUND.name()); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); when(restClient.performRequest(any(Request.class))).thenThrow(responseException); @@ -590,8 +589,8 @@ public void testPerformRequestOnResponseExceptionWithIgnoresErrorNoBody() throws public void testPerformRequestOnResponseExceptionWithIgnoresErrorValidBody() throws IOException { MainRequest mainRequest = new MainRequest(); CheckedFunction requestConverter = request -> new Request(HttpGet.METHOD_NAME, "/"); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); - httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":404}", ContentType.APPLICATION_JSON)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(RestStatus.NOT_FOUND.getStatus(), RestStatus.NOT_FOUND.name()); + httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":404}", ContentType.APPLICATION_JSON)); Response mockResponse = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(mockResponse); when(restClient.performRequest(any(Request.class))).thenThrow(responseException); @@ -619,7 +618,7 @@ public void testWrapResponseListenerOnSuccess() { Collections.emptySet() ); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); responseListener.onSuccess(new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse)); assertNull(trackingActionListener.exception.get()); assertEquals(restStatus.getStatus(), trackingActionListener.statusCode.get()); @@ -632,13 +631,13 @@ public void testWrapResponseListenerOnSuccess() { Collections.emptySet() ); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); responseListener.onSuccess(new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse)); assertThat(trackingActionListener.exception.get(), instanceOf(IOException.class)); IOException ioe = (IOException) trackingActionListener.exception.get(); assertEquals( "Unable to parse response body for Response{requestLine=GET / http/1.1, host=http://localhost:9200, " - + "response=http/1.1 " + + "response=HTTP/1.1 " + restStatus.getStatus() + " " + restStatus.name() @@ -669,7 +668,7 @@ public void testWrapResponseListenerOnResponseExceptionWithoutEntity() throws IO Collections.emptySet() ); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); responseListener.onFailure(responseException); @@ -688,9 +687,9 @@ public void testWrapResponseListenerOnResponseExceptionWithEntity() throws IOExc Collections.emptySet() ); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); httpResponse.setEntity( - new NStringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON) + new StringEntity("{\"error\":\"test error message\",\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON) ); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); @@ -711,8 +710,8 @@ public void testWrapResponseListenerOnResponseExceptionWithBrokenEntity() throws Collections.emptySet() ); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new NStringEntity("{\"error\":", ContentType.APPLICATION_JSON)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); + httpResponse.setEntity(new StringEntity("{\"error\":", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); responseListener.onFailure(responseException); @@ -731,8 +730,8 @@ public void testWrapResponseListenerOnResponseExceptionWithBrokenEntity() throws Collections.emptySet() ); RestStatus restStatus = randomFrom(RestStatus.values()); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(restStatus)); - httpResponse.setEntity(new NStringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(restStatus.getStatus(), restStatus.name()); + httpResponse.setEntity(new StringEntity("{\"status\":" + restStatus.getStatus() + "}", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); responseListener.onFailure(responseException); @@ -752,7 +751,7 @@ public void testWrapResponseListenerOnResponseExceptionWithIgnores() throws IOEx trackingActionListener, Collections.singleton(404) ); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(RestStatus.NOT_FOUND.getStatus(), RestStatus.NOT_FOUND.name()); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); responseListener.onFailure(responseException); @@ -770,7 +769,7 @@ public void testWrapResponseListenerOnResponseExceptionWithIgnoresErrorNoBody() trackingActionListener, Collections.singleton(404) ); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(RestStatus.NOT_FOUND.getStatus(), RestStatus.NOT_FOUND.name()); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); responseListener.onFailure(responseException); @@ -790,8 +789,8 @@ public void testWrapResponseListenerOnResponseExceptionWithIgnoresErrorValidBody trackingActionListener, Collections.singleton(404) ); - HttpResponse httpResponse = new BasicHttpResponse(newStatusLine(RestStatus.NOT_FOUND)); - httpResponse.setEntity(new NStringEntity("{\"error\":\"test error message\",\"status\":404}", ContentType.APPLICATION_JSON)); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(RestStatus.NOT_FOUND.getStatus(), RestStatus.NOT_FOUND.name()); + httpResponse.setEntity(new StringEntity("{\"error\":\"test error message\",\"status\":404}", ContentType.APPLICATION_JSON)); Response response = new Response(REQUEST_LINE, new HttpHost("localhost", 9200), httpResponse); ResponseException responseException = new ResponseException(response); responseListener.onFailure(responseException); @@ -887,7 +886,13 @@ public void testApiNamingConventions() throws Exception { "nodes.usage", "nodes.reload_secure_settings", "search_shards", - "remote_store.restore", }; + "remote_store.restore", + "cluster.put_weighted_routing", + "cluster.get_weighted_routing", + "cluster.delete_weighted_routing", + "cluster.put_decommission_awareness", + "cluster.get_decommission_awareness", + "cluster.delete_decommission_awareness", }; List booleanReturnMethods = Arrays.asList("security.enable_user", "security.disable_user", "security.change_password"); Set deprecatedMethods = new HashSet<>(); deprecatedMethods.add("indices.force_merge"); @@ -1000,37 +1005,34 @@ private static void assertSyncMethod(Method method, String apiName, List } assertEquals("incorrect number of exceptions for method [" + method + "]", 1, method.getExceptionTypes().length); + final Class[] parameterTypes = method.getParameterTypes(); // a few methods don't accept a request object as argument if (APIS_WITHOUT_REQUEST_OBJECT.contains(apiName)) { - assertEquals("incorrect number of arguments for method [" + method + "]", 1, method.getParameterTypes().length); - assertThat( - "the parameter to method [" + method + "] is the wrong type", - method.getParameterTypes()[0], - equalTo(RequestOptions.class) - ); + assertEquals("incorrect number of arguments for method [" + method + "]", 1, method.getParameterCount()); + assertThat("the parameter to method [" + method + "] is the wrong type", parameterTypes[0], equalTo(RequestOptions.class)); } else { - assertEquals("incorrect number of arguments for method [" + method + "]", 2, method.getParameterTypes().length); + assertEquals("incorrect number of arguments for method [" + method + "]", 2, method.getParameterCount()); // This is no longer true for all methods. Some methods can contain these 2 args backwards because of deprecation - if (method.getParameterTypes()[0].equals(RequestOptions.class)) { + if (parameterTypes[0].equals(RequestOptions.class)) { assertThat( "the first parameter to method [" + method + "] is the wrong type", - method.getParameterTypes()[0], + parameterTypes[0], equalTo(RequestOptions.class) ); assertThat( "the second parameter to method [" + method + "] is the wrong type", - method.getParameterTypes()[1].getSimpleName(), + parameterTypes[1].getSimpleName(), endsWith("Request") ); } else { assertThat( "the first parameter to method [" + method + "] is the wrong type", - method.getParameterTypes()[0].getSimpleName(), + parameterTypes[0].getSimpleName(), endsWith("Request") ); assertThat( "the second parameter to method [" + method + "] is the wrong type", - method.getParameterTypes()[1], + parameterTypes[1], equalTo(RequestOptions.class) ); } @@ -1044,39 +1046,40 @@ private static void assertAsyncMethod(Map> methods, Method m ); assertThat("async method [" + method + "] should return Cancellable", method.getReturnType(), equalTo(Cancellable.class)); assertEquals("async method [" + method + "] should not throw any exceptions", 0, method.getExceptionTypes().length); + final Class[] parameterTypes = method.getParameterTypes(); if (APIS_WITHOUT_REQUEST_OBJECT.contains(apiName.replaceAll("_async$", ""))) { - assertEquals(2, method.getParameterTypes().length); - assertThat(method.getParameterTypes()[0], equalTo(RequestOptions.class)); - assertThat(method.getParameterTypes()[1], equalTo(ActionListener.class)); + assertEquals(2, parameterTypes.length); + assertThat(parameterTypes[0], equalTo(RequestOptions.class)); + assertThat(parameterTypes[1], equalTo(ActionListener.class)); } else { - assertEquals("async method [" + method + "] has the wrong number of arguments", 3, method.getParameterTypes().length); + assertEquals("async method [" + method + "] has the wrong number of arguments", 3, method.getParameterCount()); // This is no longer true for all methods. Some methods can contain these 2 args backwards because of deprecation - if (method.getParameterTypes()[0].equals(RequestOptions.class)) { + if (parameterTypes[0].equals(RequestOptions.class)) { assertThat( "the first parameter to async method [" + method + "] should be a request type", - method.getParameterTypes()[0], + parameterTypes[0], equalTo(RequestOptions.class) ); assertThat( "the second parameter to async method [" + method + "] is the wrong type", - method.getParameterTypes()[1].getSimpleName(), + parameterTypes[1].getSimpleName(), endsWith("Request") ); } else { assertThat( "the first parameter to async method [" + method + "] should be a request type", - method.getParameterTypes()[0].getSimpleName(), + parameterTypes[0].getSimpleName(), endsWith("Request") ); assertThat( "the second parameter to async method [" + method + "] is the wrong type", - method.getParameterTypes()[1], + parameterTypes[1], equalTo(RequestOptions.class) ); } assertThat( "the third parameter to async method [" + method + "] is the wrong type", - method.getParameterTypes()[2], + parameterTypes[2], equalTo(ActionListener.class) ); } @@ -1089,16 +1092,17 @@ private static void assertSubmitTaskMethod( ClientYamlSuiteRestSpec restSpec ) { String methodName = extractMethodName(apiName); + final Class[] parameterTypes = method.getParameterTypes(); assertTrue("submit task method [" + method.getName() + "] doesn't have corresponding sync method", methods.containsKey(methodName)); - assertEquals("submit task method [" + method + "] has the wrong number of arguments", 2, method.getParameterTypes().length); + assertEquals("submit task method [" + method + "] has the wrong number of arguments", 2, method.getParameterCount()); assertThat( "the first parameter to submit task method [" + method + "] is the wrong type", - method.getParameterTypes()[0].getSimpleName(), + parameterTypes[0].getSimpleName(), endsWith("Request") ); assertThat( "the second parameter to submit task method [" + method + "] is the wrong type", - method.getParameterTypes()[1], + parameterTypes[1], equalTo(RequestOptions.class) ); @@ -1157,6 +1161,6 @@ public void onFailure(Exception e) { } private static StatusLine newStatusLine(RestStatus restStatus) { - return new BasicStatusLine(HTTP_PROTOCOL, restStatus.getStatus(), restStatus.name()); + return new StatusLine(HTTP_PROTOCOL, restStatus.getStatus(), restStatus.name()); } } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java index 8b509e5d19e92..cc6f08217d057 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SearchIT.java @@ -32,8 +32,6 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; import org.opensearch.OpenSearchException; import org.opensearch.OpenSearchStatusException; import org.opensearch.action.explain.ExplainRequest; @@ -101,6 +99,8 @@ import org.opensearch.search.suggest.Suggest; import org.opensearch.search.suggest.SuggestBuilder; import org.opensearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; import org.hamcrest.Matchers; import org.junit.Before; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java index 10baaa2e53dd4..e86de6ba718f9 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/SnapshotRequestConvertersTests.java @@ -32,10 +32,10 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; import org.opensearch.action.admin.cluster.repositories.delete.DeleteRepositoryRequest; import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.opensearch.action.admin.cluster.repositories.put.PutRepositoryRequest; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/TasksRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/TasksRequestConvertersTests.java index 64fec3c8fb810..a777bbc5d1868 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/TasksRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/TasksRequestConvertersTests.java @@ -32,8 +32,8 @@ package org.opensearch.client; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpPost; import org.opensearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.opensearch.client.tasks.CancelTasksRequest; import org.opensearch.tasks.TaskId; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/core/MainResponseTests.java b/client/rest-high-level/src/test/java/org/opensearch/client/core/MainResponseTests.java index cd759aa62eaf1..b5985cb419506 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/core/MainResponseTests.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/core/MainResponseTests.java @@ -46,21 +46,16 @@ import static org.hamcrest.Matchers.equalTo; public class MainResponseTests extends AbstractResponseTestCase { + private static String DISTRIBUTION = "opensearch"; + @Override protected org.opensearch.action.main.MainResponse createServerTestInstance(XContentType xContentType) { String clusterUuid = randomAlphaOfLength(10); ClusterName clusterName = new ClusterName(randomAlphaOfLength(10)); String nodeName = randomAlphaOfLength(10); final String date = new Date(randomNonNegativeLong()).toString(); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT); - Build build = new Build( - Build.Type.UNKNOWN, - randomAlphaOfLength(8), - date, - randomBoolean(), - version.toString(), - version.before(Version.V_1_0_0) ? null : "opensearch" - ); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT); + Build build = new Build(Build.Type.UNKNOWN, randomAlphaOfLength(8), date, randomBoolean(), version.toString(), DISTRIBUTION); return new org.opensearch.action.main.MainResponse(nodeName, version, clusterName, clusterUuid, build); } diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java index 959c5a827f143..c63b311feebc7 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/CRUDDocumentationIT.java @@ -32,7 +32,7 @@ package org.opensearch.client.documentation; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; import org.opensearch.action.DocWriteRequest; diff --git a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/MiscellaneousDocumentationIT.java b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/MiscellaneousDocumentationIT.java index 0213441a0b6a7..3edf639da8867 100644 --- a/client/rest-high-level/src/test/java/org/opensearch/client/documentation/MiscellaneousDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/opensearch/client/documentation/MiscellaneousDocumentationIT.java @@ -32,7 +32,7 @@ package org.opensearch.client.documentation; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import org.opensearch.client.OpenSearchRestHighLevelClientTestCase; import org.opensearch.client.RequestOptions; import org.opensearch.client.RestClient; @@ -92,8 +92,8 @@ public void testInitializationFromClientBuilder() throws IOException { //tag::rest-high-level-client-init RestHighLevelClient client = new RestHighLevelClient( RestClient.builder( - new HttpHost("localhost", 9200, "http"), - new HttpHost("localhost", 9201, "http"))); + new HttpHost("http", "localhost", 9200), + new HttpHost("http", "localhost", 9201))); //end::rest-high-level-client-init //tag::rest-high-level-client-close diff --git a/client/rest/build.gradle b/client/rest/build.gradle index 01c186ed83fc2..eacef14d17ce2 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -40,12 +40,12 @@ group = 'org.opensearch.client' archivesBaseName = 'opensearch-rest-client' dependencies { - api "org.apache.httpcomponents:httpclient:${versions.httpclient}" - api "org.apache.httpcomponents:httpcore:${versions.httpcore}" - api "org.apache.httpcomponents:httpasyncclient:${versions.httpasyncclient}" - api "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" + api "org.apache.httpcomponents.client5:httpclient5:${versions.httpclient5}" + api "org.apache.httpcomponents.core5:httpcore5:${versions.httpcore5}" + api "org.apache.httpcomponents.core5:httpcore5-h2:${versions.httpcore5}" api "commons-codec:commons-codec:${versions.commonscodec}" api "commons-logging:commons-logging:${versions.commonslogging}" + api "org.slf4j:slf4j-api:${versions.slf4j}" testImplementation project(":client:test") testImplementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" @@ -54,6 +54,10 @@ dependencies { testImplementation "org.mockito:mockito-core:${versions.mockito}" testImplementation "org.objenesis:objenesis:${versions.objenesis}" testImplementation "net.bytebuddy:byte-buddy:${versions.bytebuddy}" + testImplementation "org.apache.logging.log4j:log4j-api:${versions.log4j}" + testImplementation "org.apache.logging.log4j:log4j-core:${versions.log4j}" + testImplementation "org.apache.logging.log4j:log4j-jul:${versions.log4j}" + testImplementation "org.apache.logging.log4j:log4j-slf4j-impl:${versions.log4j}" } tasks.withType(CheckForbiddenApis).configureEach { @@ -85,6 +89,10 @@ testingConventions { } thirdPartyAudit.ignoreMissingClasses( + 'org.conscrypt.Conscrypt', + 'org.slf4j.impl.StaticLoggerBinder', + 'org.slf4j.impl.StaticMDCBinder', + 'org.slf4j.impl.StaticMarkerBinder', //commons-logging optional dependencies 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', diff --git a/client/rest/licenses/httpasyncclient-4.1.5.jar.sha1 b/client/rest/licenses/httpasyncclient-4.1.5.jar.sha1 deleted file mode 100644 index 366a9e31069a6..0000000000000 --- a/client/rest/licenses/httpasyncclient-4.1.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cd18227f1eb8e9a263286c1d7362ceb24f6f9b32 \ No newline at end of file diff --git a/client/rest/licenses/httpasyncclient-LICENSE.txt b/client/rest/licenses/httpasyncclient-LICENSE.txt deleted file mode 100644 index 2c41ec88f61cf..0000000000000 --- a/client/rest/licenses/httpasyncclient-LICENSE.txt +++ /dev/null @@ -1,182 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - -This project contains annotations derived from JCIP-ANNOTATIONS -Copyright (c) 2005 Brian Goetz and Tim Peierls. -See http://www.jcip.net and the Creative Commons Attribution License -(http://creativecommons.org/licenses/by/2.5) - diff --git a/client/rest/licenses/httpasyncclient-NOTICE.txt b/client/rest/licenses/httpasyncclient-NOTICE.txt deleted file mode 100644 index b45be98d168a4..0000000000000 --- a/client/rest/licenses/httpasyncclient-NOTICE.txt +++ /dev/null @@ -1,5 +0,0 @@ -Apache HttpComponents AsyncClient -Copyright 2010-2016 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). diff --git a/client/rest/licenses/httpclient-4.5.13.jar.sha1 b/client/rest/licenses/httpclient-4.5.13.jar.sha1 deleted file mode 100644 index 3281e21595b39..0000000000000 --- a/client/rest/licenses/httpclient-4.5.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e5f6cae5ca7ecaac1ec2827a9e2d65ae2869cada \ No newline at end of file diff --git a/client/rest/licenses/httpclient5-5.1.4.jar.sha1 b/client/rest/licenses/httpclient5-5.1.4.jar.sha1 new file mode 100644 index 0000000000000..3c0cb1335fb88 --- /dev/null +++ b/client/rest/licenses/httpclient5-5.1.4.jar.sha1 @@ -0,0 +1 @@ +208f9eed6d6ab709e2ae7a75b457ef60c0baefa5 \ No newline at end of file diff --git a/client/rest/licenses/httpclient-LICENSE.txt b/client/rest/licenses/httpclient5-LICENSE.txt similarity index 100% rename from client/rest/licenses/httpclient-LICENSE.txt rename to client/rest/licenses/httpclient5-LICENSE.txt diff --git a/client/rest/licenses/httpclient-NOTICE.txt b/client/rest/licenses/httpclient5-NOTICE.txt similarity index 72% rename from client/rest/licenses/httpclient-NOTICE.txt rename to client/rest/licenses/httpclient5-NOTICE.txt index 91e5c40c4c6d3..afee7c6e6880b 100644 --- a/client/rest/licenses/httpclient-NOTICE.txt +++ b/client/rest/licenses/httpclient5-NOTICE.txt @@ -1,5 +1,5 @@ Apache HttpComponents Client -Copyright 1999-2016 The Apache Software Foundation +Copyright 1999-2022 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). diff --git a/client/rest/licenses/httpcore-4.4.15.jar.sha1 b/client/rest/licenses/httpcore-4.4.15.jar.sha1 deleted file mode 100644 index 42a03b5d7a376..0000000000000 --- a/client/rest/licenses/httpcore-4.4.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7f2e0c573eaa7a74bac2e89b359e1f73d92a0a1d \ No newline at end of file diff --git a/client/rest/licenses/httpcore-LICENSE.txt b/client/rest/licenses/httpcore-LICENSE.txt deleted file mode 100644 index e454a52586f29..0000000000000 --- a/client/rest/licenses/httpcore-LICENSE.txt +++ /dev/null @@ -1,178 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - diff --git a/client/rest/licenses/httpcore-nio-4.4.15.jar.sha1 b/client/rest/licenses/httpcore-nio-4.4.15.jar.sha1 deleted file mode 100644 index 251b35ab6a1a5..0000000000000 --- a/client/rest/licenses/httpcore-nio-4.4.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -85d2b6825d42db909a1474f0ffbd6328429b7a32 \ No newline at end of file diff --git a/client/rest/licenses/httpcore5-5.1.5.jar.sha1 b/client/rest/licenses/httpcore5-5.1.5.jar.sha1 new file mode 100644 index 0000000000000..8da253152e970 --- /dev/null +++ b/client/rest/licenses/httpcore5-5.1.5.jar.sha1 @@ -0,0 +1 @@ +df9da3a1fa2351c4790245400ed28d78a8ddd3fc \ No newline at end of file diff --git a/client/rest/licenses/httpcore5-LICENSE.txt b/client/rest/licenses/httpcore5-LICENSE.txt new file mode 100644 index 0000000000000..32f01eda18fe9 --- /dev/null +++ b/client/rest/licenses/httpcore5-LICENSE.txt @@ -0,0 +1,558 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project includes Public Suffix List copied from + +licensed under the terms of the Mozilla Public License, v. 2.0 + +Full license text: + +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/client/rest/licenses/httpcore-nio-NOTICE.txt b/client/rest/licenses/httpcore5-NOTICE.txt similarity index 56% rename from client/rest/licenses/httpcore-nio-NOTICE.txt rename to client/rest/licenses/httpcore5-NOTICE.txt index a2e17bb60009f..afee7c6e6880b 100644 --- a/client/rest/licenses/httpcore-nio-NOTICE.txt +++ b/client/rest/licenses/httpcore5-NOTICE.txt @@ -1,8 +1,6 @@ - -Apache HttpCore NIO -Copyright 2005-2016 The Apache Software Foundation +Apache HttpComponents Client +Copyright 1999-2022 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). - diff --git a/client/rest/licenses/httpcore5-h2-5.1.5.jar.sha1 b/client/rest/licenses/httpcore5-h2-5.1.5.jar.sha1 new file mode 100644 index 0000000000000..097e6cc2a3be8 --- /dev/null +++ b/client/rest/licenses/httpcore5-h2-5.1.5.jar.sha1 @@ -0,0 +1 @@ +624660339afd5006d427457e6b10b10b32fd86f1 \ No newline at end of file diff --git a/client/rest/licenses/httpcore5-h2-LICENSE.txt b/client/rest/licenses/httpcore5-h2-LICENSE.txt new file mode 100644 index 0000000000000..32f01eda18fe9 --- /dev/null +++ b/client/rest/licenses/httpcore5-h2-LICENSE.txt @@ -0,0 +1,558 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + +========================================================================= + +This project includes Public Suffix List copied from + +licensed under the terms of the Mozilla Public License, v. 2.0 + +Full license text: + +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/client/rest/licenses/httpcore-NOTICE.txt b/client/rest/licenses/httpcore5-h2-NOTICE.txt similarity index 55% rename from client/rest/licenses/httpcore-NOTICE.txt rename to client/rest/licenses/httpcore5-h2-NOTICE.txt index 013448d3e9561..afee7c6e6880b 100644 --- a/client/rest/licenses/httpcore-NOTICE.txt +++ b/client/rest/licenses/httpcore5-h2-NOTICE.txt @@ -1,5 +1,6 @@ -Apache HttpComponents Core -Copyright 2005-2016 The Apache Software Foundation +Apache HttpComponents Client +Copyright 1999-2022 The Apache Software Foundation This product includes software developed at The Apache Software Foundation (http://www.apache.org/). + diff --git a/client/rest/licenses/slf4j-api-1.7.36.jar.sha1 b/client/rest/licenses/slf4j-api-1.7.36.jar.sha1 new file mode 100644 index 0000000000000..77b9917528382 --- /dev/null +++ b/client/rest/licenses/slf4j-api-1.7.36.jar.sha1 @@ -0,0 +1 @@ +6c62681a2f655b49963a5983b8b0950a6120ae14 \ No newline at end of file diff --git a/client/rest/licenses/slf4j-api-LICENSE.txt b/client/rest/licenses/slf4j-api-LICENSE.txt new file mode 100644 index 0000000000000..8fda22f4d72f6 --- /dev/null +++ b/client/rest/licenses/slf4j-api-LICENSE.txt @@ -0,0 +1,21 @@ +Copyright (c) 2004-2014 QOS.ch +All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/client/rest/licenses/slf4j-api-NOTICE.txt b/client/rest/licenses/slf4j-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/client/rest/src/main/java/org/opensearch/client/Cancellable.java b/client/rest/src/main/java/org/opensearch/client/Cancellable.java index 4bfc0704227aa..56e31a3742f35 100644 --- a/client/rest/src/main/java/org/opensearch/client/Cancellable.java +++ b/client/rest/src/main/java/org/opensearch/client/Cancellable.java @@ -31,24 +31,26 @@ package org.opensearch.client; -import org.apache.http.client.methods.AbstractExecutionAwareRequest; -import org.apache.http.client.methods.HttpRequestBase; +import org.apache.hc.client5.http.classic.methods.HttpUriRequestBase; +import org.apache.hc.core5.concurrent.CancellableDependency; import java.util.concurrent.CancellationException; /** * Represents an operation that can be cancelled. * Returned when executing async requests through {@link RestClient#performRequestAsync(Request, ResponseListener)}, so that the request - * can be cancelled if needed. Cancelling a request will result in calling {@link AbstractExecutionAwareRequest#abort()} on the underlying + * can be cancelled if needed. Cancelling a request will result in calling {@link CancellableDependency#cancel()} on the underlying * request object, which will in turn cancel its corresponding {@link java.util.concurrent.Future}. * Note that cancelling a request does not automatically translate to aborting its execution on the server side, which needs to be * specifically implemented in each API. */ -public class Cancellable { +public class Cancellable implements org.apache.hc.core5.concurrent.Cancellable { static final Cancellable NO_OP = new Cancellable(null) { @Override - public void cancel() {} + public boolean cancel() { + throw new UnsupportedOperationException(); + } @Override void runIfNotCancelled(Runnable runnable) { @@ -56,13 +58,13 @@ void runIfNotCancelled(Runnable runnable) { } }; - static Cancellable fromRequest(HttpRequestBase httpRequest) { + static Cancellable fromRequest(CancellableDependency httpRequest) { return new Cancellable(httpRequest); } - private final HttpRequestBase httpRequest; + private final CancellableDependency httpRequest; - private Cancellable(HttpRequestBase httpRequest) { + private Cancellable(CancellableDependency httpRequest) { this.httpRequest = httpRequest; } @@ -70,15 +72,15 @@ private Cancellable(HttpRequestBase httpRequest) { * Cancels the on-going request that is associated with the current instance of {@link Cancellable}. * */ - public synchronized void cancel() { - this.httpRequest.abort(); + public synchronized boolean cancel() { + return this.httpRequest.cancel(); } /** * Executes some arbitrary code iff the on-going request has not been cancelled, otherwise throws {@link CancellationException}. * This is needed to guarantee that cancelling a request works correctly even in case {@link #cancel()} is called between different - * attempts of the same request. The low-level client reuses the same instance of the {@link AbstractExecutionAwareRequest} by calling - * {@link AbstractExecutionAwareRequest#reset()} between subsequent retries. The {@link #cancel()} method can be called at anytime, + * attempts of the same request. The low-level client reuses the same instance of the {@link CancellableDependency} by calling + * {@link HttpUriRequestBase#reset()} between subsequent retries. The {@link #cancel()} method can be called at anytime, * and we need to handle the case where it gets called while there is no request being executed as one attempt may have failed and * the subsequent attempt has not been started yet. * If the request has already been cancelled we don't go ahead with the next attempt, and artificially raise the @@ -87,7 +89,7 @@ public synchronized void cancel() { * when there is no future to cancel, which would make cancelling the request a no-op. */ synchronized void runIfNotCancelled(Runnable runnable) { - if (this.httpRequest.isAborted()) { + if (this.httpRequest.isCancelled()) { throw newCancellationException(); } runnable.run(); diff --git a/client/rest/src/main/java/org/opensearch/client/HasAttributeNodeSelector.java b/client/rest/src/main/java/org/opensearch/client/HasAttributeNodeSelector.java index e6005c207ec93..0a54dbaf30364 100644 --- a/client/rest/src/main/java/org/opensearch/client/HasAttributeNodeSelector.java +++ b/client/rest/src/main/java/org/opensearch/client/HasAttributeNodeSelector.java @@ -57,6 +57,10 @@ public HasAttributeNodeSelector(String key, String value) { this.value = value; } + /** + * Select the {@link Node}s to which to send requests. + * @param nodes the {@link Node}s targeted for the sending requests + */ @Override public void select(Iterable nodes) { Iterator itr = nodes.iterator(); @@ -70,6 +74,10 @@ public void select(Iterable nodes) { } } + /** + * Compare two node selectors for equality + * @param o node selector instance to compare with + */ @Override public boolean equals(Object o) { if (this == o) { @@ -82,11 +90,17 @@ public boolean equals(Object o) { return Objects.equals(key, that.key) && Objects.equals(value, that.value); } + /** + * Calculate the hash code of the node selector + */ @Override public int hashCode() { return Objects.hash(key, value); } + /** + * Convert this node selector to string representation + */ @Override public String toString() { return key + "=" + value; diff --git a/client/rest/src/main/java/org/opensearch/client/HeapBufferedAsyncResponseConsumer.java b/client/rest/src/main/java/org/opensearch/client/HeapBufferedAsyncResponseConsumer.java deleted file mode 100644 index e2993e48a5a05..0000000000000 --- a/client/rest/src/main/java/org/opensearch/client/HeapBufferedAsyncResponseConsumer.java +++ /dev/null @@ -1,125 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.client; - -import org.apache.http.ContentTooLongException; -import org.apache.http.HttpEntity; -import org.apache.http.HttpException; -import org.apache.http.HttpResponse; -import org.apache.http.entity.ContentType; -import org.apache.http.nio.ContentDecoder; -import org.apache.http.nio.IOControl; -import org.apache.http.nio.entity.ContentBufferEntity; -import org.apache.http.nio.protocol.AbstractAsyncResponseConsumer; -import org.apache.http.nio.util.ByteBufferAllocator; -import org.apache.http.nio.util.HeapByteBufferAllocator; -import org.apache.http.nio.util.SimpleInputBuffer; -import org.apache.http.protocol.HttpContext; - -import java.io.IOException; - -/** - * Default implementation of {@link org.apache.http.nio.protocol.HttpAsyncResponseConsumer}. Buffers the whole - * response content in heap memory, meaning that the size of the buffer is equal to the content-length of the response. - * Limits the size of responses that can be read based on a configurable argument. Throws an exception in case the entity is longer - * than the configured buffer limit. - */ -public class HeapBufferedAsyncResponseConsumer extends AbstractAsyncResponseConsumer { - - private final int bufferLimitBytes; - private volatile HttpResponse response; - private volatile SimpleInputBuffer buf; - - /** - * Creates a new instance of this consumer with the provided buffer limit. - * - * @param bufferLimit the buffer limit. Must be greater than 0. - * @throws IllegalArgumentException if {@code bufferLimit} is less than or equal to 0. - */ - public HeapBufferedAsyncResponseConsumer(int bufferLimit) { - if (bufferLimit <= 0) { - throw new IllegalArgumentException("bufferLimit must be greater than 0"); - } - this.bufferLimitBytes = bufferLimit; - } - - /** - * Get the limit of the buffer. - */ - public int getBufferLimit() { - return bufferLimitBytes; - } - - @Override - protected void onResponseReceived(HttpResponse response) throws HttpException, IOException { - this.response = response; - } - - @Override - protected void onEntityEnclosed(HttpEntity entity, ContentType contentType) throws IOException { - long len = entity.getContentLength(); - if (len > bufferLimitBytes) { - throw new ContentTooLongException( - "entity content is too long [" + len + "] for the configured buffer limit [" + bufferLimitBytes + "]" - ); - } - if (len < 0) { - len = 4096; - } - this.buf = new SimpleInputBuffer((int) len, getByteBufferAllocator()); - this.response.setEntity(new ContentBufferEntity(entity, this.buf)); - } - - /** - * Returns the instance of {@link ByteBufferAllocator} to use for content buffering. - * Allows to plug in any {@link ByteBufferAllocator} implementation. - */ - protected ByteBufferAllocator getByteBufferAllocator() { - return HeapByteBufferAllocator.INSTANCE; - } - - @Override - protected void onContentReceived(ContentDecoder decoder, IOControl ioctrl) throws IOException { - this.buf.consumeContent(decoder); - } - - @Override - protected HttpResponse buildResult(HttpContext context) throws Exception { - return response; - } - - @Override - protected void releaseResources() { - response = null; - } -} diff --git a/client/rest/src/main/java/org/opensearch/client/HttpAsyncResponseConsumerFactory.java b/client/rest/src/main/java/org/opensearch/client/HttpAsyncResponseConsumerFactory.java index 7a56e03a1162c..6420a615484d0 100644 --- a/client/rest/src/main/java/org/opensearch/client/HttpAsyncResponseConsumerFactory.java +++ b/client/rest/src/main/java/org/opensearch/client/HttpAsyncResponseConsumerFactory.java @@ -32,30 +32,31 @@ package org.opensearch.client; -import org.apache.http.HttpResponse; -import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.nio.AsyncResponseConsumer; +import org.opensearch.client.nio.HeapBufferedAsyncResponseConsumer; import static org.opensearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory.DEFAULT_BUFFER_LIMIT; /** - * Factory used to create instances of {@link HttpAsyncResponseConsumer}. Each request retry needs its own instance of the + * Factory used to create instances of {@link AsyncResponseConsumer}. Each request retry needs its own instance of the * consumer object. Users can implement this interface and pass their own instance to the specialized * performRequest methods that accept an {@link HttpAsyncResponseConsumerFactory} instance as argument. */ public interface HttpAsyncResponseConsumerFactory { /** - * Creates the default type of {@link HttpAsyncResponseConsumer}, based on heap buffering with a buffer limit of 100MB. + * Creates the default type of {@link AsyncResponseConsumer}, based on heap buffering with a buffer limit of 100MB. */ HttpAsyncResponseConsumerFactory DEFAULT = new HeapBufferedResponseConsumerFactory(DEFAULT_BUFFER_LIMIT); /** - * Creates the {@link HttpAsyncResponseConsumer}, called once per request attempt. + * Creates the {@link AsyncResponseConsumer}, called once per request attempt. */ - HttpAsyncResponseConsumer createHttpAsyncResponseConsumer(); + AsyncResponseConsumer createHttpAsyncResponseConsumer(); /** - * Default factory used to create instances of {@link HttpAsyncResponseConsumer}. + * Default factory used to create instances of {@link AsyncResponseConsumer}. * Creates one instance of {@link HeapBufferedAsyncResponseConsumer} for each request attempt, with a configurable * buffer limit which defaults to 100MB. */ @@ -75,8 +76,11 @@ public HeapBufferedResponseConsumerFactory(int bufferLimitBytes) { this.bufferLimit = bufferLimitBytes; } + /** + * Creates the {@link AsyncResponseConsumer}, called once per request attempt. + */ @Override - public HttpAsyncResponseConsumer createHttpAsyncResponseConsumer() { + public AsyncResponseConsumer createHttpAsyncResponseConsumer() { return new HeapBufferedAsyncResponseConsumer(bufferLimit); } } diff --git a/client/rest/src/main/java/org/opensearch/client/HttpDeleteWithEntity.java b/client/rest/src/main/java/org/opensearch/client/HttpDeleteWithEntity.java deleted file mode 100644 index 52618cd7edc75..0000000000000 --- a/client/rest/src/main/java/org/opensearch/client/HttpDeleteWithEntity.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.client; - -import org.apache.http.client.methods.HttpDelete; -import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; - -import java.net.URI; - -/** - * Allows to send DELETE requests providing a body (not supported out of the box) - */ -final class HttpDeleteWithEntity extends HttpEntityEnclosingRequestBase { - - static final String METHOD_NAME = HttpDelete.METHOD_NAME; - - HttpDeleteWithEntity(final URI uri) { - setURI(uri); - } - - @Override - public String getMethod() { - return METHOD_NAME; - } -} diff --git a/client/rest/src/main/java/org/opensearch/client/HttpGetWithEntity.java b/client/rest/src/main/java/org/opensearch/client/HttpGetWithEntity.java deleted file mode 100644 index 8ab639433f6be..0000000000000 --- a/client/rest/src/main/java/org/opensearch/client/HttpGetWithEntity.java +++ /dev/null @@ -1,54 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.client; - -import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; -import org.apache.http.client.methods.HttpGet; - -import java.net.URI; - -/** - * Allows to send GET requests providing a body (not supported out of the box) - */ -final class HttpGetWithEntity extends HttpEntityEnclosingRequestBase { - - static final String METHOD_NAME = HttpGet.METHOD_NAME; - - HttpGetWithEntity(final URI uri) { - setURI(uri); - } - - @Override - public String getMethod() { - return METHOD_NAME; - } -} diff --git a/client/rest/src/main/java/org/opensearch/client/Node.java b/client/rest/src/main/java/org/opensearch/client/Node.java index c02ac6c68718f..8fe5dcfa00db0 100644 --- a/client/rest/src/main/java/org/opensearch/client/Node.java +++ b/client/rest/src/main/java/org/opensearch/client/Node.java @@ -32,7 +32,7 @@ package org.opensearch.client; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import java.util.List; import java.util.Map; @@ -152,6 +152,9 @@ public Map> getAttributes() { return attributes; } + /** + * Convert node to string representation + */ @Override public String toString() { StringBuilder b = new StringBuilder(); @@ -174,6 +177,10 @@ public String toString() { return b.append(']').toString(); } + /** + * Compare two nodes for equality + * @param obj node instance to compare with + */ @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != getClass()) { @@ -188,6 +195,9 @@ public boolean equals(Object obj) { && Objects.equals(attributes, other.attributes); } + /** + * Calculate the hash code of the node + */ @Override public int hashCode() { return Objects.hash(host, boundHosts, name, version, roles, attributes); @@ -239,11 +249,25 @@ public boolean isIngest() { return roles.contains("ingest"); } + /** + * Returns whether the node is dedicated to provide search capability. + */ + public boolean isSearch() { + return roles.contains("search"); + } + + /** + * Convert roles to string representation + */ @Override public String toString() { return String.join(",", roles); } + /** + * Compare two roles for equality + * @param obj roles instance to compare with + */ @Override public boolean equals(Object obj) { if (obj == null || obj.getClass() != getClass()) { @@ -253,6 +277,9 @@ public boolean equals(Object obj) { return roles.equals(other.roles); } + /** + * Calculate the hash code of the roles + */ @Override public int hashCode() { return roles.hashCode(); diff --git a/client/rest/src/main/java/org/opensearch/client/PersistentCredentialsAuthenticationStrategy.java b/client/rest/src/main/java/org/opensearch/client/PersistentCredentialsAuthenticationStrategy.java deleted file mode 100644 index 8a35d6eb607ca..0000000000000 --- a/client/rest/src/main/java/org/opensearch/client/PersistentCredentialsAuthenticationStrategy.java +++ /dev/null @@ -1,77 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * - * - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.client; - -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; -import org.apache.http.HttpHost; -import org.apache.http.auth.AuthScheme; -import org.apache.http.impl.client.TargetAuthenticationStrategy; -import org.apache.http.protocol.HttpContext; - -/** - * An {@link org.apache.http.client.AuthenticationStrategy} implementation that does not perform - * any special handling if authentication fails. - * The default handler in Apache HTTP client mimics standard browser behaviour of clearing authentication - * credentials if it receives a 401 response from the server. While this can be useful for browser, it is - * rarely the desired behaviour with the OpenSearch REST API. - * If the code using the REST client has configured credentials for the REST API, then we can and should - * assume that this is intentional, and those credentials represent the best possible authentication - * mechanism to the OpenSearch node. - * If we receive a 401 status, a probably cause is that the authentication mechanism in place was unable - * to perform the requisite password checks (the node has not yet recovered its state, or an external - * authentication provider was unavailable). - * If this occurs, then the desired behaviour is for the Rest client to retry with the same credentials - * (rather than trying with no credentials, or expecting the calling code to provide alternate credentials). - */ -final class PersistentCredentialsAuthenticationStrategy extends TargetAuthenticationStrategy { - - private final Log logger = LogFactory.getLog(PersistentCredentialsAuthenticationStrategy.class); - - @Override - public void authFailed(HttpHost host, AuthScheme authScheme, HttpContext context) { - if (logger.isDebugEnabled()) { - logger.debug( - "Authentication to " - + host - + " failed (scheme: " - + authScheme.getSchemeName() - + "). Preserving credentials for next request" - ); - } - // Do nothing. - // The superclass implementation of method will clear the credentials from the cache, but we don't - } -} diff --git a/client/rest/src/main/java/org/opensearch/client/PreferHasAttributeNodeSelector.java b/client/rest/src/main/java/org/opensearch/client/PreferHasAttributeNodeSelector.java index ddec1da068bf0..7cf7490692650 100644 --- a/client/rest/src/main/java/org/opensearch/client/PreferHasAttributeNodeSelector.java +++ b/client/rest/src/main/java/org/opensearch/client/PreferHasAttributeNodeSelector.java @@ -58,6 +58,10 @@ public PreferHasAttributeNodeSelector(String key, String value) { this.value = value; } + /** + * Select the {@link Node}s to which to send requests. + * @param nodes the {@link Node}s targeted for the sending requests + */ @Override public void select(Iterable nodes) { boolean foundAtLeastOne = false; @@ -99,6 +103,10 @@ public void select(Iterable nodes) { } } + /** + * Compare two node selectors for equality + * @param o node selector instance to compare with + */ @Override public boolean equals(Object o) { if (this == o) { @@ -111,11 +119,17 @@ public boolean equals(Object o) { return Objects.equals(key, that.key) && Objects.equals(value, that.value); } + /** + * Calculate the hash code of the node selector + */ @Override public int hashCode() { return Objects.hash(key, value); } + /** + * Convert this node selector to string representation + */ @Override public String toString() { return key + "=" + value; diff --git a/client/rest/src/main/java/org/opensearch/client/Request.java b/client/rest/src/main/java/org/opensearch/client/Request.java index df81ca7f717ae..441b01b0891ad 100644 --- a/client/rest/src/main/java/org/opensearch/client/Request.java +++ b/client/rest/src/main/java/org/opensearch/client/Request.java @@ -32,9 +32,9 @@ package org.opensearch.client; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.nio.entity.NStringEntity; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.io.entity.StringEntity; import java.util.HashMap; import java.util.Map; @@ -133,7 +133,7 @@ public void setEntity(HttpEntity entity) { * @param entity JSON string to be set as the entity body of the request. */ public void setJsonEntity(String entity) { - setEntity(entity == null ? null : new NStringEntity(entity, ContentType.APPLICATION_JSON)); + setEntity(entity == null ? null : new StringEntity(entity, ContentType.APPLICATION_JSON)); } /** @@ -176,6 +176,9 @@ public RequestOptions getOptions() { return options; } + /** + * Convert request to string representation + */ @Override public String toString() { StringBuilder b = new StringBuilder(); @@ -192,6 +195,10 @@ public String toString() { return b.append('}').toString(); } + /** + * Compare two requests for equality + * @param obj request instance to compare with + */ @Override public boolean equals(Object obj) { if (obj == null || (obj.getClass() != getClass())) { @@ -209,6 +216,9 @@ public boolean equals(Object obj) { && options.equals(other.options); } + /** + * Calculate the hash code of the request + */ @Override public int hashCode() { return Objects.hash(method, endpoint, parameters, entity, options); diff --git a/client/rest/src/main/java/org/opensearch/client/RequestLogger.java b/client/rest/src/main/java/org/opensearch/client/RequestLogger.java index 297885fa3131b..0f2e0e6da834d 100644 --- a/client/rest/src/main/java/org/opensearch/client/RequestLogger.java +++ b/client/rest/src/main/java/org/opensearch/client/RequestLogger.java @@ -34,16 +34,16 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.http.Header; -import org.apache.http.HttpEntity; -import org.apache.http.HttpEntityEnclosingRequest; -import org.apache.http.HttpHost; -import org.apache.http.HttpResponse; -import org.apache.http.RequestLine; -import org.apache.http.client.methods.HttpUriRequest; -import org.apache.http.entity.BufferedHttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.util.EntityUtils; +import org.apache.hc.client5.http.classic.methods.HttpUriRequest; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.ParseException; +import org.apache.hc.core5.http.io.entity.BufferedHttpEntity; +import org.apache.hc.core5.http.io.entity.EntityUtils; +import org.apache.hc.core5.http.message.StatusLine; import java.io.BufferedReader; import java.io.IOException; @@ -66,17 +66,10 @@ private RequestLogger() {} /** * Logs a request that yielded a response */ - static void logResponse(Log logger, HttpUriRequest request, HttpHost host, HttpResponse httpResponse) { + static void logResponse(Log logger, HttpUriRequest request, HttpHost host, ClassicHttpResponse httpResponse) { if (logger.isDebugEnabled()) { logger.debug( - "request [" - + request.getMethod() - + " " - + host - + getUri(request.getRequestLine()) - + "] returned [" - + httpResponse.getStatusLine() - + "]" + "request [" + request.getMethod() + " " + host + getUri(request) + "] returned [" + new StatusLine(httpResponse) + "]" ); } if (logger.isWarnEnabled()) { @@ -109,7 +102,7 @@ static void logResponse(Log logger, HttpUriRequest request, HttpHost host, HttpR */ static void logFailedRequest(Log logger, HttpUriRequest request, Node node, Exception e) { if (logger.isDebugEnabled()) { - logger.debug("request [" + request.getMethod() + " " + node.getHost() + getUri(request.getRequestLine()) + "] failed", e); + logger.debug("request [" + request.getMethod() + " " + node.getHost() + getUri(request) + "] failed", e); } if (tracer.isTraceEnabled()) { String traceRequest; @@ -127,7 +120,7 @@ static String buildWarningMessage(HttpUriRequest request, HttpHost host, Header[ StringBuilder message = new StringBuilder("request [").append(request.getMethod()) .append(" ") .append(host) - .append(getUri(request.getRequestLine())) + .append(getUri(request)) .append("] returned ") .append(warnings.length) .append(" warnings: "); @@ -144,17 +137,18 @@ static String buildWarningMessage(HttpUriRequest request, HttpHost host, Header[ * Creates curl output for given request */ static String buildTraceRequest(HttpUriRequest request, HttpHost host) throws IOException { - String requestLine = "curl -iX " + request.getMethod() + " '" + host + getUri(request.getRequestLine()) + "'"; - if (request instanceof HttpEntityEnclosingRequest) { - HttpEntityEnclosingRequest enclosingRequest = (HttpEntityEnclosingRequest) request; - if (enclosingRequest.getEntity() != null) { - requestLine += " -d '"; - HttpEntity entity = enclosingRequest.getEntity(); - if (entity.isRepeatable() == false) { - entity = new BufferedHttpEntity(enclosingRequest.getEntity()); - enclosingRequest.setEntity(entity); - } + String requestLine = "curl -iX " + request.getMethod() + " '" + host + getUri(request) + "'"; + if (request.getEntity() != null) { + requestLine += " -d '"; + HttpEntity entity = request.getEntity(); + if (entity.isRepeatable() == false) { + entity = new BufferedHttpEntity(request.getEntity()); + request.setEntity(entity); + } + try { requestLine += EntityUtils.toString(entity, StandardCharsets.UTF_8) + "'"; + } catch (final ParseException ex) { + throw new IOException(ex); } } return requestLine; @@ -163,10 +157,10 @@ static String buildTraceRequest(HttpUriRequest request, HttpHost host) throws IO /** * Creates curl output for given response */ - static String buildTraceResponse(HttpResponse httpResponse) throws IOException { + static String buildTraceResponse(ClassicHttpResponse httpResponse) throws IOException { StringBuilder responseLine = new StringBuilder(); - responseLine.append("# ").append(httpResponse.getStatusLine()); - for (Header header : httpResponse.getAllHeaders()) { + responseLine.append("# ").append(new StatusLine(httpResponse)); + for (Header header : httpResponse.getHeaders()) { responseLine.append("\n# ").append(header.getName()).append(": ").append(header.getValue()); } responseLine.append("\n#"); @@ -176,7 +170,7 @@ static String buildTraceResponse(HttpResponse httpResponse) throws IOException { entity = new BufferedHttpEntity(entity); } httpResponse.setEntity(entity); - ContentType contentType = ContentType.get(entity); + ContentType contentType = ContentType.parse(entity.getContentType()); Charset charset = StandardCharsets.UTF_8; if (contentType != null && contentType.getCharset() != null) { charset = contentType.getCharset(); @@ -191,10 +185,14 @@ static String buildTraceResponse(HttpResponse httpResponse) throws IOException { return responseLine.toString(); } - private static String getUri(RequestLine requestLine) { - if (requestLine.getUri().charAt(0) != '/') { - return "/" + requestLine.getUri(); + private static String getUri(HttpUriRequest request) { + final String uri = request.getRequestUri(); + if (uri == null) { + return "/"; + } else if (!uri.startsWith("/")) { + return "/" + uri; + } else { + return uri; } - return requestLine.getUri(); } } diff --git a/client/rest/src/main/java/org/opensearch/client/RequestOptions.java b/client/rest/src/main/java/org/opensearch/client/RequestOptions.java index 5390e303ff499..189d785faaf45 100644 --- a/client/rest/src/main/java/org/opensearch/client/RequestOptions.java +++ b/client/rest/src/main/java/org/opensearch/client/RequestOptions.java @@ -32,10 +32,10 @@ package org.opensearch.client; -import org.apache.http.Header; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.message.BasicHeader; -import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; +import org.apache.hc.client5.http.config.RequestConfig; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.message.BasicHeader; +import org.apache.hc.core5.http.nio.AsyncResponseConsumer; import org.opensearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory; import java.util.ArrayList; @@ -86,7 +86,7 @@ public List
getHeaders() { /** * The {@link HttpAsyncResponseConsumerFactory} used to create one - * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the + * {@link AsyncResponseConsumer} callback per retry. Controls how the * response body gets streamed from a non-blocking HTTP connection on the * client side. */ @@ -124,6 +124,9 @@ public RequestConfig getRequestConfig() { return requestConfig; } + /** + * Convert request options to string representation + */ @Override public String toString() { StringBuilder b = new StringBuilder(); @@ -152,6 +155,10 @@ public String toString() { return b.append('}').toString(); } + /** + * Compare two request options for equality + * @param obj request options instance to compare with + */ @Override public boolean equals(Object obj) { if (obj == null || (obj.getClass() != getClass())) { @@ -167,6 +174,9 @@ public boolean equals(Object obj) { && Objects.equals(warningsHandler, other.warningsHandler); } + /** + * Calculate the hash code of the request options + */ @Override public int hashCode() { return Objects.hash(headers, httpAsyncResponseConsumerFactory, warningsHandler); @@ -218,11 +228,11 @@ public Builder addHeader(String name, String value) { /** * Set the {@link HttpAsyncResponseConsumerFactory} used to create one - * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the + * {@link AsyncResponseConsumer} callback per retry. Controls how the * response body gets streamed from a non-blocking HTTP connection on the * client side. * - * @param httpAsyncResponseConsumerFactory factory for creating {@link HttpAsyncResponseConsumer}. + * @param httpAsyncResponseConsumerFactory factory for creating {@link AsyncResponseConsumer}. * @throws NullPointerException if {@code httpAsyncResponseConsumerFactory} is null. */ public void setHttpAsyncResponseConsumerFactory(HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory) { diff --git a/client/rest/src/main/java/org/opensearch/client/Response.java b/client/rest/src/main/java/org/opensearch/client/Response.java index d380607b7df9e..b062d937ed630 100644 --- a/client/rest/src/main/java/org/opensearch/client/Response.java +++ b/client/rest/src/main/java/org/opensearch/client/Response.java @@ -32,12 +32,13 @@ package org.opensearch.client; -import org.apache.http.Header; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHost; -import org.apache.http.HttpResponse; -import org.apache.http.RequestLine; -import org.apache.http.StatusLine; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.HttpResponse; +import org.apache.hc.core5.http.message.RequestLine; +import org.apache.hc.core5.http.message.StatusLine; import java.util.ArrayList; import java.util.List; @@ -53,9 +54,9 @@ public class Response { private final RequestLine requestLine; private final HttpHost host; - private final HttpResponse response; + private final ClassicHttpResponse response; - Response(RequestLine requestLine, HttpHost host, HttpResponse response) { + Response(RequestLine requestLine, HttpHost host, ClassicHttpResponse response) { Objects.requireNonNull(requestLine, "requestLine cannot be null"); Objects.requireNonNull(host, "host cannot be null"); Objects.requireNonNull(response, "response cannot be null"); @@ -82,14 +83,14 @@ public HttpHost getHost() { * Returns the status line of the current response */ public StatusLine getStatusLine() { - return response.getStatusLine(); + return new StatusLine(response); } /** * Returns all the response headers */ public Header[] getHeaders() { - return response.getAllHeaders(); + return response.getHeaders(); } /** @@ -146,7 +147,7 @@ public HttpEntity getEntity() { * @return {@code true} if the input string matches the specification */ private static boolean matchWarningHeaderPatternByPrefix(final String s) { - return s.startsWith("299 OpenSearch-") || s.startsWith("299 Elasticsearch-"); + return s.startsWith("299 OpenSearch-"); } /** @@ -199,12 +200,15 @@ public boolean hasWarnings() { return warnings != null && warnings.length > 0; } - HttpResponse getHttpResponse() { + ClassicHttpResponse getHttpResponse() { return response; } + /** + * Convert response to string representation + */ @Override public String toString() { - return "Response{" + "requestLine=" + requestLine + ", host=" + host + ", response=" + response.getStatusLine() + '}'; + return "Response{" + "requestLine=" + requestLine + ", host=" + host + ", response=" + getStatusLine() + '}'; } } diff --git a/client/rest/src/main/java/org/opensearch/client/ResponseException.java b/client/rest/src/main/java/org/opensearch/client/ResponseException.java index 8104c32c422e5..ed816c7e1177e 100644 --- a/client/rest/src/main/java/org/opensearch/client/ResponseException.java +++ b/client/rest/src/main/java/org/opensearch/client/ResponseException.java @@ -32,9 +32,10 @@ package org.opensearch.client; -import org.apache.http.HttpEntity; -import org.apache.http.entity.BufferedHttpEntity; -import org.apache.http.util.EntityUtils; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.ParseException; +import org.apache.hc.core5.http.io.entity.BufferedHttpEntity; +import org.apache.hc.core5.http.io.entity.EntityUtils; import java.io.IOException; import java.util.Locale; @@ -77,7 +78,11 @@ static String buildMessage(Response response) throws IOException { entity = new BufferedHttpEntity(entity); response.getHttpResponse().setEntity(entity); } - message += "\n" + EntityUtils.toString(entity); + try { + message += "\n" + EntityUtils.toString(entity); + } catch (final ParseException ex) { + throw new IOException(ex); + } } return message; } diff --git a/client/rest/src/main/java/org/opensearch/client/RestClient.java b/client/rest/src/main/java/org/opensearch/client/RestClient.java index 92aed2c8fb179..9d140a145b004 100644 --- a/client/rest/src/main/java/org/opensearch/client/RestClient.java +++ b/client/rest/src/main/java/org/opensearch/client/RestClient.java @@ -33,36 +33,43 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.http.ConnectionClosedException; -import org.apache.http.Header; -import org.apache.http.HttpEntity; -import org.apache.http.entity.HttpEntityWrapper; -import org.apache.http.HttpHost; -import org.apache.http.HttpRequest; -import org.apache.http.HttpResponse; -import org.apache.http.client.AuthCache; -import org.apache.http.client.ClientProtocolException; -import org.apache.http.client.entity.GzipCompressingEntity; -import org.apache.http.client.entity.GzipDecompressingEntity; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.client.methods.HttpEntityEnclosingRequestBase; -import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.methods.HttpOptions; -import org.apache.http.client.methods.HttpPatch; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.client.methods.HttpRequestBase; -import org.apache.http.client.methods.HttpTrace; -import org.apache.http.client.protocol.HttpClientContext; -import org.apache.http.client.utils.URIBuilder; -import org.apache.http.concurrent.FutureCallback; -import org.apache.http.conn.ConnectTimeoutException; -import org.apache.http.impl.auth.BasicScheme; -import org.apache.http.impl.client.BasicAuthCache; -import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; -import org.apache.http.nio.client.methods.HttpAsyncMethods; -import org.apache.http.nio.protocol.HttpAsyncRequestProducer; -import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.ConnectionClosedException; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.io.entity.HttpEntityWrapper; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.HttpRequest; +import org.apache.hc.client5.http.auth.AuthCache; +import org.apache.hc.client5.http.auth.AuthScheme; +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.Credentials; +import org.apache.hc.client5.http.auth.CredentialsProvider; +import org.apache.hc.client5.http.ConnectTimeoutException; +import org.apache.hc.client5.http.ClientProtocolException; +import org.apache.hc.client5.http.entity.GzipDecompressingEntity; +import org.apache.hc.client5.http.config.RequestConfig; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpHead; +import org.apache.hc.client5.http.classic.methods.HttpOptions; +import org.apache.hc.client5.http.classic.methods.HttpPatch; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpUriRequestBase; +import org.apache.hc.client5.http.classic.methods.HttpTrace; +import org.apache.hc.client5.http.protocol.HttpClientContext; +import org.apache.hc.core5.http.message.RequestLine; +import org.apache.hc.core5.http.nio.AsyncRequestProducer; +import org.apache.hc.core5.http.nio.AsyncResponseConsumer; +import org.apache.hc.core5.net.URIBuilder; +import org.apache.hc.core5.reactor.IOReactorStatus; +import org.apache.hc.core5.util.Args; +import org.opensearch.client.http.HttpUriRequestProducer; +import org.apache.hc.core5.concurrent.FutureCallback; +import org.apache.hc.client5.http.impl.auth.BasicScheme; +import org.apache.hc.client5.http.impl.auth.BasicAuthCache; +import org.apache.hc.client5.http.impl.async.CloseableHttpAsyncClient; import javax.net.ssl.SSLHandshakeException; import java.io.ByteArrayInputStream; @@ -70,6 +77,7 @@ import java.io.Closeable; import java.io.IOException; import java.io.InputStream; +import java.io.OutputStream; import java.net.ConnectException; import java.net.SocketTimeoutException; import java.net.URI; @@ -92,6 +100,7 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ExecutionException; +import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.zip.GZIPOutputStream; @@ -218,7 +227,7 @@ public static RestClientBuilder builder(String cloudId) { } String url = decodedParts[1] + "." + domain; - return builder(new HttpHost(url, port, "https")); + return builder(new HttpHost("https", url, port)); } /** @@ -287,7 +296,7 @@ public List getNodes() { * @return client running status */ public boolean isRunning() { - return client.isRunning(); + return client.getStatus() == IOReactorStatus.ACTIVE; } /** @@ -323,7 +332,7 @@ public Response performRequest(Request request) throws IOException { private Response performRequest(final NodeTuple> nodeTuple, final InternalRequest request, Exception previousException) throws IOException { RequestContext context = request.createContextForNextAttempt(nodeTuple.nodes.next(), nodeTuple.authCache); - HttpResponse httpResponse; + ClassicHttpResponse httpResponse; try { httpResponse = client.execute(context.requestProducer, context.asyncResponseConsumer, context.context, null).get(); } catch (Exception e) { @@ -353,18 +362,18 @@ private Response performRequest(final NodeTuple> nodeTuple, final throw responseOrResponseException.responseException; } - private ResponseOrResponseException convertResponse(InternalRequest request, Node node, HttpResponse httpResponse) throws IOException { + private ResponseOrResponseException convertResponse(InternalRequest request, Node node, ClassicHttpResponse httpResponse) + throws IOException { RequestLogger.logResponse(logger, request.httpRequest, node.getHost(), httpResponse); - int statusCode = httpResponse.getStatusLine().getStatusCode(); + int statusCode = httpResponse.getCode(); Optional.ofNullable(httpResponse.getEntity()) .map(HttpEntity::getContentEncoding) - .map(Header::getValue) .filter("gzip"::equalsIgnoreCase) .map(gzipHeaderValue -> new GzipDecompressingEntity(httpResponse.getEntity())) .ifPresent(httpResponse::setEntity); - Response response = new Response(request.httpRequest.getRequestLine(), node.getHost(), httpResponse); + Response response = new Response(new RequestLine(request.httpRequest), node.getHost(), httpResponse); if (isSuccessfulResponse(statusCode) || request.ignoreErrorCodes.contains(response.getStatusLine().getStatusCode())) { onResponse(node); if (request.warningsHandler.warningsShouldFailRequest(response.getWarnings())) { @@ -418,47 +427,56 @@ private void performRequestAsync( ) { request.cancellable.runIfNotCancelled(() -> { final RequestContext context = request.createContextForNextAttempt(nodeTuple.nodes.next(), nodeTuple.authCache); - client.execute(context.requestProducer, context.asyncResponseConsumer, context.context, new FutureCallback() { - @Override - public void completed(HttpResponse httpResponse) { - try { - ResponseOrResponseException responseOrResponseException = convertResponse(request, context.node, httpResponse); - if (responseOrResponseException.responseException == null) { - listener.onSuccess(responseOrResponseException.response); - } else { + Future future = client.execute( + context.requestProducer, + context.asyncResponseConsumer, + context.context, + new FutureCallback() { + @Override + public void completed(ClassicHttpResponse httpResponse) { + try { + ResponseOrResponseException responseOrResponseException = convertResponse(request, context.node, httpResponse); + if (responseOrResponseException.responseException == null) { + listener.onSuccess(responseOrResponseException.response); + } else { + if (nodeTuple.nodes.hasNext()) { + listener.trackFailure(responseOrResponseException.responseException); + performRequestAsync(nodeTuple, request, listener); + } else { + listener.onDefinitiveFailure(responseOrResponseException.responseException); + } + } + } catch (Exception e) { + listener.onDefinitiveFailure(e); + } + } + + @Override + public void failed(Exception failure) { + try { + RequestLogger.logFailedRequest(logger, request.httpRequest, context.node, failure); + onFailure(context.node); if (nodeTuple.nodes.hasNext()) { - listener.trackFailure(responseOrResponseException.responseException); + listener.trackFailure(failure); performRequestAsync(nodeTuple, request, listener); } else { - listener.onDefinitiveFailure(responseOrResponseException.responseException); + listener.onDefinitiveFailure(failure); } + } catch (Exception e) { + listener.onDefinitiveFailure(e); } - } catch (Exception e) { - listener.onDefinitiveFailure(e); } - } - @Override - public void failed(Exception failure) { - try { - RequestLogger.logFailedRequest(logger, request.httpRequest, context.node, failure); - onFailure(context.node); - if (nodeTuple.nodes.hasNext()) { - listener.trackFailure(failure); - performRequestAsync(nodeTuple, request, listener); - } else { - listener.onDefinitiveFailure(failure); - } - } catch (Exception e) { - listener.onDefinitiveFailure(e); + @Override + public void cancelled() { + listener.onDefinitiveFailure(Cancellable.newCancellationException()); } } + ); - @Override - public void cancelled() { - listener.onDefinitiveFailure(Cancellable.newCancellationException()); - } - }); + if (future instanceof org.apache.hc.core5.concurrent.Cancellable) { + request.httpRequest.setDependency((org.apache.hc.core5.concurrent.Cancellable) future); + } }); } @@ -583,6 +601,9 @@ private void onFailure(Node node) { failureListener.onFailure(node); } + /** + * Close the underlying {@link CloseableHttpAsyncClient} instance + */ @Override public void close() throws IOException { client.close(); @@ -608,12 +629,12 @@ private static void addSuppressedException(Exception suppressedException, Except } } - private HttpRequestBase createHttpRequest(String method, URI uri, HttpEntity entity) { + private HttpUriRequestBase createHttpRequest(String method, URI uri, HttpEntity entity) { switch (method.toUpperCase(Locale.ROOT)) { - case HttpDeleteWithEntity.METHOD_NAME: - return addRequestBody(new HttpDeleteWithEntity(uri), entity); - case HttpGetWithEntity.METHOD_NAME: - return addRequestBody(new HttpGetWithEntity(uri), entity); + case HttpDelete.METHOD_NAME: + return addRequestBody(new HttpDelete(uri), entity); + case HttpGet.METHOD_NAME: + return addRequestBody(new HttpGet(uri), entity); case HttpHead.METHOD_NAME: return addRequestBody(new HttpHead(uri), entity); case HttpOptions.METHOD_NAME: @@ -633,22 +654,18 @@ private HttpRequestBase createHttpRequest(String method, URI uri, HttpEntity ent } } - private HttpRequestBase addRequestBody(HttpRequestBase httpRequest, HttpEntity entity) { + private HttpUriRequestBase addRequestBody(HttpUriRequestBase httpRequest, HttpEntity entity) { if (entity != null) { - if (httpRequest instanceof HttpEntityEnclosingRequestBase) { - if (compressionEnabled) { - if (chunkedEnabled.isPresent()) { - entity = new ContentCompressingEntity(entity, chunkedEnabled.get()); - } else { - entity = new ContentCompressingEntity(entity); - } - } else if (chunkedEnabled.isPresent()) { - entity = new ContentHttpEntity(entity, chunkedEnabled.get()); + if (compressionEnabled) { + if (chunkedEnabled.isPresent()) { + entity = new ContentCompressingEntity(entity, chunkedEnabled.get()); + } else { + entity = new ContentCompressingEntity(entity); } - ((HttpEntityEnclosingRequestBase) httpRequest).setEntity(entity); - } else { - throw new UnsupportedOperationException(httpRequest.getMethod() + " with body is not supported"); + } else if (chunkedEnabled.isPresent()) { + entity = new ContentHttpEntity(entity, chunkedEnabled.get()); } + httpRequest.setEntity(entity); } return httpRequest; } @@ -673,7 +690,12 @@ static URI buildUri(String pathPrefix, String path, Map params) for (Map.Entry param : params.entrySet()) { uriBuilder.addParameter(param.getKey(), param.getValue()); } - return uriBuilder.build(); + + // The Apache HttpClient 5.x **does not** encode URIs but Apache HttpClient 4.x does. It leads + // to the issues with Unicode characters (f.e. document IDs could contain Unicode characters) and + // weird characters are being passed instead. By using `toASCIIString()`, the URI is already created + // with proper encoding. + return new URI(uriBuilder.build().toASCIIString()); } catch (URISyntaxException e) { throw new IllegalArgumentException(e.getMessage(), e); } @@ -802,7 +824,7 @@ public void remove() { private class InternalRequest { private final Request request; private final Set ignoreErrorCodes; - private final HttpRequestBase httpRequest; + private final HttpUriRequestBase httpRequest; private final Cancellable cancellable; private final WarningsHandler warningsHandler; @@ -839,7 +861,7 @@ private void setHeaders(HttpRequest httpRequest, Collection
requestHeade } } - private void setRequestConfig(HttpRequestBase httpRequest, RequestConfig requestConfig) { + private void setRequestConfig(HttpUriRequestBase httpRequest, RequestConfig requestConfig) { if (requestConfig != null) { httpRequest.setConfig(requestConfig); } @@ -851,21 +873,81 @@ RequestContext createContextForNextAttempt(Node node, AuthCache authCache) { } } + /** + * The Apache HttpClient 5 adds "Authorization" header even if the credentials for basic authentication are not provided + * (effectively, username and password are 'null'). To workaround that, wrapping the AuthCache around current HttpClientContext + * and ensuring that the credentials are indeed provided for particular HttpHost, otherwise returning no authentication scheme + * even if it is present in the cache. + */ + private static class WrappingAuthCache implements AuthCache { + private final HttpClientContext context; + private final AuthCache delegate; + private final boolean usePersistentCredentials = true; + + public WrappingAuthCache(HttpClientContext context, AuthCache delegate) { + this.context = context; + this.delegate = delegate; + } + + @Override + public void put(HttpHost host, AuthScheme authScheme) { + delegate.put(host, authScheme); + } + + @Override + public AuthScheme get(HttpHost host) { + AuthScheme authScheme = delegate.get(host); + + if (authScheme != null) { + final CredentialsProvider credsProvider = context.getCredentialsProvider(); + if (credsProvider != null) { + final String schemeName = authScheme.getName(); + final AuthScope authScope = new AuthScope(host, null, schemeName); + final Credentials creds = credsProvider.getCredentials(authScope, context); + + // See please https://issues.apache.org/jira/browse/HTTPCLIENT-2203 + if (authScheme instanceof BasicScheme) { + ((BasicScheme) authScheme).initPreemptive(creds); + } + + if (creds == null) { + return null; + } + } + } + + return authScheme; + } + + @Override + public void remove(HttpHost host) { + if (!usePersistentCredentials) { + delegate.remove(host); + } + } + + @Override + public void clear() { + delegate.clear(); + } + + } + private static class RequestContext { private final Node node; - private final HttpAsyncRequestProducer requestProducer; - private final HttpAsyncResponseConsumer asyncResponseConsumer; + private final AsyncRequestProducer requestProducer; + private final AsyncResponseConsumer asyncResponseConsumer; private final HttpClientContext context; RequestContext(InternalRequest request, Node node, AuthCache authCache) { this.node = node; // we stream the request body if the entity allows for it - this.requestProducer = HttpAsyncMethods.create(node.getHost(), request.httpRequest); + this.requestProducer = HttpUriRequestProducer.create(request.httpRequest, node.getHost()); this.asyncResponseConsumer = request.request.getOptions() .getHttpAsyncResponseConsumerFactory() .createHttpAsyncResponseConsumer(); this.context = HttpClientContext.create(); - context.setAuthCache(authCache); + context.setAuthCache(new WrappingAuthCache(context, authCache)); } } @@ -966,7 +1048,9 @@ private static Exception extractAndWrapCause(Exception exception) { /** * A gzip compressing entity that also implements {@code getContent()}. */ - public static class ContentCompressingEntity extends GzipCompressingEntity { + public static class ContentCompressingEntity extends HttpEntityWrapper { + private static final String GZIP_CODEC = "gzip"; + private Optional chunkedEnabled; /** @@ -979,6 +1063,14 @@ public ContentCompressingEntity(HttpEntity entity) { this.chunkedEnabled = Optional.empty(); } + /** + * Returns content encoding of the entity, if known. + */ + @Override + public String getContentEncoding() { + return GZIP_CODEC; + } + /** * Creates a {@link ContentCompressingEntity} instance with the provided HTTP entity. * @@ -990,11 +1082,14 @@ public ContentCompressingEntity(HttpEntity entity, boolean chunkedEnabled) { this.chunkedEnabled = Optional.of(chunkedEnabled); } + /** + * Returns a content stream of the entity. + */ @Override public InputStream getContent() throws IOException { ByteArrayInputOutputStream out = new ByteArrayInputOutputStream(1024); try (GZIPOutputStream gzipOut = new GZIPOutputStream(out)) { - wrappedEntity.writeTo(gzipOut); + super.writeTo(gzipOut); } return out.asInput(); } @@ -1030,9 +1125,24 @@ public long getContentLength() { return size; } } else { - return super.getContentLength(); + return -1; } } + + /** + * Writes the entity content out to the output stream. + * @param outStream the output stream to write entity content to + * @throws IOException if an I/O error occurs + */ + @Override + public void writeTo(final OutputStream outStream) throws IOException { + Args.notNull(outStream, "Output stream"); + final GZIPOutputStream gzip = new GZIPOutputStream(outStream); + super.writeTo(gzip); + // Only close output stream if the wrapped entity has been + // successfully written out + gzip.close(); + } } /** diff --git a/client/rest/src/main/java/org/opensearch/client/RestClientBuilder.java b/client/rest/src/main/java/org/opensearch/client/RestClientBuilder.java index 8841d371754c3..a01cf2f403099 100644 --- a/client/rest/src/main/java/org/opensearch/client/RestClientBuilder.java +++ b/client/rest/src/main/java/org/opensearch/client/RestClientBuilder.java @@ -32,15 +32,26 @@ package org.opensearch.client; -import org.apache.http.Header; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClientBuilder; -import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; -import org.apache.http.nio.conn.SchemeIOSessionStrategy; +import org.apache.hc.core5.function.Factory; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.nio.ssl.TlsStrategy; +import org.apache.hc.core5.reactor.ssl.TlsDetails; +import org.apache.hc.core5.util.Timeout; +import org.apache.hc.client5.http.async.HttpAsyncClient; +import org.apache.hc.client5.http.auth.CredentialsProvider; +import org.apache.hc.client5.http.config.RequestConfig; +import org.apache.hc.client5.http.impl.classic.CloseableHttpClient; +import org.apache.hc.client5.http.impl.classic.HttpClientBuilder; +import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManager; +import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManagerBuilder; +import org.apache.hc.client5.http.ssl.ClientTlsStrategyBuilder; +import org.apache.hc.client5.http.impl.DefaultAuthenticationStrategy; +import org.apache.hc.client5.http.impl.async.CloseableHttpAsyncClient; +import org.apache.hc.client5.http.impl.async.HttpAsyncClientBuilder; import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLEngine; + import java.security.AccessController; import java.security.NoSuchAlgorithmException; import java.security.PrivilegedAction; @@ -50,19 +61,19 @@ /** * Helps creating a new {@link RestClient}. Allows to set the most common http client configuration options when internally - * creating the underlying {@link org.apache.http.nio.client.HttpAsyncClient}. Also allows to provide an externally created - * {@link org.apache.http.nio.client.HttpAsyncClient} in case additional customization is needed. + * creating the underlying {@link HttpAsyncClient}. Also allows to provide an externally created + * {@link HttpAsyncClient} in case additional customization is needed. */ public final class RestClientBuilder { /** - * The default connection timout in milliseconds. + * The default connection timeout in milliseconds. */ public static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 1000; /** - * The default socket timeout in milliseconds. + * The default response timeout in milliseconds. */ - public static final int DEFAULT_SOCKET_TIMEOUT_MILLIS = 30000; + public static final int DEFAULT_RESPONSE_TIMEOUT_MILLIS = 30000; /** * The default maximum of connections per route. @@ -296,20 +307,35 @@ public RestClient build() { private CloseableHttpAsyncClient createHttpClient() { // default timeouts are all infinite RequestConfig.Builder requestConfigBuilder = RequestConfig.custom() - .setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_MILLIS) - .setSocketTimeout(DEFAULT_SOCKET_TIMEOUT_MILLIS); + .setConnectTimeout(Timeout.ofMilliseconds(DEFAULT_CONNECT_TIMEOUT_MILLIS)) + .setResponseTimeout(Timeout.ofMilliseconds(DEFAULT_RESPONSE_TIMEOUT_MILLIS)); if (requestConfigCallback != null) { requestConfigBuilder = requestConfigCallback.customizeRequestConfig(requestConfigBuilder); } try { - HttpAsyncClientBuilder httpClientBuilder = HttpAsyncClientBuilder.create() - .setDefaultRequestConfig(requestConfigBuilder.build()) - // default settings for connection pooling may be too constraining + final TlsStrategy tlsStrategy = ClientTlsStrategyBuilder.create() + .setSslContext(SSLContext.getDefault()) + // See https://issues.apache.org/jira/browse/HTTPCLIENT-2219 + .setTlsDetailsFactory(new Factory() { + @Override + public TlsDetails create(final SSLEngine sslEngine) { + return new TlsDetails(sslEngine.getSession(), sslEngine.getApplicationProtocol()); + } + }) + .build(); + + final PoolingAsyncClientConnectionManager connectionManager = PoolingAsyncClientConnectionManagerBuilder.create() .setMaxConnPerRoute(DEFAULT_MAX_CONN_PER_ROUTE) .setMaxConnTotal(DEFAULT_MAX_CONN_TOTAL) - .setSSLContext(SSLContext.getDefault()) - .setTargetAuthenticationStrategy(new PersistentCredentialsAuthenticationStrategy()); + .setTlsStrategy(tlsStrategy) + .build(); + + HttpAsyncClientBuilder httpClientBuilder = HttpAsyncClientBuilder.create() + .setDefaultRequestConfig(requestConfigBuilder.build()) + .setConnectionManager(connectionManager) + .setTargetAuthenticationStrategy(DefaultAuthenticationStrategy.INSTANCE) + .disableAutomaticRetries(); if (httpClientConfigCallback != null) { httpClientBuilder = httpClientConfigCallback.customizeHttpClient(httpClientBuilder); } @@ -344,9 +370,9 @@ public interface RequestConfigCallback { public interface HttpClientConfigCallback { /** * Allows to customize the {@link CloseableHttpAsyncClient} being created and used by the {@link RestClient}. - * Commonly used to customize the default {@link org.apache.http.client.CredentialsProvider} for authentication - * or the {@link SchemeIOSessionStrategy} for communication through ssl without losing any other useful default - * value that the {@link RestClientBuilder} internally sets, like connection pooling. + * Commonly used to customize the default {@link CredentialsProvider} for authentication for communication + * through TLS/SSL without losing any other useful default value that the {@link RestClientBuilder} internally + * sets, like connection pooling. * * @param httpClientBuilder the {@link HttpClientBuilder} for customizing the client instance. */ diff --git a/client/rest/src/main/java/org/opensearch/client/http/HttpUriRequestProducer.java b/client/rest/src/main/java/org/opensearch/client/http/HttpUriRequestProducer.java new file mode 100644 index 0000000000000..a65427cd0b032 --- /dev/null +++ b/client/rest/src/main/java/org/opensearch/client/http/HttpUriRequestProducer.java @@ -0,0 +1,63 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.client.http; + +import org.apache.hc.client5.http.classic.methods.HttpUriRequestBase; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.nio.AsyncEntityProducer; +import org.apache.hc.core5.http.nio.support.BasicRequestProducer; +import org.apache.hc.core5.net.URIAuthority; +import org.apache.hc.core5.util.Args; +import org.opensearch.client.nio.HttpEntityAsyncEntityProducer; + +/** + * The producer of the {@link HttpUriRequestBase} instances associated with a particular {@link HttpHost} + */ +public class HttpUriRequestProducer extends BasicRequestProducer { + private final HttpUriRequestBase request; + + HttpUriRequestProducer(final HttpUriRequestBase request, final AsyncEntityProducer entityProducer) { + super(request, entityProducer); + this.request = request; + } + + /** + * Get the produced {@link HttpUriRequestBase} instance + * @return produced {@link HttpUriRequestBase} instance + */ + public HttpUriRequestBase getRequest() { + return request; + } + + /** + * Create new request producer for {@link HttpUriRequestBase} instance and {@link HttpHost} + * @param request {@link HttpUriRequestBase} instance + * @param host {@link HttpHost} instance + * @return new request producer + */ + public static HttpUriRequestProducer create(final HttpUriRequestBase request, final HttpHost host) { + Args.notNull(request, "Request"); + Args.notNull(host, "HttpHost"); + + // TODO: Should we copy request here instead of modifying in place? + request.setAuthority(new URIAuthority(host)); + request.setScheme(host.getSchemeName()); + + final HttpEntity entity = request.getEntity(); + AsyncEntityProducer entityProducer = null; + + if (entity != null) { + entityProducer = new HttpEntityAsyncEntityProducer(entity); + } + + return new HttpUriRequestProducer(request, entityProducer); + } + +} diff --git a/client/rest/src/main/java/org/opensearch/client/http/package-info.java b/client/rest/src/main/java/org/opensearch/client/http/package-info.java new file mode 100644 index 0000000000000..32e0aa2016d53 --- /dev/null +++ b/client/rest/src/main/java/org/opensearch/client/http/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * HTTP support classes for REST client. + */ +package org.opensearch.client.http; diff --git a/client/rest/src/main/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumer.java b/client/rest/src/main/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumer.java new file mode 100644 index 0000000000000..9bd17d1c24c7e --- /dev/null +++ b/client/rest/src/main/java/org/opensearch/client/nio/HeapBufferedAsyncEntityConsumer.java @@ -0,0 +1,139 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.client.nio; + +import org.apache.hc.core5.http.ContentTooLongException; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpException; +import org.apache.hc.core5.http.nio.AsyncEntityConsumer; +import org.apache.hc.core5.http.nio.entity.AbstractBinAsyncEntityConsumer; +import org.apache.hc.core5.util.ByteArrayBuffer; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.concurrent.atomic.AtomicReference; + +/** + * Default implementation of {@link AsyncEntityConsumer}. Buffers the whole + * response content in heap memory, meaning that the size of the buffer is equal to the content-length of the response. + * Limits the size of responses that can be read based on a configurable argument. Throws an exception in case the entity is longer + * than the configured buffer limit. + */ +public class HeapBufferedAsyncEntityConsumer extends AbstractBinAsyncEntityConsumer { + + private final int bufferLimitBytes; + private AtomicReference bufferRef = new AtomicReference<>(); + + /** + * Creates a new instance of this consumer with the provided buffer limit. + * + * @param bufferLimit the buffer limit. Must be greater than 0. + * @throws IllegalArgumentException if {@code bufferLimit} is less than or equal to 0. + */ + public HeapBufferedAsyncEntityConsumer(int bufferLimit) { + if (bufferLimit <= 0) { + throw new IllegalArgumentException("bufferLimit must be greater than 0"); + } + this.bufferLimitBytes = bufferLimit; + } + + /** + * Get the limit of the buffer. + */ + public int getBufferLimit() { + return bufferLimitBytes; + } + + /** + * Triggered to signal beginning of entity content stream. + * + * @param contentType the entity content type + */ + @Override + protected void streamStart(final ContentType contentType) throws HttpException, IOException {} + + /** + * Triggered to obtain the capacity increment. + * + * @return the number of bytes this consumer is prepared to process. + */ + @Override + protected int capacityIncrement() { + return Integer.MAX_VALUE; + } + + /** + * Triggered to pass incoming data packet to the data consumer. + * + * @param src the data packet. + * @param endOfStream flag indicating whether this data packet is the last in the data stream. + * + */ + @Override + protected void data(final ByteBuffer src, final boolean endOfStream) throws IOException { + if (src == null) { + return; + } + + ByteArrayBuffer buffer = bufferRef.get(); + if (buffer == null) { + buffer = new ByteArrayBuffer(bufferLimitBytes); + if (bufferRef.compareAndSet(null, buffer) == false) { + buffer = bufferRef.get(); + } + } + + int len = src.limit(); + if (buffer.length() + len > bufferLimitBytes) { + throw new ContentTooLongException( + "entity content is too long [" + len + "] for the configured buffer limit [" + bufferLimitBytes + "]" + ); + } + + if (len < 0) { + len = 4096; + } + + if (src.hasArray()) { + buffer.append(src.array(), src.arrayOffset() + src.position(), src.remaining()); + } else { + while (src.hasRemaining()) { + buffer.append(src.get()); + } + } + } + + /** + * Triggered to generate entity representation. + * + * @return the entity content + */ + @Override + protected byte[] generateContent() throws IOException { + final ByteArrayBuffer buffer = bufferRef.get(); + return buffer == null ? new byte[0] : buffer.toByteArray(); + } + + /** + * Release resources being held + */ + @Override + public void releaseResources() { + ByteArrayBuffer buffer = bufferRef.getAndSet(null); + if (buffer != null) { + buffer.clear(); + buffer = null; + } + } +} diff --git a/client/rest/src/main/java/org/opensearch/client/nio/HeapBufferedAsyncResponseConsumer.java b/client/rest/src/main/java/org/opensearch/client/nio/HeapBufferedAsyncResponseConsumer.java new file mode 100644 index 0000000000000..3d93478f49f99 --- /dev/null +++ b/client/rest/src/main/java/org/opensearch/client/nio/HeapBufferedAsyncResponseConsumer.java @@ -0,0 +1,123 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.client.nio; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpException; +import org.apache.hc.core5.http.HttpHeaders; +import org.apache.hc.core5.http.HttpResponse; +import org.apache.hc.core5.http.io.entity.ByteArrayEntity; +import org.apache.hc.core5.http.message.BasicClassicHttpResponse; +import org.apache.hc.core5.http.nio.AsyncResponseConsumer; +import org.apache.hc.core5.http.nio.support.AbstractAsyncResponseConsumer; +import org.apache.hc.core5.http.protocol.HttpContext; + +import java.io.IOException; + +/** + * Default implementation of {@link AsyncResponseConsumer}. Buffers the whole + * response content in heap memory, meaning that the size of the buffer is equal to the content-length of the response. + * Limits the size of responses that can be read based on a configurable argument. Throws an exception in case the entity is longer + * than the configured buffer limit. + */ +public class HeapBufferedAsyncResponseConsumer extends AbstractAsyncResponseConsumer { + private static final Log LOGGER = LogFactory.getLog(HeapBufferedAsyncResponseConsumer.class); + private final int bufferLimit; + + /** + * Creates a new instance of this consumer with the provided buffer limit. + * + * @param bufferLimit the buffer limit. Must be greater than 0. + * @throws IllegalArgumentException if {@code bufferLimit} is less than or equal to 0. + */ + public HeapBufferedAsyncResponseConsumer(int bufferLimit) { + super(new HeapBufferedAsyncEntityConsumer(bufferLimit)); + this.bufferLimit = bufferLimit; + } + + /** + * Get the limit of the buffer. + */ + public int getBufferLimit() { + return bufferLimit; + } + + /** + * Triggered to signal receipt of an intermediate (1xx) HTTP response. + * + * @param response the intermediate (1xx) HTTP response. + * @param context the actual execution context. + */ + @Override + public void informationResponse(final HttpResponse response, final HttpContext context) throws HttpException, IOException {} + + /** + * Triggered to generate object that represents a result of response message processing. + * @param response the response message. + * @param entity the response entity. + * @param contentType the response content type. + * @return the result of response processing. + */ + @Override + protected ClassicHttpResponse buildResult(final HttpResponse response, final byte[] entity, final ContentType contentType) { + final ClassicHttpResponse classicResponse = new BasicClassicHttpResponse(response.getCode()); + classicResponse.setVersion(response.getVersion()); + classicResponse.setHeaders(response.getHeaders()); + classicResponse.setReasonPhrase(response.getReasonPhrase()); + if (response.getLocale() != null) { + classicResponse.setLocale(response.getLocale()); + } + + if (entity != null) { + String encoding = null; + + try { + final Header contentEncoding = response.getHeader(HttpHeaders.CONTENT_ENCODING); + if (contentEncoding != null) { + encoding = contentEncoding.getValue(); + } + } catch (final HttpException ex) { + LOGGER.debug("Unable to detect content encoding", ex); + } + + final ByteArrayEntity httpEntity = new ByteArrayEntity(entity, contentType, encoding); + classicResponse.setEntity(httpEntity); + } + + return classicResponse; + } +} diff --git a/client/rest/src/main/java/org/opensearch/client/nio/HttpEntityAsyncEntityProducer.java b/client/rest/src/main/java/org/opensearch/client/nio/HttpEntityAsyncEntityProducer.java new file mode 100644 index 0000000000000..81fe77ddcfbed --- /dev/null +++ b/client/rest/src/main/java/org/opensearch/client/nio/HttpEntityAsyncEntityProducer.java @@ -0,0 +1,182 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.client.nio; + +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.nio.AsyncEntityProducer; +import org.apache.hc.core5.http.nio.DataStreamChannel; +import org.apache.hc.core5.http.nio.ResourceHolder; +import org.apache.hc.core5.util.Args; +import org.apache.hc.core5.util.Asserts; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.channels.Channels; +import java.nio.channels.ReadableByteChannel; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + +/** + * The {@link AsyncEntityProducer} implementation for {@link HttpEntity} + */ +public class HttpEntityAsyncEntityProducer implements AsyncEntityProducer { + + private final HttpEntity entity; + private final ByteBuffer byteBuffer; + private final boolean chunked; + private final AtomicReference exception; + private final AtomicReference channelRef; + private boolean eof; + + /** + * Create new async HTTP entity producer + * @param entity HTTP entity + * @param bufferSize buffer size + */ + public HttpEntityAsyncEntityProducer(final HttpEntity entity, final int bufferSize) { + this.entity = Args.notNull(entity, "Http Entity"); + this.byteBuffer = ByteBuffer.allocate(bufferSize); + this.chunked = entity.isChunked(); + this.exception = new AtomicReference<>(); + this.channelRef = new AtomicReference<>(); + } + + /** + * Create new async HTTP entity producer with default buffer size (8192 bytes) + * @param entity HTTP entity + */ + public HttpEntityAsyncEntityProducer(final HttpEntity entity) { + this(entity, 8192); + } + + /** + * Determines whether the producer can consistently produce the same content + * after invocation of {@link ResourceHolder#releaseResources()}. + */ + @Override + public boolean isRepeatable() { + return entity.isRepeatable(); + } + + /** + * Returns content type of the entity, if known. + */ + @Override + public String getContentType() { + return entity.getContentType(); + } + + /** + * Returns length of the entity, if known. + */ + @Override + public long getContentLength() { + return entity.getContentLength(); + } + + /** + * Returns the number of bytes immediately available for output. + * This method can be used as a hint to control output events + * of the underlying I/O session. + * + * @return the number of bytes immediately available for output + */ + @Override + public int available() { + return Integer.MAX_VALUE; + } + + /** + * Returns content encoding of the entity, if known. + */ + @Override + public String getContentEncoding() { + return entity.getContentEncoding(); + } + + /** + * Returns chunked transfer hint for this entity. + *

+ * The behavior of wrapping entities is implementation dependent, + * but should respect the primary purpose. + *

+ */ + @Override + public boolean isChunked() { + return chunked; + } + + /** + * Preliminary declaration of trailing headers. + */ + @Override + public Set getTrailerNames() { + return entity.getTrailerNames(); + } + + /** + * Triggered to signal the ability of the underlying data channel + * to accept more data. The data producer can choose to write data + * immediately inside the call or asynchronously at some later point. + * + * @param channel the data channel capable to accepting more data. + */ + @Override + public void produce(final DataStreamChannel channel) throws IOException { + ReadableByteChannel stream = channelRef.get(); + if (stream == null) { + stream = Channels.newChannel(entity.getContent()); + Asserts.check(channelRef.getAndSet(stream) == null, "Illegal producer state"); + } + if (!eof) { + final int bytesRead = stream.read(byteBuffer); + if (bytesRead < 0) { + eof = true; + } + } + if (byteBuffer.position() > 0) { + byteBuffer.flip(); + channel.write(byteBuffer); + byteBuffer.compact(); + } + if (eof && byteBuffer.position() == 0) { + channel.endStream(); + releaseResources(); + } + } + + /** + * Triggered to signal a failure in data generation. + * + * @param cause the cause of the failure. + */ + @Override + public void failed(final Exception cause) { + if (exception.compareAndSet(null, cause)) { + releaseResources(); + } + } + + /** + * Release resources being held + */ + @Override + public void releaseResources() { + eof = false; + final ReadableByteChannel stream = channelRef.getAndSet(null); + if (stream != null) { + try { + stream.close(); + } catch (final IOException ex) { + /* Close quietly */ + } + } + } + +} diff --git a/client/rest/src/main/java/org/opensearch/client/nio/package-info.java b/client/rest/src/main/java/org/opensearch/client/nio/package-info.java new file mode 100644 index 0000000000000..ce4961ed21f7c --- /dev/null +++ b/client/rest/src/main/java/org/opensearch/client/nio/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * NIO support classes for REST client. + */ +package org.opensearch.client.nio; diff --git a/client/rest/src/test/java/org/opensearch/client/FailureTrackingResponseListenerTests.java b/client/rest/src/test/java/org/opensearch/client/FailureTrackingResponseListenerTests.java index 0a997a586acc9..9722ec867a376 100644 --- a/client/rest/src/test/java/org/opensearch/client/FailureTrackingResponseListenerTests.java +++ b/client/rest/src/test/java/org/opensearch/client/FailureTrackingResponseListenerTests.java @@ -32,14 +32,11 @@ package org.opensearch.client; -import org.apache.http.HttpHost; -import org.apache.http.HttpResponse; -import org.apache.http.ProtocolVersion; -import org.apache.http.RequestLine; -import org.apache.http.StatusLine; -import org.apache.http.message.BasicHttpResponse; -import org.apache.http.message.BasicRequestLine; -import org.apache.http.message.BasicStatusLine; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.ProtocolVersion; +import org.apache.hc.core5.http.message.RequestLine; +import org.apache.hc.core5.http.message.BasicClassicHttpResponse; import java.util.concurrent.atomic.AtomicReference; @@ -116,9 +113,8 @@ public void onFailure(Exception exception) { private static Response mockResponse() { ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); - RequestLine requestLine = new BasicRequestLine("GET", "/", protocolVersion); - StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, "OK"); - HttpResponse httpResponse = new BasicHttpResponse(statusLine); + RequestLine requestLine = new RequestLine("GET", "/", protocolVersion); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(200, "OK"); return new Response(requestLine, new HttpHost("localhost", 9200), httpResponse); } } diff --git a/client/rest/src/test/java/org/opensearch/client/HasAttributeNodeSelectorTests.java b/client/rest/src/test/java/org/opensearch/client/HasAttributeNodeSelectorTests.java index fd18bba6ee548..b5aca86e95d6c 100644 --- a/client/rest/src/test/java/org/opensearch/client/HasAttributeNodeSelectorTests.java +++ b/client/rest/src/test/java/org/opensearch/client/HasAttributeNodeSelectorTests.java @@ -32,7 +32,7 @@ package org.opensearch.client; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import org.opensearch.client.Node.Roles; import java.util.ArrayList; diff --git a/client/rest/src/test/java/org/opensearch/client/HeapBufferedAsyncResponseConsumerTests.java b/client/rest/src/test/java/org/opensearch/client/HeapBufferedAsyncResponseConsumerTests.java index 22852fe4cb793..ed329d973eb78 100644 --- a/client/rest/src/test/java/org/opensearch/client/HeapBufferedAsyncResponseConsumerTests.java +++ b/client/rest/src/test/java/org/opensearch/client/HeapBufferedAsyncResponseConsumerTests.java @@ -32,34 +32,31 @@ package org.opensearch.client; -import org.apache.http.ContentTooLongException; -import org.apache.http.HttpEntity; -import org.apache.http.HttpResponse; -import org.apache.http.ProtocolVersion; -import org.apache.http.StatusLine; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.apache.http.message.BasicHttpResponse; -import org.apache.http.message.BasicStatusLine; -import org.apache.http.nio.ContentDecoder; -import org.apache.http.nio.IOControl; -import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; -import org.apache.http.protocol.HttpContext; - +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.ContentTooLongException; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.EntityDetails; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.impl.BasicEntityDetails; +import org.apache.hc.core5.http.io.entity.AbstractHttpEntity; +import org.apache.hc.core5.http.message.BasicClassicHttpResponse; +import org.apache.hc.core5.http.nio.AsyncResponseConsumer; +import org.apache.hc.core5.http.protocol.HttpContext; +import org.opensearch.client.nio.HeapBufferedAsyncResponseConsumer; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Modifier; +import java.nio.charset.StandardCharsets; import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertSame; import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; public class HeapBufferedAsyncResponseConsumerTests extends RestClientTestCase { @@ -67,33 +64,6 @@ public class HeapBufferedAsyncResponseConsumerTests extends RestClientTestCase { private static final int MAX_TEST_BUFFER_SIZE = 50 * 1024 * 1024; private static final int TEST_BUFFER_LIMIT = 10 * 1024 * 1024; - public void testResponseProcessing() throws Exception { - ContentDecoder contentDecoder = mock(ContentDecoder.class); - IOControl ioControl = mock(IOControl.class); - HttpContext httpContext = mock(HttpContext.class); - - HeapBufferedAsyncResponseConsumer consumer = spy(new HeapBufferedAsyncResponseConsumer(TEST_BUFFER_LIMIT)); - - ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); - StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, "OK"); - HttpResponse httpResponse = new BasicHttpResponse(statusLine); - httpResponse.setEntity(new StringEntity("test", ContentType.TEXT_PLAIN)); - - // everything goes well - consumer.responseReceived(httpResponse); - consumer.consumeContent(contentDecoder, ioControl); - consumer.responseCompleted(httpContext); - - verify(consumer).releaseResources(); - verify(consumer).buildResult(httpContext); - assertTrue(consumer.isDone()); - assertSame(httpResponse, consumer.getResult()); - - consumer.responseCompleted(httpContext); - verify(consumer, times(1)).releaseResources(); - verify(consumer, times(1)).buildResult(httpContext); - } - public void testDefaultBufferLimit() throws Exception { HeapBufferedAsyncResponseConsumer consumer = new HeapBufferedAsyncResponseConsumer(TEST_BUFFER_LIMIT); bufferLimitTest(consumer, TEST_BUFFER_LIMIT); @@ -127,7 +97,7 @@ public void testCanConfigureHeapBufferLimitFromOutsidePackage() throws ClassNotF assertThat(object, instanceOf(HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory.class)); HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory consumerFactory = (HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory) object; - HttpAsyncResponseConsumer consumer = consumerFactory.createHttpAsyncResponseConsumer(); + AsyncResponseConsumer consumer = consumerFactory.createHttpAsyncResponseConsumer(); assertThat(consumer, instanceOf(HeapBufferedAsyncResponseConsumer.class)); HeapBufferedAsyncResponseConsumer bufferedAsyncResponseConsumer = (HeapBufferedAsyncResponseConsumer) consumer; assertEquals(bufferLimit, bufferedAsyncResponseConsumer.getBufferLimit()); @@ -138,23 +108,40 @@ public void testHttpAsyncResponseConsumerFactoryVisibility() throws ClassNotFoun } private static void bufferLimitTest(HeapBufferedAsyncResponseConsumer consumer, int bufferLimit) throws Exception { - ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); - StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, "OK"); - consumer.onResponseReceived(new BasicHttpResponse(statusLine)); + HttpContext httpContext = mock(HttpContext.class); + + BasicClassicHttpResponse response = new BasicClassicHttpResponse(200, "OK"); + consumer.consumeResponse(response, null, httpContext, null); final AtomicReference contentLength = new AtomicReference<>(); - HttpEntity entity = new StringEntity("", ContentType.APPLICATION_JSON) { + HttpEntity entity = new AbstractHttpEntity(ContentType.APPLICATION_JSON, null, false) { @Override public long getContentLength() { return contentLength.get(); } + + @Override + public InputStream getContent() throws IOException, UnsupportedOperationException { + return new ByteArrayInputStream("".getBytes(StandardCharsets.UTF_8)); + } + + @Override + public boolean isStreaming() { + return false; + } + + @Override + public void close() throws IOException {} }; contentLength.set(randomLongBetween(0L, bufferLimit)); - consumer.onEntityEnclosed(entity, ContentType.APPLICATION_JSON); + response.setEntity(entity); + + final EntityDetails details = new BasicEntityDetails(4096, ContentType.APPLICATION_JSON); + consumer.consumeResponse(response, details, httpContext, null); contentLength.set(randomLongBetween(bufferLimit + 1, MAX_TEST_BUFFER_SIZE)); try { - consumer.onEntityEnclosed(entity, ContentType.APPLICATION_JSON); + consumer.consumeResponse(response, details, httpContext, null); } catch (ContentTooLongException e) { assertEquals( "entity content is too long [" + entity.getContentLength() + "] for the configured buffer limit [" + bufferLimit + "]", diff --git a/client/rest/src/test/java/org/opensearch/client/HostsTrackingFailureListener.java b/client/rest/src/test/java/org/opensearch/client/HostsTrackingFailureListener.java index 2b256e7205397..0e454c6f919f5 100644 --- a/client/rest/src/test/java/org/opensearch/client/HostsTrackingFailureListener.java +++ b/client/rest/src/test/java/org/opensearch/client/HostsTrackingFailureListener.java @@ -32,7 +32,7 @@ package org.opensearch.client; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import java.util.HashSet; import java.util.List; diff --git a/client/rest/src/test/java/org/opensearch/client/NodeSelectorTests.java b/client/rest/src/test/java/org/opensearch/client/NodeSelectorTests.java index 65a831e59bfb0..cfc95f0281bcc 100644 --- a/client/rest/src/test/java/org/opensearch/client/NodeSelectorTests.java +++ b/client/rest/src/test/java/org/opensearch/client/NodeSelectorTests.java @@ -32,7 +32,7 @@ package org.opensearch.client; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import org.opensearch.client.Node.Roles; import java.util.ArrayList; diff --git a/client/rest/src/test/java/org/opensearch/client/NodeTests.java b/client/rest/src/test/java/org/opensearch/client/NodeTests.java index 352296fa3024a..748bec5fb7de5 100644 --- a/client/rest/src/test/java/org/opensearch/client/NodeTests.java +++ b/client/rest/src/test/java/org/opensearch/client/NodeTests.java @@ -32,7 +32,7 @@ package org.opensearch.client; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import org.opensearch.client.Node.Roles; import java.util.Arrays; @@ -48,7 +48,9 @@ import static java.util.Collections.singletonMap; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; +import static org.hamcrest.CoreMatchers.equalTo; public class NodeTests extends RestClientTestCase { public void testToString() { @@ -161,4 +163,9 @@ public void testEqualsAndHashCode() { ) ); } + + public void testIsSearchNode() { + Roles searchRole = new Roles(Collections.singleton("search")); + assertThat(searchRole.isSearch(), equalTo(true)); + } } diff --git a/client/rest/src/test/java/org/opensearch/client/PreferHasAttributeNodeSelectorTests.java b/client/rest/src/test/java/org/opensearch/client/PreferHasAttributeNodeSelectorTests.java index 0135cde573743..7dde1b96b3b45 100644 --- a/client/rest/src/test/java/org/opensearch/client/PreferHasAttributeNodeSelectorTests.java +++ b/client/rest/src/test/java/org/opensearch/client/PreferHasAttributeNodeSelectorTests.java @@ -32,7 +32,7 @@ package org.opensearch.client; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import org.opensearch.client.Node.Roles; import java.util.ArrayList; diff --git a/client/rest/src/test/java/org/opensearch/client/RequestLoggerTests.java b/client/rest/src/test/java/org/opensearch/client/RequestLoggerTests.java index 3c317db1b72d9..8dea2ad922bd6 100644 --- a/client/rest/src/test/java/org/opensearch/client/RequestLoggerTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RequestLoggerTests.java @@ -32,27 +32,29 @@ package org.opensearch.client; -import org.apache.http.Header; -import org.apache.http.HttpEntity; -import org.apache.http.HttpEntityEnclosingRequest; -import org.apache.http.HttpHost; -import org.apache.http.ProtocolVersion; -import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.methods.HttpOptions; -import org.apache.http.client.methods.HttpPatch; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.client.methods.HttpTrace; -import org.apache.http.client.methods.HttpUriRequest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.InputStreamEntity; -import org.apache.http.entity.StringEntity; -import org.apache.http.message.BasicHeader; -import org.apache.http.message.BasicHttpResponse; -import org.apache.http.message.BasicStatusLine; -import org.apache.http.nio.entity.NByteArrayEntity; -import org.apache.http.nio.entity.NStringEntity; -import org.apache.http.util.EntityUtils; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpHead; +import org.apache.hc.client5.http.classic.methods.HttpOptions; +import org.apache.hc.client5.http.classic.methods.HttpPatch; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpTrace; +import org.apache.hc.client5.http.classic.methods.HttpUriRequest; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.ParseException; +import org.apache.hc.core5.http.ProtocolVersion; +import org.apache.hc.core5.http.io.entity.ByteArrayEntity; +import org.apache.hc.core5.http.io.entity.EntityUtils; +import org.apache.hc.core5.http.io.entity.InputStreamEntity; +import org.apache.hc.core5.http.io.entity.StringEntity; +import org.apache.hc.core5.http.message.BasicClassicHttpResponse; +import org.apache.hc.core5.http.message.BasicHeader; +import org.apache.hc.core5.http.message.StatusLine; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -66,8 +68,8 @@ import static org.junit.Assert.assertThat; public class RequestLoggerTests extends RestClientTestCase { - public void testTraceRequest() throws IOException, URISyntaxException { - HttpHost host = new HttpHost("localhost", 9200, randomBoolean() ? "http" : "https"); + public void testTraceRequest() throws IOException, URISyntaxException, ParseException { + HttpHost host = new HttpHost(randomBoolean() ? "http" : "https", "localhost", 9200); String expectedEndpoint = "/index/type/_api"; URI uri; if (randomBoolean()) { @@ -77,11 +79,10 @@ public void testTraceRequest() throws IOException, URISyntaxException { } HttpUriRequest request = randomHttpRequest(uri); String expected = "curl -iX " + request.getMethod() + " '" + host + expectedEndpoint + "'"; - boolean hasBody = request instanceof HttpEntityEnclosingRequest && randomBoolean(); + boolean hasBody = !(request instanceof HttpTrace) && randomBoolean(); String requestBody = "{ \"field\": \"value\" }"; if (hasBody) { expected += " -d '" + requestBody + "'"; - HttpEntityEnclosingRequest enclosingRequest = (HttpEntityEnclosingRequest) request; HttpEntity entity; switch (randomIntBetween(0, 4)) { case 0: @@ -94,10 +95,10 @@ public void testTraceRequest() throws IOException, URISyntaxException { ); break; case 2: - entity = new NStringEntity(requestBody, ContentType.APPLICATION_JSON); + entity = new StringEntity(requestBody, ContentType.APPLICATION_JSON); break; case 3: - entity = new NByteArrayEntity(requestBody.getBytes(StandardCharsets.UTF_8), ContentType.APPLICATION_JSON); + entity = new ByteArrayEntity(requestBody.getBytes(StandardCharsets.UTF_8), ContentType.APPLICATION_JSON); break; case 4: // Evil entity without a charset @@ -106,24 +107,24 @@ public void testTraceRequest() throws IOException, URISyntaxException { default: throw new UnsupportedOperationException(); } - enclosingRequest.setEntity(entity); + request.setEntity(entity); } String traceRequest = RequestLogger.buildTraceRequest(request, host); assertThat(traceRequest, equalTo(expected)); if (hasBody) { // check that the body is still readable as most entities are not repeatable - String body = EntityUtils.toString(((HttpEntityEnclosingRequest) request).getEntity(), StandardCharsets.UTF_8); + String body = EntityUtils.toString(request.getEntity(), StandardCharsets.UTF_8); assertThat(body, equalTo(requestBody)); } } - public void testTraceResponse() throws IOException { + public void testTraceResponse() throws IOException, ParseException { ProtocolVersion protocolVersion = new ProtocolVersion("HTTP", 1, 1); int statusCode = randomIntBetween(200, 599); String reasonPhrase = "REASON"; - BasicStatusLine statusLine = new BasicStatusLine(protocolVersion, statusCode, reasonPhrase); + StatusLine statusLine = new StatusLine(protocolVersion, statusCode, reasonPhrase); String expected = "# " + statusLine.toString(); - BasicHttpResponse httpResponse = new BasicHttpResponse(statusLine); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(statusCode, reasonPhrase); int numHeaders = randomIntBetween(0, 3); for (int i = 0; i < numHeaders; i++) { httpResponse.setHeader("header" + i, "value"); @@ -192,13 +193,13 @@ private static HttpUriRequest randomHttpRequest(URI uri) { int requestType = randomIntBetween(0, 7); switch (requestType) { case 0: - return new HttpGetWithEntity(uri); + return new HttpGet(uri); case 1: return new HttpPost(uri); case 2: return new HttpPut(uri); case 3: - return new HttpDeleteWithEntity(uri); + return new HttpDelete(uri); case 4: return new HttpHead(uri); case 5: diff --git a/client/rest/src/test/java/org/opensearch/client/RequestOptionsTests.java b/client/rest/src/test/java/org/opensearch/client/RequestOptionsTests.java index aaa40db1442ee..a7f9a48c73393 100644 --- a/client/rest/src/test/java/org/opensearch/client/RequestOptionsTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RequestOptionsTests.java @@ -32,8 +32,9 @@ package org.opensearch.client; -import org.apache.http.Header; -import org.apache.http.client.config.RequestConfig; +import org.apache.hc.client5.http.config.RequestConfig; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.util.Timeout; import org.opensearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory; import java.util.ArrayList; @@ -108,15 +109,15 @@ public void testSetRequestBuilder() { RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder(); RequestConfig.Builder requestConfigBuilder = RequestConfig.custom(); - int socketTimeout = 10000; - int connectTimeout = 100; - requestConfigBuilder.setSocketTimeout(socketTimeout).setConnectTimeout(connectTimeout); + Timeout responseTimeout = Timeout.ofMilliseconds(10000); + Timeout connectTimeout = Timeout.ofMilliseconds(100); + requestConfigBuilder.setResponseTimeout(responseTimeout).setConnectTimeout(connectTimeout); RequestConfig requestConfig = requestConfigBuilder.build(); builder.setRequestConfig(requestConfig); RequestOptions options = builder.build(); assertSame(options.getRequestConfig(), requestConfig); - assertEquals(options.getRequestConfig().getSocketTimeout(), socketTimeout); + assertEquals(options.getRequestConfig().getResponseTimeout(), responseTimeout); assertEquals(options.getRequestConfig().getConnectTimeout(), connectTimeout); } diff --git a/client/rest/src/test/java/org/opensearch/client/RequestTests.java b/client/rest/src/test/java/org/opensearch/client/RequestTests.java index ba15c0d0b733c..d11982e9f9642 100644 --- a/client/rest/src/test/java/org/opensearch/client/RequestTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RequestTests.java @@ -32,15 +32,17 @@ package org.opensearch.client; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ByteArrayEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.apache.http.nio.entity.NStringEntity; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.io.entity.ByteArrayEntity; +import org.apache.hc.core5.http.io.entity.InputStreamEntity; +import org.apache.hc.core5.http.io.entity.StringEntity; import org.opensearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory; +import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.HashMap; import java.util.Map; @@ -133,7 +135,7 @@ public void testSetJsonEntity() throws IOException { final String json = randomAsciiLettersOfLengthBetween(1, 100); request.setJsonEntity(json); - assertEquals(ContentType.APPLICATION_JSON.toString(), request.getEntity().getContentType().getValue()); + assertEquals(ContentType.APPLICATION_JSON.toString(), request.getEntity().getContentType()); ByteArrayOutputStream os = new ByteArrayOutputStream(); request.getEntity().writeTo(os); assertEquals(json, new String(os.toByteArray(), ContentType.APPLICATION_JSON.getCharset())); @@ -201,7 +203,10 @@ private static Request randomRequest() { randomFrom( new HttpEntity[] { new StringEntity(randomAsciiAlphanumOfLength(10), ContentType.APPLICATION_JSON), - new NStringEntity(randomAsciiAlphanumOfLength(10), ContentType.APPLICATION_JSON), + new InputStreamEntity( + new ByteArrayInputStream(randomAsciiAlphanumOfLength(10).getBytes(StandardCharsets.UTF_8)), + ContentType.APPLICATION_JSON + ), new ByteArrayEntity(randomBytesOfLength(40), ContentType.APPLICATION_JSON) } ) ); diff --git a/client/rest/src/test/java/org/opensearch/client/ResponseExceptionTests.java b/client/rest/src/test/java/org/opensearch/client/ResponseExceptionTests.java index 8ecd3e1a29c99..dfbf105637962 100644 --- a/client/rest/src/test/java/org/opensearch/client/ResponseExceptionTests.java +++ b/client/rest/src/test/java/org/opensearch/client/ResponseExceptionTests.java @@ -32,19 +32,17 @@ package org.opensearch.client; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHost; -import org.apache.http.HttpResponse; -import org.apache.http.ProtocolVersion; -import org.apache.http.RequestLine; -import org.apache.http.StatusLine; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.InputStreamEntity; -import org.apache.http.entity.StringEntity; -import org.apache.http.message.BasicHttpResponse; -import org.apache.http.message.BasicRequestLine; -import org.apache.http.message.BasicStatusLine; -import org.apache.http.util.EntityUtils; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.ParseException; +import org.apache.hc.core5.http.ProtocolVersion; +import org.apache.hc.core5.http.io.entity.EntityUtils; +import org.apache.hc.core5.http.io.entity.InputStreamEntity; +import org.apache.hc.core5.http.io.entity.StringEntity; +import org.apache.hc.core5.http.message.BasicClassicHttpResponse; +import org.apache.hc.core5.http.message.RequestLine; import java.io.ByteArrayInputStream; import java.io.IOException; @@ -57,10 +55,9 @@ public class ResponseExceptionTests extends RestClientTestCase { - public void testResponseException() throws IOException { + public void testResponseException() throws IOException, ParseException { ProtocolVersion protocolVersion = new ProtocolVersion("http", 1, 1); - StatusLine statusLine = new BasicStatusLine(protocolVersion, 500, "Internal Server Error"); - HttpResponse httpResponse = new BasicHttpResponse(statusLine); + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(500, "Internal Server Error"); String responseBody = "{\"error\":{\"root_cause\": {}}}"; boolean hasBody = getRandom().nextBoolean(); @@ -78,7 +75,7 @@ public void testResponseException() throws IOException { httpResponse.setEntity(entity); } - RequestLine requestLine = new BasicRequestLine("GET", "/", protocolVersion); + RequestLine requestLine = new RequestLine("GET", "/", protocolVersion); HttpHost httpHost = new HttpHost("localhost", 9200); Response response = new Response(requestLine, httpHost, httpResponse); ResponseException responseException = new ResponseException(response); diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientBuilderIntegTests.java index 10bf9568c8798..f5e1735042e66 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientBuilderIntegTests.java @@ -36,7 +36,8 @@ import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpsConfigurator; import com.sun.net.httpserver.HttpsServer; -import org.apache.http.HttpHost; + +import org.apache.hc.core5.http.HttpHost; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -117,7 +118,7 @@ public void testBuilderUsesDefaultSSLContext() throws Exception { private RestClient buildRestClient() { InetSocketAddress address = httpsServer.getAddress(); - return RestClient.builder(new HttpHost(address.getHostString(), address.getPort(), "https")).build(); + return RestClient.builder(new HttpHost("https", address.getHostString(), address.getPort())).build(); } private static SSLContext getSslContext() throws Exception { diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientBuilderTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientBuilderTests.java index ac81cd1132a2f..7165174e688e1 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientBuilderTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientBuilderTests.java @@ -32,11 +32,12 @@ package org.opensearch.client; -import org.apache.http.Header; -import org.apache.http.HttpHost; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; -import org.apache.http.message.BasicHeader; +import org.apache.hc.client5.http.config.RequestConfig; +import org.apache.hc.client5.http.impl.async.HttpAsyncClientBuilder; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.message.BasicHeader; +import org.apache.hc.core5.util.Timeout; import java.io.IOException; import java.util.Base64; @@ -271,7 +272,7 @@ public RequestConfig.Builder customizeRequestConfig(RequestConfig.Builder reques RequestConfig requestConfig = requestConfigBuilder.build(); assertEquals(RequestConfig.DEFAULT.getConnectionRequestTimeout(), requestConfig.getConnectionRequestTimeout()); // this way we get notified if the default ever changes - assertEquals(-1, requestConfig.getConnectionRequestTimeout()); + assertEquals(Timeout.ofMinutes(3), requestConfig.getConnectionRequestTimeout()); return requestConfigBuilder; } }); diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientCompressionTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientCompressionTests.java index e8b7742044f67..bf2c19b8127a1 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientCompressionTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientCompressionTests.java @@ -11,10 +11,11 @@ import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHost; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; + +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.io.entity.StringEntity; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; @@ -108,7 +109,7 @@ private static byte[] readAll(InputStream in) throws IOException { private RestClient createClient(boolean enableCompression, boolean chunkedEnabled) { InetSocketAddress address = httpServer.getAddress(); - return RestClient.builder(new HttpHost(address.getHostString(), address.getPort(), "http")) + return RestClient.builder(new HttpHost("http", address.getHostString(), address.getPort())) .setCompressionEnabled(enableCompression) .setChunkedEnabled(chunkedEnabled) .build(); diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientGzipCompressionTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientGzipCompressionTests.java index 8c4d993517fee..fdcb65ff101c9 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientGzipCompressionTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientGzipCompressionTests.java @@ -35,10 +35,11 @@ import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHost; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; + +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.io.entity.StringEntity; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; @@ -126,7 +127,7 @@ private static byte[] readAll(InputStream in) throws IOException { private RestClient createClient(boolean enableCompression) { InetSocketAddress address = httpServer.getAddress(); - return RestClient.builder(new HttpHost(address.getHostString(), address.getPort(), "http")) + return RestClient.builder(new HttpHost("http", address.getHostString(), address.getPort())) .setCompressionEnabled(enableCompression) .build(); } @@ -184,7 +185,7 @@ public void testCompressingClientSync() throws Exception { public void testCompressingClientAsync() throws Exception { InetSocketAddress address = httpServer.getAddress(); - RestClient restClient = RestClient.builder(new HttpHost(address.getHostString(), address.getPort(), "http")) + RestClient restClient = RestClient.builder(new HttpHost("http", address.getHostString(), address.getPort())) .setCompressionEnabled(true) .build(); diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsIntegTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsIntegTests.java index 277446191a36e..8c62533072c70 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsIntegTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsIntegTests.java @@ -35,7 +35,8 @@ import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; -import org.apache.http.HttpHost; + +import org.apache.hc.core5.http.HttpHost; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -56,6 +57,7 @@ import static org.opensearch.client.RestClientTestUtil.getAllStatusCodes; import static org.opensearch.client.RestClientTestUtil.randomErrorNoRetryStatusCode; import static org.opensearch.client.RestClientTestUtil.randomOkStatusCode; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; @@ -63,7 +65,7 @@ import static org.junit.Assert.fail; /** - * Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}. + * Integration test to check interaction between {@link RestClient} and {@link org.apache.hc.client5.http.classic.HttpClient}. * Works against real http servers, multiple hosts. Also tests failover by randomly shutting down hosts. */ public class RestClientMultipleHostsIntegTests extends RestClientTestCase { @@ -299,7 +301,7 @@ public void testNodeSelector() throws Exception { } catch (ConnectException e) { // Windows isn't consistent here. Sometimes the message is even null! if (false == System.getProperty("os.name").startsWith("Windows")) { - assertEquals("Connection refused", e.getMessage()); + assertThat(e.getMessage(), containsString("Connection refused")); } } } else { diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java index d88d4f4afd9b1..62574e5ed6d5a 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientMultipleHostsTests.java @@ -33,9 +33,10 @@ package org.opensearch.client; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; -import org.apache.http.Header; -import org.apache.http.HttpHost; -import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; + +import org.apache.hc.client5.http.impl.async.CloseableHttpAsyncClient; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpHost; import org.junit.After; import java.io.IOException; diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostIntegTests.java index 0500d282a506d..beee1c5ca21a0 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostIntegTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostIntegTests.java @@ -36,30 +36,34 @@ import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; -import org.apache.http.Consts; -import org.apache.http.Header; -import org.apache.http.HttpHost; -import org.apache.http.HttpResponse; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpRequestBase; -import org.apache.http.entity.ContentType; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.http.impl.client.TargetAuthenticationStrategy; -import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; -import org.apache.http.message.BasicHeader; -import org.apache.http.nio.entity.NStringEntity; -import org.apache.http.util.EntityUtils; + +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.UsernamePasswordCredentials; +import org.apache.hc.client5.http.classic.methods.HttpUriRequestBase; +import org.apache.hc.client5.http.impl.DefaultAuthenticationStrategy; +import org.apache.hc.client5.http.impl.async.CloseableHttpAsyncClient; +import org.apache.hc.client5.http.impl.async.HttpAsyncClientBuilder; +import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.io.entity.EntityUtils; +import org.apache.hc.core5.http.io.entity.StringEntity; +import org.apache.hc.core5.http.message.BasicHeader; +import org.apache.hc.core5.http.nio.AsyncResponseConsumer; +import org.apache.hc.core5.net.URIBuilder; import org.junit.After; import org.junit.Before; +import org.opensearch.client.http.HttpUriRequestProducer; +import org.opensearch.client.nio.HeapBufferedAsyncResponseConsumer; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.net.InetAddress; import java.net.InetSocketAddress; +import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.HashSet; import java.util.List; @@ -86,7 +90,7 @@ import static org.junit.Assert.fail; /** - * Integration test to check interaction between {@link RestClient} and {@link org.apache.http.client.HttpClient}. + * Integration test to check interaction between {@link RestClient} and {@link org.apache.hc.client5.http.classic.HttpClient}. * Works against a real http server, one single host. */ public class RestClientSingleHostIntegTests extends RestClientTestCase { @@ -147,7 +151,7 @@ private static class ResponseHandler implements HttpHandler { public void handle(HttpExchange httpExchange) throws IOException { // copy request body to response body so we can verify it was sent StringBuilder body = new StringBuilder(); - try (InputStreamReader reader = new InputStreamReader(httpExchange.getRequestBody(), Consts.UTF_8)) { + try (InputStreamReader reader = new InputStreamReader(httpExchange.getRequestBody(), StandardCharsets.UTF_8)) { char[] buffer = new char[256]; int read; while ((read = reader.read(buffer)) != -1) { @@ -164,7 +168,7 @@ public void handle(HttpExchange httpExchange) throws IOException { httpExchange.sendResponseHeaders(statusCode, body.length() == 0 ? -1 : body.length()); if (body.length() > 0) { try (OutputStream out = httpExchange.getResponseBody()) { - out.write(body.toString().getBytes(Consts.UTF_8)); + out.write(body.toString().getBytes(StandardCharsets.UTF_8)); } } httpExchange.close(); @@ -172,18 +176,20 @@ public void handle(HttpExchange httpExchange) throws IOException { } private RestClient createRestClient(final boolean useAuth, final boolean usePreemptiveAuth) { - // provide the username/password for every request - final BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials("user", "pass")); - - final RestClientBuilder restClientBuilder = RestClient.builder( - new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()) - ).setDefaultHeaders(defaultHeaders); + final HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); + final RestClientBuilder restClientBuilder = RestClient.builder(httpHost).setDefaultHeaders(defaultHeaders); if (pathPrefix.length() > 0) { restClientBuilder.setPathPrefix(pathPrefix); } if (useAuth) { + // provide the username/password for every request + final BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials( + new AuthScope(httpHost, null, "Basic"), + new UsernamePasswordCredentials("user", "pass".toCharArray()) + ); + restClientBuilder.setHttpClientConfigCallback(new RestClientBuilder.HttpClientConfigCallback() { @Override public HttpAsyncClientBuilder customizeHttpClient(final HttpAsyncClientBuilder httpClientBuilder) { @@ -191,7 +197,7 @@ public HttpAsyncClientBuilder customizeHttpClient(final HttpAsyncClientBuilder h // disable preemptive auth by ignoring any authcache httpClientBuilder.disableAuthCaching(); // don't use the "persistent credentials strategy" - httpClientBuilder.setTargetAuthenticationStrategy(new TargetAuthenticationStrategy()); + httpClientBuilder.setTargetAuthenticationStrategy(DefaultAuthenticationStrategy.INSTANCE); } return httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider); @@ -220,7 +226,7 @@ public void testManyAsyncRequests() throws Exception { final List exceptions = new CopyOnWriteArrayList<>(); for (int i = 0; i < iters; i++) { Request request = new Request("PUT", "/200"); - request.setEntity(new NStringEntity("{}", ContentType.APPLICATION_JSON)); + request.setEntity(new StringEntity("{}", ContentType.APPLICATION_JSON)); restClient.performRequestAsync(request, new ResponseListener() { @Override public void onSuccess(Response response) { @@ -271,7 +277,7 @@ public void onFailure(Exception exception) { /** * This test verifies some assumptions that we rely upon around the way the async http client works when reusing the same request - * throughout multiple retries, and the use of the {@link HttpRequestBase#abort()} method. + * throughout multiple retries, and the use of the {@link HttpUriRequestBase#abort()} method. * In fact the low-level REST client reuses the same request instance throughout multiple retries, and relies on the http client * to set the future ref to the request properly so that when abort is called, the proper future gets cancelled. */ @@ -279,7 +285,10 @@ public void testRequestResetAndAbort() throws Exception { try (CloseableHttpAsyncClient client = HttpAsyncClientBuilder.create().build()) { client.start(); HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); - HttpGet httpGet = new HttpGet(pathPrefix + "/200"); + HttpUriRequestBase httpGet = new HttpUriRequestBase( + "GET", + new URIBuilder().setHttpHost(httpHost).setPath(pathPrefix + "/200").build() + ); // calling abort before the request is sent is a no-op httpGet.abort(); @@ -288,8 +297,11 @@ public void testRequestResetAndAbort() throws Exception { { httpGet.reset(); assertFalse(httpGet.isAborted()); + + Future future = client.execute(getRequestProducer(httpGet, httpHost), getResponseConsumer(), null); + httpGet.setDependency((org.apache.hc.core5.concurrent.Cancellable) future); httpGet.abort(); - Future future = client.execute(httpHost, httpGet, null); + try { future.get(); fail("expected cancellation exception"); @@ -300,8 +312,9 @@ public void testRequestResetAndAbort() throws Exception { } { httpGet.reset(); - Future future = client.execute(httpHost, httpGet, null); + Future future = client.execute(getRequestProducer(httpGet, httpHost), getResponseConsumer(), null); assertFalse(httpGet.isAborted()); + httpGet.setDependency((org.apache.hc.core5.concurrent.Cancellable) future); httpGet.abort(); assertTrue(httpGet.isAborted()); try { @@ -315,9 +328,9 @@ public void testRequestResetAndAbort() throws Exception { { httpGet.reset(); assertFalse(httpGet.isAborted()); - Future future = client.execute(httpHost, httpGet, null); + Future future = client.execute(getRequestProducer(httpGet, httpHost), getResponseConsumer(), null); assertFalse(httpGet.isAborted()); - assertEquals(200, future.get().getStatusLine().getStatusCode()); + assertEquals(200, future.get().getCode()); assertFalse(future.isCancelled()); } } @@ -325,7 +338,7 @@ public void testRequestResetAndAbort() throws Exception { /** * End to end test for headers. We test it explicitly against a real http client as there are different ways - * to set/add headers to the {@link org.apache.http.client.HttpClient}. + * to set/add headers to the {@link org.apache.hc.client5.http.classic.HttpClient}. * Exercises the test http server ability to send back whatever headers it received. */ public void testHeaders() throws Exception { @@ -365,7 +378,7 @@ public void testHeaders() throws Exception { /** * End to end test for delete with body. We test it explicitly as it is not supported - * out of the box by {@link org.apache.http.client.HttpClient}. + * out of the box by {@link org.apache.hc.client5.http.classic.HttpClient}. * Exercises the test http server ability to send back whatever body it received. */ public void testDeleteWithBody() throws Exception { @@ -374,7 +387,7 @@ public void testDeleteWithBody() throws Exception { /** * End to end test for get with body. We test it explicitly as it is not supported - * out of the box by {@link org.apache.http.client.HttpClient}. + * out of the box by {@link org.apache.hc.client5.http.classic.HttpClient}. * Exercises the test http server ability to send back whatever body it received. */ public void testGetWithBody() throws Exception { @@ -410,7 +423,7 @@ public void testEncodeParams() throws Exception { Request request = new Request("PUT", "/200"); request.addParameter("routing", "foo bar"); Response response = RestClientSingleHostTests.performRequestSyncOrAsync(restClient, request); - assertEquals(pathPrefix + "/200?routing=foo+bar", response.getRequestLine().getUri()); + assertEquals(pathPrefix + "/200?routing=foo%20bar", response.getRequestLine().getUri()); } { Request request = new Request("PUT", "/200"); @@ -540,4 +553,13 @@ private Response bodyTest(RestClient restClient, String method, int statusCode, return esResponse; } + + private AsyncResponseConsumer getResponseConsumer() { + return new HeapBufferedAsyncResponseConsumer(1024); + } + + private HttpUriRequestProducer getRequestProducer(HttpUriRequestBase request, HttpHost host) { + return HttpUriRequestProducer.create(request, host); + + } } diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostTests.java index e5ce5eb91ad5a..f46a91aa910f8 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientSingleHostTests.java @@ -34,38 +34,42 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.http.ConnectionClosedException; -import org.apache.http.Header; -import org.apache.http.HttpEntity; -import org.apache.http.HttpEntityEnclosingRequest; -import org.apache.http.HttpHost; -import org.apache.http.HttpRequest; -import org.apache.http.HttpResponse; -import org.apache.http.ProtocolVersion; -import org.apache.http.StatusLine; -import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.methods.HttpOptions; -import org.apache.http.client.methods.HttpPatch; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.client.methods.HttpTrace; -import org.apache.http.client.methods.HttpUriRequest; -import org.apache.http.client.protocol.HttpClientContext; -import org.apache.http.client.utils.URIBuilder; -import org.apache.http.concurrent.FutureCallback; -import org.apache.http.conn.ConnectTimeoutException; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; -import org.apache.http.message.BasicHttpResponse; -import org.apache.http.message.BasicStatusLine; -import org.apache.http.nio.protocol.HttpAsyncRequestProducer; -import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; -import org.apache.http.util.EntityUtils; +import org.apache.hc.client5.http.ConnectTimeoutException; +import org.apache.hc.client5.http.classic.methods.HttpDelete; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.client5.http.classic.methods.HttpHead; +import org.apache.hc.client5.http.classic.methods.HttpOptions; +import org.apache.hc.client5.http.classic.methods.HttpPatch; +import org.apache.hc.client5.http.classic.methods.HttpPost; +import org.apache.hc.client5.http.classic.methods.HttpPut; +import org.apache.hc.client5.http.classic.methods.HttpTrace; +import org.apache.hc.client5.http.classic.methods.HttpUriRequest; +import org.apache.hc.client5.http.impl.async.CloseableHttpAsyncClient; +import org.apache.hc.core5.concurrent.FutureCallback; +import org.apache.hc.core5.function.Supplier; +import org.apache.hc.core5.http.ClassicHttpRequest; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.ConnectionClosedException; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.HttpRequest; +import org.apache.hc.core5.http.io.entity.EntityUtils; +import org.apache.hc.core5.http.io.entity.StringEntity; +import org.apache.hc.core5.http.message.BasicClassicHttpResponse; +import org.apache.hc.core5.http.nio.AsyncPushConsumer; +import org.apache.hc.core5.http.nio.AsyncRequestProducer; +import org.apache.hc.core5.http.nio.AsyncResponseConsumer; +import org.apache.hc.core5.http.nio.HandlerFactory; +import org.apache.hc.core5.http.protocol.HttpContext; +import org.apache.hc.core5.io.CloseMode; +import org.apache.hc.core5.net.URIBuilder; +import org.apache.hc.core5.reactor.IOReactorStatus; +import org.apache.hc.core5.util.TimeValue; import org.junit.After; import org.junit.Before; -import org.mockito.ArgumentCaptor; -import org.mockito.stubbing.Answer; +import org.opensearch.client.http.HttpUriRequestProducer; import javax.net.ssl.SSLHandshakeException; import java.io.IOException; @@ -85,6 +89,7 @@ import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicReference; +import java.util.concurrent.atomic.LongAdder; import static java.util.Collections.singletonList; import static org.opensearch.client.RestClientTestUtil.getAllErrorStatusCodes; @@ -100,12 +105,6 @@ import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import static org.mockito.Mockito.any; -import static org.mockito.Mockito.nullable; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.times; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; /** * Tests for basic functionality of {@link RestClient} against one single host: tests http requests being sent, headers, @@ -122,10 +121,17 @@ public class RestClientSingleHostTests extends RestClientTestCase { private CloseableHttpAsyncClient httpClient; private HostsTrackingFailureListener failureListener; private boolean strictDeprecationMode; + private LongAdder requests; + private AtomicReference requestProducerCapture; @Before public void createRestClient() { - httpClient = mockHttpClient(exec); + requests = new LongAdder(); + requestProducerCapture = new AtomicReference<>(); + httpClient = mockHttpClient(exec, (target, requestProducer, responseConsumer, pushHandlerFactory, context, callback) -> { + requests.increment(); + requestProducerCapture.set(requestProducer); + }); defaultHeaders = RestClientTestUtil.randomHeaders(getRandom(), "Header-default"); node = new Node(new HttpHost("localhost", 9200)); failureListener = new HostsTrackingFailureListener(); @@ -143,41 +149,78 @@ public void createRestClient() { ); } + interface CloseableHttpAsyncClientListener { + void onExecute( + HttpHost target, + AsyncRequestProducer requestProducer, + AsyncResponseConsumer responseConsumer, + HandlerFactory pushHandlerFactory, + HttpContext context, + FutureCallback callback + ); + } + @SuppressWarnings("unchecked") - static CloseableHttpAsyncClient mockHttpClient(final ExecutorService exec) { - CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); - when( - httpClient.execute( - any(HttpAsyncRequestProducer.class), - any(HttpAsyncResponseConsumer.class), - any(HttpClientContext.class), - nullable(FutureCallback.class) - ) - ).thenAnswer((Answer>) invocationOnMock -> { - final HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; - final FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[3]; - // Call the callback asynchronous to better simulate how async http client works - return exec.submit(() -> { - if (futureCallback != null) { - try { - HttpResponse httpResponse = responseOrException(requestProducer); - futureCallback.completed(httpResponse); - } catch (Exception e) { - futureCallback.failed(e); + static CloseableHttpAsyncClient mockHttpClient(final ExecutorService exec, final CloseableHttpAsyncClientListener... listeners) { + CloseableHttpAsyncClient httpClient = new CloseableHttpAsyncClient() { + @Override + public void close() throws IOException {} + + @Override + public void close(CloseMode closeMode) {} + + @Override + public void start() {} + + @Override + public void register(String hostname, String uriPattern, Supplier supplier) {} + + @Override + public void initiateShutdown() {} + + @Override + public IOReactorStatus getStatus() { + return null; + } + + @Override + protected Future doExecute( + HttpHost target, + AsyncRequestProducer requestProducer, + AsyncResponseConsumer responseConsumer, + HandlerFactory pushHandlerFactory, + HttpContext context, + FutureCallback callback + ) { + Arrays.stream(listeners) + .forEach(l -> l.onExecute(target, requestProducer, responseConsumer, pushHandlerFactory, context, callback)); + // Call the callback asynchronous to better simulate how async http client works + return exec.submit(() -> { + if (callback != null) { + try { + ClassicHttpResponse httpResponse = responseOrException(requestProducer); + callback.completed((T) httpResponse); + } catch (Exception e) { + callback.failed(e); + } + return null; } - return null; - } - return responseOrException(requestProducer); - }); - }); + return (T) responseOrException(requestProducer); + }); + } + + @Override + public void awaitShutdown(TimeValue waitTime) throws InterruptedException {} + }; + return httpClient; } - private static HttpResponse responseOrException(HttpAsyncRequestProducer requestProducer) throws Exception { - final HttpUriRequest request = (HttpUriRequest) requestProducer.generateRequest(); - final HttpHost httpHost = requestProducer.getTarget(); + private static ClassicHttpResponse responseOrException(AsyncRequestProducer requestProducer) throws Exception { + final ClassicHttpRequest request = getRequest(requestProducer); + final HttpHost httpHost = new HttpHost(request.getAuthority()); // return the desired status code or exception depending on the path - switch (request.getURI().getPath()) { + switch (request.getRequestUri()) { case "/soe": throw new SocketTimeoutException(httpHost.toString()); case "/coe": @@ -193,20 +236,17 @@ private static HttpResponse responseOrException(HttpAsyncRequestProducer request case "/runtime": throw new RuntimeException(); default: - int statusCode = Integer.parseInt(request.getURI().getPath().substring(1)); - StatusLine statusLine = new BasicStatusLine(new ProtocolVersion("http", 1, 1), statusCode, ""); + int statusCode = Integer.parseInt(request.getRequestUri().substring(1)); - final HttpResponse httpResponse = new BasicHttpResponse(statusLine); + final ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(statusCode, ""); // return the same body that was sent - if (request instanceof HttpEntityEnclosingRequest) { - HttpEntity entity = ((HttpEntityEnclosingRequest) request).getEntity(); - if (entity != null) { - assertTrue("the entity is not repeatable, cannot set it to the response directly", entity.isRepeatable()); - httpResponse.setEntity(entity); - } + HttpEntity entity = request.getEntity(); + if (entity != null) { + assertTrue("the entity is not repeatable, cannot set it to the response directly", entity.isRepeatable()); + httpResponse.setEntity(entity); } // return the same headers that were sent - httpResponse.setHeaders(request.getAllHeaders()); + httpResponse.setHeaders(request.getHeaders()); return httpResponse; } } @@ -224,26 +264,20 @@ public void shutdownExec() { */ @SuppressWarnings("unchecked") public void testInternalHttpRequest() throws Exception { - ArgumentCaptor requestArgumentCaptor = ArgumentCaptor.forClass(HttpAsyncRequestProducer.class); int times = 0; for (String httpMethod : getHttpMethods()) { - HttpUriRequest expectedRequest = performRandomRequest(httpMethod); - verify(httpClient, times(++times)).execute( - requestArgumentCaptor.capture(), - any(HttpAsyncResponseConsumer.class), - any(HttpClientContext.class), - nullable(FutureCallback.class) - ); - HttpUriRequest actualRequest = (HttpUriRequest) requestArgumentCaptor.getValue().generateRequest(); - assertEquals(expectedRequest.getURI(), actualRequest.getURI()); - assertEquals(expectedRequest.getClass(), actualRequest.getClass()); - assertArrayEquals(expectedRequest.getAllHeaders(), actualRequest.getAllHeaders()); - if (expectedRequest instanceof HttpEntityEnclosingRequest) { - HttpEntity expectedEntity = ((HttpEntityEnclosingRequest) expectedRequest).getEntity(); - if (expectedEntity != null) { - HttpEntity actualEntity = ((HttpEntityEnclosingRequest) actualRequest).getEntity(); - assertEquals(EntityUtils.toString(expectedEntity), EntityUtils.toString(actualEntity)); - } + ClassicHttpRequest expectedRequest = performRandomRequest(httpMethod); + assertThat(requests.intValue(), equalTo(++times)); + + ClassicHttpRequest actualRequest = getRequest(requestProducerCapture.get()); + assertEquals(expectedRequest.getRequestUri(), actualRequest.getRequestUri()); + assertEquals(expectedRequest.getMethod(), actualRequest.getMethod()); + assertArrayEquals(expectedRequest.getHeaders(), actualRequest.getHeaders()); + + HttpEntity expectedEntity = expectedRequest.getEntity(); + if (expectedEntity != null) { + HttpEntity actualEntity = actualRequest.getEntity(); + assertEquals(EntityUtils.toString(expectedEntity), EntityUtils.toString(actualEntity)); } } } @@ -414,14 +448,14 @@ public void testBody() throws Exception { } } } - for (String method : Arrays.asList("HEAD", "OPTIONS", "TRACE")) { + for (String method : Arrays.asList("TRACE")) { Request request = new Request(method, "/" + randomStatusCode(getRandom())); request.setEntity(entity); try { performRequestSyncOrAsync(restClient, request); fail("request should have failed"); - } catch (UnsupportedOperationException e) { - assertThat(e.getMessage(), equalTo(method + " with body is not supported")); + } catch (IllegalStateException e) { + assertThat(e.getMessage(), equalTo(method + " requests may not include an entity.")); } } } @@ -587,10 +621,10 @@ private HttpUriRequest performRandomRequest(String method) throws Exception { HttpUriRequest expectedRequest; switch (method) { case "DELETE": - expectedRequest = new HttpDeleteWithEntity(uri); + expectedRequest = new HttpDelete(uri); break; case "GET": - expectedRequest = new HttpGetWithEntity(uri); + expectedRequest = new HttpGet(uri); break; case "HEAD": expectedRequest = new HttpHead(uri); @@ -614,14 +648,14 @@ private HttpUriRequest performRandomRequest(String method) throws Exception { throw new UnsupportedOperationException("method not supported: " + method); } - if (expectedRequest instanceof HttpEntityEnclosingRequest && getRandom().nextBoolean()) { + if (getRandom().nextBoolean() && !(expectedRequest instanceof HttpTrace /* no entity */)) { HttpEntity entity = new StringEntity(randomAsciiAlphanumOfLengthBetween(10, 100), ContentType.APPLICATION_JSON); - ((HttpEntityEnclosingRequest) expectedRequest).setEntity(entity); + expectedRequest.setEntity(entity); request.setEntity(entity); } final Set uniqueNames = new HashSet<>(); - if (randomBoolean()) { + if (randomBoolean() && !(expectedRequest instanceof HttpTrace /* no entity */)) { Header[] headers = RestClientTestUtil.randomHeaders(getRandom(), "Header"); RequestOptions.Builder options = request.getOptions().toBuilder(); for (Header header : headers) { @@ -698,4 +732,9 @@ private static void assertExceptionStackContainsCallingMethod(Throwable t) { t.printStackTrace(new PrintWriter(stack)); fail("didn't find the calling method (looks like " + myMethod + ") in:\n" + stack); } + + private static ClassicHttpRequest getRequest(AsyncRequestProducer requestProducer) throws NoSuchFieldException, IllegalAccessException { + assertThat(requestProducer, instanceOf(HttpUriRequestProducer.class)); + return ((HttpUriRequestProducer) requestProducer).getRequest(); + } } diff --git a/client/rest/src/test/java/org/opensearch/client/RestClientTests.java b/client/rest/src/test/java/org/opensearch/client/RestClientTests.java index ca761dcb6b9b6..dd51da3a30d8c 100644 --- a/client/rest/src/test/java/org/opensearch/client/RestClientTests.java +++ b/client/rest/src/test/java/org/opensearch/client/RestClientTests.java @@ -32,12 +32,13 @@ package org.opensearch.client; -import org.apache.http.Header; -import org.apache.http.HttpHost; -import org.apache.http.client.AuthCache; -import org.apache.http.impl.auth.BasicScheme; -import org.apache.http.impl.client.BasicAuthCache; -import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; +import org.apache.hc.client5.http.auth.AuthCache; +import org.apache.hc.client5.http.impl.async.CloseableHttpAsyncClient; +import org.apache.hc.client5.http.impl.auth.BasicAuthCache; +import org.apache.hc.client5.http.impl.auth.BasicScheme; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.reactor.IOReactorStatus; import org.opensearch.client.RestClient.NodeTuple; import java.io.IOException; @@ -410,10 +411,10 @@ public void testIsRunning() { CloseableHttpAsyncClient client = mock(CloseableHttpAsyncClient.class); RestClient restClient = new RestClient(client, new Header[] {}, nodes, null, null, null, false, false); - when(client.isRunning()).thenReturn(true); + when(client.getStatus()).thenReturn(IOReactorStatus.ACTIVE); assertTrue(restClient.isRunning()); - when(client.isRunning()).thenReturn(false); + when(client.getStatus()).thenReturn(IOReactorStatus.INACTIVE); assertFalse(restClient.isRunning()); } diff --git a/client/rest/src/test/java/org/opensearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/opensearch/client/documentation/RestClientDocumentation.java index 066419844f048..b2807d35d230e 100644 --- a/client/rest/src/test/java/org/opensearch/client/documentation/RestClientDocumentation.java +++ b/client/rest/src/test/java/org/opensearch/client/documentation/RestClientDocumentation.java @@ -32,23 +32,30 @@ package org.opensearch.client.documentation; -import org.apache.http.Header; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHost; -import org.apache.http.RequestLine; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.client.config.RequestConfig; -import org.apache.http.entity.ContentType; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; -import org.apache.http.impl.nio.reactor.IOReactorConfig; -import org.apache.http.message.BasicHeader; -import org.apache.http.nio.entity.NStringEntity; -import org.apache.http.ssl.SSLContextBuilder; -import org.apache.http.ssl.SSLContexts; -import org.apache.http.util.EntityUtils; +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.UsernamePasswordCredentials; +import org.apache.hc.client5.http.config.RequestConfig; +import org.apache.hc.client5.http.impl.async.HttpAsyncClientBuilder; +import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider; +import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManager; +import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManagerBuilder; +import org.apache.hc.client5.http.ssl.ClientTlsStrategyBuilder; +import org.apache.hc.core5.function.Factory; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.ParseException; +import org.apache.hc.core5.http.io.entity.EntityUtils; +import org.apache.hc.core5.http.io.entity.StringEntity; +import org.apache.hc.core5.http.message.BasicHeader; +import org.apache.hc.core5.http.message.RequestLine; +import org.apache.hc.core5.http.nio.ssl.TlsStrategy; +import org.apache.hc.core5.reactor.IOReactorConfig; +import org.apache.hc.core5.reactor.ssl.TlsDetails; +import org.apache.hc.core5.ssl.SSLContextBuilder; +import org.apache.hc.core5.ssl.SSLContexts; +import org.apache.hc.core5.util.Timeout; import org.opensearch.client.Cancellable; import org.opensearch.client.HttpAsyncResponseConsumerFactory; import org.opensearch.client.Node; @@ -62,6 +69,8 @@ import org.opensearch.client.RestClientBuilder.HttpClientConfigCallback; import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLEngine; + import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -109,12 +118,12 @@ public class RestClientDocumentation { // end::rest-client-options-singleton @SuppressWarnings("unused") - public void usage() throws IOException, InterruptedException { + public void usage() throws IOException, InterruptedException, ParseException { //tag::rest-client-init RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http"), - new HttpHost("localhost", 9201, "http")).build(); + new HttpHost("http", "localhost", 9200), + new HttpHost("http", "localhost", 9201)).build(); //end::rest-client-init //tag::rest-client-close @@ -124,7 +133,7 @@ public void usage() throws IOException, InterruptedException { { //tag::rest-client-init-default-headers RestClientBuilder builder = RestClient.builder( - new HttpHost("localhost", 9200, "http")); + new HttpHost("http", "localhost", 9200)); Header[] defaultHeaders = new Header[]{new BasicHeader("header", "value")}; builder.setDefaultHeaders(defaultHeaders); // <1> //end::rest-client-init-default-headers @@ -132,14 +141,14 @@ public void usage() throws IOException, InterruptedException { { //tag::rest-client-init-node-selector RestClientBuilder builder = RestClient.builder( - new HttpHost("localhost", 9200, "http")); + new HttpHost("http", "localhost", 9200)); builder.setNodeSelector(NodeSelector.SKIP_DEDICATED_CLUSTER_MANAGERS); // <1> //end::rest-client-init-node-selector } { //tag::rest-client-init-allocation-aware-selector RestClientBuilder builder = RestClient.builder( - new HttpHost("localhost", 9200, "http")); + new HttpHost("http", "localhost", 9200)); builder.setNodeSelector(new NodeSelector() { // <1> @Override public void select(Iterable nodes) { @@ -173,7 +182,7 @@ public void select(Iterable nodes) { { //tag::rest-client-init-failure-listener RestClientBuilder builder = RestClient.builder( - new HttpHost("localhost", 9200, "http")); + new HttpHost("http", "localhost", 9200)); builder.setFailureListener(new RestClient.FailureListener() { @Override public void onFailure(Node node) { @@ -185,13 +194,13 @@ public void onFailure(Node node) { { //tag::rest-client-init-request-config-callback RestClientBuilder builder = RestClient.builder( - new HttpHost("localhost", 9200, "http")); + new HttpHost("http", "localhost", 9200)); builder.setRequestConfigCallback( new RestClientBuilder.RequestConfigCallback() { @Override public RequestConfig.Builder customizeRequestConfig( RequestConfig.Builder requestConfigBuilder) { - return requestConfigBuilder.setSocketTimeout(10000); // <1> + return requestConfigBuilder.setResponseTimeout(Timeout.ofMilliseconds(10000)); // <1> } }); //end::rest-client-init-request-config-callback @@ -199,13 +208,13 @@ public RequestConfig.Builder customizeRequestConfig( { //tag::rest-client-init-client-config-callback RestClientBuilder builder = RestClient.builder( - new HttpHost("localhost", 9200, "http")); + new HttpHost("http", "localhost", 9200)); builder.setHttpClientConfigCallback(new HttpClientConfigCallback() { @Override public HttpAsyncClientBuilder customizeHttpClient( HttpAsyncClientBuilder httpClientBuilder) { return httpClientBuilder.setProxy( - new HttpHost("proxy", 9000, "http")); // <1> + new HttpHost("http", "proxy", 9000)); // <1> } }); //end::rest-client-init-client-config-callback @@ -244,7 +253,7 @@ public void onFailure(Exception exception) { request.addParameter("pretty", "true"); //end::rest-client-parameters //tag::rest-client-body - request.setEntity(new NStringEntity( + request.setEntity(new StringEntity( "{\"json\":\"text\"}", ContentType.APPLICATION_JSON)); //end::rest-client-body @@ -334,8 +343,8 @@ public void commonConfiguration() throws Exception { public RequestConfig.Builder customizeRequestConfig( RequestConfig.Builder requestConfigBuilder) { return requestConfigBuilder - .setConnectTimeout(5000) - .setSocketTimeout(60000); + .setConnectTimeout(Timeout.ofMilliseconds(5000)) + .setResponseTimeout(Timeout.ofMilliseconds(60000)); } }); //end::rest-client-config-timeouts @@ -343,8 +352,8 @@ public RequestConfig.Builder customizeRequestConfig( { //tag::rest-client-config-request-options-timeouts RequestConfig requestConfig = RequestConfig.custom() - .setConnectTimeout(5000) - .setSocketTimeout(60000) + .setConnectTimeout(Timeout.ofMilliseconds(5000)) + .setResponseTimeout(Timeout.ofMilliseconds(60000)) .build(); RequestOptions options = RequestOptions.DEFAULT.toBuilder() .setRequestConfig(requestConfig) @@ -359,7 +368,7 @@ public RequestConfig.Builder customizeRequestConfig( @Override public HttpAsyncClientBuilder customizeHttpClient( HttpAsyncClientBuilder httpClientBuilder) { - return httpClientBuilder.setDefaultIOReactorConfig( + return httpClientBuilder.setIOReactorConfig( IOReactorConfig.custom() .setIoThreadCount(1) .build()); @@ -369,10 +378,9 @@ public HttpAsyncClientBuilder customizeHttpClient( } { //tag::rest-client-config-basic-auth - final CredentialsProvider credentialsProvider = - new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials("user", "password")); + final BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(new AuthScope(new HttpHost("localhost", 9200)), + new UsernamePasswordCredentials("user", "password".toCharArray())); RestClientBuilder builder = RestClient.builder( new HttpHost("localhost", 9200)) @@ -388,10 +396,10 @@ public HttpAsyncClientBuilder customizeHttpClient( } { //tag::rest-client-config-disable-preemptive-auth - final CredentialsProvider credentialsProvider = + final BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, - new UsernamePasswordCredentials("user", "password")); + credentialsProvider.setCredentials(new AuthScope(new HttpHost("localhost", 9200)), + new UsernamePasswordCredentials("user", "password".toCharArray())); RestClientBuilder builder = RestClient.builder( new HttpHost("localhost", 9200)) @@ -418,12 +426,27 @@ public HttpAsyncClientBuilder customizeHttpClient( .loadTrustMaterial(truststore, null); final SSLContext sslContext = sslBuilder.build(); RestClientBuilder builder = RestClient.builder( - new HttpHost("localhost", 9200, "https")) + new HttpHost("https", "localhost", 9200)) .setHttpClientConfigCallback(new HttpClientConfigCallback() { @Override public HttpAsyncClientBuilder customizeHttpClient( HttpAsyncClientBuilder httpClientBuilder) { - return httpClientBuilder.setSSLContext(sslContext); + final TlsStrategy tlsStrategy = ClientTlsStrategyBuilder.create() + .setSslContext(sslContext) + // See https://issues.apache.org/jira/browse/HTTPCLIENT-2219 + .setTlsDetailsFactory(new Factory() { + @Override + public TlsDetails create(final SSLEngine sslEngine) { + return new TlsDetails(sslEngine.getSession(), sslEngine.getApplicationProtocol()); + } + }) + .build(); + + final PoolingAsyncClientConnectionManager connectionManager = PoolingAsyncClientConnectionManagerBuilder.create() + .setTlsStrategy(tlsStrategy) + .build(); + + return httpClientBuilder.setConnectionManager(connectionManager); } }); //end::rest-client-config-encrypted-communication @@ -444,12 +467,27 @@ public HttpAsyncClientBuilder customizeHttpClient( .loadTrustMaterial(trustStore, null); final SSLContext sslContext = sslContextBuilder.build(); RestClient.builder( - new HttpHost("localhost", 9200, "https")) + new HttpHost("https", "localhost", 9200)) .setHttpClientConfigCallback(new HttpClientConfigCallback() { @Override public HttpAsyncClientBuilder customizeHttpClient( HttpAsyncClientBuilder httpClientBuilder) { - return httpClientBuilder.setSSLContext(sslContext); + final TlsStrategy tlsStrategy = ClientTlsStrategyBuilder.create() + .setSslContext(sslContext) + // See please https://issues.apache.org/jira/browse/HTTPCLIENT-2219 + .setTlsDetailsFactory(new Factory() { + @Override + public TlsDetails create(final SSLEngine sslEngine) { + return new TlsDetails(sslEngine.getSession(), sslEngine.getApplicationProtocol()); + } + }) + .build(); + + final PoolingAsyncClientConnectionManager connectionManager = PoolingAsyncClientConnectionManagerBuilder.create() + .setTlsStrategy(tlsStrategy) + .build(); + + return httpClientBuilder.setConnectionManager(connectionManager); } }); //end::rest-client-config-trust-ca-pem @@ -473,12 +511,20 @@ public HttpAsyncClientBuilder customizeHttpClient( .loadKeyMaterial(keyStore, keyStorePass.toCharArray()); final SSLContext sslContext = sslBuilder.build(); RestClientBuilder builder = RestClient.builder( - new HttpHost("localhost", 9200, "https")) + new HttpHost("https", "localhost", 9200)) .setHttpClientConfigCallback(new HttpClientConfigCallback() { @Override public HttpAsyncClientBuilder customizeHttpClient( HttpAsyncClientBuilder httpClientBuilder) { - return httpClientBuilder.setSSLContext(sslContext); + final TlsStrategy tlsStrategy = ClientTlsStrategyBuilder.create() + .setSslContext(sslContext) + .build(); + + final PoolingAsyncClientConnectionManager connectionManager = PoolingAsyncClientConnectionManagerBuilder.create() + .setTlsStrategy(tlsStrategy) + .build(); + + return httpClientBuilder.setConnectionManager(connectionManager); } }); //end::rest-client-config-mutual-tls-authentication @@ -486,7 +532,7 @@ public HttpAsyncClientBuilder customizeHttpClient( { //tag::rest-client-auth-bearer-token RestClientBuilder builder = RestClient.builder( - new HttpHost("localhost", 9200, "http")); + new HttpHost("http", "localhost", 9200)); Header[] defaultHeaders = new Header[]{new BasicHeader("Authorization", "Bearer u6iuAxZ0RG1Kcm5jVFI4eU4tZU9aVFEwT2F3")}; @@ -502,7 +548,7 @@ public HttpAsyncClientBuilder customizeHttpClient( (apiKeyId + ":" + apiKeySecret) .getBytes(StandardCharsets.UTF_8)); RestClientBuilder builder = RestClient.builder( - new HttpHost("localhost", 9200, "http")); + new HttpHost("http", "localhost", 9200)); Header[] defaultHeaders = new Header[]{new BasicHeader("Authorization", "ApiKey " + apiKeyAuth)}; diff --git a/client/sniffer/build.gradle b/client/sniffer/build.gradle index b7cb0d87c02d9..eb3306cf2cea2 100644 --- a/client/sniffer/build.gradle +++ b/client/sniffer/build.gradle @@ -38,8 +38,8 @@ archivesBaseName = 'opensearch-rest-client-sniffer' dependencies { api project(":client:rest") - api "org.apache.httpcomponents:httpclient:${versions.httpclient}" - api "org.apache.httpcomponents:httpcore:${versions.httpcore}" + api "org.apache.httpcomponents.client5:httpclient5:${versions.httpclient5}" + api "org.apache.httpcomponents.core5:httpcore5:${versions.httpcore5}" api "commons-codec:commons-codec:${versions.commonscodec}" api "commons-logging:commons-logging:${versions.commonslogging}" api "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" @@ -84,6 +84,7 @@ testingConventions { } thirdPartyAudit.ignoreMissingClasses( + 'org.conscrypt.Conscrypt', //commons-logging optional dependencies 'org.apache.avalon.framework.logger.Logger', 'org.apache.log.Hierarchy', diff --git a/client/sniffer/licenses/httpclient-4.5.13.jar.sha1 b/client/sniffer/licenses/httpclient-4.5.13.jar.sha1 deleted file mode 100644 index 3281e21595b39..0000000000000 --- a/client/sniffer/licenses/httpclient-4.5.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e5f6cae5ca7ecaac1ec2827a9e2d65ae2869cada \ No newline at end of file diff --git a/client/sniffer/licenses/httpclient5-5.1.4.jar.sha1 b/client/sniffer/licenses/httpclient5-5.1.4.jar.sha1 new file mode 100644 index 0000000000000..3c0cb1335fb88 --- /dev/null +++ b/client/sniffer/licenses/httpclient5-5.1.4.jar.sha1 @@ -0,0 +1 @@ +208f9eed6d6ab709e2ae7a75b457ef60c0baefa5 \ No newline at end of file diff --git a/client/sniffer/licenses/httpcore-4.4.15.jar.sha1 b/client/sniffer/licenses/httpcore-4.4.15.jar.sha1 deleted file mode 100644 index 42a03b5d7a376..0000000000000 --- a/client/sniffer/licenses/httpcore-4.4.15.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7f2e0c573eaa7a74bac2e89b359e1f73d92a0a1d \ No newline at end of file diff --git a/client/sniffer/licenses/httpcore5-5.1.5.jar.sha1 b/client/sniffer/licenses/httpcore5-5.1.5.jar.sha1 new file mode 100644 index 0000000000000..8da253152e970 --- /dev/null +++ b/client/sniffer/licenses/httpcore5-5.1.5.jar.sha1 @@ -0,0 +1 @@ +df9da3a1fa2351c4790245400ed28d78a8ddd3fc \ No newline at end of file diff --git a/client/sniffer/licenses/jackson-core-2.13.3.jar.sha1 b/client/sniffer/licenses/jackson-core-2.13.3.jar.sha1 deleted file mode 100644 index 6e0e2cf9bf2d4..0000000000000 --- a/client/sniffer/licenses/jackson-core-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a27014716e4421684416e5fa83d896ddb87002da \ No newline at end of file diff --git a/client/sniffer/licenses/jackson-core-2.14.1.jar.sha1 b/client/sniffer/licenses/jackson-core-2.14.1.jar.sha1 new file mode 100644 index 0000000000000..054873b60eb21 --- /dev/null +++ b/client/sniffer/licenses/jackson-core-2.14.1.jar.sha1 @@ -0,0 +1 @@ +7a07bc535ccf0b7f6929c4d0f2ab9b294ef7c4a3 \ No newline at end of file diff --git a/client/sniffer/src/main/java/org/opensearch/client/sniff/OpenSearchNodesSniffer.java b/client/sniffer/src/main/java/org/opensearch/client/sniff/OpenSearchNodesSniffer.java index c1a0fcf9a8acf..e6696c1fc4039 100644 --- a/client/sniffer/src/main/java/org/opensearch/client/sniff/OpenSearchNodesSniffer.java +++ b/client/sniffer/src/main/java/org/opensearch/client/sniff/OpenSearchNodesSniffer.java @@ -37,8 +37,8 @@ import com.fasterxml.jackson.core.JsonToken; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; import org.opensearch.client.Node; import org.opensearch.client.Node.Roles; import org.opensearch.client.Request; @@ -192,12 +192,12 @@ private static Node readNode(String nodeId, JsonParser parser, Scheme scheme) th publishAddressAsURI = URI.create(scheme + "://" + address); host = publishAddressAsURI.getHost(); } - publishedHost = new HttpHost(host, publishAddressAsURI.getPort(), publishAddressAsURI.getScheme()); + publishedHost = new HttpHost(publishAddressAsURI.getScheme(), host, publishAddressAsURI.getPort()); } else if (parser.currentToken() == JsonToken.START_ARRAY && "bound_address".equals(parser.getCurrentName())) { while (parser.nextToken() != JsonToken.END_ARRAY) { URI boundAddressAsURI = URI.create(scheme + "://" + parser.getValueAsString()); boundHosts.add( - new HttpHost(boundAddressAsURI.getHost(), boundAddressAsURI.getPort(), boundAddressAsURI.getScheme()) + new HttpHost(boundAddressAsURI.getScheme(), boundAddressAsURI.getHost(), boundAddressAsURI.getPort()) ); } } else if (parser.getCurrentToken() == JsonToken.START_OBJECT) { diff --git a/client/sniffer/src/test/java/org/opensearch/client/sniff/MockNodesSniffer.java b/client/sniffer/src/test/java/org/opensearch/client/sniff/MockNodesSniffer.java index cbf349e534deb..9b5e89fbeb038 100644 --- a/client/sniffer/src/test/java/org/opensearch/client/sniff/MockNodesSniffer.java +++ b/client/sniffer/src/test/java/org/opensearch/client/sniff/MockNodesSniffer.java @@ -32,7 +32,7 @@ package org.opensearch.client.sniff; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import org.opensearch.client.Node; import java.util.Collections; diff --git a/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferParseTests.java b/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferParseTests.java index 58b60ac13dee8..fd38eceee6224 100644 --- a/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferParseTests.java +++ b/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferParseTests.java @@ -33,10 +33,11 @@ package org.opensearch.client.sniff; import com.fasterxml.jackson.core.JsonFactory; -import org.apache.http.HttpEntity; -import org.apache.http.HttpHost; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.InputStreamEntity; + +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.io.entity.InputStreamEntity; import org.opensearch.client.Node; import org.opensearch.client.Node.Roles; import org.opensearch.client.RestClientTestCase; diff --git a/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferTests.java b/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferTests.java index 1d06e9353726d..b678fb050e8f8 100644 --- a/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferTests.java +++ b/client/sniffer/src/test/java/org/opensearch/client/sniff/OpenSearchNodesSnifferTests.java @@ -40,14 +40,13 @@ import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; -import org.apache.http.Consts; -import org.apache.http.HttpHost; -import org.apache.http.client.methods.HttpGet; import org.opensearch.client.Node; import org.opensearch.client.Response; import org.opensearch.client.ResponseException; import org.opensearch.client.RestClient; import org.opensearch.client.RestClientTestCase; +import org.apache.hc.client5.http.classic.methods.HttpGet; +import org.apache.hc.core5.http.HttpHost; import org.junit.After; import org.junit.Before; @@ -56,6 +55,7 @@ import java.io.StringWriter; import java.net.InetAddress; import java.net.InetSocketAddress; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -181,7 +181,7 @@ public void handle(HttpExchange httpExchange) throws IOException { String nodesInfoBody = sniffResponse.nodesInfoBody; httpExchange.sendResponseHeaders(sniffResponse.nodesInfoResponseCode, nodesInfoBody.length()); try (OutputStream out = httpExchange.getResponseBody()) { - out.write(nodesInfoBody.getBytes(Consts.UTF_8)); + out.write(nodesInfoBody.getBytes(StandardCharsets.UTF_8)); return; } } @@ -210,14 +210,14 @@ private static SniffResponse buildSniffResponse(OpenSearchNodesSniffer.Scheme sc String nodeId = RandomStrings.randomAsciiOfLengthBetween(getRandom(), 5, 10); String host = "host" + i; int port = RandomNumbers.randomIntBetween(getRandom(), 9200, 9299); - HttpHost publishHost = new HttpHost(host, port, scheme.toString()); + HttpHost publishHost = new HttpHost(scheme.toString(), host, port); Set boundHosts = new HashSet<>(); boundHosts.add(publishHost); if (randomBoolean()) { int bound = between(1, 5); for (int b = 0; b < bound; b++) { - boundHosts.add(new HttpHost(host + b, port, scheme.toString())); + boundHosts.add(new HttpHost(scheme.toString(), host + b, port)); } } diff --git a/client/sniffer/src/test/java/org/opensearch/client/sniff/SniffOnFailureListenerTests.java b/client/sniffer/src/test/java/org/opensearch/client/sniff/SniffOnFailureListenerTests.java index e4d1058282f5c..faab6babcaca6 100644 --- a/client/sniffer/src/test/java/org/opensearch/client/sniff/SniffOnFailureListenerTests.java +++ b/client/sniffer/src/test/java/org/opensearch/client/sniff/SniffOnFailureListenerTests.java @@ -32,7 +32,7 @@ package org.opensearch.client.sniff; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import org.opensearch.client.Node; import org.opensearch.client.RestClient; import org.opensearch.client.RestClientTestCase; diff --git a/client/sniffer/src/test/java/org/opensearch/client/sniff/SnifferBuilderTests.java b/client/sniffer/src/test/java/org/opensearch/client/sniff/SnifferBuilderTests.java index 25a3162e238ed..24ee540aa6364 100644 --- a/client/sniffer/src/test/java/org/opensearch/client/sniff/SnifferBuilderTests.java +++ b/client/sniffer/src/test/java/org/opensearch/client/sniff/SnifferBuilderTests.java @@ -33,7 +33,8 @@ package org.opensearch.client.sniff; import com.carrotsearch.randomizedtesting.generators.RandomNumbers; -import org.apache.http.HttpHost; + +import org.apache.hc.core5.http.HttpHost; import org.opensearch.client.RestClient; import org.opensearch.client.RestClientTestCase; diff --git a/client/sniffer/src/test/java/org/opensearch/client/sniff/SnifferTests.java b/client/sniffer/src/test/java/org/opensearch/client/sniff/SnifferTests.java index 304243e73c078..36923281dde6b 100644 --- a/client/sniffer/src/test/java/org/opensearch/client/sniff/SnifferTests.java +++ b/client/sniffer/src/test/java/org/opensearch/client/sniff/SnifferTests.java @@ -32,12 +32,12 @@ package org.opensearch.client.sniff; -import org.apache.http.HttpHost; import org.opensearch.client.Node; import org.opensearch.client.RestClient; import org.opensearch.client.RestClientTestCase; import org.opensearch.client.sniff.Sniffer.DefaultScheduler; import org.opensearch.client.sniff.Sniffer.Scheduler; +import org.apache.hc.core5.http.HttpHost; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; diff --git a/client/sniffer/src/test/java/org/opensearch/client/sniff/documentation/SnifferDocumentation.java b/client/sniffer/src/test/java/org/opensearch/client/sniff/documentation/SnifferDocumentation.java index 3b612aab80851..8f3e446d8aefb 100644 --- a/client/sniffer/src/test/java/org/opensearch/client/sniff/documentation/SnifferDocumentation.java +++ b/client/sniffer/src/test/java/org/opensearch/client/sniff/documentation/SnifferDocumentation.java @@ -32,7 +32,7 @@ package org.opensearch.client.sniff.documentation; -import org.apache.http.HttpHost; +import org.apache.hc.core5.http.HttpHost; import org.opensearch.client.Node; import org.opensearch.client.RestClient; import org.opensearch.client.sniff.OpenSearchNodesSniffer; @@ -69,7 +69,7 @@ public void usage() throws IOException { { //tag::sniffer-init RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http")) + new HttpHost("http", "localhost", 9200)) .build(); Sniffer sniffer = Sniffer.builder(restClient).build(); //end::sniffer-init @@ -82,7 +82,7 @@ public void usage() throws IOException { { //tag::sniffer-interval RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http")) + new HttpHost("http", "localhost", 9200)) .build(); Sniffer sniffer = Sniffer.builder(restClient) .setSniffIntervalMillis(60000).build(); @@ -105,7 +105,7 @@ public void usage() throws IOException { { //tag::sniffer-https RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http")) + new HttpHost("http", "localhost", 9200)) .build(); NodesSniffer nodesSniffer = new OpenSearchNodesSniffer( restClient, @@ -118,7 +118,7 @@ public void usage() throws IOException { { //tag::sniff-request-timeout RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http")) + new HttpHost("http", "localhost", 9200)) .build(); NodesSniffer nodesSniffer = new OpenSearchNodesSniffer( restClient, @@ -131,7 +131,7 @@ public void usage() throws IOException { { //tag::custom-nodes-sniffer RestClient restClient = RestClient.builder( - new HttpHost("localhost", 9200, "http")) + new HttpHost("http", "localhost", 9200)) .build(); NodesSniffer nodesSniffer = new NodesSniffer() { @Override diff --git a/client/test/build.gradle b/client/test/build.gradle index 07d874cf01ea7..13e9bd6b9e34a 100644 --- a/client/test/build.gradle +++ b/client/test/build.gradle @@ -35,7 +35,7 @@ sourceCompatibility = JavaVersion.VERSION_11 group = "${group}.client.test" dependencies { - api "org.apache.httpcomponents:httpcore:${versions.httpcore}" + api "org.apache.httpcomponents.core5:httpcore5:${versions.httpcore5}" api "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" api "junit:junit:${versions.junit}" api "org.hamcrest:hamcrest:${versions.hamcrest}" diff --git a/client/test/src/main/java/org/opensearch/client/RestClientTestCase.java b/client/test/src/main/java/org/opensearch/client/RestClientTestCase.java index 2b3e867929e27..b4eacdbf88827 100644 --- a/client/test/src/main/java/org/opensearch/client/RestClientTestCase.java +++ b/client/test/src/main/java/org/opensearch/client/RestClientTestCase.java @@ -43,7 +43,8 @@ import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope; import com.carrotsearch.randomizedtesting.annotations.ThreadLeakZombies; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; -import org.apache.http.Header; + +import org.apache.hc.core5.http.Header; import java.util.ArrayList; import java.util.HashMap; diff --git a/client/test/src/main/java/org/opensearch/client/RestClientTestUtil.java b/client/test/src/main/java/org/opensearch/client/RestClientTestUtil.java index aeba9bde4bff4..6a01ed30e0c63 100644 --- a/client/test/src/main/java/org/opensearch/client/RestClientTestUtil.java +++ b/client/test/src/main/java/org/opensearch/client/RestClientTestUtil.java @@ -35,8 +35,9 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import org.apache.http.Header; -import org.apache.http.message.BasicHeader; + +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.message.BasicHeader; import java.util.ArrayList; import java.util.Arrays; diff --git a/distribution/README.md b/distribution/README.md new file mode 100644 index 0000000000000..b9e948b625659 --- /dev/null +++ b/distribution/README.md @@ -0,0 +1,12 @@ +# Distributions +This subproject contains the necessary tooling to build the various distributions. +Note that some of this can only be run on the specific architecture and does not support cross-compile. + +The following distributions are being built: +* Archives (`*.zip`, `*.tar`): these form the basis for all other OpenSearch distributions +* Packages (`*.deb`, `*.rpm`): specific package formats for some Linux distributions +* Docker images +* Backwards compatibility tests: used internally for version compatibility testing, not for public consumption + +## With or Without JDK? +For each supported platform there should be both a target bundled with a JDK and a target without a bundled JDK. diff --git a/distribution/archives/build.gradle b/distribution/archives/build.gradle index 1376b8d419f6e..587175eef4008 100644 --- a/distribution/archives/build.gradle +++ b/distribution/archives/build.gradle @@ -137,6 +137,13 @@ distribution_archives { } } + noJdkLinuxArm64Tar { + archiveClassifier = 'no-jdk-linux-arm64' + content { + archiveFiles(modulesFiles('linux-arm64'), 'tar', 'linux', 'arm64', false) + } + } + linuxTar { archiveClassifier = 'linux-x64' content { @@ -151,6 +158,8 @@ distribution_archives { } } + // Should really be `no-jdk-linux-s390x` as it ships without a JDK, however it seems that the build can't handle + // the absence of the `linux-s390x` target. linuxS390xTar { archiveClassifier = 'linux-s390x' content { diff --git a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/WaitForRefreshAndCloseIT.java b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/WaitForRefreshAndCloseIT.java index 37ffe32d19509..07576dacffb03 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/WaitForRefreshAndCloseIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/opensearch/test/rest/WaitForRefreshAndCloseIT.java @@ -32,13 +32,14 @@ package org.opensearch.test.rest; -import org.apache.http.util.EntityUtils; import org.opensearch.action.ActionFuture; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.client.ResponseException; import org.opensearch.client.ResponseListener; +import org.apache.hc.core5.http.ParseException; +import org.apache.hc.core5.http.io.entity.EntityUtils; import org.junit.After; import org.junit.Before; @@ -145,6 +146,8 @@ public void onSuccess(Response response) { future.onResponse(EntityUtils.toString(response.getEntity())); } catch (IOException e) { future.onFailure(e); + } catch (ParseException e) { + future.onFailure(e); } } diff --git a/distribution/packages/build.gradle b/distribution/packages/build.gradle index df3049d7684c4..d9db3448104c8 100644 --- a/distribution/packages/build.gradle +++ b/distribution/packages/build.gradle @@ -350,6 +350,10 @@ tasks.register('buildArm64Deb', Deb) { configure(commonDebConfig(true, 'arm64')) } +tasks.register('buildNoJdkArm64Deb', Deb) { + configure(commonDebConfig(false, 'arm64')) +} + tasks.register('buildDeb', Deb) { configure(commonDebConfig(true, 'x64')) } @@ -387,6 +391,10 @@ tasks.register('buildArm64Rpm', Rpm) { configure(commonRpmConfig(true, 'arm64')) } +tasks.register('buildNoJdkArm64Rpm', Rpm) { + configure(commonRpmConfig(false, 'arm64')) +} + tasks.register('buildRpm', Rpm) { configure(commonRpmConfig(true, 'x64')) } diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index ef1035489c9fc..6cd5feadbef87 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -78,3 +78,6 @@ ${error.file} # Explicitly allow security manager (https://bugs.openjdk.java.net/browse/JDK-8270380) 18-:-Djava.security.manager=allow + +# Allow mmap to use new JDK-19 preview APIs in Apache Lucene 9.4 (https://github.com/opensearch-project/OpenSearch/issues/4637) +19-:--enable-preview diff --git a/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/SuppressForbidden.java b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/SuppressForbidden.java index 725718d85b179..d02e4e98b1287 100644 --- a/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/SuppressForbidden.java +++ b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/SuppressForbidden.java @@ -43,5 +43,10 @@ @Retention(RetentionPolicy.CLASS) @Target({ ElementType.CONSTRUCTOR, ElementType.FIELD, ElementType.METHOD, ElementType.TYPE }) public @interface SuppressForbidden { + /** + * The argument to this annotation, specifying the reason a forbidden API is being used. + * + * @return The reason the error is being suppressed. + */ String reason(); } diff --git a/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/package-info.java b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/package-info.java new file mode 100644 index 0000000000000..a626a125bb4c9 --- /dev/null +++ b/distribution/tools/java-version-checker/src/main/java/org/opensearch/tools/java_version_checker/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Tools to validate minimum version of the runtime Java. + */ +package org.opensearch.tools.java_version_checker; diff --git a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/KeyStoreCli.java b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/KeyStoreCli.java index 4789c5df416e6..7a772526cd66b 100644 --- a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/KeyStoreCli.java +++ b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/KeyStoreCli.java @@ -36,7 +36,7 @@ import org.opensearch.cli.Terminal; /** - * A cli tool for managing secrets in the opensearch keystore. + * A CLI tool for managing secrets in the OpenSearch keystore. */ public class KeyStoreCli extends LoggingAwareMultiCommand { @@ -52,6 +52,12 @@ private KeyStoreCli() { subcommands.put("has-passwd", new HasPasswordKeyStoreCommand()); } + /** + * Main entry point for the OpenSearch Keystore CLI tool. + * + * @param args CLI commands for managing secrets. + * @throws Exception if an exception was encountered executing the command. + */ public static void main(String[] args) throws Exception { exit(new KeyStoreCli().main(args, Terminal.DEFAULT)); } diff --git a/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/package-info.java b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/package-info.java new file mode 100644 index 0000000000000..3969fb4f91e49 --- /dev/null +++ b/distribution/tools/keystore-cli/src/main/java/org/opensearch/common/settings/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Classes implementing a CLI tool for managing secrets in the OpenSearch keystore. + */ +package org.opensearch.common.settings; diff --git a/distribution/tools/launchers/build.gradle b/distribution/tools/launchers/build.gradle index 52100296ac7e6..7ebe5c7e64416 100644 --- a/distribution/tools/launchers/build.gradle +++ b/distribution/tools/launchers/build.gradle @@ -54,6 +54,5 @@ testingConventions { } javadoc.enabled = false -missingJavadoc.enabled = false loggerUsageCheck.enabled = false jarHell.enabled = false diff --git a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java index fc613ccdaae68..aa3dfbe39ee96 100644 --- a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java +++ b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/SystemJvmOptions.java @@ -77,12 +77,21 @@ static List systemJvmOptions() { // log4j 2 "-Dlog4j.shutdownHookEnabled=false", "-Dlog4j2.disable.jmx=true", - + // security manager + allowSecurityManagerOption(), javaLocaleProviders() ) ).stream().filter(e -> e.isEmpty() == false).collect(Collectors.toList()); } + private static String allowSecurityManagerOption() { + if (Runtime.version().feature() > 17) { + return "-Djava.security.manager=allow"; + } else { + return ""; + } + } + private static String maybeShowCodeDetailsInExceptionMessages() { if (Runtime.version().feature() >= 14) { return "-XX:+ShowCodeDetailsInExceptionMessages"; diff --git a/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/package-info.java b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/package-info.java new file mode 100644 index 0000000000000..c77d9cab1f468 --- /dev/null +++ b/distribution/tools/launchers/src/main/java/org/opensearch/tools/launchers/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Classes implementing utility methods for launching JVMs. + */ +package org.opensearch.tools.launchers; diff --git a/distribution/tools/plugin-cli/build.gradle b/distribution/tools/plugin-cli/build.gradle index b2e81491da6bd..29592d965c2c3 100644 --- a/distribution/tools/plugin-cli/build.gradle +++ b/distribution/tools/plugin-cli/build.gradle @@ -35,7 +35,7 @@ archivesBaseName = 'opensearch-plugin-cli' dependencies { compileOnly project(":server") compileOnly project(":libs:opensearch-cli") - api "org.bouncycastle:bcpg-fips:1.0.5.1" + api "org.bouncycastle:bcpg-fips:1.0.7.1" api "org.bouncycastle:bc-fips:1.0.2.3" testImplementation project(":test:framework") testImplementation 'com.google.jimfs:jimfs:1.2' diff --git a/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.5.1.jar.sha1 b/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.5.1.jar.sha1 deleted file mode 100644 index 30c30bb4af8e0..0000000000000 --- a/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.5.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -63a454936d930fadb1c7a3206b8e758378dd0a26 \ No newline at end of file diff --git a/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.7.1.jar.sha1 b/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.7.1.jar.sha1 new file mode 100644 index 0000000000000..44cebc7c92d87 --- /dev/null +++ b/distribution/tools/plugin-cli/licenses/bcpg-fips-1.0.7.1.jar.sha1 @@ -0,0 +1 @@ +5e1952428655ea822066f86df2e3ecda8fa0ba2b \ No newline at end of file diff --git a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java index c2db39ecea072..5bf0bc7763ddd 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java +++ b/distribution/tools/plugin-cli/src/main/java/org/opensearch/plugins/InstallPluginCommand.java @@ -1028,7 +1028,7 @@ private static void setFileAttributes(final Path path, final Set input) { // check if the elasticsearch version is supported if (taskInput.getVersion().isPresent()) { final Version version = taskInput.getVersion().get(); - if (version.equals(LegacyESVersion.V_7_10_2) == false) { + if (version.equals(LegacyESVersion.fromId(7100299)) == false) { throw new RuntimeException( String.format(Locale.getDefault(), "The installed version %s of elasticsearch is not supported.", version) ); diff --git a/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ValidateInputTaskTests.java b/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ValidateInputTaskTests.java index 07cb19b132f31..195c57e5b457f 100644 --- a/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ValidateInputTaskTests.java +++ b/distribution/tools/upgrade-cli/src/test/java/org/opensearch/upgrade/ValidateInputTaskTests.java @@ -39,7 +39,7 @@ public void setTask() { public void testUnsupportedEsVersion() { TaskInput taskInput = new TaskInput(env); - taskInput.setVersion(LegacyESVersion.V_7_10_1); + taskInput.setVersion(LegacyESVersion.fromId(7100199)); final RuntimeException e = expectThrows(RuntimeException.class, () -> task.accept(new Tuple<>(taskInput, terminal))); @@ -51,7 +51,7 @@ public void testGetSummaryFields() { taskInput.setEsConfig(PathUtils.get("es_home")); taskInput.setCluster("some-cluster"); taskInput.setNode("some-node"); - taskInput.setVersion(LegacyESVersion.V_7_10_2); + taskInput.setVersion(LegacyESVersion.fromId(7100299)); taskInput.setBaseUrl("some-url"); taskInput.setPlugins(Arrays.asList("plugin-1", "plugin-2")); diff --git a/gradle/missing-javadoc.gradle b/gradle/missing-javadoc.gradle index 6b3dacd3e905a..a1fde7637796c 100644 --- a/gradle/missing-javadoc.gradle +++ b/gradle/missing-javadoc.gradle @@ -95,18 +95,6 @@ configure([ project(":client:client-benchmark-noop-api-plugin"), project(":client:rest-high-level"), project(":client:test"), - project(":distribution:tools:java-version-checker"), - project(":distribution:tools:keystore-cli"), - project(":distribution:tools:launchers"), - project(":distribution:tools:plugin-cli"), - project(":doc-tools"), - project(":example-plugins:custom-settings"), - project(":example-plugins:custom-significance-heuristic"), - project(":example-plugins:custom-suggester"), - project(":example-plugins:painless-allowlist"), - project(":example-plugins:rescore"), - project(":example-plugins:rest-handler"), - project(":example-plugins:script-expert-scoring"), project(":libs:opensearch-cli"), project(":libs:opensearch-core"), project(":libs:opensearch-dissect"), @@ -159,9 +147,7 @@ configure([ project(":plugins:store-smb"), project(":plugins:transport-nio"), project(":qa:die-with-dignity"), - project(":qa:os"), project(":qa:wildfly"), - project(":rest-api-spec"), project(":test:external-modules:test-delayed-aggs"), project(":test:fixtures:azure-fixture"), project(":test:fixtures:gcs-fixture"), diff --git a/gradle/runtime-jdk-provision.gradle b/gradle/runtime-jdk-provision.gradle index 2f0c2f74d6803..cb4bb834f89c9 100644 --- a/gradle/runtime-jdk-provision.gradle +++ b/gradle/runtime-jdk-provision.gradle @@ -20,7 +20,11 @@ if (BuildParams.getIsRuntimeJavaHomeSet()) { configure(allprojects - project(':build-tools')) { project.tasks.withType(Test).configureEach { Test test -> if (BuildParams.getIsRuntimeJavaHomeSet()) { - test.executable = "${BuildParams.runtimeJavaHome}/bin/java" + if (OS.current() == OS.WINDOWS) { + test.executable = "${BuildParams.runtimeJavaHome}/bin/java.exe" + } else { + test.executable = "${BuildParams.runtimeJavaHome}/bin/java" + } } } } diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 58e9a16f424db..7e42e8ab05a2e 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -11,7 +11,7 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-7.6-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=db9c8211ed63f61f60292c69e80d89196f9eb36665e369e7f00ac4cc841c2219 +distributionSha256Sum=312eb12875e1747e05c2f81a4789902d7e4ec5defbd1eefeaccc08acf096505d diff --git a/gradlew b/gradlew index 4f906e0c811fc..1b6c787337ffb 100755 --- a/gradlew +++ b/gradlew @@ -1,7 +1,7 @@ -#!/usr/bin/env sh +#!/bin/sh # -# Copyright 2015 the original author or authors. +# Copyright © 2015-2021 the original authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,67 +17,101 @@ # ############################################################################## -## -## Gradle start up script for UN*X -## +# +# Gradle start up script for POSIX generated by Gradle. +# +# Important for running: +# +# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is +# noncompliant, but you have some other compliant shell such as ksh or +# bash, then to run this script, type that shell name before the whole +# command line, like: +# +# ksh Gradle +# +# Busybox and similar reduced shells will NOT work, because this script +# requires all of these POSIX shell features: +# * functions; +# * expansions «$var», «${var}», «${var:-default}», «${var+SET}», +# «${var#prefix}», «${var%suffix}», and «$( cmd )»; +# * compound commands having a testable exit status, especially «case»; +# * various built-in commands including «command», «set», and «ulimit». +# +# Important for patching: +# +# (2) This script targets any POSIX shell, so it avoids extensions provided +# by Bash, Ksh, etc; in particular arrays are avoided. +# +# The "traditional" practice of packing multiple parameters into a +# space-separated string is a well documented source of bugs and security +# problems, so this is (mostly) avoided, by progressively accumulating +# options in "$@", and eventually passing that to Java. +# +# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, +# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; +# see the in-line comments for details. +# +# There are tweaks for specific operating systems such as AIX, CygWin, +# Darwin, MinGW, and NonStop. +# +# (3) This script is generated from the Groovy template +# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt +# within the Gradle project. +# +# You can find Gradle at https://github.com/gradle/gradle/. +# ############################################################################## # Attempt to set APP_HOME + # Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi +app_path=$0 + +# Need this for daisy-chained symlinks. +while + APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path + [ -h "$app_path" ] +do + ls=$( ls -ld "$app_path" ) + link=${ls#*' -> '} + case $link in #( + /*) app_path=$link ;; #( + *) app_path=$APP_HOME$link ;; + esac done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null + +APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` +APP_BASE_NAME=${0##*/} # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" +MAX_FD=maximum warn () { echo "$*" -} +} >&2 die () { echo echo "$*" echo exit 1 -} +} >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; +case "$( uname )" in #( + CYGWIN* ) cygwin=true ;; #( + Darwin* ) darwin=true ;; #( + MSYS* | MINGW* ) msys=true ;; #( + NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar @@ -87,9 +121,9 @@ CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" + JAVACMD=$JAVA_HOME/jre/sh/java else - JAVACMD="$JAVA_HOME/bin/java" + JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME @@ -98,7 +132,7 @@ Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else - JAVACMD="java" + JAVACMD=java which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the @@ -106,80 +140,95 @@ location of your Java installation." fi # Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi +if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then + case $MAX_FD in #( + max*) + MAX_FD=$( ulimit -H -n ) || + warn "Could not query maximum file descriptor limit" + esac + case $MAX_FD in #( + '' | soft) :;; #( + *) + ulimit -n "$MAX_FD" || + warn "Could not set maximum file descriptor limit to $MAX_FD" + esac fi -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi +# Collect all arguments for the java command, stacking in reverse order: +# * args from the command line +# * the main class name +# * -classpath +# * -D...appname settings +# * --module-path (only if needed) +# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. # For Cygwin or MSYS, switch paths to Windows format before running java -if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi +if "$cygwin" || "$msys" ; then + APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) + CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) + + JAVACMD=$( cygpath --unix "$JAVACMD" ) + # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" + for arg do + if + case $arg in #( + -*) false ;; # don't mess with options #( + /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath + [ -e "$t" ] ;; #( + *) false ;; + esac + then + arg=$( cygpath --path --ignore --mixed "$arg" ) fi - i=`expr $i + 1` + # Roll the args list around exactly as many times as the number of + # args, so each arg winds up back in the position where it started, but + # possibly modified. + # + # NB: a `for` loop captures its iteration list before it begins, so + # changing the positional parameters here affects neither the number of + # iterations, nor the values presented in `arg`. + shift # remove old arg + set -- "$@" "$arg" # push replacement arg done - case $i in - 0) set -- ;; - 1) set -- "$args0" ;; - 2) set -- "$args0" "$args1" ;; - 3) set -- "$args0" "$args1" "$args2" ;; - 4) set -- "$args0" "$args1" "$args2" "$args3" ;; - 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac fi -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=`save "$@"` +# Collect all arguments for the java command; +# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of +# shell script including quotes and variable substitutions, so put them in +# double quotes to make sure that they get re-expanded; and +# * put everything else in single quotes, so that it's not re-expanded. + +set -- \ + "-Dorg.gradle.appname=$APP_BASE_NAME" \ + -classpath "$CLASSPATH" \ + org.gradle.wrapper.GradleWrapperMain \ + "$@" + +# Use "xargs" to parse quoted args. +# +# With -n1 it outputs one arg per line, with the quotes and backslashes removed. +# +# In Bash we could simply go: +# +# readarray ARGS < <( xargs -n1 <<<"$var" ) && +# set -- "${ARGS[@]}" "$@" +# +# but POSIX shell has neither arrays nor command substitution, so instead we +# post-process each arg (as a line of input to sed) to backslash-escape any +# character that might be a shell metacharacter, then use eval to reverse +# that process (while maintaining the separation between arguments), and wrap +# the whole thing up as a single "set" statement. +# +# This will of course break if any of these variables contains a newline or +# an unmatched quote. +# -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" +eval "set -- $( + printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | + xargs -n1 | + sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | + tr '\n' ' ' + )" '"$@"' exec "$JAVACMD" "$@" diff --git a/libs/cli/build.gradle b/libs/cli/build.gradle index 7f1e9cb8d04b3..bbb7bf68e2ced 100644 --- a/libs/cli/build.gradle +++ b/libs/cli/build.gradle @@ -28,7 +28,7 @@ * under the License. */ apply plugin: 'opensearch.build' -apply plugin: 'nebula.optional-base' +apply plugin: 'com.netflix.nebula.optional-base' apply plugin: 'opensearch.publish' dependencies { diff --git a/libs/core/build.gradle b/libs/core/build.gradle index 374f2fe572a12..fb8bed207dbc6 100644 --- a/libs/core/build.gradle +++ b/libs/core/build.gradle @@ -30,7 +30,7 @@ import org.opensearch.gradle.info.BuildParams -apply plugin: 'nebula.optional-base' +apply plugin: 'com.netflix.nebula.optional-base' apply plugin: 'opensearch.publish' archivesBaseName = 'opensearch-core' diff --git a/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java b/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java index dfadf9269a097..8aca043017e32 100644 --- a/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java +++ b/libs/geo/src/main/java/org/opensearch/geometry/GeometryCollection.java @@ -88,6 +88,15 @@ public G get(int i) { return shapes.get(i); } + /** + * Returns a {@link List} of All {@link Geometry} present in this collection. + * + * @return a {@link List} of All {@link Geometry} + */ + public List getAll() { + return shapes; + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/libs/grok/build.gradle b/libs/grok/build.gradle index 86414d18108a1..43a55f84b9d55 100644 --- a/libs/grok/build.gradle +++ b/libs/grok/build.gradle @@ -29,9 +29,9 @@ */ dependencies { - api 'org.jruby.joni:joni:2.1.43' + api 'org.jruby.joni:joni:2.1.44' // joni dependencies: - api 'org.jruby.jcodings:jcodings:1.0.57' + api 'org.jruby.jcodings:jcodings:1.0.58' testImplementation(project(":test:framework")) { exclude group: 'org.opensearch', module: 'opensearch-grok' diff --git a/libs/grok/licenses/jcodings-1.0.57.jar.sha1 b/libs/grok/licenses/jcodings-1.0.57.jar.sha1 deleted file mode 100644 index 1a703c2644787..0000000000000 --- a/libs/grok/licenses/jcodings-1.0.57.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -603a9ceac39cbf7f6f27fe18b2fded4714319b0a \ No newline at end of file diff --git a/libs/grok/licenses/jcodings-1.0.58.jar.sha1 b/libs/grok/licenses/jcodings-1.0.58.jar.sha1 new file mode 100644 index 0000000000000..0202d24704a50 --- /dev/null +++ b/libs/grok/licenses/jcodings-1.0.58.jar.sha1 @@ -0,0 +1 @@ +dce27159dc0382e5f7518d4f3e499fc8396357ed \ No newline at end of file diff --git a/libs/grok/licenses/joni-2.1.43.jar.sha1 b/libs/grok/licenses/joni-2.1.43.jar.sha1 deleted file mode 100644 index ef5dfabb2b391..0000000000000 --- a/libs/grok/licenses/joni-2.1.43.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9a3bf154469d5ff1d1107755904279081a5fb618 \ No newline at end of file diff --git a/libs/grok/licenses/joni-2.1.44.jar.sha1 b/libs/grok/licenses/joni-2.1.44.jar.sha1 new file mode 100644 index 0000000000000..bff9ca56f7e8c --- /dev/null +++ b/libs/grok/licenses/joni-2.1.44.jar.sha1 @@ -0,0 +1 @@ +35746c2aee04ce459a2aa8dc2d626946c5dfb051 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-core-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-core-2.13.3.jar.sha1 deleted file mode 100644 index 6e0e2cf9bf2d4..0000000000000 --- a/libs/x-content/licenses/jackson-core-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a27014716e4421684416e5fa83d896ddb87002da \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-core-2.14.1.jar.sha1 b/libs/x-content/licenses/jackson-core-2.14.1.jar.sha1 new file mode 100644 index 0000000000000..054873b60eb21 --- /dev/null +++ b/libs/x-content/licenses/jackson-core-2.14.1.jar.sha1 @@ -0,0 +1 @@ +7a07bc535ccf0b7f6929c4d0f2ab9b294ef7c4a3 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.13.3.jar.sha1 deleted file mode 100644 index a1dd86f11312d..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-cbor-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -bf43eed9de0031521107dfea41d1e5d6bf1b9639 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-cbor-2.14.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-cbor-2.14.1.jar.sha1 new file mode 100644 index 0000000000000..e1dcda6b33782 --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-cbor-2.14.1.jar.sha1 @@ -0,0 +1 @@ +04e6fbcdcd2a01e4a5cb5901338cab6199c9b26b \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.13.3.jar.sha1 deleted file mode 100644 index 864f2da02463f..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-smile-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b4e03e361e2388e3a8a0b68e3b9988d3a07ee3f3 \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-smile-2.14.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-smile-2.14.1.jar.sha1 new file mode 100644 index 0000000000000..7138ebda0e78c --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-smile-2.14.1.jar.sha1 @@ -0,0 +1 @@ +656ccecc1fc85b95d13e5b8080289fc1a5e5e21e \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.13.3.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.13.3.jar.sha1 deleted file mode 100644 index ba45b6520a1d7..0000000000000 --- a/libs/x-content/licenses/jackson-dataformat-yaml-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9363ded5441b1fee62d5be0604035690ca759a2a \ No newline at end of file diff --git a/libs/x-content/licenses/jackson-dataformat-yaml-2.14.1.jar.sha1 b/libs/x-content/licenses/jackson-dataformat-yaml-2.14.1.jar.sha1 new file mode 100644 index 0000000000000..300b6920dfc8d --- /dev/null +++ b/libs/x-content/licenses/jackson-dataformat-yaml-2.14.1.jar.sha1 @@ -0,0 +1 @@ +cf6d18651659a2e64301452c841e6daa62e77bf6 \ No newline at end of file diff --git a/libs/x-content/licenses/snakeyaml-1.31.jar.sha1 b/libs/x-content/licenses/snakeyaml-1.31.jar.sha1 deleted file mode 100644 index 1ac9b78b88687..0000000000000 --- a/libs/x-content/licenses/snakeyaml-1.31.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cf26b7b05fef01e7bec00cb88ab4feeeba743e12 \ No newline at end of file diff --git a/libs/x-content/licenses/snakeyaml-1.32.jar.sha1 b/libs/x-content/licenses/snakeyaml-1.32.jar.sha1 new file mode 100644 index 0000000000000..3216ba485951a --- /dev/null +++ b/libs/x-content/licenses/snakeyaml-1.32.jar.sha1 @@ -0,0 +1 @@ +e80612549feb5c9191c498de628c1aa80693cf0b \ No newline at end of file diff --git a/libs/x-content/src/main/java/org/opensearch/common/ParseField.java b/libs/x-content/src/main/java/org/opensearch/common/ParseField.java index 8673e25bf567b..8f97fd923b560 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/ParseField.java +++ b/libs/x-content/src/main/java/org/opensearch/common/ParseField.java @@ -68,12 +68,12 @@ public ParseField(String name, String... deprecatedNames) { } else { final HashSet set = new HashSet<>(); Collections.addAll(set, deprecatedNames); - this.deprecatedNames = set.toArray(new String[set.size()]); + this.deprecatedNames = set.toArray(new String[0]); } Set allNames = new HashSet<>(); allNames.add(name); Collections.addAll(allNames, this.deprecatedNames); - this.allNames = allNames.toArray(new String[allNames.size()]); + this.allNames = allNames.toArray(new String[0]); } /** diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/support/filtering/FilterPath.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/support/filtering/FilterPath.java index be7778097b45b..a11b13ec65946 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/support/filtering/FilterPath.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/support/filtering/FilterPath.java @@ -101,7 +101,7 @@ public static FilterPath[] compile(Set filters) { } } } - return paths.toArray(new FilterPath[paths.size()]); + return paths.toArray(new FilterPath[0]); } private static FilterPath parse(final String filter, final String segment) { diff --git a/libs/x-content/src/main/java/org/opensearch/common/xcontent/support/filtering/FilterPathBasedFilter.java b/libs/x-content/src/main/java/org/opensearch/common/xcontent/support/filtering/FilterPathBasedFilter.java index 0463caaa93118..5e402cbd495ba 100644 --- a/libs/x-content/src/main/java/org/opensearch/common/xcontent/support/filtering/FilterPathBasedFilter.java +++ b/libs/x-content/src/main/java/org/opensearch/common/xcontent/support/filtering/FilterPathBasedFilter.java @@ -95,7 +95,7 @@ private TokenFilter evaluate(String name, FilterPath[] filters) { } if ((nextFilters != null) && (nextFilters.isEmpty() == false)) { - return new FilterPathBasedFilter(nextFilters.toArray(new FilterPath[nextFilters.size()]), inclusive); + return new FilterPathBasedFilter(nextFilters.toArray(new FilterPath[0]), inclusive); } } return NO_MATCHING; diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java index 57865e15d523a..a26912733a9c6 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/CommonAnalysisModulePlugin.java @@ -124,7 +124,7 @@ import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.apache.lucene.analysis.util.ElisionFilter; import org.apache.lucene.util.SetOnce; -import org.opensearch.LegacyESVersion; +import org.opensearch.Version; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; @@ -268,11 +268,18 @@ public Map> getTokenFilters() { filters.put("dutch_stem", DutchStemTokenFilterFactory::new); filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new); filters.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { - deprecationLogger.deprecate( - "edgeNGram_deprecation", - "The [edgeNGram] token filter name is deprecated and will be removed in a future version. " - + "Please change the filter name to [edge_ngram] instead." - ); + if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_3_0_0)) { + throw new IllegalArgumentException( + "The [edgeNGram] tokenizer name was deprecated pre 1.0. " + + "Please change the tokenizer name to [edge_ngram] for indices created in versions 3.0 or higher instead." + ); + } else { + deprecationLogger.deprecate( + "edgeNGram_deprecation", + "The [edgeNGram] token filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [edge_ngram] instead." + ); + } return new EdgeNGramTokenFilterFactory(indexSettings, environment, name, settings); }); filters.put("elision", requiresAnalysisSettings(ElisionTokenFilterFactory::new)); @@ -347,7 +354,12 @@ public Map> getTokenizers() { tokenizers.put("simple_pattern_split", SimplePatternSplitTokenizerFactory::new); tokenizers.put("thai", ThaiTokenizerFactory::new); tokenizers.put("nGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { - if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) { + if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_3_0_0)) { + throw new IllegalArgumentException( + "The [nGram] tokenizer name was deprecated pre 1.0. " + + "Please change the tokenizer name to [ngram] for indices created in versions 3.0 or higher instead." + ); + } else { deprecationLogger.deprecate( "nGram_tokenizer_deprecation", "The [nGram] tokenizer name is deprecated and will be removed in a future version. " @@ -358,7 +370,12 @@ public Map> getTokenizers() { }); tokenizers.put("ngram", NGramTokenizerFactory::new); tokenizers.put("edgeNGram", (IndexSettings indexSettings, Environment environment, String name, Settings settings) -> { - if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) { + if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_3_0_0)) { + throw new IllegalArgumentException( + "The [edgeNGram] tokenizer name was deprecated pre 1.0. " + + "Please change the tokenizer name to [edge_ngram] for indices created in versions 3.0 or higher instead." + ); + } else { deprecationLogger.deprecate( "edgeNGram_tokenizer_deprecation", "The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. " @@ -485,19 +502,10 @@ public List getPreConfiguredTokenFilters() { filters.add(PreConfiguredTokenFilter.singleton("dutch_stem", false, input -> new SnowballFilter(input, new DutchStemmer()))); filters.add(PreConfiguredTokenFilter.singleton("edge_ngram", false, false, input -> new EdgeNGramTokenFilter(input, 1))); filters.add(PreConfiguredTokenFilter.openSearchVersion("edgeNGram", false, false, (reader, version) -> { - if (version.onOrAfter(LegacyESVersion.V_7_0_0)) { - throw new IllegalArgumentException( - "The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. " - + "Please change the filter name to [edge_ngram] instead." - ); - } else { - deprecationLogger.deprecate( - "edgeNGram_deprecation", - "The [edgeNGram] token filter name is deprecated and will be removed in a future version. " - + "Please change the filter name to [edge_ngram] instead." - ); - } - return new EdgeNGramTokenFilter(reader, 1); + throw new IllegalArgumentException( + "The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. " + + "Please change the filter name to [edge_ngram] instead." + ); })); filters.add( PreConfiguredTokenFilter.singleton("elision", true, input -> new ElisionFilter(input, FrenchAnalyzer.DEFAULT_ARTICLES)) @@ -524,19 +532,10 @@ public List getPreConfiguredTokenFilters() { ); filters.add(PreConfiguredTokenFilter.singleton("ngram", false, false, reader -> new NGramTokenFilter(reader, 1, 2, false))); filters.add(PreConfiguredTokenFilter.openSearchVersion("nGram", false, false, (reader, version) -> { - if (version.onOrAfter(LegacyESVersion.V_7_0_0)) { - throw new IllegalArgumentException( - "The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. " - + "Please change the filter name to [ngram] instead." - ); - } else { - deprecationLogger.deprecate( - "nGram_deprecation", - "The [nGram] token filter name is deprecated and will be removed in a future version. " - + "Please change the filter name to [ngram] instead." - ); - } - return new NGramTokenFilter(reader, 1, 2, false); + throw new IllegalArgumentException( + "The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. " + + "Please change the filter name to [ngram] instead." + ); })); filters.add(PreConfiguredTokenFilter.singleton("persian_normalization", true, PersianNormalizationFilter::new)); filters.add(PreConfiguredTokenFilter.singleton("porter_stem", false, PorterStemFilter::new)); @@ -581,18 +580,22 @@ public List getPreConfiguredTokenFilters() { ) ) ); - filters.add(PreConfiguredTokenFilter.openSearchVersion("word_delimiter_graph", false, false, (input, version) -> { - boolean adjustOffsets = version.onOrAfter(LegacyESVersion.V_7_3_0); - return new WordDelimiterGraphFilter( - input, - adjustOffsets, - WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE, - WordDelimiterGraphFilter.GENERATE_WORD_PARTS | WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS - | WordDelimiterGraphFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterGraphFilter.SPLIT_ON_NUMERICS - | WordDelimiterGraphFilter.STEM_ENGLISH_POSSESSIVE, - null - ); - })); + filters.add( + PreConfiguredTokenFilter.openSearchVersion( + "word_delimiter_graph", + false, + false, + (input, version) -> new WordDelimiterGraphFilter( + input, + true, + WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE, + WordDelimiterGraphFilter.GENERATE_WORD_PARTS | WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS + | WordDelimiterGraphFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterGraphFilter.SPLIT_ON_NUMERICS + | WordDelimiterGraphFilter.STEM_ENGLISH_POSSESSIVE, + null + ) + ) + ); return filters; } @@ -606,12 +609,12 @@ public List getPreConfiguredTokenizers() { tokenizers.add(PreConfiguredTokenizer.singleton("letter", LetterTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new)); tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new)); - tokenizers.add(PreConfiguredTokenizer.openSearchVersion("edge_ngram", (version) -> { - if (version.onOrAfter(LegacyESVersion.V_7_3_0)) { - return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); - } - return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); - })); + tokenizers.add( + PreConfiguredTokenizer.openSearchVersion( + "edge_ngram", + (version) -> new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE) + ) + ); tokenizers.add(PreConfiguredTokenizer.singleton("pattern", () -> new PatternTokenizer(Regex.compile("\\W+", null), -1))); tokenizers.add(PreConfiguredTokenizer.singleton("thai", ThaiTokenizer::new)); // TODO deprecate and remove in API @@ -620,7 +623,12 @@ public List getPreConfiguredTokenizers() { // Temporary shim for aliases. TODO deprecate after they are moved tokenizers.add(PreConfiguredTokenizer.openSearchVersion("nGram", (version) -> { - if (version.onOrAfter(LegacyESVersion.V_7_6_0)) { + if (version.onOrAfter(Version.V_3_0_0)) { + throw new IllegalArgumentException( + "The [nGram] tokenizer name was deprecated pre 1.0. " + + "Please change the tokenizer name to [ngram] for indices created in versions 3.0 or higher instead." + ); + } else { deprecationLogger.deprecate( "nGram_tokenizer_deprecation", "The [nGram] tokenizer name is deprecated and will be removed in a future version. " @@ -630,17 +638,19 @@ public List getPreConfiguredTokenizers() { return new NGramTokenizer(); })); tokenizers.add(PreConfiguredTokenizer.openSearchVersion("edgeNGram", (version) -> { - if (version.onOrAfter(LegacyESVersion.V_7_6_0)) { + if (version.onOrAfter(Version.V_3_0_0)) { + throw new IllegalArgumentException( + "The [edgeNGram] tokenizer name was deprecated pre 1.0. " + + "Please change the tokenizer name to [edge_ngram] for indices created in versions 3.0 or higher instead." + ); + } else { deprecationLogger.deprecate( "edgeNGram_tokenizer_deprecation", "The [edgeNGram] tokenizer name is deprecated and will be removed in a future version. " + "Please change the tokenizer name to [edge_ngram] instead." ); } - if (version.onOrAfter(LegacyESVersion.V_7_3_0)) { - return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); - } - return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE); + return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); })); tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new)); diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactory.java index 0d1a2b185d1d3..7c1c15ef74e30 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/ConcatenateGraphTokenFilterFactory.java @@ -11,7 +11,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.ConcatenateGraphFilter; import org.apache.lucene.util.automaton.TooComplexToDeterminizeException; -import org.opensearch.LegacyESVersion; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; @@ -24,11 +23,6 @@ * max_graph_expansions is 100 as the default value of 10_000 seems to be unnecessarily large and preserve_separator is false. * *
    - *
  • preserve_separator: - * For LegacyESVersion lesser than {@link LegacyESVersion#V_7_6_0} i.e. lucene versions lesser - * than {@link org.apache.lucene.util.Version#LUCENE_8_4_0} - * Whether {@link ConcatenateGraphFilter#SEP_LABEL} should separate the input tokens in the concatenated token. - *
  • *
  • token_separator: * Separator to use for concatenation. Must be a String with a single character or empty. * If not present, {@link ConcatenateGraphTokenFilterFactory#DEFAULT_TOKEN_SEPARATOR} will be used. @@ -59,17 +53,11 @@ public class ConcatenateGraphTokenFilterFactory extends AbstractTokenFilterFacto ConcatenateGraphTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { super(indexSettings, name, settings); - if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_6_0)) { // i.e. Lucene 8.4.0 - String separator = settings.get("token_separator", DEFAULT_TOKEN_SEPARATOR); - if (separator.length() > 1) { - throw new IllegalArgumentException("token_separator must be either empty or a single character"); - } - tokenSeparator = separator.length() == 0 ? null : separator.charAt(0); // null means no separator while concatenating - } else { - boolean preserveSep = settings.getAsBoolean("preserve_separator", ConcatenateGraphFilter.DEFAULT_PRESERVE_SEP); - tokenSeparator = preserveSep ? ConcatenateGraphFilter.DEFAULT_TOKEN_SEPARATOR : null; + String separator = settings.get("token_separator", DEFAULT_TOKEN_SEPARATOR); + if (separator.length() > 1) { + throw new IllegalArgumentException("token_separator must be either empty or a single character"); } - + tokenSeparator = separator.length() == 0 ? null : separator.charAt(0); // null means no separator while concatenating maxGraphExpansions = settings.getAsInt("max_graph_expansions", DEFAULT_MAX_GRAPH_EXPANSIONS); preservePositionIncrements = settings.getAsBoolean("preserve_position_increments", DEFAULT_PRESERVE_POSITION_INCREMENTS); } diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/EnglishPluralStemFilter.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/EnglishPluralStemFilter.java new file mode 100644 index 0000000000000..c30318a31527b --- /dev/null +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/EnglishPluralStemFilter.java @@ -0,0 +1,182 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.analysis.common; + +import org.apache.lucene.analysis.TokenFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.en.EnglishMinimalStemFilter; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.KeywordAttribute; + +import java.io.IOException; + +public final class EnglishPluralStemFilter extends TokenFilter { + private final EnglishPluralStemmer stemmer = new EnglishPluralStemmer(); + private final CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + private final KeywordAttribute keywordAttr = addAttribute(KeywordAttribute.class); + + public EnglishPluralStemFilter(TokenStream input) { + super(input); + } + + @Override + public boolean incrementToken() throws IOException { + if (input.incrementToken()) { + if (!keywordAttr.isKeyword()) { + final int newlen = stemmer.stem(termAtt.buffer(), termAtt.length()); + termAtt.setLength(newlen); + } + return true; + } else { + return false; + } + } + + /** + * Plural stemmer for English based on the {@link EnglishMinimalStemFilter} + *

    + * This stemmer removes plurals but beyond EnglishMinimalStemFilter adds + * four new suffix rules to remove dangling e characters: + *

      + *
    • xes - "boxes" becomes "box"
    • + *
    • sses - "dresses" becomes "dress"
    • + *
    • shes - "dishes" becomes "dish"
    • + *
    • tches - "watches" becomes "watch"
    • + *
    + * See https://github.com/elastic/elasticsearch/issues/42892 + *

    + * In addition the s stemmer logic is amended so that + *

      + *
    • ees->ee so that bees matches bee
    • + *
    • ies->y only on longer words to that ties matches tie
    • + *
    • oes->o rule so that tomatoes matches tomato but retains e for some words eg shoes to shoe
    • + *
    + */ + public static class EnglishPluralStemmer { + + // Words ending in oes that retain the e when stemmed + public static final char[][] oesExceptions = { "shoes".toCharArray(), "canoes".toCharArray(), "oboes".toCharArray() }; + // Words ending in ches that retain the e when stemmed + public static final char[][] chesExceptions = { + "cliches".toCharArray(), + "avalanches".toCharArray(), + "mustaches".toCharArray(), + "moustaches".toCharArray(), + "quiches".toCharArray(), + "headaches".toCharArray(), + "heartaches".toCharArray(), + "porsches".toCharArray(), + "tranches".toCharArray(), + "caches".toCharArray() }; + + @SuppressWarnings("fallthrough") + public int stem(char s[], int len) { + if (len < 3 || s[len - 1] != 's') return len; + + switch (s[len - 2]) { + case 'u': + case 's': + return len; + case 'e': + // Modified ies->y logic from original s-stemmer - only work on strings > 4 + // so spies -> spy still but pies->pie. + // The original code also special-cased aies and eies for no good reason as far as I can tell. + // ( no words of consequence - eg http://www.thefreedictionary.com/words-that-end-in-aies ) + if (len > 4 && s[len - 3] == 'i') { + s[len - 3] = 'y'; + return len - 2; + } + + // Suffix rules to remove any dangling "e" + if (len > 3) { + // xes (but >1 prefix so we can stem "boxes->box" but keep "axes->axe") + if (len > 4 && s[len - 3] == 'x') { + return len - 2; + } + // oes + if (len > 3 && s[len - 3] == 'o') { + if (isException(s, len, oesExceptions)) { + // Only remove the S + return len - 1; + } + // Remove the es + return len - 2; + } + if (len > 4) { + // shes/sses + if (s[len - 4] == 's' && (s[len - 3] == 'h' || s[len - 3] == 's')) { + return len - 2; + } + + // ches + if (len > 4) { + if (s[len - 4] == 'c' && s[len - 3] == 'h') { + if (isException(s, len, chesExceptions)) { + // Only remove the S + return len - 1; + } + // Remove the es + return len - 2; + + } + } + } + } + + default: + return len - 1; + } + } + + private boolean isException(char[] s, int len, char[][] exceptionsList) { + for (char[] oesRule : exceptionsList) { + int rulePos = oesRule.length - 1; + int sPos = len - 1; + boolean matched = true; + while (rulePos >= 0 && sPos >= 0) { + if (oesRule[rulePos] != s[sPos]) { + matched = false; + break; + } + rulePos--; + sPos--; + } + if (matched) { + return true; + } + } + return false; + } + } + +} diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java index b46bef3e6c563..25bf58409928e 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/HyphenationCompoundWordTokenFilterFactory.java @@ -32,12 +32,14 @@ package org.opensearch.analysis.common; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.compound.HyphenationCompoundWordTokenFilter; import org.apache.lucene.analysis.compound.hyphenation.HyphenationTree; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; +import org.opensearch.index.analysis.Analysis; import org.xml.sax.InputSource; import java.io.InputStream; @@ -61,13 +63,15 @@ public class HyphenationCompoundWordTokenFilterFactory extends AbstractCompoundW throw new IllegalArgumentException("hyphenation_patterns_path is a required setting."); } - Path hyphenationPatternsFile = env.configDir().resolve(hyphenationPatternsPath); + Path hyphenationPatternsFile = Analysis.resolveAnalyzerPath(env, hyphenationPatternsPath); try { InputStream in = Files.newInputStream(hyphenationPatternsFile); hyphenationTree = HyphenationCompoundWordTokenFilter.getHyphenationTree(new InputSource(in)); } catch (Exception e) { - throw new IllegalArgumentException("Exception while reading hyphenation_patterns_path.", e); + LogManager.getLogger(HyphenationCompoundWordTokenFilterFactory.class) + .error("Exception while reading hyphenation_patterns_path ", e); + throw new IllegalArgumentException("Exception while reading hyphenation_patterns_path."); } } diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MappingCharFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MappingCharFilterFactory.java index 7200b69135a30..d6d9f8975f2fc 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MappingCharFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/MappingCharFilterFactory.java @@ -39,6 +39,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.analysis.AbstractCharFilterFactory; import org.opensearch.index.analysis.Analysis; +import org.opensearch.index.analysis.MappingRule; import org.opensearch.index.analysis.NormalizingCharFilterFactory; import java.io.Reader; @@ -53,13 +54,13 @@ public class MappingCharFilterFactory extends AbstractCharFilterFactory implemen MappingCharFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name); - List rules = Analysis.getWordList(env, settings, "mappings"); + List> rules = Analysis.parseWordList(env, settings, "mappings", this::parse); if (rules == null) { throw new IllegalArgumentException("mapping requires either `mappings` or `mappings_path` to be configured"); } NormalizeCharMap.Builder normMapBuilder = new NormalizeCharMap.Builder(); - parseRules(rules, normMapBuilder); + rules.forEach(rule -> normMapBuilder.add(rule.getLeft(), rule.getRight())); normMap = normMapBuilder.build(); } @@ -71,18 +72,13 @@ public Reader create(Reader tokenStream) { // source => target private static Pattern rulePattern = Pattern.compile("(.*)\\s*=>\\s*(.*)\\s*$"); - /** - * parses a list of MappingCharFilter style rules into a normalize char map - */ - private void parseRules(List rules, NormalizeCharMap.Builder map) { - for (String rule : rules) { - Matcher m = rulePattern.matcher(rule); - if (!m.find()) throw new RuntimeException("Invalid Mapping Rule : [" + rule + "]"); - String lhs = parseString(m.group(1).trim()); - String rhs = parseString(m.group(2).trim()); - if (lhs == null || rhs == null) throw new RuntimeException("Invalid Mapping Rule : [" + rule + "]. Illegal mapping."); - map.add(lhs, rhs); - } + private MappingRule parse(String rule) { + Matcher m = rulePattern.matcher(rule); + if (!m.find()) throw new RuntimeException("Invalid mapping rule : [" + rule + "]"); + String lhs = parseString(m.group(1).trim()); + String rhs = parseString(m.group(2).trim()); + if (lhs == null || rhs == null) throw new RuntimeException("Invalid mapping rule: [" + rule + "]. Illegal mapping."); + return new MappingRule<>(lhs, rhs); } char[] out = new char[256]; diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/NGramTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/NGramTokenFilterFactory.java index 218bb74b84667..a6adf680a454c 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/NGramTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/NGramTokenFilterFactory.java @@ -34,7 +34,6 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.ngram.NGramTokenFilter; -import org.opensearch.LegacyESVersion; import org.opensearch.common.settings.Settings; import org.opensearch.env.Environment; import org.opensearch.index.IndexSettings; @@ -54,25 +53,15 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory { this.maxGram = settings.getAsInt("max_gram", 2); int ngramDiff = maxGram - minGram; if (ngramDiff > maxAllowedNgramDiff) { - if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) { - throw new IllegalArgumentException( - "The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: [" - + maxAllowedNgramDiff - + "] but was [" - + ngramDiff - + "]. This limit can be set by changing the [" - + IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() - + "] index level setting." - ); - } else { - deprecationLogger.deprecate( - "ngram_big_difference", - "Deprecated big difference between max_gram and min_gram in NGram Tokenizer," - + "expected difference must be less than or equal to: [" - + maxAllowedNgramDiff - + "]" - ); - } + throw new IllegalArgumentException( + "The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: [" + + maxAllowedNgramDiff + + "] but was [" + + ngramDiff + + "]. This limit can be set by changing the [" + + IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey() + + "] index level setting." + ); } preserveOriginal = settings.getAsBoolean(PRESERVE_ORIG_KEY, false); } diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactory.java index 89f0766542296..bdd6e01261443 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactory.java @@ -40,24 +40,31 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.analysis.AbstractTokenFilterFactory; import org.opensearch.index.analysis.Analysis; +import org.opensearch.index.analysis.MappingRule; import java.io.IOException; +import java.util.ArrayList; import java.util.List; public class StemmerOverrideTokenFilterFactory extends AbstractTokenFilterFactory { + private static final String MAPPING_SEPARATOR = "=>"; private final StemmerOverrideMap overrideMap; StemmerOverrideTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) throws IOException { super(indexSettings, name, settings); - List rules = Analysis.getWordList(env, settings, "rules"); + List, String>> rules = Analysis.parseWordList(env, settings, "rules", this::parse); if (rules == null) { throw new IllegalArgumentException("stemmer override filter requires either `rules` or `rules_path` to be configured"); } StemmerOverrideFilter.Builder builder = new StemmerOverrideFilter.Builder(false); - parseRules(rules, builder, "=>"); + for (MappingRule, String> rule : rules) { + for (String key : rule.getLeft()) { + builder.add(key, rule.getRight()); + } + } overrideMap = builder.build(); } @@ -67,27 +74,26 @@ public TokenStream create(TokenStream tokenStream) { return new StemmerOverrideFilter(tokenStream, overrideMap); } - static void parseRules(List rules, StemmerOverrideFilter.Builder builder, String mappingSep) { - for (String rule : rules) { - String[] sides = rule.split(mappingSep, -1); - if (sides.length != 2) { - throw new RuntimeException("Invalid Keyword override Rule:" + rule); - } + private MappingRule, String> parse(String rule) { + String[] sides = rule.split(MAPPING_SEPARATOR, -1); + if (sides.length != 2) { + throw new RuntimeException("Invalid keyword override rule: " + rule); + } - String[] keys = sides[0].split(",", -1); - String override = sides[1].trim(); - if (override.isEmpty() || override.indexOf(',') != -1) { - throw new RuntimeException("Invalid Keyword override Rule:" + rule); - } + String[] keys = sides[0].split(",", -1); + String override = sides[1].trim(); + if (override.isEmpty() || override.indexOf(',') != -1) { + throw new RuntimeException("Invalid keyword override rule: " + rule); + } - for (String key : keys) { - String trimmedKey = key.trim(); - if (trimmedKey.isEmpty()) { - throw new RuntimeException("Invalid Keyword override Rule:" + rule); - } - builder.add(trimmedKey, override); + List trimmedKeys = new ArrayList<>(); + for (String key : keys) { + String trimmedKey = key.trim(); + if (trimmedKey.isEmpty()) { + throw new RuntimeException("Invalid keyword override rule: " + rule); } + trimmedKeys.add(trimmedKey); } + return new MappingRule<>(trimmedKeys, override); } - } diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/StemmerTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/StemmerTokenFilterFactory.java index 5d96f01265cf6..fc045447e159e 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/StemmerTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/StemmerTokenFilterFactory.java @@ -154,6 +154,8 @@ public TokenStream create(TokenStream tokenStream) { return new SnowballFilter(tokenStream, new EnglishStemmer()); } else if ("minimal_english".equalsIgnoreCase(language) || "minimalEnglish".equalsIgnoreCase(language)) { return new EnglishMinimalStemFilter(tokenStream); + } else if ("plural_english".equalsIgnoreCase(language) || "pluralEnglish".equalsIgnoreCase(language)) { + return new EnglishPluralStemFilter(tokenStream); } else if ("possessive_english".equalsIgnoreCase(language) || "possessiveEnglish".equalsIgnoreCase(language)) { return new EnglishPossessiveFilter(tokenStream); diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/SynonymTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/SynonymTokenFilterFactory.java index dc6b5b2dd8b7b..01a65e87d7466 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/SynonymTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/SynonymTokenFilterFactory.java @@ -32,6 +32,7 @@ package org.opensearch.analysis.common; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.synonym.SynonymFilter; @@ -155,14 +156,15 @@ SynonymMap buildSynonyms(Analyzer analyzer, Reader rules) { } return parser.build(); } catch (Exception e) { - throw new IllegalArgumentException("failed to build synonyms", e); + LogManager.getLogger(SynonymTokenFilterFactory.class).error("Failed to build synonyms: ", e); + throw new IllegalArgumentException("Failed to build synonyms"); } } Reader getRulesFromSettings(Environment env) { Reader rulesReader; if (settings.getAsList("synonyms", null) != null) { - List rulesList = Analysis.getWordList(env, settings, "synonyms"); + List rulesList = Analysis.parseWordList(env, settings, "synonyms", s -> s); StringBuilder sb = new StringBuilder(); for (String line : rulesList) { sb.append(line).append(System.lineSeparator()); diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java index 31d52d030cb71..51ac3141fd465 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/WordDelimiterGraphTokenFilterFactory.java @@ -41,6 +41,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.analysis.AbstractTokenFilterFactory; import org.opensearch.index.analysis.Analysis; +import org.opensearch.index.analysis.MappingRule; import org.opensearch.index.analysis.TokenFilterFactory; import java.util.List; @@ -73,7 +74,12 @@ public WordDelimiterGraphTokenFilterFactory(IndexSettings indexSettings, Environ // . => DIGIT // \u002C => DIGIT // \u200D => ALPHANUM - List charTypeTableValues = Analysis.getWordList(env, settings, "type_table"); + List> charTypeTableValues = Analysis.parseWordList( + env, + settings, + "type_table", + WordDelimiterTokenFilterFactory::parse + ); if (charTypeTableValues == null) { this.charTypeTable = WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE; } else { diff --git a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactory.java index d40acfa05dd21..96e50206fb53d 100644 --- a/modules/analysis-common/src/main/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/opensearch/analysis/common/WordDelimiterTokenFilterFactory.java @@ -41,6 +41,7 @@ import org.opensearch.index.IndexSettings; import org.opensearch.index.analysis.AbstractTokenFilterFactory; import org.opensearch.index.analysis.Analysis; +import org.opensearch.index.analysis.MappingRule; import org.opensearch.index.analysis.TokenFilterFactory; import java.util.Collection; @@ -76,7 +77,12 @@ public WordDelimiterTokenFilterFactory(IndexSettings indexSettings, Environment // . => DIGIT // \u002C => DIGIT // \u200D => ALPHANUM - List charTypeTableValues = Analysis.getWordList(env, settings, "type_table"); + List> charTypeTableValues = Analysis.parseWordList( + env, + settings, + "type_table", + WordDelimiterTokenFilterFactory::parse + ); if (charTypeTableValues == null) { this.charTypeTable = WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE; } else { @@ -127,19 +133,23 @@ public int getFlag(int flag, Settings settings, String key, boolean defaultValue // source => type private static Pattern typePattern = Pattern.compile("(.*)\\s*=>\\s*(.*)\\s*$"); + static MappingRule parse(String rule) { + Matcher m = typePattern.matcher(rule); + if (!m.find()) throw new RuntimeException("Invalid mapping rule: [" + rule + "]"); + String lhs = parseString(m.group(1).trim()); + Byte rhs = parseType(m.group(2).trim()); + if (lhs.length() != 1) throw new RuntimeException("Invalid mapping rule: [" + rule + "]. Only a single character is allowed."); + if (rhs == null) throw new RuntimeException("Invalid mapping rule: [" + rule + "]. Illegal type."); + return new MappingRule<>(lhs.charAt(0), rhs); + } + /** * parses a list of MappingCharFilter style rules into a custom byte[] type table */ - static byte[] parseTypes(Collection rules) { + static byte[] parseTypes(Collection> rules) { SortedMap typeMap = new TreeMap<>(); - for (String rule : rules) { - Matcher m = typePattern.matcher(rule); - if (!m.find()) throw new RuntimeException("Invalid Mapping Rule : [" + rule + "]"); - String lhs = parseString(m.group(1).trim()); - Byte rhs = parseType(m.group(2).trim()); - if (lhs.length() != 1) throw new RuntimeException("Invalid Mapping Rule : [" + rule + "]. Only a single character is allowed."); - if (rhs == null) throw new RuntimeException("Invalid Mapping Rule : [" + rule + "]. Illegal type."); - typeMap.put(lhs.charAt(0), rhs); + for (MappingRule rule : rules) { + typeMap.put(rule.getLeft(), rule.getRight()); } // ensure the table is always at least as big as DEFAULT_WORD_DELIM_TABLE for performance diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java index 829ace512b5c8..94c7d63f2bee7 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/BaseWordDelimiterTokenFilterFactoryTestCase.java @@ -195,4 +195,24 @@ public void testStemEnglishPossessive() throws IOException { tokenizer.setReader(new StringReader(source)); assertTokenStreamContents(tokenFilter.create(tokenizer), expected); } + + private void createTokenFilterFactoryWithTypeTable(String[] rules) throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.filter.my_word_delimiter.type", type) + .putList("index.analysis.filter.my_word_delimiter.type_table", rules) + .put("index.analysis.filter.my_word_delimiter.catenate_words", "true") + .put("index.analysis.filter.my_word_delimiter.generate_word_parts", "true") + .build(), + new CommonAnalysisModulePlugin() + ); + analysis.tokenFilter.get("my_word_delimiter"); + } + + public void testTypeTableParsingError() { + String[] rules = { "# This is a comment", "$ => DIGIT", "\\u200D => ALPHANUM", "abc => ALPHA" }; + RuntimeException ex = expectThrows(RuntimeException.class, () -> createTokenFilterFactoryWithTypeTable(rules)); + assertEquals("Line [4]: Invalid mapping rule: [abc => ALPHA]. Only a single character is allowed.", ex.getMessage()); + } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java index 34fdec4135bfe..ae3ebde99bd98 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/EdgeNGramTokenizerTests.java @@ -51,6 +51,9 @@ import java.io.StringReader; import java.util.Collections; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; + public class EdgeNGramTokenizerTests extends OpenSearchTokenStreamTestCase { private IndexAnalyzers buildAnalyzers(Version version, String tokenizer) throws IOException { @@ -76,21 +79,28 @@ public void testPreConfiguredTokenizer() throws IOException { } } - // Check deprecated name as well, needs version before 8.0 because throws IAE after that + // Check deprecated name as well, needs version before 3.0 because throws IAE after that { try ( IndexAnalyzers indexAnalyzers = buildAnalyzers( - VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT), + VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, VersionUtils.getPreviousVersion(Version.V_3_0_0)), "edgeNGram" ) ) { NamedAnalyzer analyzer = indexAnalyzers.get("my_analyzer"); assertNotNull(analyzer); assertAnalyzesTo(analyzer, "test", new String[] { "t", "te" }); - } } + // Check IAE from 3.0 onward + { + final IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> buildAnalyzers(VersionUtils.randomVersionBetween(random(), Version.V_3_0_0, Version.CURRENT), "edgeNGram") + ); + assertThat(e, hasToString(containsString("The [edgeNGram] tokenizer name was deprecated pre 1.0."))); + } } public void testCustomTokenChars() throws IOException { diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MappingCharFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MappingCharFilterFactoryTests.java new file mode 100644 index 0000000000000..7d059ff9ce1da --- /dev/null +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/MappingCharFilterFactoryTests.java @@ -0,0 +1,70 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.analysis.common; + +import org.apache.lucene.analysis.CharFilter; +import org.opensearch.common.settings.Settings; +import org.opensearch.env.Environment; +import org.opensearch.index.analysis.AnalysisTestsHelper; +import org.opensearch.index.analysis.CharFilterFactory; +import org.opensearch.test.OpenSearchTestCase; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Arrays; + +public class MappingCharFilterFactoryTests extends OpenSearchTestCase { + public static CharFilterFactory create(String... rules) throws IOException { + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings( + Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put("index.analysis.analyzer.my_analyzer.tokenizer", "standard") + .put("index.analysis.analyzer.my_analyzer.char_filter", "my_mappings_char_filter") + .put("index.analysis.char_filter.my_mappings_char_filter.type", "mapping") + .putList("index.analysis.char_filter.my_mappings_char_filter.mappings", rules) + .build(), + new CommonAnalysisModulePlugin() + ); + + return analysis.charFilter.get("my_mappings_char_filter"); + } + + public void testRulesOk() throws IOException { + MappingCharFilterFactory mappingCharFilterFactory = (MappingCharFilterFactory) create( + "# This is a comment", + ":) => _happy_", + ":( => _sad_" + ); + CharFilter inputReader = (CharFilter) mappingCharFilterFactory.create(new StringReader("I'm so :)")); + char[] tempBuff = new char[14]; + StringBuilder output = new StringBuilder(); + while (true) { + int length = inputReader.read(tempBuff); + if (length == -1) break; + output.append(tempBuff, 0, length); + } + assertEquals("I'm so _happy_", output.toString()); + } + + public void testRuleError() { + for (String rule : Arrays.asList( + "", // empty + "a", // no arrow + "a:>b" // invalid delimiter + )) { + RuntimeException ex = expectThrows(RuntimeException.class, () -> create(rule)); + assertEquals("Line [1]: Invalid mapping rule : [" + rule + "]", ex.getMessage()); + } + } + + public void testRulePartError() { + RuntimeException ex = expectThrows(RuntimeException.class, () -> create("# This is a comment", ":) => _happy_", "a:b")); + assertEquals("Line [3]: Invalid mapping rule : [a:b]", ex.getMessage()); + } +} diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java index e923e11ffc349..0fcb2fb8d51ca 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerOverrideTokenFilterFactoryTests.java @@ -46,7 +46,6 @@ import java.io.IOException; import java.io.StringReader; import java.util.Arrays; -import java.util.Locale; public class StemmerOverrideTokenFilterFactoryTests extends OpenSearchTokenStreamTestCase { @Rule @@ -76,11 +75,8 @@ public void testRuleError() { "=>a", // no keys "a,=>b" // empty key )) { - expectThrows( - RuntimeException.class, - String.format(Locale.ROOT, "Should fail for invalid rule: '%s'", rule), - () -> create(rule) - ); + RuntimeException ex = expectThrows(RuntimeException.class, () -> create(rule)); + assertEquals("Line [1]: Invalid keyword override rule: " + rule, ex.getMessage()); } } @@ -90,4 +86,9 @@ public void testRulesOk() throws IOException { tokenizer.setReader(new StringReader("a b c")); assertTokenStreamContents(tokenFilterFactory.create(tokenizer), new String[] { "1", "2", "2" }); } + + public void testRulePartError() { + RuntimeException ex = expectThrows(RuntimeException.class, () -> create("a => 1", "b,c => 2", "# This is a comment", "=>a=>b")); + assertEquals("Line [4]: Invalid keyword override rule: =>a=>b", ex.getMessage()); + } } diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerTokenFilterFactoryTests.java index 2cd7b74cd8c35..18d3727475065 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerTokenFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/StemmerTokenFilterFactoryTests.java @@ -111,6 +111,83 @@ public void testPorter2FilterFactory() throws IOException { } } + public void testEnglishPluralFilter() throws IOException { + int iters = scaledRandomIntBetween(20, 100); + for (int i = 0; i < iters; i++) { + + Version v = VersionUtils.randomVersion(random()); + Settings settings = Settings.builder() + .put("index.analysis.filter.my_plurals.type", "stemmer") + .put("index.analysis.filter.my_plurals.language", "plural_english") + .put("index.analysis.analyzer.my_plurals.tokenizer", "whitespace") + .put("index.analysis.analyzer.my_plurals.filter", "my_plurals") + .put(SETTING_VERSION_CREATED, v) + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + + OpenSearchTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, PLUGIN); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("my_plurals"); + assertThat(tokenFilter, instanceOf(StemmerTokenFilterFactory.class)); + Tokenizer tokenizer = new WhitespaceTokenizer(); + tokenizer.setReader(new StringReader("dresses")); + TokenStream create = tokenFilter.create(tokenizer); + IndexAnalyzers indexAnalyzers = analysis.indexAnalyzers; + NamedAnalyzer analyzer = indexAnalyzers.get("my_plurals"); + assertThat(create, instanceOf(EnglishPluralStemFilter.class)); + + // Check old EnglishMinimalStemmer ("S" stemmer) logic + assertAnalyzesTo(analyzer, "phones", new String[] { "phone" }); + assertAnalyzesTo(analyzer, "horses", new String[] { "horse" }); + assertAnalyzesTo(analyzer, "cameras", new String[] { "camera" }); + + // The orginal s stemmer gives up on stemming oes words because English has no fixed rule for the stem + // (see https://howtospell.co.uk/making-O-words-plural ) + // This stemmer removes the es but retains e for a small number of exceptions + assertAnalyzesTo(analyzer, "mosquitoes", new String[] { "mosquito" }); + assertAnalyzesTo(analyzer, "heroes", new String[] { "hero" }); + // oes exceptions that retain the e. + assertAnalyzesTo(analyzer, "shoes", new String[] { "shoe" }); + assertAnalyzesTo(analyzer, "horseshoes", new String[] { "horseshoe" }); + assertAnalyzesTo(analyzer, "canoes", new String[] { "canoe" }); + assertAnalyzesTo(analyzer, "oboes", new String[] { "oboe" }); + + // Check improved EnglishPluralStemFilter logic + // sses + assertAnalyzesTo(analyzer, "dresses", new String[] { "dress" }); + assertAnalyzesTo(analyzer, "possess", new String[] { "possess" }); + assertAnalyzesTo(analyzer, "possesses", new String[] { "possess" }); + // xes + assertAnalyzesTo(analyzer, "boxes", new String[] { "box" }); + assertAnalyzesTo(analyzer, "axes", new String[] { "axe" }); + // shes + assertAnalyzesTo(analyzer, "dishes", new String[] { "dish" }); + assertAnalyzesTo(analyzer, "washes", new String[] { "wash" }); + // ees + assertAnalyzesTo(analyzer, "employees", new String[] { "employee" }); + assertAnalyzesTo(analyzer, "bees", new String[] { "bee" }); + // tch + assertAnalyzesTo(analyzer, "watches", new String[] { "watch" }); + assertAnalyzesTo(analyzer, "itches", new String[] { "itch" }); + // ies->y but only for length >4 + assertAnalyzesTo(analyzer, "spies", new String[] { "spy" }); + assertAnalyzesTo(analyzer, "ties", new String[] { "tie" }); + assertAnalyzesTo(analyzer, "lies", new String[] { "lie" }); + assertAnalyzesTo(analyzer, "pies", new String[] { "pie" }); + assertAnalyzesTo(analyzer, "dies", new String[] { "die" }); + + assertAnalyzesTo(analyzer, "lunches", new String[] { "lunch" }); + assertAnalyzesTo(analyzer, "avalanches", new String[] { "avalanche" }); + assertAnalyzesTo(analyzer, "headaches", new String[] { "headache" }); + assertAnalyzesTo(analyzer, "caches", new String[] { "cache" }); + assertAnalyzesTo(analyzer, "beaches", new String[] { "beach" }); + assertAnalyzesTo(analyzer, "britches", new String[] { "britch" }); + assertAnalyzesTo(analyzer, "cockroaches", new String[] { "cockroach" }); + assertAnalyzesTo(analyzer, "cliches", new String[] { "cliche" }); + assertAnalyzesTo(analyzer, "quiches", new String[] { "quiche" }); + + } + } + public void testMultipleLanguagesThrowsException() throws IOException { Version v = VersionUtils.randomVersion(random()); Settings settings = Settings.builder() diff --git a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java index c078c7dabc63a..2618c106d9275 100644 --- a/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java +++ b/modules/analysis-common/src/test/java/org/opensearch/analysis/common/SynonymsAnalysisTests.java @@ -116,7 +116,7 @@ public void testSynonymWordDeleteByAnalyzer() throws IOException { fail("fail! due to synonym word deleted by analyzer"); } catch (Exception e) { assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), startsWith("failed to build synonyms")); + assertThat(e.getMessage(), startsWith("Failed to build synonyms")); } } @@ -137,7 +137,7 @@ public void testExpandSynonymWordDeleteByAnalyzer() throws IOException { fail("fail! due to synonym word deleted by analyzer"); } catch (Exception e) { assertThat(e, instanceOf(IllegalArgumentException.class)); - assertThat(e.getMessage(), startsWith("failed to build synonyms")); + assertThat(e.getMessage(), startsWith("Failed to build synonyms")); } } @@ -230,7 +230,7 @@ public void testChainedSynonymFilters() throws IOException { public void testShingleFilters() { Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT)) + .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT)) .put("path.home", createTempDir().toString()) .put("index.analysis.filter.synonyms.type", "synonym") .putList("index.analysis.filter.synonyms.synonyms", "programmer, developer") @@ -289,7 +289,7 @@ public void testPreconfiguredTokenFilters() throws IOException { ); Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT)) + .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT)) .put("path.home", createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); @@ -313,7 +313,7 @@ public void testPreconfiguredTokenFilters() throws IOException { public void testDisallowedTokenFilters() throws IOException { Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT)) + .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT)) .put("path.home", createTempDir().toString()) .putList("common_words", "a", "b") .put("output_unigrams", "true") diff --git a/modules/geo/build.gradle b/modules/geo/build.gradle index 7f687a414e566..6b00709f08bf9 100644 --- a/modules/geo/build.gradle +++ b/modules/geo/build.gradle @@ -40,6 +40,7 @@ restResources { includeCore '_common', 'indices', 'index', 'search', 'bulk' } } + artifacts { restTests(project.file('src/yamlRestTest/resources/rest-api-spec/test')) } diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java index 7dc6f2c1b89b7..31ff2ef4689bd 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/GeoModulePluginIntegTestCase.java @@ -21,6 +21,9 @@ * for the test cluster on which integration tests are running. */ public abstract class GeoModulePluginIntegTestCase extends OpenSearchIntegTestCase { + + protected static final double GEOHASH_TOLERANCE = 1E-5D; + /** * Returns a collection of plugins that should be loaded on each node for doing the integration tests. As this * geo plugin is not getting packaged in a zip, we need to load it before the tests run. diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java index 2ac73728b2dab..9bd082a6e1ffe 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/MissingValueIT.java @@ -8,52 +8,149 @@ package org.opensearch.geo.search; +import org.hamcrest.MatcherAssert; +import org.junit.Before; import org.opensearch.action.search.SearchResponse; +import org.opensearch.common.geo.GeoPoint; import org.opensearch.geo.GeoModulePluginIntegTestCase; +import org.opensearch.geo.search.aggregations.common.GeoBoundsHelper; import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.geo.tests.common.AggregationBuilders; +import org.opensearch.geo.tests.common.RandomGeoGenerator; +import org.opensearch.geo.tests.common.RandomGeoGeometryGenerator; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.utils.WellKnownText; import org.opensearch.test.OpenSearchIntegTestCase; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; import static org.hamcrest.Matchers.closeTo; +/** + * Tests to validate if user specified a missingValue in the input while doing the aggregation + */ @OpenSearchIntegTestCase.SuiteScopeTestCase public class MissingValueIT extends GeoModulePluginIntegTestCase { + private static final String INDEX_NAME = "idx"; + private static final String GEO_SHAPE_FIELD_NAME = "myshape"; + private static final String GEO_SHAPE_FIELD_TYPE = "type=geo_shape"; + private static final String AGGREGATION_NAME = "bounds"; + private static final String NON_EXISTENT_FIELD = "non_existing_field"; + private static final WellKnownText WKT = WellKnownText.INSTANCE; + private static Geometry indexedGeometry; + private static GeoPoint indexedGeoPoint; + private GeoPoint bottomRight; + private GeoPoint topLeft; + @Override protected void setupSuiteScopeCluster() throws Exception { - assertAcked(prepareCreate("idx").setMapping("date", "type=date", "location", "type=geo_point", "str", "type=keyword").get()); + assertAcked( + prepareCreate(INDEX_NAME).setMapping( + "date", + "type=date", + "location", + "type=geo_point", + "str", + "type=keyword", + GEO_SHAPE_FIELD_NAME, + GEO_SHAPE_FIELD_TYPE + ).get() + ); + indexedGeometry = RandomGeoGeometryGenerator.randomGeometry(random()); + indexedGeoPoint = RandomGeoGenerator.randomPoint(random()); + assert indexedGeometry != null; indexRandom( true, - client().prepareIndex("idx").setId("1").setSource(), - client().prepareIndex("idx") + client().prepareIndex(INDEX_NAME).setId("1").setSource(), + client().prepareIndex(INDEX_NAME) .setId("2") - .setSource("str", "foo", "long", 3L, "double", 5.5, "date", "2015-05-07", "location", "1,2") + .setSource( + "str", + "foo", + "long", + 3L, + "double", + 5.5, + "date", + "2015-05-07", + "location", + indexedGeoPoint.toString(), + GEO_SHAPE_FIELD_NAME, + WKT.toWKT(indexedGeometry) + ) ); } + @Before + public void runBeforeEachTest() { + bottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); + topLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); + } + public void testUnmappedGeoBounds() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(AggregationBuilders.geoBounds("bounds").field("non_existing_field").missing("2,1")) + final GeoPoint missingGeoPoint = RandomGeoGenerator.randomPoint(random()); + GeoBoundsHelper.updateBoundsBottomRight(missingGeoPoint, bottomRight); + GeoBoundsHelper.updateBoundsTopLeft(missingGeoPoint, topLeft); + SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation( + AggregationBuilders.geoBounds(AGGREGATION_NAME) + .field(NON_EXISTENT_FIELD) + .wrapLongitude(false) + .missing(missingGeoPoint.toString()) + ) .get(); assertSearchResponse(response); - GeoBounds bounds = response.getAggregations().get("bounds"); - assertThat(bounds.bottomRight().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.bottomRight().lon(), closeTo(1.0, 1E-5)); - assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5)); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); } public void testGeoBounds() { - SearchResponse response = client().prepareSearch("idx") - .addAggregation(AggregationBuilders.geoBounds("bounds").field("location").missing("2,1")) + GeoBoundsHelper.updateBoundsForGeoPoint(indexedGeoPoint, topLeft, bottomRight); + final GeoPoint missingGeoPoint = RandomGeoGenerator.randomPoint(random()); + GeoBoundsHelper.updateBoundsForGeoPoint(missingGeoPoint, topLeft, bottomRight); + SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation( + AggregationBuilders.geoBounds(AGGREGATION_NAME).field("location").wrapLongitude(false).missing(missingGeoPoint.toString()) + ) .get(); assertSearchResponse(response); - GeoBounds bounds = response.getAggregations().get("bounds"); - assertThat(bounds.bottomRight().lat(), closeTo(1.0, 1E-5)); - assertThat(bounds.bottomRight().lon(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lat(), closeTo(2.0, 1E-5)); - assertThat(bounds.topLeft().lon(), closeTo(1.0, 1E-5)); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); + } + + public void testGeoBoundsWithMissingShape() { + // create GeoBounds for the indexed Field + GeoBoundsHelper.updateBoundsForGeometry(indexedGeometry, topLeft, bottomRight); + final Geometry missingGeometry = RandomGeoGeometryGenerator.randomGeometry(random()); + assert missingGeometry != null; + GeoBoundsHelper.updateBoundsForGeometry(missingGeometry, topLeft, bottomRight); + final SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation( + AggregationBuilders.geoBounds(AGGREGATION_NAME) + .wrapLongitude(false) + .field(GEO_SHAPE_FIELD_NAME) + .missing(WKT.toWKT(missingGeometry)) + ) + .get(); + assertSearchResponse(response); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); + } + + public void testUnmappedGeoBoundsOnGeoShape() { + // We cannot useGeometry other than Point as for GeoBoundsAggregation as the Default Value for the + // CoreValueSourceType is GeoPoint hence we need to use Point here. + final Geometry missingGeometry = RandomGeoGeometryGenerator.randomPoint(random()); + final SearchResponse response = client().prepareSearch(INDEX_NAME) + .addAggregation(AggregationBuilders.geoBounds(AGGREGATION_NAME).field(NON_EXISTENT_FIELD).missing(WKT.toWKT(missingGeometry))) + .get(); + GeoBoundsHelper.updateBoundsForGeometry(missingGeometry, topLeft, bottomRight); + assertSearchResponse(response); + validateResult(response.getAggregations().get(AGGREGATION_NAME)); + } + + private void validateResult(final GeoBounds bounds) { + MatcherAssert.assertThat(bounds.bottomRight().lat(), closeTo(bottomRight.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bounds.bottomRight().lon(), closeTo(bottomRight.lon(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bounds.topLeft().lat(), closeTo(topLeft.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bounds.topLeft().lon(), closeTo(topLeft.lon(), GEOHASH_TOLERANCE)); } } diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/common/GeoBoundsHelper.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/common/GeoBoundsHelper.java new file mode 100644 index 0000000000000..257cc98db69fc --- /dev/null +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/common/GeoBoundsHelper.java @@ -0,0 +1,187 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.search.aggregations.common; + +import org.junit.Assert; +import org.opensearch.common.geo.GeoPoint; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.GeometryCollection; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.MultiLine; +import org.opensearch.geometry.MultiPoint; +import org.opensearch.geometry.MultiPolygon; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geometry.ShapeType; + +import java.util.Locale; + +/** + * A helper class for finding the geo bounds for a shape or a point. + */ +public final class GeoBoundsHelper { + + /** + * Updates the GeoBounds for the input GeoPoint in topLeft and bottomRight GeoPoints. + * + * @param geoPoint {@link GeoPoint} + * @param topLeft {@link GeoPoint} + * @param bottomRight {@link GeoPoint} + */ + public static void updateBoundsForGeoPoint(final GeoPoint geoPoint, final GeoPoint topLeft, final GeoPoint bottomRight) { + updateBoundsBottomRight(geoPoint, bottomRight); + updateBoundsTopLeft(geoPoint, topLeft); + } + + /** + * Find the bottom right for a point and put it in the currentBounds param. + * + * @param geoPoint {@link GeoPoint} + * @param currentBound {@link GeoPoint} + */ + public static void updateBoundsBottomRight(final GeoPoint geoPoint, final GeoPoint currentBound) { + if (geoPoint.lat() < currentBound.lat()) { + currentBound.resetLat(geoPoint.lat()); + } + if (geoPoint.lon() > currentBound.lon()) { + currentBound.resetLon(geoPoint.lon()); + } + } + + /** + * Find the top left for a point and put it in the currentBounds param. + * + * @param geoPoint {@link GeoPoint} + * @param currentBound {@link GeoPoint} + */ + public static void updateBoundsTopLeft(final GeoPoint geoPoint, final GeoPoint currentBound) { + if (geoPoint.lat() > currentBound.lat()) { + currentBound.resetLat(geoPoint.lat()); + } + if (geoPoint.lon() < currentBound.lon()) { + currentBound.resetLon(geoPoint.lon()); + } + } + + /** + * Find the bounds for an input shape. + * + * @param geometry {@link Geometry} + * @param geoShapeTopLeft {@link GeoPoint} + * @param geoShapeBottomRight {@link GeoPoint} + */ + public static void updateBoundsForGeometry( + final Geometry geometry, + final GeoPoint geoShapeTopLeft, + final GeoPoint geoShapeBottomRight + ) { + final ShapeType shapeType = geometry.type(); + switch (shapeType) { + case POINT: + updateBoundsTopLeft((Point) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Point) geometry, geoShapeBottomRight); + return; + case MULTIPOINT: + ((MultiPoint) geometry).getAll().forEach(p -> updateBoundsTopLeft(p, geoShapeTopLeft)); + ((MultiPoint) geometry).getAll().forEach(p -> updateBoundsBottomRight(p, geoShapeBottomRight)); + return; + case POLYGON: + updateBoundsTopLeft((Polygon) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Polygon) geometry, geoShapeBottomRight); + return; + case LINESTRING: + updateBoundsTopLeft((Line) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Line) geometry, geoShapeBottomRight); + return; + case MULTIPOLYGON: + ((MultiPolygon) geometry).getAll().forEach(p -> updateBoundsTopLeft(p, geoShapeTopLeft)); + ((MultiPolygon) geometry).getAll().forEach(p -> updateBoundsBottomRight(p, geoShapeBottomRight)); + return; + case GEOMETRYCOLLECTION: + ((GeometryCollection) geometry).getAll() + .forEach(geo -> updateBoundsForGeometry(geo, geoShapeTopLeft, geoShapeBottomRight)); + return; + case MULTILINESTRING: + ((MultiLine) geometry).getAll().forEach(line -> updateBoundsTopLeft(line, geoShapeTopLeft)); + ((MultiLine) geometry).getAll().forEach(line -> updateBoundsBottomRight(line, geoShapeBottomRight)); + return; + case ENVELOPE: + updateBoundsTopLeft((Rectangle) geometry, geoShapeTopLeft); + updateBoundsBottomRight((Rectangle) geometry, geoShapeBottomRight); + return; + default: + Assert.fail(String.format(Locale.ROOT, "The shape type %s is not supported", shapeType)); + } + } + + private static void updateBoundsTopLeft(final Point p, final GeoPoint currentBound) { + final GeoPoint geoPoint = new GeoPoint(p.getLat(), p.getLon()); + updateBoundsTopLeft(geoPoint, currentBound); + } + + private static void updateBoundsTopLeft(final Polygon polygon, final GeoPoint currentBound) { + for (int i = 0; i < polygon.getPolygon().length(); i++) { + double lat = polygon.getPolygon().getLats()[i]; + double lon = polygon.getPolygon().getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsTopLeft(geoPoint, currentBound); + } + } + + private static void updateBoundsTopLeft(final Line line, final GeoPoint currentBound) { + for (int i = 0; i < line.length(); i++) { + double lat = line.getLats()[i]; + double lon = line.getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsTopLeft(geoPoint, currentBound); + } + } + + private static void updateBoundsTopLeft(final Rectangle rectangle, final GeoPoint currentBound) { + if (rectangle.getMaxLat() > currentBound.lat()) { + currentBound.resetLat(rectangle.getMaxLat()); + } + if (rectangle.getMinLon() < currentBound.lon()) { + currentBound.resetLon(rectangle.getMinLon()); + } + } + + private static void updateBoundsBottomRight(final Point p, final GeoPoint currentBound) { + final GeoPoint geoPoint = new GeoPoint(p.getLat(), p.getLon()); + updateBoundsBottomRight(geoPoint, currentBound); + } + + private static void updateBoundsBottomRight(final Polygon polygon, final GeoPoint currentBound) { + for (int i = 0; i < polygon.getPolygon().length(); i++) { + double lat = polygon.getPolygon().getLats()[i]; + double lon = polygon.getPolygon().getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsBottomRight(geoPoint, currentBound); + } + } + + private static void updateBoundsBottomRight(final Line line, final GeoPoint currentBound) { + for (int i = 0; i < line.length(); i++) { + double lat = line.getLats()[i]; + double lon = line.getLons()[i]; + final GeoPoint geoPoint = new GeoPoint(lat, lon); + updateBoundsBottomRight(geoPoint, currentBound); + } + } + + private static void updateBoundsBottomRight(final Rectangle rectangle, final GeoPoint currentBound) { + if (rectangle.getMinLat() < currentBound.lat()) { + currentBound.resetLat(rectangle.getMinLat()); + } + if (rectangle.getMaxLon() > currentBound.lon()) { + currentBound.resetLon(rectangle.getMaxLon()); + } + } +} diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java index 92987d407f51d..b6f33ec2e0cae 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/AbstractGeoAggregatorModulePluginTestCase.java @@ -22,14 +22,20 @@ import org.opensearch.common.xcontent.XContentBuilder; import org.opensearch.common.xcontent.XContentFactory; import org.opensearch.geo.GeoModulePluginIntegTestCase; +import org.opensearch.geo.search.aggregations.common.GeoBoundsHelper; import org.opensearch.geo.tests.common.RandomGeoGenerator; +import org.opensearch.geo.tests.common.RandomGeoGeometryGenerator; +import org.opensearch.geometry.Geometry; import org.opensearch.geometry.utils.Geohash; +import org.opensearch.geometry.utils.StandardValidator; +import org.opensearch.geometry.utils.WellKnownText; import org.opensearch.search.SearchHit; import org.opensearch.search.sort.SortBuilders; import org.opensearch.search.sort.SortOrder; import java.util.ArrayList; import java.util.List; +import java.util.stream.IntStream; import static org.hamcrest.Matchers.equalTo; import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder; @@ -46,6 +52,7 @@ public abstract class AbstractGeoAggregatorModulePluginTestCase extends GeoModul protected static final String SINGLE_VALUED_FIELD_NAME = "geo_value"; protected static final String MULTI_VALUED_FIELD_NAME = "geo_values"; + protected static final String GEO_SHAPE_FIELD_NAME = "shape"; protected static final String NUMBER_FIELD_NAME = "l_values"; protected static final String UNMAPPED_IDX_NAME = "idx_unmapped"; protected static final String IDX_NAME = "idx"; @@ -57,11 +64,13 @@ public abstract class AbstractGeoAggregatorModulePluginTestCase extends GeoModul protected static int numDocs; protected static int numUniqueGeoPoints; protected static GeoPoint[] singleValues, multiValues; + protected static Geometry[] geoShapesValues; protected static GeoPoint singleTopLeft, singleBottomRight, multiTopLeft, multiBottomRight, singleCentroid, multiCentroid, - unmappedCentroid; + unmappedCentroid, geoShapeTopLeft, geoShapeBottomRight; protected static ObjectIntMap expectedDocCountsForGeoHash = null; protected static ObjectObjectMap expectedCentroidsForGeoHash = null; - protected static final double GEOHASH_TOLERANCE = 1E-5D; + + protected static final WellKnownText WKT = new WellKnownText(true, new StandardValidator(true)); @Override public void setupSuiteScopeCluster() throws Exception { @@ -75,7 +84,9 @@ public void setupSuiteScopeCluster() throws Exception { NUMBER_FIELD_NAME, "type=long", "tag", - "type=keyword" + "type=keyword", + GEO_SHAPE_FIELD_NAME, + "type=geo_shape" ) ); @@ -83,6 +94,8 @@ public void setupSuiteScopeCluster() throws Exception { singleBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); multiTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); multiBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); + geoShapeTopLeft = new GeoPoint(Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY); + geoShapeBottomRight = new GeoPoint(Double.POSITIVE_INFINITY, Double.NEGATIVE_INFINITY); singleCentroid = new GeoPoint(0, 0); multiCentroid = new GeoPoint(0, 0); unmappedCentroid = new GeoPoint(0, 0); @@ -95,17 +108,21 @@ public void setupSuiteScopeCluster() throws Exception { singleValues = new GeoPoint[numUniqueGeoPoints]; for (int i = 0; i < singleValues.length; i++) { singleValues[i] = RandomGeoGenerator.randomPoint(random()); - updateBoundsTopLeft(singleValues[i], singleTopLeft); - updateBoundsBottomRight(singleValues[i], singleBottomRight); + GeoBoundsHelper.updateBoundsForGeoPoint(singleValues[i], singleTopLeft, singleBottomRight); } multiValues = new GeoPoint[numUniqueGeoPoints]; for (int i = 0; i < multiValues.length; i++) { multiValues[i] = RandomGeoGenerator.randomPoint(random()); - updateBoundsTopLeft(multiValues[i], multiTopLeft); - updateBoundsBottomRight(multiValues[i], multiBottomRight); + GeoBoundsHelper.updateBoundsForGeoPoint(multiValues[i], multiTopLeft, multiBottomRight); } + geoShapesValues = new Geometry[numDocs]; + IntStream.range(0, numDocs).forEach(iterator -> { + geoShapesValues[iterator] = RandomGeoGeometryGenerator.randomGeometry(random()); + GeoBoundsHelper.updateBoundsForGeometry(geoShapesValues[iterator], geoShapeTopLeft, geoShapeBottomRight); + }); + List builders = new ArrayList<>(); GeoPoint singleVal; @@ -132,6 +149,7 @@ public void setupSuiteScopeCluster() throws Exception { .endArray() .field(NUMBER_FIELD_NAME, i) .field("tag", "tag" + i) + .field(GEO_SHAPE_FIELD_NAME, WKT.toWKT(geoShapesValues[i])) .endObject() ) ); @@ -147,7 +165,9 @@ public void setupSuiteScopeCluster() throws Exception { ); } - assertAcked(prepareCreate(EMPTY_IDX_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point")); + assertAcked( + prepareCreate(EMPTY_IDX_NAME).setMapping(SINGLE_VALUED_FIELD_NAME, "type=geo_point", GEO_SHAPE_FIELD_NAME, "type=geo_shape") + ); assertAcked( prepareCreate(DATELINE_IDX_NAME).setMapping( @@ -274,22 +294,4 @@ private GeoPoint updateHashCentroid(String hash, final GeoPoint location) { final double newLat = centroid.lat() + (location.lat() - centroid.lat()) / docCount; return centroid.reset(newLat, newLon); } - - private void updateBoundsBottomRight(GeoPoint geoPoint, GeoPoint currentBound) { - if (geoPoint.lat() < currentBound.lat()) { - currentBound.resetLat(geoPoint.lat()); - } - if (geoPoint.lon() > currentBound.lon()) { - currentBound.resetLon(geoPoint.lon()); - } - } - - private void updateBoundsTopLeft(GeoPoint geoPoint, GeoPoint currentBound) { - if (geoPoint.lat() > currentBound.lat()) { - currentBound.resetLat(geoPoint.lat()); - } - if (geoPoint.lon() < currentBound.lon()) { - currentBound.resetLon(geoPoint.lon()); - } - } } diff --git a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java index 8cc82da12d69a..ed3196319faca 100644 --- a/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java +++ b/modules/geo/src/internalClusterTest/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsITTestCase.java @@ -32,6 +32,7 @@ package org.opensearch.geo.search.aggregations.metrics; +import org.hamcrest.MatcherAssert; import org.opensearch.action.search.SearchResponse; import org.opensearch.common.geo.GeoPoint; import org.opensearch.common.util.BigArray; @@ -43,18 +44,18 @@ import java.util.List; -import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.closeTo; +import static org.opensearch.geo.tests.common.AggregationBuilders.geoBounds; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; import static org.opensearch.search.aggregations.AggregationBuilders.global; import static org.opensearch.search.aggregations.AggregationBuilders.terms; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertSearchResponse; -import static org.opensearch.geo.tests.common.AggregationBuilders.geoBounds; @OpenSearchIntegTestCase.SuiteScopeTestCase public class GeoBoundsITTestCase extends AbstractGeoAggregatorModulePluginTestCase { @@ -275,4 +276,36 @@ public void testSingleValuedFieldWithZeroLon() throws Exception { assertThat(bottomRight.lat(), closeTo(1.0, GEOHASH_TOLERANCE)); assertThat(bottomRight.lon(), closeTo(0.0, GEOHASH_TOLERANCE)); } + + public void testGeoShapeValuedField() { + final SearchResponse response = client().prepareSearch(IDX_NAME) + .addAggregation(geoBounds(aggName).field(GEO_SHAPE_FIELD_NAME).wrapLongitude(false)) + .get(); + assertSearchResponse(response); + final GeoBounds geoBounds = response.getAggregations().get(aggName); + MatcherAssert.assertThat(geoBounds, notNullValue()); + MatcherAssert.assertThat(geoBounds.getName(), equalTo(aggName)); + final GeoPoint topLeft = geoBounds.topLeft(); + final GeoPoint bottomRight = geoBounds.bottomRight(); + MatcherAssert.assertThat(topLeft.lat(), closeTo(geoShapeTopLeft.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(topLeft.lon(), closeTo(geoShapeTopLeft.lon(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bottomRight.lat(), closeTo(geoShapeBottomRight.lat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(bottomRight.lon(), closeTo(geoShapeBottomRight.lon(), GEOHASH_TOLERANCE)); + } + + public void testEmptyAggregationOnGeoShapes() { + final SearchResponse searchResponse = client().prepareSearch(EMPTY_IDX_NAME) + .setQuery(matchAllQuery()) + .addAggregation(geoBounds(aggName).field(GEO_SHAPE_FIELD_NAME).wrapLongitude(false)) + .get(); + + MatcherAssert.assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); + final GeoBounds geoBounds = searchResponse.getAggregations().get(aggName); + MatcherAssert.assertThat(geoBounds, notNullValue()); + MatcherAssert.assertThat(geoBounds.getName(), equalTo(aggName)); + final GeoPoint topLeft = geoBounds.topLeft(); + final GeoPoint bottomRight = geoBounds.bottomRight(); + MatcherAssert.assertThat(topLeft, equalTo(null)); + MatcherAssert.assertThat(bottomRight, equalTo(null)); + } } diff --git a/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java b/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java index 25dcf8db2c407..8ca1d2a0c214f 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java +++ b/modules/geo/src/main/java/org/opensearch/geo/GeoModulePlugin.java @@ -35,11 +35,11 @@ import org.opensearch.geo.search.aggregations.bucket.composite.GeoTileGridValuesSourceBuilder; import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder; import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder; -import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregator; -import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoHashGrid; -import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoTileGrid; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoHashGrid; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGrid; import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder; +import org.opensearch.geo.search.aggregations.metrics.GeoBoundsGeoShapeAggregator; import org.opensearch.geo.search.aggregations.metrics.InternalGeoBounds; import org.opensearch.index.mapper.GeoShapeFieldMapper; import org.opensearch.index.mapper.Mapper; @@ -47,10 +47,13 @@ import org.opensearch.plugins.Plugin; import org.opensearch.plugins.SearchPlugin; import org.opensearch.search.aggregations.bucket.composite.CompositeAggregation; +import org.opensearch.search.aggregations.support.CoreValuesSourceType; +import org.opensearch.search.aggregations.support.ValuesSourceRegistry; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.function.Consumer; public class GeoModulePlugin extends Plugin implements MapperPlugin, SearchPlugin { @@ -74,18 +77,18 @@ public List getAggregations() { GeoHashGridAggregationBuilder.NAME, GeoHashGridAggregationBuilder::new, GeoHashGridAggregationBuilder.PARSER - ).addResultReader(InternalGeoHashGrid::new).setAggregatorRegistrar(GeoHashGridAggregationBuilder::registerAggregators); + ).addResultReader(GeoHashGrid::new).setAggregatorRegistrar(GeoHashGridAggregationBuilder::registerAggregators); final AggregationSpec geoTileGrid = new AggregationSpec( GeoTileGridAggregationBuilder.NAME, GeoTileGridAggregationBuilder::new, GeoTileGridAggregationBuilder.PARSER - ).addResultReader(InternalGeoTileGrid::new).setAggregatorRegistrar(GeoTileGridAggregationBuilder::registerAggregators); + ).addResultReader(GeoTileGrid::new).setAggregatorRegistrar(GeoTileGridAggregationBuilder::registerAggregators); return List.of(geoBounds, geoHashGrid, geoTileGrid); } /** - * Registering the {@link GeoTileGridAggregator} in the {@link CompositeAggregation}. + * Registering the geotile grid in the {@link CompositeAggregation}. * * @return a {@link List} of {@link CompositeAggregationSpec} */ @@ -102,4 +105,23 @@ public List getCompositeAggregations() { ) ); } + + /** + * Registering the GeoBounds Aggregation on the GeoShape Field. This function allows plugins to register new + * aggregations using aggregation names that are already defined in Core, as long as the new aggregations target + * different ValuesSourceTypes. + * + * @return A list of the new registrar functions + */ + @Override + public List> getAggregationExtentions() { + final Consumer geoShapeConsumer = builder -> builder.register( + GeoBoundsAggregationBuilder.REGISTRY_KEY, + CoreValuesSourceType.GEO_SHAPE, + GeoBoundsGeoShapeAggregator::new, + true + ); + return Collections.singletonList(geoShapeConsumer); + } + } diff --git a/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java b/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java new file mode 100644 index 0000000000000..246ece4342cff --- /dev/null +++ b/modules/geo/src/main/java/org/opensearch/geo/algorithm/PolygonGenerator.java @@ -0,0 +1,190 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.algorithm; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.opensearch.common.util.CollectionUtils; + +import java.awt.geom.Point2D; +import java.util.ArrayList; +import java.util.Collections; +import java.util.Comparator; +import java.util.List; +import java.util.Random; +import java.util.stream.IntStream; + +/** + * Helper class to generate a polygon. Keeping this in the src folder so that GeoSpatial plugin can take advantage of + * this helper to create the Polygons, rather than hardcoding the values. + */ +public class PolygonGenerator { + + private static final Logger LOG = LogManager.getLogger(PolygonGenerator.class); + + /** + * A helper function to create the Polygons for testing. The returned list of double array where first element + * contains all the X points and second contains all the Y points. + * + * @param xPool a {@link java.util.List} of {@link Double} + * @param yPool a {@link java.util.List} of {@link Double} + * @return a {@link List} of double array. + */ + public static List generatePolygon(final List xPool, final List yPool, final Random random) { + if (CollectionUtils.isEmpty(xPool) || CollectionUtils.isEmpty(yPool)) { + LOG.debug("One of the X or Y list is empty or null. X.size : {} Y.size : {}", xPool, yPool); + return Collections.emptyList(); + } + final List generatedPolygonPointsList = ValtrAlgorithm.generateRandomConvexPolygon(xPool, yPool, random); + final double[] x = new double[generatedPolygonPointsList.size()]; + final double[] y = new double[generatedPolygonPointsList.size()]; + IntStream.range(0, generatedPolygonPointsList.size()).forEach(iterator -> { + x[iterator] = generatedPolygonPointsList.get(iterator).getX(); + y[iterator] = generatedPolygonPointsList.get(iterator).getY(); + }); + final List pointsList = new ArrayList<>(); + pointsList.add(x); + pointsList.add(y); + return pointsList; + } + + /* + * MIT License + * + * Copyright (c) 2017 Sander Verdonschot + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + /** + * Provides a helper function to create a Polygon with a list of points. This source code is used to create the + * polygons in the test cases. + * Reference Link + * Visual Link + */ + private static class ValtrAlgorithm { + /** + * Generates a convex polygon using the points provided as a {@link List} of {@link Double} for both X and Y axis. + * + * @param xPool a {@link List} of {@link Double} + * @param yPool a {@link List} of {@link Double} + * @return a {@link List} of {@link Point2D.Double} + */ + private static List generateRandomConvexPolygon( + final List xPool, + final List yPool, + final Random random + ) { + final int n = xPool.size(); + // Sort them + Collections.sort(xPool); + Collections.sort(yPool); + + // Isolate the extreme points + final Double minX = xPool.get(0); + final Double maxX = xPool.get(n - 1); + final Double minY = yPool.get(0); + final Double maxY = yPool.get(n - 1); + + // Divide the interior points into two chains & Extract the vector components + java.util.List xVec = new ArrayList<>(n); + java.util.List yVec = new ArrayList<>(n); + + double lastTop = minX, lastBot = minX; + + for (int i = 1; i < n - 1; i++) { + double x = xPool.get(i); + + if (random.nextBoolean()) { + xVec.add(x - lastTop); + lastTop = x; + } else { + xVec.add(lastBot - x); + lastBot = x; + } + } + + xVec.add(maxX - lastTop); + xVec.add(lastBot - maxX); + + double lastLeft = minY, lastRight = minY; + + for (int i = 1; i < n - 1; i++) { + double y = yPool.get(i); + + if (random.nextBoolean()) { + yVec.add(y - lastLeft); + lastLeft = y; + } else { + yVec.add(lastRight - y); + lastRight = y; + } + } + + yVec.add(maxY - lastLeft); + yVec.add(lastRight - maxY); + + // Randomly pair up the X- and Y-components + Collections.shuffle(yVec, random); + + // Combine the paired up components into vectors + List vec = new ArrayList<>(n); + + for (int i = 0; i < n; i++) { + vec.add(new Point2D.Double(xVec.get(i), yVec.get(i))); + } + + // Sort the vectors by angle + Collections.sort(vec, Comparator.comparingDouble(v -> Math.atan2(v.getY(), v.getX()))); + + // Lay them end-to-end + double x = 0, y = 0; + double minPolygonX = 0; + double minPolygonY = 0; + List points = new ArrayList<>(n); + + for (int i = 0; i < n; i++) { + points.add(new Point2D.Double(x, y)); + + x += vec.get(i).getX(); + y += vec.get(i).getY(); + + minPolygonX = Math.min(minPolygonX, x); + minPolygonY = Math.min(minPolygonY, y); + } + + // Move the polygon to the original min and max coordinates + double xShift = minX - minPolygonX; + double yShift = minY - minPolygonY; + + for (int i = 0; i < n; i++) { + Point2D.Double p = points.get(i); + points.set(i, new Point2D.Double(p.x + xShift, p.y + yShift)); + } + + return points; + } + } + +} diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java index 84d5943da287f..9e671118637b9 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridValuesSourceBuilder.java @@ -33,7 +33,6 @@ package org.opensearch.geo.search.aggregations.bucket.composite; import org.apache.lucene.index.IndexReader; -import org.opensearch.LegacyESVersion; import org.opensearch.common.ParseField; import org.opensearch.common.geo.GeoBoundingBox; import org.opensearch.common.geo.GeoPoint; @@ -175,9 +174,7 @@ public static void register(ValuesSourceRegistry.Builder builder) { public GeoTileGridValuesSourceBuilder(StreamInput in) throws IOException { super(in); this.precision = in.readInt(); - if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) { - this.geoBoundingBox = new GeoBoundingBox(in); - } + this.geoBoundingBox = new GeoBoundingBox(in); } public GeoTileGridValuesSourceBuilder precision(int precision) { @@ -198,9 +195,7 @@ public GeoTileGridValuesSourceBuilder format(String format) { @Override protected void innerWriteTo(StreamOutput out) throws IOException { out.writeInt(precision); - if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) { - geoBoundingBox.writeTo(out); - } + geoBoundingBox.writeTo(out); } @Override diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BaseGeoGrid.java similarity index 72% rename from modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGrid.java rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BaseGeoGrid.java index 9dbed7b27307a..b58c19a7186e6 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGrid.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BaseGeoGrid.java @@ -54,30 +54,30 @@ * All geo-grid hash-encoding in a grid are of the same precision and held internally as a single long * for efficiency's sake. * - * @opensearch.internal + * @opensearch.api */ -public abstract class InternalGeoGrid extends InternalMultiBucketAggregation< - InternalGeoGrid, - InternalGeoGridBucket> implements GeoGrid { +public abstract class BaseGeoGrid extends InternalMultiBucketAggregation + implements + GeoGrid { protected final int requiredSize; - protected final List buckets; + protected final List buckets; - InternalGeoGrid(String name, int requiredSize, List buckets, Map metadata) { + protected BaseGeoGrid(String name, int requiredSize, List buckets, Map metadata) { super(name, metadata); this.requiredSize = requiredSize; this.buckets = buckets; } - abstract Writeable.Reader getBucketReader(); + protected abstract Writeable.Reader getBucketReader(); /** * Read from a stream. */ - public InternalGeoGrid(StreamInput in) throws IOException { + public BaseGeoGrid(StreamInput in) throws IOException { super(in); requiredSize = readSize(in); - buckets = (List) in.readList(getBucketReader()); + buckets = (List) in.readList(getBucketReader()); } @Override @@ -86,24 +86,24 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeList(buckets); } - abstract InternalGeoGrid create(String name, int requiredSize, List buckets, Map metadata); + protected abstract BaseGeoGrid create(String name, int requiredSize, List buckets, Map metadata); @Override - public List getBuckets() { + public List getBuckets() { return unmodifiableList(buckets); } @Override - public InternalGeoGrid reduce(List aggregations, ReduceContext reduceContext) { - LongObjectPagedHashMap> buckets = null; + public BaseGeoGrid reduce(List aggregations, ReduceContext reduceContext) { + LongObjectPagedHashMap> buckets = null; for (InternalAggregation aggregation : aggregations) { - InternalGeoGrid grid = (InternalGeoGrid) aggregation; + BaseGeoGrid grid = (BaseGeoGrid) aggregation; if (buckets == null) { buckets = new LongObjectPagedHashMap<>(grid.buckets.size(), reduceContext.bigArrays()); } for (Object obj : grid.buckets) { - InternalGeoGridBucket bucket = (InternalGeoGridBucket) obj; - List existingBuckets = buckets.get(bucket.hashAsLong()); + BaseGeoGridBucket bucket = (BaseGeoGridBucket) obj; + List existingBuckets = buckets.get(bucket.hashAsLong()); if (existingBuckets == null) { existingBuckets = new ArrayList<>(aggregations.size()); buckets.put(bucket.hashAsLong(), existingBuckets); @@ -113,13 +113,13 @@ public InternalGeoGrid reduce(List aggregations, ReduceCont } final int size = Math.toIntExact(reduceContext.isFinalReduce() == false ? buckets.size() : Math.min(requiredSize, buckets.size())); - BucketPriorityQueue ordered = new BucketPriorityQueue<>(size); - for (LongObjectPagedHashMap.Cursor> cursor : buckets) { - List sameCellBuckets = cursor.value; + BucketPriorityQueue ordered = new BucketPriorityQueue<>(size); + for (LongObjectPagedHashMap.Cursor> cursor : buckets) { + List sameCellBuckets = cursor.value; ordered.insertWithOverflow(reduceBucket(sameCellBuckets, reduceContext)); } buckets.close(); - InternalGeoGridBucket[] list = new InternalGeoGridBucket[ordered.size()]; + BaseGeoGridBucket[] list = new BaseGeoGridBucket[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; i--) { list[i] = ordered.pop(); } @@ -128,11 +128,11 @@ public InternalGeoGrid reduce(List aggregations, ReduceCont } @Override - protected InternalGeoGridBucket reduceBucket(List buckets, ReduceContext context) { + protected BaseGeoGridBucket reduceBucket(List buckets, ReduceContext context) { assert buckets.size() > 0; List aggregationsList = new ArrayList<>(buckets.size()); long docCount = 0; - for (InternalGeoGridBucket bucket : buckets) { + for (BaseGeoGridBucket bucket : buckets) { docCount += bucket.docCount; aggregationsList.add(bucket.aggregations); } @@ -140,12 +140,12 @@ protected InternalGeoGridBucket reduceBucket(List buckets return createBucket(buckets.get(0).hashAsLong, docCount, aggs); } - abstract B createBucket(long hashAsLong, long docCount, InternalAggregations aggregations); + protected abstract B createBucket(long hashAsLong, long docCount, InternalAggregations aggregations); @Override public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { builder.startArray(CommonFields.BUCKETS.getPreferredName()); - for (InternalGeoGridBucket bucket : buckets) { + for (BaseGeoGridBucket bucket : buckets) { bucket.toXContent(builder, params); } builder.endArray(); @@ -168,7 +168,7 @@ public boolean equals(Object obj) { if (obj == null || getClass() != obj.getClass()) return false; if (super.equals(obj) == false) return false; - InternalGeoGrid other = (InternalGeoGrid) obj; + BaseGeoGrid other = (BaseGeoGrid) obj; return Objects.equals(requiredSize, other.requiredSize) && Objects.equals(buckets, other.buckets); } diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BaseGeoGridBucket.java similarity index 87% rename from modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BaseGeoGridBucket.java index 93fcdbd098400..f362d2b3d33d6 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoGridBucket.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BaseGeoGridBucket.java @@ -45,12 +45,12 @@ /** * Base implementation of geogrid aggs * - * @opensearch.internal + * @opensearch.api */ -public abstract class InternalGeoGridBucket extends InternalMultiBucketAggregation.InternalBucket +public abstract class BaseGeoGridBucket extends InternalMultiBucketAggregation.InternalBucket implements GeoGrid.Bucket, - Comparable { + Comparable { protected long hashAsLong; protected long docCount; @@ -58,7 +58,7 @@ public abstract class InternalGeoGridBucket ext long bucketOrd; - public InternalGeoGridBucket(long hashAsLong, long docCount, InternalAggregations aggregations) { + public BaseGeoGridBucket(long hashAsLong, long docCount, InternalAggregations aggregations) { this.docCount = docCount; this.aggregations = aggregations; this.hashAsLong = hashAsLong; @@ -67,7 +67,7 @@ public InternalGeoGridBucket(long hashAsLong, long docCount, InternalAggregation /** * Read from a stream. */ - public InternalGeoGridBucket(StreamInput in) throws IOException { + public BaseGeoGridBucket(StreamInput in) throws IOException { hashAsLong = in.readLong(); docCount = in.readVLong(); aggregations = InternalAggregations.readFrom(in); @@ -80,7 +80,7 @@ public void writeTo(StreamOutput out) throws IOException { aggregations.writeTo(out); } - long hashAsLong() { + public long hashAsLong() { return hashAsLong; } @@ -95,7 +95,7 @@ public Aggregations getAggregations() { } @Override - public int compareTo(InternalGeoGridBucket other) { + public int compareTo(BaseGeoGridBucket other) { if (this.hashAsLong > other.hashAsLong) { return 1; } @@ -119,7 +119,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - InternalGeoGridBucket bucket = (InternalGeoGridBucket) o; + BaseGeoGridBucket bucket = (BaseGeoGridBucket) o; return hashAsLong == bucket.hashAsLong && docCount == bucket.docCount && Objects.equals(aggregations, bucket.aggregations); } diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BucketPriorityQueue.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BucketPriorityQueue.java index 70d0552b3e80b..83fcdf4f66424 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BucketPriorityQueue.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/BucketPriorityQueue.java @@ -38,14 +38,14 @@ * * @opensearch.internal */ -class BucketPriorityQueue extends PriorityQueue { +class BucketPriorityQueue extends PriorityQueue { BucketPriorityQueue(int size) { super(size); } @Override - protected boolean lessThan(InternalGeoGridBucket o1, InternalGeoGridBucket o2) { + protected boolean lessThan(BaseGeoGridBucket o1, BaseGeoGridBucket o2) { int cmp = Long.compare(o2.getDocCount(), o1.getDocCount()); if (cmp == 0) { cmp = o2.compareTo(o1); diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/CellIdSource.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/CellIdSource.java index d40029e9a762d..89ce288770185 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/CellIdSource.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/CellIdSource.java @@ -43,7 +43,7 @@ * Wrapper class to help convert {@link MultiGeoPointValues} * to numeric long values for bucketing. * - * @opensearch.internal + * @opensearch.api */ public class CellIdSource extends ValuesSource.Numeric { private final ValuesSource.GeoPoint valuesSource; diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGrid.java index 4ae888640efc8..b2fe6e33ef95c 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGrid.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGrid.java @@ -39,13 +39,13 @@ * A geo-grid aggregation. Defines multiple buckets, each representing a cell in a geo-grid of a specific * precision. * - * @opensearch.internal + * @opensearch.api */ public interface GeoGrid extends MultiBucketsAggregation { /** * A bucket that is associated with a geo-grid cell. The key of the bucket is - * the {@link InternalGeoGridBucket#getKeyAsString()} of the cell + * the {@link BaseGeoGridBucket#getKeyAsString()} of the cell */ interface Bucket extends MultiBucketsAggregation.Bucket {} diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java index 4a904b3aa2b16..abc892396fbf7 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java @@ -32,7 +32,6 @@ package org.opensearch.geo.search.aggregations.bucket.geogrid; -import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchException; import org.opensearch.common.ParseField; import org.opensearch.common.geo.GeoBoundingBox; @@ -58,9 +57,9 @@ import java.util.function.Function; /** - * Base Aggregation Builder for geohash_grid and geotile_grid aggs + * Base Aggregation Builder for geogrid aggs * - * @opensearch.internal + * @opensearch.api */ public abstract class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder { /* recognized field names in JSON */ @@ -125,9 +124,7 @@ public GeoGridAggregationBuilder(StreamInput in) throws IOException { precision = in.readVInt(); requiredSize = in.readVInt(); shardSize = in.readVInt(); - if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) { - geoBoundingBox = new GeoBoundingBox(in); - } + geoBoundingBox = new GeoBoundingBox(in); } @Override @@ -140,9 +137,7 @@ protected void innerWriteTo(StreamOutput out) throws IOException { out.writeVInt(precision); out.writeVInt(requiredSize); out.writeVInt(shardSize); - if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) { - geoBoundingBox.writeTo(out); - } + geoBoundingBox.writeTo(out); } /** diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregator.java index 909772c61a960..db07ac8f947e5 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregator.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregator.java @@ -55,16 +55,16 @@ /** * Aggregates data expressed as longs (for efficiency's sake) but formats results as aggregation-specific strings. * - * @opensearch.internal + * @opensearch.api */ -public abstract class GeoGridAggregator extends BucketsAggregator { +public abstract class GeoGridAggregator extends BucketsAggregator { protected final int requiredSize; protected final int shardSize; protected final ValuesSource.Numeric valuesSource; protected final LongKeyedBucketOrds bucketOrds; - GeoGridAggregator( + protected GeoGridAggregator( String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @@ -118,23 +118,23 @@ public void collect(int doc, long owningBucketOrd) throws IOException { }; } - abstract T buildAggregation(String name, int requiredSize, List buckets, Map metadata); + protected abstract T buildAggregation(String name, int requiredSize, List buckets, Map metadata); /** * This method is used to return a re-usable instance of the bucket when building * the aggregation. - * @return a new {@link InternalGeoGridBucket} implementation with empty parameters + * @return a new {@link BaseGeoGridBucket} implementation with empty parameters */ - abstract InternalGeoGridBucket newEmptyBucket(); + protected abstract BaseGeoGridBucket newEmptyBucket(); @Override public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws IOException { - InternalGeoGridBucket[][] topBucketsPerOrd = new InternalGeoGridBucket[owningBucketOrds.length][]; + BaseGeoGridBucket[][] topBucketsPerOrd = new BaseGeoGridBucket[owningBucketOrds.length][]; for (int ordIdx = 0; ordIdx < owningBucketOrds.length; ordIdx++) { int size = (int) Math.min(bucketOrds.bucketsInOrd(owningBucketOrds[ordIdx]), shardSize); - BucketPriorityQueue ordered = new BucketPriorityQueue<>(size); - InternalGeoGridBucket spare = null; + BucketPriorityQueue ordered = new BucketPriorityQueue<>(size); + BaseGeoGridBucket spare = null; LongKeyedBucketOrds.BucketOrdsEnum ordsEnum = bucketOrds.ordsEnum(owningBucketOrds[ordIdx]); while (ordsEnum.next()) { if (spare == null) { @@ -149,7 +149,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I spare = ordered.insertWithOverflow(spare); } - topBucketsPerOrd[ordIdx] = new InternalGeoGridBucket[ordered.size()]; + topBucketsPerOrd[ordIdx] = new BaseGeoGridBucket[ordered.size()]; for (int i = ordered.size() - 1; i >= 0; --i) { topBucketsPerOrd[ordIdx][i] = ordered.pop(); } @@ -163,7 +163,7 @@ public InternalAggregation[] buildAggregations(long[] owningBucketOrds) throws I } @Override - public InternalGeoGrid buildEmptyAggregation() { + public BaseGeoGrid buildEmptyAggregation() { return buildAggregation(name, requiredSize, Collections.emptyList(), metadata()); } diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGrid.java similarity index 70% rename from modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGrid.java index ff1247300939a..aa1d5504ad24f 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGrid.java @@ -43,40 +43,40 @@ * All geohashes in a grid are of the same precision and held internally as a single long * for efficiency's sake. * - * @opensearch.internal + * @opensearch.api */ -public class InternalGeoHashGrid extends InternalGeoGrid { +public class GeoHashGrid extends BaseGeoGrid { - InternalGeoHashGrid(String name, int requiredSize, List buckets, Map metadata) { + GeoHashGrid(String name, int requiredSize, List buckets, Map metadata) { super(name, requiredSize, buckets, metadata); } - public InternalGeoHashGrid(StreamInput in) throws IOException { + public GeoHashGrid(StreamInput in) throws IOException { super(in); } @Override - public InternalGeoGrid create(List buckets) { - return new InternalGeoHashGrid(name, requiredSize, buckets, metadata); + public BaseGeoGrid create(List buckets) { + return new GeoHashGrid(name, requiredSize, buckets, metadata); } @Override - public InternalGeoGridBucket createBucket(InternalAggregations aggregations, InternalGeoGridBucket prototype) { + public BaseGeoGridBucket createBucket(InternalAggregations aggregations, BaseGeoGridBucket prototype) { return new InternalGeoHashGridBucket(prototype.hashAsLong, prototype.docCount, aggregations); } @Override - InternalGeoGrid create(String name, int requiredSize, List buckets, Map metadata) { - return new InternalGeoHashGrid(name, requiredSize, buckets, metadata); + protected BaseGeoGrid create(String name, int requiredSize, List buckets, Map metadata) { + return new GeoHashGrid(name, requiredSize, buckets, metadata); } @Override - InternalGeoHashGridBucket createBucket(long hashAsLong, long docCount, InternalAggregations aggregations) { + protected InternalGeoHashGridBucket createBucket(long hashAsLong, long docCount, InternalAggregations aggregations) { return new InternalGeoHashGridBucket(hashAsLong, docCount, aggregations); } @Override - Reader getBucketReader() { + protected Reader getBucketReader() { return InternalGeoHashGridBucket::new; } diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java index bbaf9613fb216..760d7d643c0a5 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregationBuilder.java @@ -51,7 +51,7 @@ /** * Aggregation Builder for geohash_grid * - * @opensearch.internal + * @opensearch.api */ public class GeoHashGridAggregationBuilder extends GeoGridAggregationBuilder { public static final String NAME = "geohash_grid"; diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java index 6ca7a4d8a9cb8..9ff9fe7d8f9ba 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java @@ -47,9 +47,9 @@ * * @opensearch.internal */ -public class GeoHashGridAggregator extends GeoGridAggregator { +class GeoHashGridAggregator extends GeoGridAggregator { - public GeoHashGridAggregator( + GeoHashGridAggregator( String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @@ -64,16 +64,17 @@ public GeoHashGridAggregator( } @Override - InternalGeoHashGrid buildAggregation(String name, int requiredSize, List buckets, Map metadata) { - return new InternalGeoHashGrid(name, requiredSize, buckets, metadata); + protected GeoHashGrid buildAggregation(String name, int requiredSize, List buckets, Map metadata) { + return new GeoHashGrid(name, requiredSize, buckets, metadata); } @Override - public InternalGeoHashGrid buildEmptyAggregation() { - return new InternalGeoHashGrid(name, requiredSize, Collections.emptyList(), metadata()); + public GeoHashGrid buildEmptyAggregation() { + return new GeoHashGrid(name, requiredSize, Collections.emptyList(), metadata()); } - InternalGeoGridBucket newEmptyBucket() { + @Override + protected BaseGeoGridBucket newEmptyBucket() { return new InternalGeoHashGridBucket(0, 0, null); } } diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java index 1914c07e831f7..898a7d82a4dec 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridAggregatorFactory.java @@ -58,7 +58,7 @@ * * @opensearch.internal */ -public class GeoHashGridAggregatorFactory extends ValuesSourceAggregatorFactory { +class GeoHashGridAggregatorFactory extends ValuesSourceAggregatorFactory { private final int precision; private final int requiredSize; @@ -86,7 +86,7 @@ public class GeoHashGridAggregatorFactory extends ValuesSourceAggregatorFactory @Override protected Aggregator createUnmapped(SearchContext searchContext, Aggregator parent, Map metadata) throws IOException { - final InternalAggregation aggregation = new InternalGeoHashGrid(name, requiredSize, emptyList(), metadata); + final InternalAggregation aggregation = new GeoHashGrid(name, requiredSize, emptyList(), metadata); return new NonCollectingAggregator(name, searchContext, parent, factories, metadata) { @Override public InternalAggregation buildEmptyAggregation() { diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoTileGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGrid.java similarity index 70% rename from modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoTileGrid.java rename to modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGrid.java index fa544b5893f0c..91c523c80855e 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoTileGrid.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGrid.java @@ -43,40 +43,40 @@ * All geohashes in a grid are of the same precision and held internally as a single long * for efficiency's sake. * - * @opensearch.internal + * @opensearch.api */ -public class InternalGeoTileGrid extends InternalGeoGrid { +public class GeoTileGrid extends BaseGeoGrid { - InternalGeoTileGrid(String name, int requiredSize, List buckets, Map metadata) { + GeoTileGrid(String name, int requiredSize, List buckets, Map metadata) { super(name, requiredSize, buckets, metadata); } - public InternalGeoTileGrid(StreamInput in) throws IOException { + public GeoTileGrid(StreamInput in) throws IOException { super(in); } @Override - public InternalGeoGrid create(List buckets) { - return new InternalGeoTileGrid(name, requiredSize, buckets, metadata); + public BaseGeoGrid create(List buckets) { + return new GeoTileGrid(name, requiredSize, buckets, metadata); } @Override - public InternalGeoGridBucket createBucket(InternalAggregations aggregations, InternalGeoGridBucket prototype) { + public BaseGeoGridBucket createBucket(InternalAggregations aggregations, BaseGeoGridBucket prototype) { return new InternalGeoTileGridBucket(prototype.hashAsLong, prototype.docCount, aggregations); } @Override - InternalGeoGrid create(String name, int requiredSize, List buckets, Map metadata) { - return new InternalGeoTileGrid(name, requiredSize, buckets, metadata); + protected BaseGeoGrid create(String name, int requiredSize, List buckets, Map metadata) { + return new GeoTileGrid(name, requiredSize, buckets, metadata); } @Override - InternalGeoTileGridBucket createBucket(long hashAsLong, long docCount, InternalAggregations aggregations) { + protected InternalGeoTileGridBucket createBucket(long hashAsLong, long docCount, InternalAggregations aggregations) { return new InternalGeoTileGridBucket(hashAsLong, docCount, aggregations); } @Override - Reader getBucketReader() { + protected Reader getBucketReader() { return InternalGeoTileGridBucket::new; } diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java index 76ad515f34fe5..0f1f87bdc57fa 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregationBuilder.java @@ -51,7 +51,7 @@ /** * Aggregation Builder for geotile_grid agg * - * @opensearch.internal + * @opensearch.api */ public class GeoTileGridAggregationBuilder extends GeoGridAggregationBuilder { public static final String NAME = "geotile_grid"; diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java index a205a9afde41e..8faed4e9cd2d4 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregator.java @@ -48,9 +48,9 @@ * * @opensearch.internal */ -public class GeoTileGridAggregator extends GeoGridAggregator { +class GeoTileGridAggregator extends GeoGridAggregator { - public GeoTileGridAggregator( + GeoTileGridAggregator( String name, AggregatorFactories factories, ValuesSource.Numeric valuesSource, @@ -65,16 +65,17 @@ public GeoTileGridAggregator( } @Override - InternalGeoTileGrid buildAggregation(String name, int requiredSize, List buckets, Map metadata) { - return new InternalGeoTileGrid(name, requiredSize, buckets, metadata); + protected GeoTileGrid buildAggregation(String name, int requiredSize, List buckets, Map metadata) { + return new GeoTileGrid(name, requiredSize, buckets, metadata); } @Override - public InternalGeoTileGrid buildEmptyAggregation() { - return new InternalGeoTileGrid(name, requiredSize, Collections.emptyList(), metadata()); + public GeoTileGrid buildEmptyAggregation() { + return new GeoTileGrid(name, requiredSize, Collections.emptyList(), metadata()); } - InternalGeoGridBucket newEmptyBucket() { + @Override + protected BaseGeoGridBucket newEmptyBucket() { return new InternalGeoTileGridBucket(0, 0, null); } } diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java index b830988a3d410..6eb73727ad6c8 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridAggregatorFactory.java @@ -57,7 +57,7 @@ * * @opensearch.internal */ -public class GeoTileGridAggregatorFactory extends ValuesSourceAggregatorFactory { +class GeoTileGridAggregatorFactory extends ValuesSourceAggregatorFactory { private final int precision; private final int requiredSize; @@ -85,7 +85,7 @@ public class GeoTileGridAggregatorFactory extends ValuesSourceAggregatorFactory @Override protected Aggregator createUnmapped(SearchContext searchContext, Aggregator parent, Map metadata) throws IOException { - final InternalAggregation aggregation = new InternalGeoTileGrid(name, requiredSize, Collections.emptyList(), metadata); + final InternalAggregation aggregation = new GeoTileGrid(name, requiredSize, Collections.emptyList(), metadata); return new NonCollectingAggregator(name, searchContext, parent, factories, metadata) { @Override public InternalAggregation buildEmptyAggregation() { diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGridBucket.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGridBucket.java index 659909e868651..6e7ed8a679681 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGridBucket.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/InternalGeoHashGridBucket.java @@ -43,7 +43,7 @@ * * @opensearch.internal */ -public class InternalGeoHashGridBucket extends InternalGeoGridBucket { +class InternalGeoHashGridBucket extends BaseGeoGridBucket { InternalGeoHashGridBucket(long hashAsLong, long docCount, InternalAggregations aggregations) { super(hashAsLong, docCount, aggregations); } @@ -51,7 +51,7 @@ public class InternalGeoHashGridBucket extends InternalGeoGridBucket { +class InternalGeoTileGridBucket extends BaseGeoGridBucket { InternalGeoTileGridBucket(long hashAsLong, long docCount, InternalAggregations aggregations) { super(hashAsLong, docCount, aggregations); } @@ -52,7 +52,7 @@ public class InternalGeoTileGridBucket extends InternalGeoGridBucket implements GeoGrid { @@ -63,7 +63,7 @@ public static ObjectParser createParser( return parser; } - protected void setName(String name) { + public void setName(String name) { super.setName(name); } } diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java index 80124cda50b19..cbe3a2ee89dd7 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoGridBucket.java @@ -40,7 +40,7 @@ /** * A single geo grid bucket result parsed between nodes * - * @opensearch.internal + * @opensearch.api */ public abstract class ParsedGeoGridBucket extends ParsedMultiBucketAggregation.ParsedBucket implements GeoGrid.Bucket { diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java index 109524e755c4d..343149f8e19ab 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGrid.java @@ -42,7 +42,7 @@ * * @opensearch.internal */ -public class ParsedGeoHashGrid extends ParsedGeoGrid { +class ParsedGeoHashGrid extends ParsedGeoGrid { private static final ObjectParser PARSER = createParser( ParsedGeoHashGrid::new, diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java index 4e6e454b08324..6704273f45580 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoHashGridBucket.java @@ -41,7 +41,7 @@ * * @opensearch.internal */ -public class ParsedGeoHashGridBucket extends ParsedGeoGridBucket { +class ParsedGeoHashGridBucket extends ParsedGeoGridBucket { @Override public GeoPoint getKey() { diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java index 8734c96a15578..cb64a0e153e87 100644 --- a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/bucket/geogrid/ParsedGeoTileGrid.java @@ -42,7 +42,7 @@ * * @opensearch.internal */ -public class ParsedGeoTileGrid extends ParsedGeoGrid { +class ParsedGeoTileGrid extends ParsedGeoGrid { private static final ObjectParser PARSER = createParser( ParsedGeoTileGrid::new, diff --git a/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregator.java b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregator.java new file mode 100644 index 0000000000000..918b9a6701490 --- /dev/null +++ b/modules/geo/src/main/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregator.java @@ -0,0 +1,116 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.search.aggregations.metrics; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.index.LeafReaderContext; +import org.opensearch.common.geo.GeoShapeDocValue; +import org.opensearch.common.util.BigArrays; +import org.opensearch.index.fielddata.GeoShapeValue; +import org.opensearch.search.aggregations.Aggregator; +import org.opensearch.search.aggregations.LeafBucketCollector; +import org.opensearch.search.aggregations.LeafBucketCollectorBase; +import org.opensearch.search.aggregations.support.ValuesSource; +import org.opensearch.search.aggregations.support.ValuesSourceConfig; +import org.opensearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Map; + +/** + * Aggregate all docs into a geographic bounds for field geo_shape. + * + * @opensearch.internal + */ +public final class GeoBoundsGeoShapeAggregator extends AbstractGeoBoundsAggregator { + private static final Logger LOGGER = LogManager.getLogger(GeoBoundsGeoShapeAggregator.class); + + public GeoBoundsGeoShapeAggregator( + String name, + SearchContext searchContext, + Aggregator aggregator, + ValuesSourceConfig valuesSourceConfig, + boolean wrapLongitude, + Map metaData + ) throws IOException { + super(name, searchContext, aggregator, valuesSourceConfig, wrapLongitude, metaData); + } + + @Override + protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, LeafBucketCollector leafBucketCollector) { + if (valuesSource == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } + final BigArrays bigArrays = context.bigArrays(); + final GeoShapeValue values = valuesSource.getGeoShapeValues(ctx); + return new LeafBucketCollectorBase(leafBucketCollector, values) { + @Override + public void collect(int doc, long bucket) throws IOException { + setBucketSize(bucket, bigArrays); + if (values.advanceExact(doc)) { + final GeoShapeDocValue value = values.nextValue(); + final GeoShapeDocValue.BoundingRectangle boundingBox = value.getBoundingRectangle(); + if (boundingBox != null) { + double top = tops.get(bucket); + if (boundingBox.getMaxLatitude() > top) { + top = boundingBox.getMaxLatitude(); + } + + double bottom = bottoms.get(bucket); + if (boundingBox.getMinLatitude() < bottom) { + bottom = boundingBox.getMinLatitude(); + } + + double posLeft = posLefts.get(bucket); + if (boundingBox.getMinLongitude() >= 0 && boundingBox.getMinLongitude() < posLeft) { + posLeft = boundingBox.getMinLongitude(); + } + if (boundingBox.getMaxLongitude() >= 0 && boundingBox.getMaxLongitude() < posLeft) { + posLeft = boundingBox.getMaxLongitude(); + } + + double posRight = posRights.get(bucket); + if (boundingBox.getMaxLongitude() >= 0 && boundingBox.getMaxLongitude() > posRight) { + posRight = boundingBox.getMaxLongitude(); + } + if (boundingBox.getMinLongitude() >= 0 && boundingBox.getMinLongitude() > posRight) { + posRight = boundingBox.getMinLongitude(); + } + + double negLeft = negLefts.get(bucket); + if (boundingBox.getMinLongitude() < 0 && boundingBox.getMinLongitude() < negLeft) { + negLeft = boundingBox.getMinLongitude(); + } + if (boundingBox.getMaxLongitude() < 0 && boundingBox.getMaxLongitude() < negLeft) { + negLeft = boundingBox.getMaxLongitude(); + } + + double negRight = negRights.get(bucket); + if (boundingBox.getMaxLongitude() < 0 && boundingBox.getMaxLongitude() > negRight) { + negRight = boundingBox.getMaxLongitude(); + } + if (boundingBox.getMinLongitude() < 0 && boundingBox.getMinLongitude() > negRight) { + negRight = boundingBox.getMinLongitude(); + } + + tops.set(bucket, top); + bottoms.set(bucket, bottom); + posLefts.set(bucket, posLeft); + posRights.set(bucket, posRight); + negLefts.set(bucket, negLeft); + negRights.set(bucket, negRight); + } else { + LOGGER.error("The bounding box was null for the Doc id {}", doc); + } + } + } + }; + } +} diff --git a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridAggregationCompositeAggregatorTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridAggregationCompositeAggregatorTests.java index 3c7c292f9d193..bc7fde8d66d0a 100644 --- a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridAggregationCompositeAggregatorTests.java +++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/composite/GeoTileGridAggregationCompositeAggregatorTests.java @@ -17,7 +17,6 @@ import org.opensearch.common.geo.GeoPoint; import org.opensearch.geo.GeoModulePlugin; import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder; -import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregator; import org.opensearch.index.mapper.GeoPointFieldMapper; import org.opensearch.plugins.SearchPlugin; import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; @@ -31,7 +30,7 @@ import java.util.Map; /** - * Testing the {@link GeoTileGridAggregator} as part of CompositeAggregation. + * Testing the geo tile grid as part of CompositeAggregation. */ public class GeoTileGridAggregationCompositeAggregatorTests extends BaseCompositeAggregatorTestCase { diff --git a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java index d6153637f656d..5ec10a7f4f7cf 100644 --- a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java +++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridAggregatorTestCase.java @@ -73,7 +73,7 @@ import static org.hamcrest.Matchers.equalTo; -public abstract class GeoGridAggregatorTestCase extends AggregatorTestCase { +public abstract class GeoGridAggregatorTestCase extends AggregatorTestCase { private static final String FIELD_NAME = "location"; protected static final double GEOHASH_TOLERANCE = 1E-5D; @@ -201,9 +201,9 @@ public void testAsSubAgg() throws IOException { Consumer verify = (terms) -> { Map> actual = new TreeMap<>(); for (StringTerms.Bucket tb : terms.getBuckets()) { - InternalGeoGrid gg = tb.getAggregations().get("gg"); + BaseGeoGrid gg = tb.getAggregations().get("gg"); Map sub = new TreeMap<>(); - for (InternalGeoGridBucket ggb : gg.getBuckets()) { + for (BaseGeoGridBucket ggb : gg.getBuckets()) { sub.put(ggb.getKeyAsString(), ggb.getDocCount()); } actual.put(tb.getKeyAsString(), sub); @@ -299,7 +299,7 @@ private void testCase( String field, int precision, GeoBoundingBox geoBoundingBox, - Consumer> verify, + Consumer> verify, CheckedConsumer buildIndex ) throws IOException { testCase(query, precision, geoBoundingBox, verify, buildIndex, createBuilder("_name").field(field)); @@ -309,7 +309,7 @@ private void testCase( Query query, int precision, GeoBoundingBox geoBoundingBox, - Consumer> verify, + Consumer> verify, CheckedConsumer buildIndex, GeoGridAggregationBuilder aggregationBuilder ) throws IOException { @@ -333,7 +333,7 @@ private void testCase( aggregator.preCollection(); indexSearcher.search(query, aggregator); aggregator.postCollection(); - verify.accept((InternalGeoGrid) aggregator.buildTopLevel()); + verify.accept((BaseGeoGrid) aggregator.buildTopLevel()); indexReader.close(); directory.close(); diff --git a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridTestCase.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridTestCase.java index 432736a2b43fe..2a655239997b6 100644 --- a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridTestCase.java +++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoGridTestCase.java @@ -50,16 +50,16 @@ import static org.hamcrest.Matchers.equalTo; -public abstract class GeoGridTestCase> extends - InternalMultiBucketAggregationTestCase { +public abstract class GeoGridTestCase> extends InternalMultiBucketAggregationTestCase< + T> { /** - * Instantiate a {@link InternalGeoGrid}-derived class using the same parameters as constructor. + * Instantiate a {@link BaseGeoGrid}-derived class using the same parameters as constructor. */ - protected abstract T createInternalGeoGrid(String name, int size, List buckets, Map metadata); + protected abstract T createInternalGeoGrid(String name, int size, List buckets, Map metadata); /** - * Instantiate a {@link InternalGeoGridBucket}-derived class using the same parameters as constructor. + * Instantiate a {@link BaseGeoGridBucket}-derived class using the same parameters as constructor. */ protected abstract B createInternalGeoGridBucket(Long key, long docCount, InternalAggregations aggregations); @@ -117,7 +117,7 @@ protected List getNamedXContents() { protected T createTestInstance(String name, Map metadata, InternalAggregations aggregations) { final int precision = randomPrecision(); int size = randomNumberOfBuckets(); - List buckets = new ArrayList<>(size); + List buckets = new ArrayList<>(size); for (int i = 0; i < size; i++) { double latitude = randomDoubleBetween(-90.0, 90.0, false); double longitude = randomDoubleBetween(-180.0, 180.0, false); @@ -176,7 +176,7 @@ protected Class implementationClass() { protected T mutateInstance(T instance) { String name = instance.getName(); int size = instance.getRequiredSize(); - List buckets = instance.getBuckets(); + List buckets = instance.getBuckets(); Map metadata = instance.getMetadata(); switch (between(0, 3)) { case 0: @@ -206,7 +206,7 @@ protected T mutateInstance(T instance) { } public void testCreateFromBuckets() { - InternalGeoGrid original = createTestInstance(); + BaseGeoGrid original = createTestInstance(); assertThat(original, equalTo(original.create(original.buckets))); } } diff --git a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridTests.java index c84c6ef5ec076..ada943b6dd369 100644 --- a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridTests.java +++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoHashGridTests.java @@ -37,16 +37,11 @@ import java.util.List; import java.util.Map; -public class GeoHashGridTests extends GeoGridTestCase { +public class GeoHashGridTests extends GeoGridTestCase { @Override - protected InternalGeoHashGrid createInternalGeoGrid( - String name, - int size, - List buckets, - Map metadata - ) { - return new InternalGeoHashGrid(name, size, buckets, metadata); + protected GeoHashGrid createInternalGeoGrid(String name, int size, List buckets, Map metadata) { + return new GeoHashGrid(name, size, buckets, metadata); } @Override diff --git a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridTests.java index ead67e0455d94..b59e9ec2cff53 100644 --- a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridTests.java +++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/bucket/geogrid/GeoTileGridTests.java @@ -37,16 +37,11 @@ import java.util.List; import java.util.Map; -public class GeoTileGridTests extends GeoGridTestCase { +public class GeoTileGridTests extends GeoGridTestCase { @Override - protected InternalGeoTileGrid createInternalGeoGrid( - String name, - int size, - List buckets, - Map metadata - ) { - return new InternalGeoTileGrid(name, size, buckets, metadata); + protected GeoTileGrid createInternalGeoGrid(String name, int size, List buckets, Map metadata) { + return new GeoTileGrid(name, size, buckets, metadata); } @Override diff --git a/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregatorTests.java b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregatorTests.java new file mode 100644 index 0000000000000..d449d72f0b148 --- /dev/null +++ b/modules/geo/src/test/java/org/opensearch/geo/search/aggregations/metrics/GeoBoundsGeoShapeAggregatorTests.java @@ -0,0 +1,237 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.search.aggregations.metrics; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.LatLonShape; +import org.apache.lucene.document.ShapeDocValuesField; +import org.apache.lucene.geo.LatLonGeometry; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.hamcrest.MatcherAssert; +import org.junit.Assert; +import org.opensearch.common.geo.GeoBoundingBox; +import org.opensearch.common.geo.GeoPoint; +import org.opensearch.common.geo.GeoShapeUtils; +import org.opensearch.geo.GeoModulePlugin; +import org.opensearch.geo.tests.common.AggregationInspectionHelper; +import org.opensearch.geo.tests.common.RandomGeoGeometryGenerator; +import org.opensearch.geometry.Circle; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.ShapeType; +import org.opensearch.index.mapper.GeoShapeFieldMapper; +import org.opensearch.index.mapper.GeoShapeIndexer; +import org.opensearch.index.mapper.MappedFieldType; +import org.opensearch.plugins.SearchPlugin; +import org.opensearch.search.aggregations.AggregatorTestCase; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Locale; +import java.util.Random; + +import static org.hamcrest.Matchers.closeTo; + +public class GeoBoundsGeoShapeAggregatorTests extends AggregatorTestCase { + private static final Logger LOG = LogManager.getLogger(GeoBoundsGeoShapeAggregatorTests.class); + private static final double GEOHASH_TOLERANCE = 1E-5D; + private static final String AGGREGATION_NAME = "my_agg"; + private static final String FIELD_NAME = "field"; + + /** + * Overriding the Search Plugins list with {@link GeoModulePlugin} so that the testcase will know that this plugin is + * to be loaded during the tests. + * + * @return List of {@link SearchPlugin} + */ + @Override + protected List getSearchPlugins() { + return Collections.singletonList(new GeoModulePlugin()); + } + + /** + * Testing Empty aggregator results. + * + * @throws Exception if an error occurs accessing the index + */ + public void testEmpty() throws Exception { + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + final GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder(AGGREGATION_NAME).field(FIELD_NAME) + .wrapLongitude(false); + + final MappedFieldType fieldType = new GeoShapeFieldMapper.GeoShapeFieldType(FIELD_NAME); + try (IndexReader reader = w.getReader()) { + IndexSearcher searcher = new IndexSearcher(reader); + InternalGeoBounds bounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + assertTrue(Double.isInfinite(bounds.top)); + assertTrue(Double.isInfinite(bounds.bottom)); + assertTrue(Double.isInfinite(bounds.posLeft)); + assertTrue(Double.isInfinite(bounds.posRight)); + assertTrue(Double.isInfinite(bounds.negLeft)); + assertTrue(Double.isInfinite(bounds.negRight)); + assertFalse(AggregationInspectionHelper.hasValue(bounds)); + } + } + } + + /** + * Testing GeoBoundAggregator for random shapes which are indexed. + * + * @throws Exception if an error occurs accessing the index + */ + public void testRandom() throws Exception { + final int numDocs = randomIntBetween(50, 100); + final List Y = new ArrayList<>(); + final List X = new ArrayList<>(); + final Random random = random(); + try (Directory dir = newDirectory(); RandomIndexWriter w = new RandomIndexWriter(random, dir)) { + for (int i = 0; i < numDocs; i++) { + final Document document = new Document(); + final Geometry geometry = randomLuceneGeometry(random); + LOG.debug("Random Geometry created for Indexing : {}", geometry); + document.add(createShapeDocValue(geometry)); + w.addDocument(document); + getAllXAndYPoints(geometry, X, Y); + } + final GeoBoundsAggregationBuilder aggBuilder = new GeoBoundsAggregationBuilder(AGGREGATION_NAME).field(FIELD_NAME) + .wrapLongitude(false); + final MappedFieldType fieldType = new GeoShapeFieldMapper.GeoShapeFieldType(FIELD_NAME); + try (IndexReader reader = w.getReader()) { + final IndexSearcher searcher = new IndexSearcher(reader); + final InternalGeoBounds actualBounds = searchAndReduce(searcher, new MatchAllDocsQuery(), aggBuilder, fieldType); + final GeoBoundingBox expectedGeoBounds = getExpectedGeoBounds(X, Y); + MatcherAssert.assertThat( + actualBounds.bottomRight().getLat(), + closeTo(expectedGeoBounds.bottomRight().getLat(), GEOHASH_TOLERANCE) + ); + MatcherAssert.assertThat( + actualBounds.bottomRight().getLon(), + closeTo(expectedGeoBounds.bottomRight().getLon(), GEOHASH_TOLERANCE) + ); + MatcherAssert.assertThat(actualBounds.topLeft().getLat(), closeTo(expectedGeoBounds.topLeft().getLat(), GEOHASH_TOLERANCE)); + MatcherAssert.assertThat(actualBounds.topLeft().getLon(), closeTo(expectedGeoBounds.topLeft().getLon(), GEOHASH_TOLERANCE)); + assertTrue(AggregationInspectionHelper.hasValue(actualBounds)); + } + } + } + + private GeoBoundingBox getExpectedGeoBounds(final List X, final List Y) { + double top = Double.NEGATIVE_INFINITY; + double bottom = Double.POSITIVE_INFINITY; + double posLeft = Double.POSITIVE_INFINITY; + double posRight = Double.NEGATIVE_INFINITY; + double negLeft = Double.POSITIVE_INFINITY; + double negRight = Double.NEGATIVE_INFINITY; + // Finding the bounding box for the shapes. + for (final Double lon : X) { + if (lon >= 0 && lon < posLeft) { + posLeft = lon; + } + if (lon >= 0 && lon > posRight) { + posRight = lon; + } + if (lon < 0 && lon < negLeft) { + negLeft = lon; + } + if (lon < 0 && lon > negRight) { + negRight = lon; + } + } + for (final Double lat : Y) { + if (lat > top) { + top = lat; + } + if (lat < bottom) { + bottom = lat; + } + } + if (Double.isInfinite(posLeft)) { + return new GeoBoundingBox(new GeoPoint(top, negLeft), new GeoPoint(bottom, negRight)); + } else if (Double.isInfinite(negLeft)) { + return new GeoBoundingBox(new GeoPoint(top, posLeft), new GeoPoint(bottom, posRight)); + } else { + return new GeoBoundingBox(new GeoPoint(top, negLeft), new GeoPoint(bottom, posRight)); + } + } + + private void getAllXAndYPoints(final Geometry geometry, final List X, final List Y) { + if (geometry instanceof Point) { + final Point point = (Point) geometry; + X.add(point.getX()); + Y.add(point.getY()); + return; + } else if (geometry instanceof Polygon) { + final Polygon polygon = (Polygon) geometry; + for (int i = 0; i < polygon.getPolygon().getX().length; i++) { + X.add(polygon.getPolygon().getX(i)); + Y.add(polygon.getPolygon().getY(i)); + } + return; + } else if (geometry instanceof Line) { + final Line line = (Line) geometry; + for (int i = 0; i < line.getX().length; i++) { + X.add(line.getX(i)); + Y.add(line.getY(i)); + } + return; + } + Assert.fail( + String.format(Locale.ROOT, "Error cannot convert the %s to a valid indexable format[POINT, POLYGON, LINE]", geometry.getClass()) + ); + } + + private ShapeDocValuesField createShapeDocValue(final Geometry geometry) { + if (geometry instanceof Point) { + final Point point = (Point) geometry; + return LatLonShape.createDocValueField(FIELD_NAME, point.getLat(), point.getLon()); + } else if (geometry instanceof Polygon) { + return LatLonShape.createDocValueField(FIELD_NAME, GeoShapeUtils.toLucenePolygon((Polygon) geometry)); + } else if (geometry instanceof Line) { + return LatLonShape.createDocValueField(FIELD_NAME, GeoShapeUtils.toLuceneLine((Line) geometry)); + } + Assert.fail( + String.format(Locale.ROOT, "Error cannot convert the %s to a valid indexable format[POINT, POLYGON, LINE]", geometry.getClass()) + ); + return null; + } + + /** + * Random function to generate a {@link LatLonGeometry}. Now for indexing of GeoShape field, we index all the + * different Geometry shapes that we support({@link ShapeType}) in OpenSearch are broken down into 3 shapes only. + * Hence, we are generating only 3 shapes : {@link org.apache.lucene.geo.Point}, + * {@link org.apache.lucene.geo.Line}, {@link org.apache.lucene.geo.Polygon}. {@link Circle} is not supported. + * Check {@link GeoShapeIndexer#prepareForIndexing(org.opensearch.geometry.Geometry)} + * + * @return {@link LatLonGeometry} + */ + private static Geometry randomLuceneGeometry(final Random r) { + int shapeNumber = OpenSearchTestCase.randomIntBetween(0, 2); + if (shapeNumber == 0) { + // Point + return RandomGeoGeometryGenerator.randomPoint(r); + } else if (shapeNumber == 1) { + // LineString + return RandomGeoGeometryGenerator.randomLine(r); + } else { + // Polygon + return RandomGeoGeometryGenerator.randomPolygon(r); + } + } + +} diff --git a/modules/geo/src/test/java/org/opensearch/geo/tests/common/AggregationBuilders.java b/modules/geo/src/test/java/org/opensearch/geo/tests/common/AggregationBuilders.java index c0d7e51047c6b..706c73e7416f5 100644 --- a/modules/geo/src/test/java/org/opensearch/geo/tests/common/AggregationBuilders.java +++ b/modules/geo/src/test/java/org/opensearch/geo/tests/common/AggregationBuilders.java @@ -10,8 +10,8 @@ import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder; import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder; -import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoHashGrid; -import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoTileGrid; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoHashGrid; +import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGrid; import org.opensearch.geo.search.aggregations.metrics.GeoBounds; import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder; @@ -24,14 +24,14 @@ public static GeoBoundsAggregationBuilder geoBounds(String name) { } /** - * Create a new {@link InternalGeoHashGrid} aggregation with the given name. + * Create a new {@link GeoHashGrid} aggregation with the given name. */ public static GeoHashGridAggregationBuilder geohashGrid(String name) { return new GeoHashGridAggregationBuilder(name); } /** - * Create a new {@link InternalGeoTileGrid} aggregation with the given name. + * Create a new {@link GeoTileGrid} aggregation with the given name. */ public static GeoTileGridAggregationBuilder geotileGrid(String name) { return new GeoTileGridAggregationBuilder(name); diff --git a/modules/geo/src/test/java/org/opensearch/geo/tests/common/AggregationInspectionHelper.java b/modules/geo/src/test/java/org/opensearch/geo/tests/common/AggregationInspectionHelper.java index 3473cf2d94b76..89debdf5abd95 100644 --- a/modules/geo/src/test/java/org/opensearch/geo/tests/common/AggregationInspectionHelper.java +++ b/modules/geo/src/test/java/org/opensearch/geo/tests/common/AggregationInspectionHelper.java @@ -8,7 +8,7 @@ package org.opensearch.geo.tests.common; -import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoGrid; +import org.opensearch.geo.search.aggregations.bucket.geogrid.BaseGeoGrid; import org.opensearch.geo.search.aggregations.metrics.InternalGeoBounds; public class AggregationInspectionHelper { @@ -17,7 +17,7 @@ public static boolean hasValue(InternalGeoBounds agg) { return (agg.topLeft() == null && agg.bottomRight() == null) == false; } - public static boolean hasValue(InternalGeoGrid agg) { + public static boolean hasValue(BaseGeoGrid agg) { return agg.getBuckets().stream().anyMatch(bucket -> bucket.getDocCount() > 0); } } diff --git a/modules/geo/src/test/java/org/opensearch/geo/tests/common/RandomGeoGeometryGenerator.java b/modules/geo/src/test/java/org/opensearch/geo/tests/common/RandomGeoGeometryGenerator.java new file mode 100644 index 0000000000000..caf15507e08c5 --- /dev/null +++ b/modules/geo/src/test/java/org/opensearch/geo/tests/common/RandomGeoGeometryGenerator.java @@ -0,0 +1,240 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.geo.tests.common; + +import org.junit.Assert; +import org.opensearch.geo.algorithm.PolygonGenerator; +import org.opensearch.geometry.Geometry; +import org.opensearch.geometry.GeometryCollection; +import org.opensearch.geometry.Line; +import org.opensearch.geometry.LinearRing; +import org.opensearch.geometry.MultiLine; +import org.opensearch.geometry.MultiPoint; +import org.opensearch.geometry.MultiPolygon; +import org.opensearch.geometry.Point; +import org.opensearch.geometry.Polygon; +import org.opensearch.geometry.Rectangle; +import org.opensearch.geometry.ShapeType; +import org.opensearch.index.mapper.GeoShapeIndexer; +import org.opensearch.test.OpenSearchTestCase; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Locale; +import java.util.Random; +import java.util.function.Predicate; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +/** + * Random geo generation utilities for randomized geo_shape type testing. + */ +public class RandomGeoGeometryGenerator { + // Just picking a number 10 to be the max edges of a polygon. Don't want to make too large which can impact + // debugging. + private static final int MAX_VERTEXES = 10; + private static final int MAX_MULTIPLE_GEOMETRIES = 10; + + private static final Predicate NOT_SUPPORTED_SHAPES = shapeType -> shapeType != ShapeType.CIRCLE + && shapeType != ShapeType.LINEARRING; + + /** + * Creating list of only supported geometries defined here: {@link GeoShapeIndexer#prepareForIndexing(Geometry)} + */ + private static final List SUPPORTED_SHAPE_TYPES = Arrays.stream(ShapeType.values()) + .filter(NOT_SUPPORTED_SHAPES) + .collect(Collectors.toList()); + + /** + * Returns a random Geometry. It makes sure that only that geometry is returned which is supported by OpenSearch + * while indexing. Check {@link GeoShapeIndexer#prepareForIndexing(Geometry)} + * + * @return {@link Geometry} + */ + public static Geometry randomGeometry(final Random r) { + final ShapeType randomShapeType = SUPPORTED_SHAPE_TYPES.get( + OpenSearchTestCase.randomIntBetween(0, SUPPORTED_SHAPE_TYPES.size() - 1) + ); + switch (randomShapeType) { + case POINT: + return randomPoint(r); + case MULTIPOINT: + return randomMultiPoint(r); + case POLYGON: + return randomPolygon(r); + case LINESTRING: + return randomLine(r); + case MULTIPOLYGON: + return randomMultiPolygon(r); + case GEOMETRYCOLLECTION: + return randomGeometryCollection(r); + case MULTILINESTRING: + return randomMultiLine(r); + case ENVELOPE: + return randomRectangle(r); + default: + Assert.fail(String.format(Locale.ROOT, "Cannot create a geometry of type %s ", randomShapeType)); + } + return null; + } + + /** + * Generate a random point on the Earth Surface. + * + * @param r {@link Random} + * @return {@link Point} + */ + public static Point randomPoint(final Random r) { + double[] pt = getLonAndLatitude(r); + return new Point(pt[0], pt[1]); + } + + /** + * Generate a random polygon on earth surface. + * + * @param r {@link Random} + * @return {@link Polygon} + */ + public static Polygon randomPolygon(final Random r) { + final int vertexCount = OpenSearchTestCase.randomIntBetween(3, MAX_VERTEXES); + return randomPolygonWithFixedVertexCount(r, vertexCount); + } + + /** + * Generate a random line on the earth Surface. + * + * @param r {@link Random} + * @return {@link Line} + */ + public static Line randomLine(final Random r) { + final double[] pt1 = getLonAndLatitude(r); + final double[] pt2 = getLonAndLatitude(r); + final double[] x = { pt1[0], pt2[0] }; + final double[] y = { pt1[1], pt2[1] }; + return new Line(x, y); + } + + /** + * Returns an object of {@link MultiPoint} denoting a list of points on earth surface. + * @param r {@link Random} + * @return {@link MultiPoint} + */ + public static MultiPoint randomMultiPoint(final Random r) { + int multiplePoints = OpenSearchTestCase.randomIntBetween(1, MAX_MULTIPLE_GEOMETRIES); + final List pointsList = new ArrayList<>(); + IntStream.range(0, multiplePoints).forEach(i -> pointsList.add(randomPoint(r))); + return new MultiPoint(pointsList); + } + + /** + * Returns an object of {@link MultiPolygon} denoting various polygons on earth surface. + * + * @param r {@link Random} + * @return {@link MultiPolygon} + */ + public static MultiPolygon randomMultiPolygon(final Random r) { + int multiplePolygons = OpenSearchTestCase.randomIntBetween(1, MAX_MULTIPLE_GEOMETRIES); + final List polygonList = new ArrayList<>(); + IntStream.range(0, multiplePolygons).forEach(i -> polygonList.add(randomPolygon(r))); + return new MultiPolygon(polygonList); + } + + /** + * Returns an object of {@link GeometryCollection} having various shapes on earth surface. + * + * @param r {@link Random} + * @return {@link GeometryCollection} + */ + public static GeometryCollection randomGeometryCollection(final Random r) { + final List geometries = new ArrayList<>(); + geometries.addAll(randomMultiPoint(r).getAll()); + geometries.addAll(randomMultiPolygon(r).getAll()); + geometries.addAll(randomMultiLine(r).getAll()); + geometries.add(randomPoint(r)); + geometries.add(randomLine(r)); + geometries.add(randomPolygon(r)); + geometries.add(randomRectangle(r)); + return new GeometryCollection<>(geometries); + } + + /** + * Returns a {@link MultiLine} object containing multiple lines on earth surface. + * + * @param r {@link Random} + * @return {@link MultiLine} + */ + public static MultiLine randomMultiLine(Random r) { + int multiLines = OpenSearchTestCase.randomIntBetween(1, MAX_MULTIPLE_GEOMETRIES); + final List linesList = new ArrayList<>(); + IntStream.range(0, multiLines).forEach(i -> linesList.add(randomLine(r))); + return new MultiLine(linesList); + } + + /** + * Returns a random {@link Rectangle} created on earth surface. + * + * @param r {@link Random} + * @return {@link Rectangle} + */ + public static Rectangle randomRectangle(final Random r) { + final Polygon polygon = randomPolygonWithFixedVertexCount(r, 4); + double minX = Double.POSITIVE_INFINITY, maxX = Double.NEGATIVE_INFINITY, maxY = Double.NEGATIVE_INFINITY, minY = + Double.POSITIVE_INFINITY; + for (int i = 0; i < polygon.getPolygon().length(); i++) { + double x = polygon.getPolygon().getX()[i]; + double y = polygon.getPolygon().getY()[i]; + + minX = Math.min(minX, x); + minY = Math.min(minY, y); + maxX = Math.max(maxX, x); + maxY = Math.max(maxY, y); + } + return new Rectangle(minX, maxX, maxY, minY); + } + + /** + * Returns a double array where pt[0] : longitude and pt[1] : latitude + * + * @param r {@link Random} + * @return double[] + */ + private static double[] getLonAndLatitude(final Random r) { + double[] pt = new double[2]; + RandomGeoGenerator.randomPoint(r, pt); + return pt; + } + + private static Polygon randomPolygonWithFixedVertexCount(final Random r, final int vertexCount) { + final List xPool = new ArrayList<>(vertexCount); + final List yPool = new ArrayList<>(vertexCount); + IntStream.range(0, vertexCount).forEach(iterator -> { + double[] pt = getLonAndLatitude(r); + xPool.add(pt[0]); + yPool.add(pt[1]); + }); + final List pointsList = PolygonGenerator.generatePolygon(xPool, yPool, r); + // Checking the list + assert vertexCount == pointsList.get(0).length; + assert vertexCount == pointsList.get(1).length; + // Create the linearRing, as we need to close the polygon hence increasing vertexes count by 1 + final double[] x = new double[vertexCount + 1]; + final double[] y = new double[vertexCount + 1]; + IntStream.range(0, vertexCount).forEach(iterator -> { + x[iterator] = pointsList.get(0)[iterator]; + y[iterator] = pointsList.get(1)[iterator]; + }); + // making sure to close the polygon + x[vertexCount] = x[0]; + y[vertexCount] = y[0]; + final LinearRing linearRing = new LinearRing(x, y); + return new Polygon(linearRing); + } + +} diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GrokProcessorGetAction.java index bb587350f4256..518323e0901cf 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/GrokProcessorGetAction.java @@ -31,7 +31,6 @@ package org.opensearch.ingest.common; -import org.opensearch.LegacyESVersion; import org.opensearch.action.ActionListener; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; @@ -79,7 +78,7 @@ public Request(boolean sorted) { Request(StreamInput in) throws IOException { super(in); - this.sorted = in.getVersion().onOrAfter(LegacyESVersion.V_7_10_0) ? in.readBoolean() : false; + this.sorted = in.readBoolean(); } @Override @@ -90,9 +89,7 @@ public ActionRequestValidationException validate() { @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getVersion().onOrAfter(LegacyESVersion.V_7_10_0)) { - out.writeBoolean(sorted); - } + out.writeBoolean(sorted); } public boolean sorted() { diff --git a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/URLDecodeProcessor.java b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/URLDecodeProcessor.java index bf80c5b064703..bb236f957a587 100644 --- a/modules/ingest-common/src/main/java/org/opensearch/ingest/common/URLDecodeProcessor.java +++ b/modules/ingest-common/src/main/java/org/opensearch/ingest/common/URLDecodeProcessor.java @@ -32,8 +32,8 @@ package org.opensearch.ingest.common; -import java.io.UnsupportedEncodingException; import java.net.URLDecoder; +import java.nio.charset.StandardCharsets; import java.util.Map; /** @@ -48,11 +48,7 @@ public final class URLDecodeProcessor extends AbstractStringProcessor { } public static String apply(String value) { - try { - return URLDecoder.decode(value, "UTF-8"); - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("Could not URL-decode value.", e); - } + return URLDecoder.decode(value, StandardCharsets.UTF_8); } @Override diff --git a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/URLDecodeProcessorTests.java b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/URLDecodeProcessorTests.java index 81ed3c89768b7..3d68648825594 100644 --- a/modules/ingest-common/src/test/java/org/opensearch/ingest/common/URLDecodeProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/opensearch/ingest/common/URLDecodeProcessorTests.java @@ -32,13 +32,14 @@ package org.opensearch.ingest.common; -import java.io.UnsupportedEncodingException; import java.net.URLDecoder; +import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; public class URLDecodeProcessorTests extends AbstractStringProcessorTestCase { @Override protected String modifyInput(String input) { - return "Hello%20G%C3%BCnter" + input; + return "Hello%20G%C3%BCnter" + urlEncode(input); } @Override @@ -48,10 +49,10 @@ protected AbstractStringProcessor newProcessor(String field, boolean ign @Override protected String expectedResult(String input) { - try { - return "Hello Günter" + URLDecoder.decode(input, "UTF-8"); - } catch (UnsupportedEncodingException e) { - throw new IllegalArgumentException("invalid"); - } + return "Hello Günter" + URLDecoder.decode(urlEncode(input), StandardCharsets.UTF_8); + } + + private static String urlEncode(String s) { + return URLEncoder.encode(s, StandardCharsets.UTF_8); } } diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index 7dce788f3a4a4..4e4186c4888f6 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -39,11 +39,9 @@ opensearchplugin { } dependencies { - api('com.maxmind.geoip2:geoip2:3.0.1') + api('com.maxmind.geoip2:geoip2:3.0.2') // geoip2 dependencies: - api("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") - api("com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}") - api('com.maxmind.db:maxmind-db:2.0.0') + api('com.maxmind.db:maxmind-db:2.1.0') testImplementation 'org.elasticsearch:geolite2-databases:20191119' } diff --git a/modules/ingest-geoip/licenses/geoip2-3.0.1.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-3.0.1.jar.sha1 deleted file mode 100644 index f1d5ac5aea546..0000000000000 --- a/modules/ingest-geoip/licenses/geoip2-3.0.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8a814ae92a1d8c35f82d0ff76d86927c191b7916 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/geoip2-3.0.2.jar.sha1 b/modules/ingest-geoip/licenses/geoip2-3.0.2.jar.sha1 new file mode 100644 index 0000000000000..2ff70cf499713 --- /dev/null +++ b/modules/ingest-geoip/licenses/geoip2-3.0.2.jar.sha1 @@ -0,0 +1 @@ +f0ab0a451309c93f0fb6bf3cb203ba19d452c800 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-annotations-2.13.3.jar.sha1 b/modules/ingest-geoip/licenses/jackson-annotations-2.13.3.jar.sha1 deleted file mode 100644 index 7e68b8b99757d..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-annotations-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7198b3aac15285a49e218e08441c5f70af00fc51 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.13.3.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.13.3.jar.sha1 deleted file mode 100644 index fd75028bd141f..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-databind-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56deb9ea2c93a7a556b3afbedd616d342963464e \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-LICENSE b/modules/ingest-geoip/licenses/jackson-databind-LICENSE deleted file mode 100644 index f5f45d26a49d6..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-databind-LICENSE +++ /dev/null @@ -1,8 +0,0 @@ -This copy of Jackson JSON processor streaming parser/generator is licensed under the -Apache (Software) License, version 2.0 ("the License"). -See the License for details about distribution rights, and the -specific rights regarding derivate works. - -You may obtain a copy of the License at: - -http://www.apache.org/licenses/LICENSE-2.0 diff --git a/modules/ingest-geoip/licenses/jackson-databind-NOTICE b/modules/ingest-geoip/licenses/jackson-databind-NOTICE deleted file mode 100644 index 4c976b7b4cc58..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-databind-NOTICE +++ /dev/null @@ -1,20 +0,0 @@ -# Jackson JSON processor - -Jackson is a high-performance, Free/Open Source JSON processing library. -It was originally written by Tatu Saloranta (tatu.saloranta@iki.fi), and has -been in development since 2007. -It is currently developed by a community of developers, as well as supported -commercially by FasterXML.com. - -## Licensing - -Jackson core and extension components may licensed under different licenses. -To find the details that apply to this artifact see the accompanying LICENSE file. -For more information, including possible other licensing options, contact -FasterXML.com (http://fasterxml.com). - -## Credits - -A list of contributors may be found from CREDITS file, which is included -in some artifacts (usually source distributions); but is always available -from the source code management (SCM) system project uses. diff --git a/modules/ingest-geoip/licenses/maxmind-db-2.0.0.jar.sha1 b/modules/ingest-geoip/licenses/maxmind-db-2.0.0.jar.sha1 deleted file mode 100644 index 32c18f89c6a29..0000000000000 --- a/modules/ingest-geoip/licenses/maxmind-db-2.0.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e7e0fd82da0a160b7928ba214e699a7e6a74fff4 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/maxmind-db-2.1.0.jar.sha1 b/modules/ingest-geoip/licenses/maxmind-db-2.1.0.jar.sha1 new file mode 100644 index 0000000000000..3d9f6c443ec9f --- /dev/null +++ b/modules/ingest-geoip/licenses/maxmind-db-2.1.0.jar.sha1 @@ -0,0 +1 @@ +5fb0a7c4677ba725149ed557df9d0809d1836b80 \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-9.3.jar.sha1 b/modules/lang-expression/licenses/asm-9.3.jar.sha1 deleted file mode 100644 index 71d3966a6f6f9..0000000000000 --- a/modules/lang-expression/licenses/asm-9.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e6300ef51c1d801a7ed62d07cd221aca3a90640 \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-9.4.jar.sha1 b/modules/lang-expression/licenses/asm-9.4.jar.sha1 new file mode 100644 index 0000000000000..75f2b0fe9a112 --- /dev/null +++ b/modules/lang-expression/licenses/asm-9.4.jar.sha1 @@ -0,0 +1 @@ +b4e0e2d2e023aa317b7cfcfc916377ea348e07d1 \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-commons-9.3.jar.sha1 b/modules/lang-expression/licenses/asm-commons-9.3.jar.sha1 deleted file mode 100644 index fd7cd4943a57c..0000000000000 --- a/modules/lang-expression/licenses/asm-commons-9.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1f2a432d1212f5c352ae607d7b61dcae20c20af5 \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-commons-9.4.jar.sha1 b/modules/lang-expression/licenses/asm-commons-9.4.jar.sha1 new file mode 100644 index 0000000000000..e0e2a2f4e63e9 --- /dev/null +++ b/modules/lang-expression/licenses/asm-commons-9.4.jar.sha1 @@ -0,0 +1 @@ +8fc2810ddbcbbec0a8bbccb3f8eda58321839912 \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-tree-9.3.jar.sha1 b/modules/lang-expression/licenses/asm-tree-9.3.jar.sha1 deleted file mode 100644 index 238f0006424d3..0000000000000 --- a/modules/lang-expression/licenses/asm-tree-9.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -78d2ecd61318b5a58cd04fb237636c0e86b77d97 \ No newline at end of file diff --git a/modules/lang-expression/licenses/asm-tree-9.4.jar.sha1 b/modules/lang-expression/licenses/asm-tree-9.4.jar.sha1 new file mode 100644 index 0000000000000..50ce6d740aab7 --- /dev/null +++ b/modules/lang-expression/licenses/asm-tree-9.4.jar.sha1 @@ -0,0 +1 @@ +a99175a17d7fdc18cbcbd0e8ea6a5d276844190a \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.4.0-snapshot-ddf0d0a.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.4.0-snapshot-ddf0d0a.jar.sha1 deleted file mode 100644 index ec6906d730ac1..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-9.4.0-snapshot-ddf0d0a.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9f23e695b0c864fa9722e4f67d950266ca64d37b \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-9.5.0-snapshot-a4ef70f.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-9.5.0-snapshot-a4ef70f.jar.sha1 new file mode 100644 index 0000000000000..0e1f3e37f508a --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-9.5.0-snapshot-a4ef70f.jar.sha1 @@ -0,0 +1 @@ +c92a0928724b04224157ce2d3e105953f57f94db \ No newline at end of file diff --git a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java index 1802d03e20942..7c2c403fdd487 100644 --- a/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/opensearch/script/mustache/MultiSearchTemplateResponse.java @@ -32,7 +32,6 @@ package org.opensearch.script.mustache; -import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchException; import org.opensearch.action.ActionResponse; import org.opensearch.action.search.MultiSearchResponse; @@ -125,11 +124,7 @@ public String toString() { MultiSearchTemplateResponse(StreamInput in) throws IOException { super(in); items = in.readArray(Item::new, Item[]::new); - if (in.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) { - tookInMillis = in.readVLong(); - } else { - tookInMillis = -1L; - } + tookInMillis = in.readVLong(); } MultiSearchTemplateResponse(Item[] items, long tookInMillis) { @@ -159,9 +154,7 @@ public TimeValue getTook() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeArray(items); - if (out.getVersion().onOrAfter(LegacyESVersion.V_7_0_0)) { - out.writeVLong(tookInMillis); - } + out.writeVLong(tookInMillis); } @Override diff --git a/modules/lang-painless/licenses/asm-9.3.jar.sha1 b/modules/lang-painless/licenses/asm-9.3.jar.sha1 deleted file mode 100644 index 71d3966a6f6f9..0000000000000 --- a/modules/lang-painless/licenses/asm-9.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e6300ef51c1d801a7ed62d07cd221aca3a90640 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-9.4.jar.sha1 b/modules/lang-painless/licenses/asm-9.4.jar.sha1 new file mode 100644 index 0000000000000..75f2b0fe9a112 --- /dev/null +++ b/modules/lang-painless/licenses/asm-9.4.jar.sha1 @@ -0,0 +1 @@ +b4e0e2d2e023aa317b7cfcfc916377ea348e07d1 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-analysis-9.3.jar.sha1 b/modules/lang-painless/licenses/asm-analysis-9.3.jar.sha1 deleted file mode 100644 index f5a04d0196823..0000000000000 --- a/modules/lang-painless/licenses/asm-analysis-9.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4b071f211b37c38e0e9f5998550197c8593f6ad8 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-analysis-9.4.jar.sha1 b/modules/lang-painless/licenses/asm-analysis-9.4.jar.sha1 new file mode 100644 index 0000000000000..850a070775e4d --- /dev/null +++ b/modules/lang-painless/licenses/asm-analysis-9.4.jar.sha1 @@ -0,0 +1 @@ +0a5fec9dfc039448d4fd098fbaffcaf55373b223 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-commons-9.3.jar.sha1 b/modules/lang-painless/licenses/asm-commons-9.3.jar.sha1 deleted file mode 100644 index fd7cd4943a57c..0000000000000 --- a/modules/lang-painless/licenses/asm-commons-9.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1f2a432d1212f5c352ae607d7b61dcae20c20af5 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-commons-9.4.jar.sha1 b/modules/lang-painless/licenses/asm-commons-9.4.jar.sha1 new file mode 100644 index 0000000000000..e0e2a2f4e63e9 --- /dev/null +++ b/modules/lang-painless/licenses/asm-commons-9.4.jar.sha1 @@ -0,0 +1 @@ +8fc2810ddbcbbec0a8bbccb3f8eda58321839912 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-tree-9.3.jar.sha1 b/modules/lang-painless/licenses/asm-tree-9.3.jar.sha1 deleted file mode 100644 index 238f0006424d3..0000000000000 --- a/modules/lang-painless/licenses/asm-tree-9.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -78d2ecd61318b5a58cd04fb237636c0e86b77d97 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-tree-9.4.jar.sha1 b/modules/lang-painless/licenses/asm-tree-9.4.jar.sha1 new file mode 100644 index 0000000000000..50ce6d740aab7 --- /dev/null +++ b/modules/lang-painless/licenses/asm-tree-9.4.jar.sha1 @@ -0,0 +1 @@ +a99175a17d7fdc18cbcbd0e8ea6a5d276844190a \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-util-9.3.jar.sha1 b/modules/lang-painless/licenses/asm-util-9.3.jar.sha1 deleted file mode 100644 index 8859c317794ba..0000000000000 --- a/modules/lang-painless/licenses/asm-util-9.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9595bc05510d0bd4b610188b77333fe4851a1975 \ No newline at end of file diff --git a/modules/lang-painless/licenses/asm-util-9.4.jar.sha1 b/modules/lang-painless/licenses/asm-util-9.4.jar.sha1 new file mode 100644 index 0000000000000..8c5854f41bcda --- /dev/null +++ b/modules/lang-painless/licenses/asm-util-9.4.jar.sha1 @@ -0,0 +1 @@ +ab1e0a84b72561dbaf1ee260321e72148ebf4b19 \ No newline at end of file diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java index 7b9efa4deb207..35c676653fdc3 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/Compiler.java @@ -212,8 +212,9 @@ private static void addFactoryMethod(Map> additionalClasses, Cl } additionalClasses.put(factoryClass.getName(), factoryClass); - for (int i = 0; i < factoryMethod.getParameterTypes().length; ++i) { - Class parameterClazz = factoryMethod.getParameterTypes()[i]; + final Class[] parameterTypes = factoryMethod.getParameterTypes(); + for (int i = 0; i < parameterTypes.length; ++i) { + Class parameterClazz = parameterTypes[i]; additionalClasses.put(parameterClazz.getName(), parameterClazz); } } diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java index ca6e68706709a..e9edfb73c740c 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/PainlessScriptEngine.java @@ -195,11 +195,12 @@ private Type generateStatefulFactory(Loader loader, ScriptContext context } } - for (int count = 0; count < newFactory.getParameterTypes().length; ++count) { + final Class[] parameterTypes = newFactory.getParameterTypes(); + for (int count = 0; count < parameterTypes.length; ++count) { writer.visitField( Opcodes.ACC_PRIVATE | Opcodes.ACC_FINAL, "$arg" + count, - Type.getType(newFactory.getParameterTypes()[count]).getDescriptor(), + Type.getType(parameterTypes[count]).getDescriptor(), null, null ).visitEnd(); @@ -211,7 +212,7 @@ private Type generateStatefulFactory(Loader loader, ScriptContext context ); org.objectweb.asm.commons.Method init = new org.objectweb.asm.commons.Method( "", - MethodType.methodType(void.class, newFactory.getParameterTypes()).toMethodDescriptorString() + MethodType.methodType(void.class, parameterTypes).toMethodDescriptorString() ); GeneratorAdapter constructor = new GeneratorAdapter( @@ -223,10 +224,10 @@ private Type generateStatefulFactory(Loader loader, ScriptContext context constructor.loadThis(); constructor.invokeConstructor(OBJECT_TYPE, base); - for (int count = 0; count < newFactory.getParameterTypes().length; ++count) { + for (int count = 0; count < parameterTypes.length; ++count) { constructor.loadThis(); constructor.loadArg(count); - constructor.putField(Type.getType("L" + className + ";"), "$arg" + count, Type.getType(newFactory.getParameterTypes()[count])); + constructor.putField(Type.getType("L" + className + ";"), "$arg" + count, Type.getType(parameterTypes[count])); } constructor.returnValue(); @@ -247,7 +248,7 @@ private Type generateStatefulFactory(Loader loader, ScriptContext context MethodType.methodType(newInstance.getReturnType(), newInstance.getParameterTypes()).toMethodDescriptorString() ); - List> parameters = new ArrayList<>(Arrays.asList(newFactory.getParameterTypes())); + List> parameters = new ArrayList<>(Arrays.asList(parameterTypes)); parameters.addAll(Arrays.asList(newInstance.getParameterTypes())); org.objectweb.asm.commons.Method constru = new org.objectweb.asm.commons.Method( @@ -264,9 +265,9 @@ private Type generateStatefulFactory(Loader loader, ScriptContext context adapter.newInstance(WriterConstants.CLASS_TYPE); adapter.dup(); - for (int count = 0; count < newFactory.getParameterTypes().length; ++count) { + for (int count = 0; count < parameterTypes.length; ++count) { adapter.loadThis(); - adapter.getField(Type.getType("L" + className + ";"), "$arg" + count, Type.getType(newFactory.getParameterTypes()[count])); + adapter.getField(Type.getType("L" + className + ";"), "$arg" + count, Type.getType(parameterTypes[count])); } adapter.loadArgs(); @@ -334,13 +335,14 @@ private T generateFactory(Loader loader, ScriptContext context, Type clas } } + final Class[] parameterTypes = reflect.getParameterTypes(); org.objectweb.asm.commons.Method instance = new org.objectweb.asm.commons.Method( reflect.getName(), - MethodType.methodType(reflect.getReturnType(), reflect.getParameterTypes()).toMethodDescriptorString() + MethodType.methodType(reflect.getReturnType(), parameterTypes).toMethodDescriptorString() ); org.objectweb.asm.commons.Method constru = new org.objectweb.asm.commons.Method( "", - MethodType.methodType(void.class, reflect.getParameterTypes()).toMethodDescriptorString() + MethodType.methodType(void.class, parameterTypes).toMethodDescriptorString() ); GeneratorAdapter adapter = new GeneratorAdapter( @@ -421,9 +423,7 @@ private T generateFactory(Loader loader, ScriptContext context, Type clas private void writeNeedsMethods(Class clazz, ClassWriter writer, Set extractedVariables) { for (Method method : clazz.getMethods()) { - if (method.getName().startsWith("needs") - && method.getReturnType().equals(boolean.class) - && method.getParameterTypes().length == 0) { + if (method.getName().startsWith("needs") && method.getReturnType().equals(boolean.class) && method.getParameterCount() == 0) { String name = method.getName(); name = name.substring(5); name = Character.toLowerCase(name.charAt(0)) + name.substring(1); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/ScriptClassInfo.java b/modules/lang-painless/src/main/java/org/opensearch/painless/ScriptClassInfo.java index e80f92442680a..26dcb4adabea3 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/ScriptClassInfo.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/ScriptClassInfo.java @@ -88,7 +88,7 @@ public ScriptClassInfo(PainlessLookup painlessLookup, Class baseClass) { + "] has more than one." ); } - } else if (m.getName().startsWith("needs") && m.getReturnType() == boolean.class && m.getParameterTypes().length == 0) { + } else if (m.getName().startsWith("needs") && m.getReturnType() == boolean.class && m.getParameterCount() == 0) { needsMethods.add(new org.objectweb.asm.commons.Method(m.getName(), NEEDS_PARAMETER_METHOD_TYPE.toMethodDescriptorString())); } else if (m.getName().startsWith("get") && m.getName().equals("getClass") == false @@ -124,7 +124,7 @@ public ScriptClassInfo(PainlessLookup painlessLookup, Class baseClass) { FunctionTable.LocalFunction defConverter = null; for (java.lang.reflect.Method m : baseClass.getMethods()) { if (m.getName().startsWith("convertFrom") - && m.getParameterTypes().length == 1 + && m.getParameterCount() == 1 && m.getReturnType() == returnType && Modifier.isStatic(m.getModifiers())) { diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/Walker.java index 719a69a9977e7..c03b4199ce8d9 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/antlr/Walker.java @@ -248,10 +248,6 @@ private Location location(ParserRuleContext ctx) { return new Location(sourceName, ctx.getStart().getStartIndex()); } - private Location location(TerminalNode tn) { - return new Location(sourceName, tn.getSymbol().getStartIndex()); - } - @Override public ANode visitSource(SourceContext ctx) { List functions = new ArrayList<>(); diff --git a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java index e43d1beb9b25b..e79eda975f417 100644 --- a/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/opensearch/painless/lookup/PainlessLookupBuilder.java @@ -2168,9 +2168,10 @@ private void generateBridgeMethod(PainlessClassBuilder painlessClassBuilder, Pai bridgeMethodWriter.loadArg(0); } - for (int typeParameterCount = 0; typeParameterCount < javaMethod.getParameterTypes().length; ++typeParameterCount) { + final Class[] typeParameters = javaMethod.getParameterTypes(); + for (int typeParameterCount = 0; typeParameterCount < typeParameters.length; ++typeParameterCount) { bridgeMethodWriter.loadArg(typeParameterCount + bridgeTypeParameterOffset); - Class typeParameter = javaMethod.getParameterTypes()[typeParameterCount]; + Class typeParameter = typeParameters[typeParameterCount]; if (typeParameter == Byte.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_BYTE_IMPLICIT); else if (typeParameter == Short.class) bridgeMethodWriter.invokeStatic(DEF_UTIL_TYPE, DEF_TO_B_SHORT_IMPLICIT); diff --git a/modules/opensearch-dashboards/src/javaRestTest/java/org/opensearch/dashboards/OpenSearchDashboardsSystemIndexIT.java b/modules/opensearch-dashboards/src/javaRestTest/java/org/opensearch/dashboards/OpenSearchDashboardsSystemIndexIT.java index 2584a9b41f14d..10ee9393b343f 100644 --- a/modules/opensearch-dashboards/src/javaRestTest/java/org/opensearch/dashboards/OpenSearchDashboardsSystemIndexIT.java +++ b/modules/opensearch-dashboards/src/javaRestTest/java/org/opensearch/dashboards/OpenSearchDashboardsSystemIndexIT.java @@ -34,7 +34,10 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.http.util.EntityUtils; + +import org.apache.hc.core5.http.ParseException; +import org.apache.hc.core5.http.io.entity.EntityUtils; +import org.opensearch.OpenSearchParseException; import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.common.xcontent.XContentHelper; @@ -73,7 +76,7 @@ public void testCreateIndex() throws IOException { assertThat(response.getStatusLine().getStatusCode(), is(200)); } - public void testAliases() throws IOException { + public void testAliases() throws IOException, ParseException { assumeFalse("In this test, .opensearch_dashboards is the alias name", ".opensearch_dashboards".equals(indexName)); Request request = new Request("PUT", "/_opensearch_dashboards/" + indexName); Response response = client().performRequest(request); @@ -96,7 +99,7 @@ public void testBulkToOpenSearchDashboardsIndex() throws IOException { assertThat(response.getStatusLine().getStatusCode(), is(200)); } - public void testRefresh() throws IOException { + public void testRefresh() throws IOException, ParseException { Request request = new Request("POST", "/_opensearch_dashboards/_bulk"); request.setJsonEntity("{ \"index\" : { \"_index\" : \"" + indexName + "\", \"_id\" : \"1\" } }\n{ \"foo\" : \"bar\" }\n"); Response response = client().performRequest(request); @@ -114,7 +117,7 @@ public void testRefresh() throws IOException { assertThat(responseBody, containsString("bar")); } - public void testGetFromOpenSearchDashboardsIndex() throws IOException { + public void testGetFromOpenSearchDashboardsIndex() throws IOException, ParseException { Request request = new Request("POST", "/_opensearch_dashboards/_bulk"); request.setJsonEntity("{ \"index\" : { \"_index\" : \"" + indexName + "\", \"_id\" : \"1\" } }\n{ \"foo\" : \"bar\" }\n"); request.addParameter("refresh", "true"); @@ -130,7 +133,7 @@ public void testGetFromOpenSearchDashboardsIndex() throws IOException { assertThat(responseBody, containsString("bar")); } - public void testMultiGetFromOpenSearchDashboardsIndex() throws IOException { + public void testMultiGetFromOpenSearchDashboardsIndex() throws IOException, ParseException { Request request = new Request("POST", "/_opensearch_dashboards/_bulk"); request.setJsonEntity( "{ \"index\" : { \"_index\" : \"" @@ -163,7 +166,7 @@ public void testMultiGetFromOpenSearchDashboardsIndex() throws IOException { assertThat(responseBody, containsString("tag")); } - public void testSearchFromOpenSearchDashboardsIndex() throws IOException { + public void testSearchFromOpenSearchDashboardsIndex() throws IOException, ParseException { Request request = new Request("POST", "/_opensearch_dashboards/_bulk"); request.setJsonEntity( "{ \"index\" : { \"_index\" : \"" @@ -241,7 +244,7 @@ public void testUpdateIndexSettings() throws IOException { assertThat(response.getStatusLine().getStatusCode(), is(200)); } - public void testGetIndex() throws IOException { + public void testGetIndex() throws IOException, ParseException { Request request = new Request("PUT", "/_opensearch_dashboards/" + indexName); Response response = client().performRequest(request); assertThat(response.getStatusLine().getStatusCode(), is(200)); @@ -278,7 +281,7 @@ public void testIndexingAndUpdatingDocs() throws IOException { assertThat(response.getStatusLine().getStatusCode(), is(200)); } - public void testScrollingDocs() throws IOException { + public void testScrollingDocs() throws IOException, OpenSearchParseException, ParseException { Request request = new Request("POST", "/_opensearch_dashboards/_bulk"); request.setJsonEntity( "{ \"index\" : { \"_index\" : \"" diff --git a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java index d684f0bfebcfb..cacd7c3a23824 100644 --- a/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java +++ b/modules/parent-join/src/internalClusterTest/java/org/opensearch/join/query/ChildQuerySearchIT.java @@ -310,7 +310,7 @@ public void testHasParentFilter() throws Exception { } assertThat(parentToChildren.get(previousParentId).add(childId), is(true)); } - indexRandom(true, builders.toArray(new IndexRequestBuilder[builders.size()])); + indexRandom(true, builders.toArray(new IndexRequestBuilder[0])); assertThat(parentToChildren.isEmpty(), equalTo(false)); for (Map.Entry> parentToChildrenEntry : parentToChildren.entrySet()) { diff --git a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java index b2130eca3bb02..4f4665c434c67 100644 --- a/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/opensearch/percolator/PercolateQueryBuilder.java @@ -546,7 +546,6 @@ protected Analyzer getWrappedAnalyzer(String fieldName) { String name = this.name != null ? this.name : pft.name(); QueryShardContext percolateShardContext = wrap(context); PercolatorFieldMapper.configureContext(percolateShardContext, pft.mapUnmappedFieldsAsText); - ; PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField, percolateShardContext); return pft.percolateQuery(name, queryStore, documents, docSearcher, excludeNestedDocuments, context.indexVersionCreated()); diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequest.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequest.java index 66db397865a0b..d38307fc2194a 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequest.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/RankEvalRequest.java @@ -32,7 +32,6 @@ package org.opensearch.index.rankeval; -import org.opensearch.LegacyESVersion; import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.IndicesRequest; @@ -69,9 +68,7 @@ public RankEvalRequest(RankEvalSpec rankingEvaluationSpec, String[] indices) { rankingEvaluationSpec = new RankEvalSpec(in); indices = in.readStringArray(); indicesOptions = IndicesOptions.readIndicesOptions(in); - if (in.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) { - searchType = SearchType.fromId(in.readByte()); - } + searchType = SearchType.fromId(in.readByte()); } RankEvalRequest() {} @@ -150,9 +147,7 @@ public void writeTo(StreamOutput out) throws IOException { rankingEvaluationSpec.writeTo(out); out.writeStringArray(indices); indicesOptions.writeIndicesOptions(out); - if (out.getVersion().onOrAfter(LegacyESVersion.V_7_6_0)) { - out.writeByte(searchType.id()); - } + out.writeByte(searchType.id()); } @Override diff --git a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java index 8cfde2d2b412e..4fce04e23119c 100644 --- a/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/opensearch/index/rankeval/TransportRankEvalAction.java @@ -148,7 +148,7 @@ protected void doExecute(Task task, RankEvalRequest request, ActionListener indexingFailures, List search return; } RefreshRequest refresh = new RefreshRequest(); - refresh.indices(destinationIndices.toArray(new String[destinationIndices.size()])); + refresh.indices(destinationIndices.toArray(new String[0])); logger.debug("[{}]: refreshing", task.getId()); client.admin().indices().refresh(refresh, new ActionListener() { @Override diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java index 34fcd245289be..0e0e387b78e38 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/ReindexSslConfig.java @@ -32,10 +32,12 @@ package org.opensearch.index.reindex; -import org.apache.http.conn.ssl.DefaultHostnameVerifier; -import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; -import org.opensearch.common.Strings; +import org.apache.hc.client5.http.ssl.ClientTlsStrategyBuilder; +import org.apache.hc.client5.http.ssl.DefaultHostnameVerifier; +import org.apache.hc.client5.http.ssl.NoopHostnameVerifier; +import org.apache.hc.core5.function.Factory; +import org.apache.hc.core5.http.nio.ssl.TlsStrategy; +import org.apache.hc.core5.reactor.ssl.TlsDetails; import org.opensearch.common.settings.SecureSetting; import org.opensearch.common.settings.SecureString; import org.opensearch.common.settings.Setting; @@ -50,6 +52,8 @@ import javax.net.ssl.HostnameVerifier; import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLEngine; + import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Path; @@ -161,16 +165,31 @@ private void reload() { } /** - * Encapsulate the loaded SSL configuration as a HTTP-client {@link SSLIOSessionStrategy}. + * Encapsulate the loaded SSL configuration as a HTTP-client {@link TlsStrategy}. * The returned strategy is immutable, but successive calls will return different objects that may have different * configurations if the underlying key/certificate files are modified. */ - SSLIOSessionStrategy getStrategy() { + TlsStrategy getStrategy() { final HostnameVerifier hostnameVerifier = configuration.getVerificationMode().isHostnameVerificationEnabled() ? new DefaultHostnameVerifier() : new NoopHostnameVerifier(); - final String[] protocols = configuration.getSupportedProtocols().toArray(Strings.EMPTY_ARRAY); - final String[] cipherSuites = configuration.getCipherSuites().toArray(Strings.EMPTY_ARRAY); - return new SSLIOSessionStrategy(context, protocols, cipherSuites, hostnameVerifier); + + final String[] protocols = configuration.getSupportedProtocols().toArray(new String[0]); + final String[] cipherSuites = configuration.getCipherSuites().toArray(new String[0]); + + return ClientTlsStrategyBuilder.create() + .setSslContext(context) + .setHostnameVerifier(hostnameVerifier) + .setCiphers(cipherSuites) + .setTlsVersions(protocols) + // See https://issues.apache.org/jira/browse/HTTPCLIENT-2219 + .setTlsDetailsFactory(new Factory() { + @Override + public TlsDetails create(final SSLEngine sslEngine) { + return new TlsDetails(sslEngine.getSession(), sslEngine.getApplicationProtocol()); + } + }) + .build(); + } } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java index 8ade055d10f60..aa9accbd90e21 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/Reindexer.java @@ -33,15 +33,18 @@ package org.opensearch.index.reindex; import java.util.Optional; -import org.apache.http.Header; -import org.apache.http.HttpHost; -import org.apache.http.HttpRequestInterceptor; -import org.apache.http.auth.AuthScope; -import org.apache.http.auth.UsernamePasswordCredentials; -import org.apache.http.client.CredentialsProvider; -import org.apache.http.impl.client.BasicCredentialsProvider; -import org.apache.http.impl.nio.reactor.IOReactorConfig; -import org.apache.http.message.BasicHeader; + +import org.apache.hc.client5.http.auth.AuthScope; +import org.apache.hc.client5.http.auth.UsernamePasswordCredentials; +import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider; +import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManager; +import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManagerBuilder; +import org.apache.hc.core5.http.Header; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.HttpRequestInterceptor; +import org.apache.hc.core5.http.message.BasicHeader; +import org.apache.hc.core5.reactor.IOReactorConfig; +import org.apache.hc.core5.util.Timeout; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionListener; @@ -202,21 +205,23 @@ static RestClient buildRestClient( for (Map.Entry header : remoteInfo.getHeaders().entrySet()) { clientHeaders[i++] = new BasicHeader(header.getKey(), header.getValue()); } - final RestClientBuilder builder = RestClient.builder( - new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme()) - ).setDefaultHeaders(clientHeaders).setRequestConfigCallback(c -> { - c.setConnectTimeout(Math.toIntExact(remoteInfo.getConnectTimeout().millis())); - c.setSocketTimeout(Math.toIntExact(remoteInfo.getSocketTimeout().millis())); + final HttpHost httpHost = new HttpHost(remoteInfo.getScheme(), remoteInfo.getHost(), remoteInfo.getPort()); + final RestClientBuilder builder = RestClient.builder(httpHost).setDefaultHeaders(clientHeaders).setRequestConfigCallback(c -> { + c.setConnectTimeout(Timeout.ofMilliseconds(Math.toIntExact(remoteInfo.getConnectTimeout().millis()))); + c.setResponseTimeout(Timeout.ofMilliseconds(Math.toIntExact(remoteInfo.getSocketTimeout().millis()))); return c; }).setHttpClientConfigCallback(c -> { // Enable basic auth if it is configured if (remoteInfo.getUsername() != null) { - UsernamePasswordCredentials creds = new UsernamePasswordCredentials(remoteInfo.getUsername(), remoteInfo.getPassword()); - CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); - credentialsProvider.setCredentials(AuthScope.ANY, creds); + UsernamePasswordCredentials creds = new UsernamePasswordCredentials( + remoteInfo.getUsername(), + remoteInfo.getPassword().toCharArray() + ); + BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider(); + credentialsProvider.setCredentials(new AuthScope(httpHost, null, "Basic"), creds); c.setDefaultCredentialsProvider(credentialsProvider); } else { - restInterceptor.ifPresent(interceptor -> c.addInterceptorLast(interceptor)); + restInterceptor.ifPresent(interceptor -> c.addRequestInterceptorLast(interceptor)); } // Stick the task id in the thread name so we can track down tasks from stack traces AtomicInteger threads = new AtomicInteger(); @@ -227,8 +232,13 @@ static RestClient buildRestClient( return t; }); // Limit ourselves to one reactor thread because for now the search process is single threaded. - c.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build()); - c.setSSLStrategy(sslConfig.getStrategy()); + c.setIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build()); + + final PoolingAsyncClientConnectionManager connectionManager = PoolingAsyncClientConnectionManagerBuilder.create() + .setTlsStrategy(sslConfig.getStrategy()) + .build(); + + c.setConnectionManager(connectionManager); return c; }); if (Strings.hasLength(remoteInfo.getPathPrefix()) && "/".equals(remoteInfo.getPathPrefix()) == false) { diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java index 8467fbdeacd0e..873bd7c3b48cb 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteRequestBuilders.java @@ -32,8 +32,8 @@ package org.opensearch.index.reindex.remote; -import org.apache.http.entity.ContentType; -import org.apache.http.nio.entity.NStringEntity; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.io.entity.StringEntity; import org.opensearch.OpenSearchException; import org.opensearch.Version; import org.opensearch.action.search.SearchRequest; @@ -240,7 +240,7 @@ static Request scroll(String scroll, TimeValue keepAlive, Version remoteVersion) if (remoteVersion.before(Version.fromId(2000099))) { // Versions before 2.0.0 extract the plain scroll_id from the body - request.setEntity(new NStringEntity(scroll, ContentType.TEXT_PLAIN)); + request.setEntity(new StringEntity(scroll, ContentType.TEXT_PLAIN)); return request; } @@ -258,7 +258,7 @@ static Request clearScroll(String scroll, Version remoteVersion) { if (remoteVersion.before(Version.fromId(2000099))) { // Versions before 2.0.0 extract the plain scroll_id from the body - request.setEntity(new NStringEntity(scroll, ContentType.TEXT_PLAIN)); + request.setEntity(new StringEntity(scroll, ContentType.TEXT_PLAIN)); return request; } try (XContentBuilder entity = JsonXContent.contentBuilder()) { diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSource.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSource.java index be691243ecf84..3a943450a1a89 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSource.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSource.java @@ -32,10 +32,11 @@ package org.opensearch.index.reindex.remote; -import org.apache.http.ContentTooLongException; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.util.EntityUtils; +import org.apache.hc.core5.http.ContentTooLongException; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.ParseException; +import org.apache.hc.core5.http.io.entity.EntityUtils; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; @@ -199,7 +200,7 @@ public void onSuccess(org.opensearch.client.Response response) { InputStream content = responseEntity.getContent(); XContentType xContentType = null; if (responseEntity.getContentType() != null) { - final String mimeType = ContentType.parse(responseEntity.getContentType().getValue()).getMimeType(); + final String mimeType = ContentType.parse(responseEntity.getContentType()).getMimeType(); xContentType = XContentType.fromMediaType(mimeType); } if (xContentType == null) { @@ -284,7 +285,11 @@ private static String bodyMessage(@Nullable HttpEntity entity) throws IOExceptio if (entity == null) { return "No error body."; } else { - return "body=" + EntityUtils.toString(entity); + try { + return "body=" + EntityUtils.toString(entity); + } catch (final ParseException ex) { + throw new IOException(ex); + } } } } diff --git a/modules/reindex/src/main/java/org/opensearch/index/reindex/spi/ReindexRestInterceptorProvider.java b/modules/reindex/src/main/java/org/opensearch/index/reindex/spi/ReindexRestInterceptorProvider.java index 034981c969b4b..0646c9b5d8705 100644 --- a/modules/reindex/src/main/java/org/opensearch/index/reindex/spi/ReindexRestInterceptorProvider.java +++ b/modules/reindex/src/main/java/org/opensearch/index/reindex/spi/ReindexRestInterceptorProvider.java @@ -6,7 +6,8 @@ package org.opensearch.index.reindex.spi; import java.util.Optional; -import org.apache.http.HttpRequestInterceptor; + +import org.apache.hc.core5.http.HttpRequestInterceptor; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.index.reindex.ReindexRequest; diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java index baf3c83bd0050..6874f96628761 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/DeleteByQueryBasicTests.java @@ -362,7 +362,7 @@ public void testMultipleSources() throws Exception { int slices = randomSlices(1, 10); int expectedSlices = expectedSliceStatuses(slices, docs.keySet()); - String[] sourceIndexNames = docs.keySet().toArray(new String[docs.size()]); + String[] sourceIndexNames = docs.keySet().toArray(new String[0]); assertThat( deleteByQuery().source(sourceIndexNames).filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(), diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java index 0c660e5df9682..24adba16d0bad 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/ReindexBasicTests.java @@ -161,7 +161,7 @@ public void testMultipleSources() throws Exception { int slices = randomSlices(1, 10); int expectedSlices = expectedSliceStatuses(slices, docs.keySet()); - String[] sourceIndexNames = docs.keySet().toArray(new String[docs.size()]); + String[] sourceIndexNames = docs.keySet().toArray(new String[0]); ReindexRequestBuilder request = reindex().source(sourceIndexNames).destination("dest").refresh(true).setSlices(slices); BulkByScrollResponse response = request.get(); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java index 6239946852cf8..edd301603250a 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/RoundTripTests.java @@ -152,25 +152,6 @@ private void randomRequest(AbstractBulkIndexByScrollRequest request) { request.setScript(random().nextBoolean() ? null : randomScript()); } - private void assertRequestEquals(Version version, ReindexRequest request, ReindexRequest tripped) { - assertRequestEquals((AbstractBulkIndexByScrollRequest) request, (AbstractBulkIndexByScrollRequest) tripped); - assertEquals(request.getDestination().version(), tripped.getDestination().version()); - assertEquals(request.getDestination().index(), tripped.getDestination().index()); - if (request.getRemoteInfo() == null) { - assertNull(tripped.getRemoteInfo()); - } else { - assertNotNull(tripped.getRemoteInfo()); - assertEquals(request.getRemoteInfo().getScheme(), tripped.getRemoteInfo().getScheme()); - assertEquals(request.getRemoteInfo().getHost(), tripped.getRemoteInfo().getHost()); - assertEquals(request.getRemoteInfo().getQuery(), tripped.getRemoteInfo().getQuery()); - assertEquals(request.getRemoteInfo().getUsername(), tripped.getRemoteInfo().getUsername()); - assertEquals(request.getRemoteInfo().getPassword(), tripped.getRemoteInfo().getPassword()); - assertEquals(request.getRemoteInfo().getHeaders(), tripped.getRemoteInfo().getHeaders()); - assertEquals(request.getRemoteInfo().getSocketTimeout(), tripped.getRemoteInfo().getSocketTimeout()); - assertEquals(request.getRemoteInfo().getConnectTimeout(), tripped.getRemoteInfo().getConnectTimeout()); - } - } - private void assertRequestEquals(AbstractBulkIndexByScrollRequest request, AbstractBulkIndexByScrollRequest tripped) { assertRequestEquals((AbstractBulkByScrollRequest) request, (AbstractBulkByScrollRequest) tripped); assertEquals(request.getScript(), tripped.getScript()); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java index 4f48b99dccdd4..987fab954a8d0 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/UpdateByQueryBasicTests.java @@ -151,7 +151,7 @@ public void testMultipleSources() throws Exception { int slices = randomSlices(1, 10); int expectedSlices = expectedSliceStatuses(slices, docs.keySet()); - String[] sourceIndexNames = docs.keySet().toArray(new String[docs.size()]); + String[] sourceIndexNames = docs.keySet().toArray(new String[0]); BulkByScrollResponse response = updateByQuery().source(sourceIndexNames).refresh(true).setSlices(slices).get(); assertThat(response, matcher().updated(allDocs.size()).slices(hasSize(expectedSlices))); diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java index c349bc54bcbd9..e7af54a0563d3 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteRequestBuildersTests.java @@ -32,8 +32,8 @@ package org.opensearch.index.reindex.remote; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; import org.opensearch.Version; import org.opensearch.action.search.SearchRequest; import org.opensearch.client.Request; @@ -245,7 +245,7 @@ public void testInitialSearchEntity() throws IOException { searchRequest.source(new SearchSourceBuilder()); String query = "{\"match_all\":{}}"; HttpEntity entity = initialSearch(searchRequest, new BytesArray(query), remoteVersion).getEntity(); - assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); + assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType()); if (remoteVersion.onOrAfter(Version.fromId(1000099))) { assertEquals( "{\"query\":" + query + ",\"_source\":true}", @@ -261,7 +261,7 @@ public void testInitialSearchEntity() throws IOException { // Source filtering is included if set up searchRequest.source().fetchSource(new String[] { "in1", "in2" }, new String[] { "out" }); entity = initialSearch(searchRequest, new BytesArray(query), remoteVersion).getEntity(); - assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); + assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType()); assertEquals( "{\"query\":" + query + ",\"_source\":{\"includes\":[\"in1\",\"in2\"],\"excludes\":[\"out\"]}}", Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)) @@ -287,7 +287,7 @@ public void testScrollParams() { public void testScrollEntity() throws IOException { String scroll = randomAlphaOfLength(30); HttpEntity entity = scroll(scroll, timeValueMillis(between(1, 1000)), Version.fromString("5.0.0")).getEntity(); - assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); + assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType()); assertThat( Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)), containsString("\"" + scroll + "\"") @@ -295,14 +295,14 @@ public void testScrollEntity() throws IOException { // Test with version < 2.0.0 entity = scroll(scroll, timeValueMillis(between(1, 1000)), Version.fromId(1070499)).getEntity(); - assertEquals(ContentType.TEXT_PLAIN.toString(), entity.getContentType().getValue()); + assertEquals(ContentType.TEXT_PLAIN.toString(), entity.getContentType()); assertEquals(scroll, Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8))); } public void testClearScroll() throws IOException { String scroll = randomAlphaOfLength(30); Request request = clearScroll(scroll, Version.fromString("5.0.0")); - assertEquals(ContentType.APPLICATION_JSON.toString(), request.getEntity().getContentType().getValue()); + assertEquals(ContentType.APPLICATION_JSON.toString(), request.getEntity().getContentType()); assertThat( Streams.copyToString(new InputStreamReader(request.getEntity().getContent(), StandardCharsets.UTF_8)), containsString("\"" + scroll + "\"") @@ -311,7 +311,7 @@ public void testClearScroll() throws IOException { // Test with version < 2.0.0 request = clearScroll(scroll, Version.fromId(1070499)); - assertEquals(ContentType.TEXT_PLAIN.toString(), request.getEntity().getContentType().getValue()); + assertEquals(ContentType.TEXT_PLAIN.toString(), request.getEntity().getContentType()); assertEquals(scroll, Streams.copyToString(new InputStreamReader(request.getEntity().getContent(), StandardCharsets.UTF_8))); assertThat(request.getParameters().keySet(), empty()); } diff --git a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 337bc67796f8e..c0e2bd14f55bc 100644 --- a/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/opensearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -32,31 +32,14 @@ package org.opensearch.index.reindex.remote; -import org.apache.http.ContentTooLongException; -import org.apache.http.HttpEntity; -import org.apache.http.HttpEntityEnclosingRequest; -import org.apache.http.HttpHost; -import org.apache.http.HttpResponse; -import org.apache.http.ProtocolVersion; -import org.apache.http.StatusLine; -import org.apache.http.client.protocol.HttpClientContext; -import org.apache.http.concurrent.FutureCallback; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.InputStreamEntity; -import org.apache.http.entity.StringEntity; -import org.apache.http.impl.nio.client.CloseableHttpAsyncClient; -import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; -import org.apache.http.message.BasicHttpResponse; -import org.apache.http.message.BasicStatusLine; -import org.apache.http.nio.protocol.HttpAsyncRequestProducer; -import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; import org.opensearch.LegacyESVersion; import org.opensearch.OpenSearchStatusException; import org.opensearch.Version; import org.opensearch.action.bulk.BackoffPolicy; import org.opensearch.action.search.SearchRequest; -import org.opensearch.client.HeapBufferedAsyncResponseConsumer; import org.opensearch.client.RestClient; +import org.opensearch.client.http.HttpUriRequestProducer; +import org.opensearch.client.nio.HeapBufferedAsyncResponseConsumer; import org.opensearch.common.ParsingException; import org.opensearch.common.bytes.BytesArray; import org.opensearch.common.io.FileSystemUtils; @@ -74,13 +57,32 @@ import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; +import org.apache.hc.client5.http.impl.async.CloseableHttpAsyncClient; +import org.apache.hc.client5.http.impl.async.HttpAsyncClientBuilder; +import org.apache.hc.core5.concurrent.FutureCallback; +import org.apache.hc.core5.function.Supplier; +import org.apache.hc.core5.http.ClassicHttpRequest; +import org.apache.hc.core5.http.ClassicHttpResponse; +import org.apache.hc.core5.http.ContentTooLongException; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.HttpHost; +import org.apache.hc.core5.http.io.entity.InputStreamEntity; +import org.apache.hc.core5.http.io.entity.StringEntity; +import org.apache.hc.core5.http.message.BasicClassicHttpResponse; +import org.apache.hc.core5.http.nio.AsyncPushConsumer; +import org.apache.hc.core5.http.nio.AsyncRequestProducer; +import org.apache.hc.core5.http.nio.AsyncResponseConsumer; +import org.apache.hc.core5.http.nio.HandlerFactory; +import org.apache.hc.core5.http.protocol.HttpContext; +import org.apache.hc.core5.io.CloseMode; +import org.apache.hc.core5.reactor.IOReactorStatus; import org.junit.After; import org.junit.Before; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; import java.io.IOException; import java.io.InputStreamReader; +import java.io.UncheckedIOException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.Queue; @@ -97,7 +99,6 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; -import static org.mockito.Mockito.any; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -444,24 +445,49 @@ public void testWrapExceptionToPreserveStatus() throws IOException { @SuppressWarnings({ "unchecked", "rawtypes" }) public void testTooLargeResponse() throws Exception { ContentTooLongException tooLong = new ContentTooLongException("too long!"); - CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); - when( - httpClient.execute( - any(HttpAsyncRequestProducer.class), - any(HttpAsyncResponseConsumer.class), - any(HttpClientContext.class), - any(FutureCallback.class) - ) - ).then(new Answer>() { + CloseableHttpAsyncClient httpClient = new CloseableHttpAsyncClient() { + + @Override + public void close() throws IOException {} + + @Override + public void close(CloseMode closeMode) {} + + @Override + public void start() {} + + @Override + public void register(String hostname, String uriPattern, Supplier supplier) {} + + @Override + public void initiateShutdown() {} + @Override - public Future answer(InvocationOnMock invocationOnMock) throws Throwable { - HeapBufferedAsyncResponseConsumer consumer = (HeapBufferedAsyncResponseConsumer) invocationOnMock.getArguments()[1]; - FutureCallback callback = (FutureCallback) invocationOnMock.getArguments()[3]; - assertEquals(new ByteSizeValue(100, ByteSizeUnit.MB).bytesAsInt(), consumer.getBufferLimit()); + public IOReactorStatus getStatus() { + return null; + } + + @Override + protected Future doExecute( + HttpHost target, + AsyncRequestProducer requestProducer, + AsyncResponseConsumer responseConsumer, + HandlerFactory pushHandlerFactory, + HttpContext context, + FutureCallback callback + ) { + assertEquals( + new ByteSizeValue(100, ByteSizeUnit.MB).bytesAsInt(), + ((HeapBufferedAsyncResponseConsumer) responseConsumer).getBufferLimit() + ); callback.failed(tooLong); return null; } - }); + + @Override + public void awaitShutdown(org.apache.hc.core5.util.TimeValue waitTime) throws InterruptedException {} + }; + RemoteScrollableHitSource source = sourceWithMockedClient(true, httpClient); Throwable e = expectThrows(RuntimeException.class, source::start); @@ -539,46 +565,68 @@ private RemoteScrollableHitSource sourceWithMockedRemoteCall(boolean mockRemoteV } } - CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); - when( - httpClient.execute( - any(HttpAsyncRequestProducer.class), - any(HttpAsyncResponseConsumer.class), - any(HttpClientContext.class), - any(FutureCallback.class) - ) - ).thenAnswer(new Answer>() { - + final CloseableHttpAsyncClient httpClient = new CloseableHttpAsyncClient() { int responseCount = 0; @Override - public Future answer(InvocationOnMock invocationOnMock) throws Throwable { - // Throw away the current thread context to simulate running async httpclient's thread pool - threadPool.getThreadContext().stashContext(); - HttpAsyncRequestProducer requestProducer = (HttpAsyncRequestProducer) invocationOnMock.getArguments()[0]; - FutureCallback futureCallback = (FutureCallback) invocationOnMock.getArguments()[3]; - HttpEntityEnclosingRequest request = (HttpEntityEnclosingRequest) requestProducer.generateRequest(); - URL resource = resources[responseCount]; - String path = paths[responseCount++]; - ProtocolVersion protocolVersion = new ProtocolVersion("http", 1, 1); - if (path.startsWith("fail:")) { - String body = Streams.copyToString(new InputStreamReader(request.getEntity().getContent(), StandardCharsets.UTF_8)); - if (path.equals("fail:rejection.json")) { - StatusLine statusLine = new BasicStatusLine(protocolVersion, RestStatus.TOO_MANY_REQUESTS.getStatus(), ""); - BasicHttpResponse httpResponse = new BasicHttpResponse(statusLine); - futureCallback.completed(httpResponse); + public void close(CloseMode closeMode) {} + + @Override + public void close() throws IOException {} + + @Override + public void start() {} + + @Override + public IOReactorStatus getStatus() { + return null; + } + + @Override + public void awaitShutdown(org.apache.hc.core5.util.TimeValue waitTime) throws InterruptedException {} + + @Override + public void initiateShutdown() {} + + @Override + protected Future doExecute( + HttpHost target, + AsyncRequestProducer requestProducer, + AsyncResponseConsumer responseConsumer, + HandlerFactory pushHandlerFactory, + HttpContext context, + FutureCallback callback + ) { + try { + // Throw away the current thread context to simulate running async httpclient's thread pool + threadPool.getThreadContext().stashContext(); + ClassicHttpRequest request = getRequest(requestProducer); + URL resource = resources[responseCount]; + String path = paths[responseCount++]; + if (path.startsWith("fail:")) { + String body = Streams.copyToString(new InputStreamReader(request.getEntity().getContent(), StandardCharsets.UTF_8)); + if (path.equals("fail:rejection.json")) { + ClassicHttpResponse httpResponse = new BasicClassicHttpResponse(RestStatus.TOO_MANY_REQUESTS.getStatus(), ""); + callback.completed((T) httpResponse); + } else { + callback.failed(new RuntimeException(body)); + } } else { - futureCallback.failed(new RuntimeException(body)); + BasicClassicHttpResponse httpResponse = new BasicClassicHttpResponse(200, ""); + httpResponse.setEntity(new InputStreamEntity(FileSystemUtils.openFileURLStream(resource), contentType)); + callback.completed((T) httpResponse); } - } else { - StatusLine statusLine = new BasicStatusLine(protocolVersion, 200, ""); - HttpResponse httpResponse = new BasicHttpResponse(statusLine); - httpResponse.setEntity(new InputStreamEntity(FileSystemUtils.openFileURLStream(resource), contentType)); - futureCallback.completed(httpResponse); + return null; + } catch (IOException ex) { + throw new UncheckedIOException(ex); } - return null; } - }); + + @Override + public void register(String hostname, String uriPattern, Supplier supplier) {} + + }; + return sourceWithMockedClient(mockRemoteVersion, httpClient); } @@ -649,4 +697,9 @@ private T expectListenerFailure(Class expectedExcept assertNotNull(exception.get()); return exception.get(); } + + private static ClassicHttpRequest getRequest(AsyncRequestProducer requestProducer) { + assertThat(requestProducer, instanceOf(HttpUriRequestProducer.class)); + return ((HttpUriRequestProducer) requestProducer).getRequest(); + } } diff --git a/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java index 3d0c09fb2288c..cbadcba5ef6f0 100644 --- a/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java +++ b/modules/repository-url/src/yamlRestTest/java/org/opensearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java @@ -34,9 +34,6 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.nio.entity.NStringEntity; import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.common.Strings; @@ -49,6 +46,9 @@ import org.opensearch.rest.RestStatus; import org.opensearch.test.rest.yaml.ClientYamlTestCandidate; import org.opensearch.test.rest.yaml.OpenSearchClientYamlSuiteTestCase; +import org.apache.hc.core5.http.ContentType; +import org.apache.hc.core5.http.HttpEntity; +import org.apache.hc.core5.http.io.entity.StringEntity; import org.junit.Before; import java.io.IOException; @@ -144,7 +144,7 @@ private static HttpEntity buildRepositorySettings(final String type, final Setti builder.endObject(); } builder.endObject(); - return new NStringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); + return new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); } } } diff --git a/modules/transport-netty4/build.gradle b/modules/transport-netty4/build.gradle index 5d2047d7f18a2..9e0d9955a65a1 100644 --- a/modules/transport-netty4/build.gradle +++ b/modules/transport-netty4/build.gradle @@ -156,6 +156,12 @@ thirdPartyAudit { 'org.bouncycastle.cert.X509v3CertificateBuilder', 'org.bouncycastle.cert.jcajce.JcaX509CertificateConverter', 'org.bouncycastle.operator.jcajce.JcaContentSignerBuilder', + 'org.bouncycastle.openssl.PEMEncryptedKeyPair', + 'org.bouncycastle.openssl.PEMParser', + 'org.bouncycastle.openssl.jcajce.JcaPEMKeyConverter', + 'org.bouncycastle.openssl.jcajce.JceOpenSSLPKCS8DecryptorProviderBuilder', + 'org.bouncycastle.openssl.jcajce.JcePEMDecryptorProviderBuilder', + 'org.bouncycastle.pkcs.PKCS8EncryptedPrivateKeyInfo', // from io.netty.handler.ssl.JettyNpnSslEngine (netty) 'org.eclipse.jetty.npn.NextProtoNego$ClientProvider', diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.79.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.79.Final.jar.sha1 deleted file mode 100644 index 8e9e4d0b7f754..0000000000000 --- a/modules/transport-netty4/licenses/netty-buffer-4.1.79.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6c014412b599489b1db27c6bc08d8a46da94e397 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-buffer-4.1.84.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-buffer-4.1.84.Final.jar.sha1 new file mode 100644 index 0000000000000..25a6f9ecf50b6 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-buffer-4.1.84.Final.jar.sha1 @@ -0,0 +1 @@ +a6b8cf57cfffc28d8e33f8175788a99401f576d9 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.79.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.79.Final.jar.sha1 deleted file mode 100644 index c0920231d79a8..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-4.1.79.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -18f5b02af7ca611978bc28f2cb58cbb3b9b0f0ef \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-4.1.84.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-4.1.84.Final.jar.sha1 new file mode 100644 index 0000000000000..032a8f1ed954e --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-4.1.84.Final.jar.sha1 @@ -0,0 +1 @@ +4f60f56c4cd17db114f01dc64aa465a2905240f5 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.79.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.79.Final.jar.sha1 deleted file mode 100644 index a3f650da5abbd..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http-4.1.79.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -882c70bc0a30a98bf3ce477f043e967ac026044c \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http-4.1.84.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http-4.1.84.Final.jar.sha1 new file mode 100644 index 0000000000000..1e985edfce65e --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http-4.1.84.Final.jar.sha1 @@ -0,0 +1 @@ +78628e78087d3da6c3d2b22aa67798d3f3adcd68 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.79.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.79.Final.jar.sha1 deleted file mode 100644 index f2989024cfce1..0000000000000 --- a/modules/transport-netty4/licenses/netty-codec-http2-4.1.79.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0eeffab0cd5efb699d5e4ab9b694d32fef6694b3 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-codec-http2-4.1.84.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-codec-http2-4.1.84.Final.jar.sha1 new file mode 100644 index 0000000000000..5fe8c5420cd74 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-codec-http2-4.1.84.Final.jar.sha1 @@ -0,0 +1 @@ +5a0178b9689493fd612cd40481034469f4bd14cc \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.79.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.79.Final.jar.sha1 deleted file mode 100644 index faa7b099406a3..0000000000000 --- a/modules/transport-netty4/licenses/netty-common-4.1.79.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2814bd465731355323aba0fdd22163bfce638a75 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-common-4.1.84.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-common-4.1.84.Final.jar.sha1 new file mode 100644 index 0000000000000..beaa2cce654c3 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-common-4.1.84.Final.jar.sha1 @@ -0,0 +1 @@ +90c84ec7f1108ae164810cf46694a5ec7ce738fc \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.79.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.79.Final.jar.sha1 deleted file mode 100644 index 8e314f164da69..0000000000000 --- a/modules/transport-netty4/licenses/netty-handler-4.1.79.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2dc22423c8ed19906615fb936a5fcb7db14a4e6c \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-handler-4.1.84.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-handler-4.1.84.Final.jar.sha1 new file mode 100644 index 0000000000000..afd28b451ba12 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-handler-4.1.84.Final.jar.sha1 @@ -0,0 +1 @@ +69cd93e2c321f04fc0a18eb5af519565373d0873 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.79.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.79.Final.jar.sha1 deleted file mode 100644 index af550935bb911..0000000000000 --- a/modules/transport-netty4/licenses/netty-resolver-4.1.79.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -55ecb1ff4464b56564a90824a741c3911264aaa4 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-resolver-4.1.84.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-resolver-4.1.84.Final.jar.sha1 new file mode 100644 index 0000000000000..07aa37fc76524 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-resolver-4.1.84.Final.jar.sha1 @@ -0,0 +1 @@ +b6f808e331cf843d2a7ff62042cf9b5343e2ff25 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.79.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.79.Final.jar.sha1 deleted file mode 100644 index c6e18efb3ad3d..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-4.1.79.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6cc2b49749b4fbcc39c687027e04e65e857552a9 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-4.1.84.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-4.1.84.Final.jar.sha1 new file mode 100644 index 0000000000000..5e12ada3f5c10 --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-4.1.84.Final.jar.sha1 @@ -0,0 +1 @@ +acd9947d0a951b1f7021c7adc393df936b1ecbf0 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.79.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.79.Final.jar.sha1 deleted file mode 100644 index 7f984663dfa85..0000000000000 --- a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.79.Final.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -731937caec938b77b39df932a8da8aaca8d5ec05 \ No newline at end of file diff --git a/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.84.Final.jar.sha1 b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.84.Final.jar.sha1 new file mode 100644 index 0000000000000..6273c55f3acbd --- /dev/null +++ b/modules/transport-netty4/licenses/netty-transport-native-unix-common-4.1.84.Final.jar.sha1 @@ -0,0 +1 @@ +e51601ddb88ee646a97ff04db38d45c22c29aee8 \ No newline at end of file diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java index 96e21e0e05ff7..fbac1f1c52e95 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/opensearch/rest/discovery/Zen2RestApiIT.java @@ -32,7 +32,6 @@ package org.opensearch.rest.discovery; -import org.apache.http.HttpHost; import org.opensearch.OpenSearchNetty4IntegTestCase; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.client.Client; @@ -49,9 +48,11 @@ import org.opensearch.http.HttpServerTransport; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.InternalTestCluster; +import org.apache.hc.core5.http.HttpHost; import org.hamcrest.Matchers; import java.io.IOException; +import java.net.URISyntaxException; import java.util.Collections; import java.util.List; @@ -124,6 +125,8 @@ public Settings onNodeStopped(String nodeName) throws IOException { .get(); assertFalse(nodeName, clusterHealthResponse.isTimedOut()); return Settings.EMPTY; + } catch (final URISyntaxException ex) { + throw new IOException(ex); } finally { restClient.setNodes(allNodes); } diff --git a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java index 1e0a4d89f2fd5..fcc9ab295c6c7 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/http/netty4/Netty4HttpServerTransport.java @@ -332,8 +332,10 @@ public ChannelHandler configureServerChannelHandler() { return new HttpChannelHandler(this, handlingSettings); } - static final AttributeKey HTTP_CHANNEL_KEY = AttributeKey.newInstance("opensearch-http-channel"); - static final AttributeKey HTTP_SERVER_CHANNEL_KEY = AttributeKey.newInstance("opensearch-http-server-channel"); + protected static final AttributeKey HTTP_CHANNEL_KEY = AttributeKey.newInstance("opensearch-http-channel"); + protected static final AttributeKey HTTP_SERVER_CHANNEL_KEY = AttributeKey.newInstance( + "opensearch-http-server-channel" + ); protected static class HttpChannelHandler extends ChannelInitializer { @@ -411,18 +413,19 @@ protected void channelRead0(ChannelHandlerContext ctx, HttpMessage msg) throws E // If this handler is hit then no upgrade has been attempted and the client is just talking HTTP final ChannelPipeline pipeline = ctx.pipeline(); pipeline.addAfter(ctx.name(), "handler", getRequestHandler()); - pipeline.replace(this, "aggregator", aggregator); + pipeline.replace(this, "decoder_compress", new HttpContentDecompressor()); - ch.pipeline().addLast("decoder_compress", new HttpContentDecompressor()); - ch.pipeline().addLast("encoder", new HttpResponseEncoder()); + pipeline.addAfter("decoder_compress", "aggregator", aggregator); if (handlingSettings.isCompression()) { - ch.pipeline() - .addAfter("aggregator", "encoder_compress", new HttpContentCompressor(handlingSettings.getCompressionLevel())); + pipeline.addAfter( + "aggregator", + "encoder_compress", + new HttpContentCompressor(handlingSettings.getCompressionLevel()) + ); } - ch.pipeline().addBefore("handler", "request_creator", requestCreator); - ch.pipeline().addBefore("handler", "response_creator", responseCreator); - ch.pipeline() - .addBefore("handler", "pipelining", new Netty4HttpPipeliningHandler(logger, transport.pipeliningMaxEvents)); + pipeline.addBefore("handler", "request_creator", requestCreator); + pipeline.addBefore("handler", "response_creator", responseCreator); + pipeline.addBefore("handler", "pipelining", new Netty4HttpPipeliningHandler(logger, transport.pipeliningMaxEvents)); ctx.fireChannelRead(ReferenceCountUtil.retain(msg)); } diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4NioServerSocketChannel.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4NioServerSocketChannel.java new file mode 100644 index 0000000000000..8a8b1da6ef5dd --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/Netty4NioServerSocketChannel.java @@ -0,0 +1,62 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +package org.opensearch.transport; + +import io.netty.channel.socket.InternetProtocolFamily; +import io.netty.channel.socket.nio.NioServerSocketChannel; +import io.netty.util.internal.SocketUtils; +import io.netty.util.internal.logging.InternalLogger; +import io.netty.util.internal.logging.InternalLoggerFactory; + +import java.nio.channels.ServerSocketChannel; +import java.nio.channels.SocketChannel; +import java.nio.channels.spi.SelectorProvider; +import java.util.List; + +public class Netty4NioServerSocketChannel extends NioServerSocketChannel { + private static final InternalLogger logger = InternalLoggerFactory.getInstance(Netty4NioServerSocketChannel.class); + + public Netty4NioServerSocketChannel() { + super(); + } + + public Netty4NioServerSocketChannel(SelectorProvider provider) { + super(provider); + } + + public Netty4NioServerSocketChannel(SelectorProvider provider, InternetProtocolFamily family) { + super(provider, family); + } + + public Netty4NioServerSocketChannel(ServerSocketChannel channel) { + super(channel); + } + + @Override + protected int doReadMessages(List buf) throws Exception { + SocketChannel ch = SocketUtils.accept(javaChannel()); + + try { + if (ch != null) { + buf.add(new Netty4NioSocketChannel(this, ch)); + return 1; + } + } catch (Throwable t) { + logger.warn("Failed to create a new channel from an accepted socket.", t); + + try { + ch.close(); + } catch (Throwable t2) { + logger.warn("Failed to close a socket.", t2); + } + } + + return 0; + } +} diff --git a/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java b/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java index e25853d864813..f2f6538d305d9 100644 --- a/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java +++ b/modules/transport-netty4/src/main/java/org/opensearch/transport/NettyAllocator.java @@ -39,7 +39,6 @@ import io.netty.buffer.UnpooledByteBufAllocator; import io.netty.channel.Channel; import io.netty.channel.ServerChannel; -import io.netty.channel.socket.nio.NioServerSocketChannel; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.common.Booleans; @@ -181,7 +180,7 @@ public static Class getServerChannelType() { if (ALLOCATOR instanceof NoDirectBuffers) { return CopyBytesServerSocketChannel.class; } else { - return NioServerSocketChannel.class; + return Netty4NioServerSocketChannel.class; } } diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.4.0-snapshot-ddf0d0a.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.4.0-snapshot-ddf0d0a.jar.sha1 deleted file mode 100644 index 83c10845cd35a..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.4.0-snapshot-ddf0d0a.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2f6cb0fd7387c6e0db3b86eef7d8677cea3e88a0 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analysis-icu-9.5.0-snapshot-a4ef70f.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.5.0-snapshot-a4ef70f.jar.sha1 new file mode 100644 index 0000000000000..a49a0749a9e4a --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analysis-icu-9.5.0-snapshot-a4ef70f.jar.sha1 @@ -0,0 +1 @@ +a7c38619d8f2cc48f792e007aa25b430f4f25698 \ No newline at end of file diff --git a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuCollationTokenFilterFactory.java b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuCollationTokenFilterFactory.java index cd2898c9d64b4..ca29492addcfe 100644 --- a/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuCollationTokenFilterFactory.java +++ b/plugins/analysis-icu/src/main/java/org/opensearch/index/analysis/IcuCollationTokenFilterFactory.java @@ -37,6 +37,7 @@ import java.nio.file.Files; import java.nio.file.InvalidPathException; +import org.apache.logging.log4j.LogManager; import org.apache.lucene.analysis.TokenStream; import org.opensearch.common.io.Streams; import org.opensearch.common.settings.Settings; @@ -80,9 +81,12 @@ public IcuCollationTokenFilterFactory(IndexSettings indexSettings, Environment e collator = new RuleBasedCollator(rules); } catch (Exception e) { if (failureToResolve != null) { - throw new IllegalArgumentException("Failed to resolve collation rules location", failureToResolve); + LogManager.getLogger(IcuCollationTokenFilterFactory.class) + .error("Failed to resolve collation rules location", failureToResolve); + throw new IllegalArgumentException("Failed to resolve collation rules location"); } else { - throw new IllegalArgumentException("Failed to parse collation rules", e); + LogManager.getLogger(IcuCollationTokenFilterFactory.class).error("Failed to parse collation rules", e); + throw new IllegalArgumentException("Failed to parse collation rules"); } } } else { diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.4.0-snapshot-ddf0d0a.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.4.0-snapshot-ddf0d0a.jar.sha1 deleted file mode 100644 index 29387f38bc10c..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.4.0-snapshot-ddf0d0a.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6aff23715a2fba88d844ac83c61decce8ed480bd \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.5.0-snapshot-a4ef70f.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.5.0-snapshot-a4ef70f.jar.sha1 new file mode 100644 index 0000000000000..709bcf84faf06 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analysis-kuromoji-9.5.0-snapshot-a4ef70f.jar.sha1 @@ -0,0 +1 @@ +6243383e5fbcf87551ded4c1b48b69a4276bb748 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiPartOfSpeechFilterFactory.java b/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiPartOfSpeechFilterFactory.java index fef8d06c466b9..8e9c209ae421d 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiPartOfSpeechFilterFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiPartOfSpeechFilterFactory.java @@ -49,7 +49,7 @@ public class KuromojiPartOfSpeechFilterFactory extends AbstractTokenFilterFactor public KuromojiPartOfSpeechFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); - List wordList = Analysis.getWordList(env, settings, "stoptags"); + List wordList = Analysis.parseWordList(env, settings, "stoptags", s -> s); if (wordList != null) { stopTags.addAll(wordList); } else { diff --git a/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java index b5e718eaa6fa0..2939711f6f7e1 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/opensearch/index/analysis/KuromojiTokenizerFactory.java @@ -32,6 +32,8 @@ package org.opensearch.index.analysis; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ja.JapaneseTokenizer; import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode; @@ -50,6 +52,7 @@ public class KuromojiTokenizerFactory extends AbstractTokenizerFactory { + private static final Logger LOGGER = LogManager.getLogger(KuromojiTokenizerFactory.class); private static final String USER_DICT_PATH_OPTION = "user_dictionary"; private static final String USER_DICT_RULES_OPTION = "user_dictionary_rules"; private static final String NBEST_COST = "nbest_cost"; @@ -74,6 +77,17 @@ public KuromojiTokenizerFactory(IndexSettings indexSettings, Environment env, St discardCompoundToken = settings.getAsBoolean(DISCARD_COMPOUND_TOKEN, false); } + private static String parse(String rule, Set dup) { + String[] values = CSVUtil.parse(rule); + if (values.length == 0) { + throw new IllegalArgumentException("Malformed csv in user dictionary."); + } + if (dup.add(values[0]) == false) { + throw new IllegalArgumentException("Found duplicate term [" + values[0] + "] in user dictionary."); + } + return rule; + } + public static UserDictionary getUserDictionary(Environment env, Settings settings) { if (settings.get(USER_DICT_PATH_OPTION) != null && settings.get(USER_DICT_RULES_OPTION) != null) { throw new IllegalArgumentException( @@ -81,31 +95,26 @@ public static UserDictionary getUserDictionary(Environment env, Settings setting ); } try { - List ruleList = Analysis.getWordList(env, settings, USER_DICT_PATH_OPTION, USER_DICT_RULES_OPTION, false); + Set dup = new HashSet<>(); + List ruleList = Analysis.parseWordList( + env, + settings, + USER_DICT_PATH_OPTION, + USER_DICT_RULES_OPTION, + s -> parse(s, dup) + ); if (ruleList == null || ruleList.isEmpty()) { return null; } - Set dup = new HashSet<>(); - int lineNum = 0; - for (String line : ruleList) { - // ignore comments - if (line.startsWith("#") == false) { - String[] values = CSVUtil.parse(line); - if (dup.add(values[0]) == false) { - throw new IllegalArgumentException( - "Found duplicate term [" + values[0] + "] in user dictionary " + "at line [" + lineNum + "]" - ); - } - } - ++lineNum; - } + StringBuilder sb = new StringBuilder(); for (String line : ruleList) { sb.append(line).append(System.lineSeparator()); } return UserDictionary.open(new StringReader(sb.toString())); } catch (IOException e) { - throw new OpenSearchException("failed to load kuromoji user dictionary", e); + LOGGER.error("Failed to load kuromoji user dictionary", e); + throw new OpenSearchException("Failed to load kuromoji user dictionary"); } } diff --git a/plugins/analysis-kuromoji/src/test/java/org/opensearch/index/analysis/KuromojiAnalysisTests.java b/plugins/analysis-kuromoji/src/test/java/org/opensearch/index/analysis/KuromojiAnalysisTests.java index e17658d83a085..03d9df6ebd6b2 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/opensearch/index/analysis/KuromojiAnalysisTests.java +++ b/plugins/analysis-kuromoji/src/test/java/org/opensearch/index/analysis/KuromojiAnalysisTests.java @@ -379,6 +379,15 @@ public void testKuromojiAnalyzerInvalidUserDictOption() throws Exception { ); } + public void testKuromojiAnalyzerEmptyDictRule() throws Exception { + Settings settings = Settings.builder() + .put("index.analysis.analyzer.my_analyzer.type", "kuromoji") + .putList("index.analysis.analyzer.my_analyzer.user_dictionary_rules", "\"") + .build(); + RuntimeException exc = expectThrows(RuntimeException.class, () -> createTestAnalysis(settings)); + assertThat(exc.getMessage(), equalTo("Line [1]: Malformed csv in user dictionary.")); + } + public void testKuromojiAnalyzerDuplicateUserDictRule() throws Exception { Settings settings = Settings.builder() .put("index.analysis.analyzer.my_analyzer.type", "kuromoji") @@ -390,8 +399,8 @@ public void testKuromojiAnalyzerDuplicateUserDictRule() throws Exception { "制限スピード,制限スピード,セイゲンスピード,テスト名詞" ) .build(); - IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> createTestAnalysis(settings)); - assertThat(exc.getMessage(), containsString("[制限スピード] in user dictionary at line [3]")); + RuntimeException exc = expectThrows(RuntimeException.class, () -> createTestAnalysis(settings)); + assertThat(exc.getMessage(), equalTo("Line [4]: Found duplicate term [制限スピード] in user dictionary.")); } public void testDiscardCompoundToken() throws Exception { diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.4.0-snapshot-ddf0d0a.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.4.0-snapshot-ddf0d0a.jar.sha1 deleted file mode 100644 index 54b451abf5049..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.4.0-snapshot-ddf0d0a.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f82d3eba195134f663865e9de3f511e16fbc7351 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analysis-nori-9.5.0-snapshot-a4ef70f.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.5.0-snapshot-a4ef70f.jar.sha1 new file mode 100644 index 0000000000000..0c4d7b7a2755c --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analysis-nori-9.5.0-snapshot-a4ef70f.jar.sha1 @@ -0,0 +1 @@ +91d1560bc927f1a431bb92e47fda9395d3b3e551 \ No newline at end of file diff --git a/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriAnalyzerProvider.java b/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriAnalyzerProvider.java index 3dee606185429..e3b1cef6aee8a 100644 --- a/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriAnalyzerProvider.java +++ b/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriAnalyzerProvider.java @@ -52,7 +52,7 @@ public NoriAnalyzerProvider(IndexSettings indexSettings, Environment env, String super(indexSettings, name, settings); final KoreanTokenizer.DecompoundMode mode = NoriTokenizerFactory.getMode(settings); final UserDictionary userDictionary = NoriTokenizerFactory.getUserDictionary(env, settings); - final List tagList = Analysis.getWordList(env, settings, "stoptags"); + final List tagList = Analysis.parseWordList(env, settings, "stoptags", s -> s); final Set stopTags = tagList != null ? resolvePOSList(tagList) : KoreanPartOfSpeechStopFilter.DEFAULT_STOP_TAGS; analyzer = new KoreanAnalyzer(userDictionary, mode, stopTags, false); } diff --git a/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriPartOfSpeechStopFilterFactory.java b/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriPartOfSpeechStopFilterFactory.java index 18cbc3c7c153d..5023db50422fc 100644 --- a/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriPartOfSpeechStopFilterFactory.java +++ b/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriPartOfSpeechStopFilterFactory.java @@ -48,7 +48,7 @@ public class NoriPartOfSpeechStopFilterFactory extends AbstractTokenFilterFactor public NoriPartOfSpeechStopFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); - List tagList = Analysis.getWordList(env, settings, "stoptags"); + List tagList = Analysis.parseWordList(env, settings, "stoptags", s -> s); this.stopTags = tagList != null ? resolvePOSList(tagList) : KoreanPartOfSpeechStopFilter.DEFAULT_STOP_TAGS; } diff --git a/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriTokenizerFactory.java b/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriTokenizerFactory.java index 5136277611e3a..9f3183194cdae 100644 --- a/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriTokenizerFactory.java +++ b/plugins/analysis-nori/src/main/java/org/opensearch/index/analysis/NoriTokenizerFactory.java @@ -32,6 +32,8 @@ package org.opensearch.index.analysis; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.Tokenizer; import org.apache.lucene.analysis.ko.KoreanTokenizer; import org.apache.lucene.analysis.ko.dict.UserDictionary; @@ -47,6 +49,7 @@ import java.util.Locale; public class NoriTokenizerFactory extends AbstractTokenizerFactory { + private static final Logger LOGGER = LogManager.getLogger(NoriTokenizerFactory.class); private static final String USER_DICT_PATH_OPTION = "user_dictionary"; private static final String USER_DICT_RULES_OPTION = "user_dictionary_rules"; @@ -67,7 +70,7 @@ public static UserDictionary getUserDictionary(Environment env, Settings setting "It is not allowed to use [" + USER_DICT_PATH_OPTION + "] in conjunction" + " with [" + USER_DICT_RULES_OPTION + "]" ); } - List ruleList = Analysis.getWordList(env, settings, USER_DICT_PATH_OPTION, USER_DICT_RULES_OPTION, true); + List ruleList = Analysis.parseWordList(env, settings, USER_DICT_PATH_OPTION, USER_DICT_RULES_OPTION, s -> s); StringBuilder sb = new StringBuilder(); if (ruleList == null || ruleList.isEmpty()) { return null; @@ -78,7 +81,8 @@ public static UserDictionary getUserDictionary(Environment env, Settings setting try (Reader rulesReader = new StringReader(sb.toString())) { return UserDictionary.open(rulesReader); } catch (IOException e) { - throw new OpenSearchException("failed to load nori user dictionary", e); + LOGGER.error("Failed to load nori user dictionary", e); + throw new OpenSearchException("Failed to load nori user dictionary"); } } diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.4.0-snapshot-ddf0d0a.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.4.0-snapshot-ddf0d0a.jar.sha1 deleted file mode 100644 index 87474064fbe0f..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.4.0-snapshot-ddf0d0a.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2af6e1996e696b1721a2ec7382bac9aa5096eecb \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.5.0-snapshot-a4ef70f.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.5.0-snapshot-a4ef70f.jar.sha1 new file mode 100644 index 0000000000000..82524cbdb4ada --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analysis-phonetic-9.5.0-snapshot-a4ef70f.jar.sha1 @@ -0,0 +1 @@ +26bbfd1a796d62006dff9c7e32d31a0397a8025e \ No newline at end of file diff --git a/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/AnalysisPhoneticFactoryTests.java b/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/AnalysisPhoneticFactoryTests.java index 19bc27f6e616d..62fb6e22b8950 100644 --- a/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/AnalysisPhoneticFactoryTests.java +++ b/plugins/analysis-phonetic/src/test/java/org/opensearch/index/analysis/AnalysisPhoneticFactoryTests.java @@ -64,7 +64,7 @@ public void testDisallowedWithSynonyms() throws IOException { AnalysisPhoneticPlugin plugin = new AnalysisPhoneticPlugin(); Settings settings = Settings.builder() - .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_1_0_0, Version.CURRENT)) + .put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_2_0_0, Version.CURRENT)) .put("path.home", createTempDir().toString()) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.4.0-snapshot-ddf0d0a.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.4.0-snapshot-ddf0d0a.jar.sha1 deleted file mode 100644 index 6d35832a1a643..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.4.0-snapshot-ddf0d0a.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ec01d7f91f711abd75b539bb66a437db7cf1ca67 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.5.0-snapshot-a4ef70f.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.5.0-snapshot-a4ef70f.jar.sha1 new file mode 100644 index 0000000000000..af6b600d22090 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analysis-smartcn-9.5.0-snapshot-a4ef70f.jar.sha1 @@ -0,0 +1 @@ +a1a26c04e24d9a8573e6bd9a0bacad184821dd33 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.4.0-snapshot-ddf0d0a.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.4.0-snapshot-ddf0d0a.jar.sha1 deleted file mode 100644 index f93d1a153cd26..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.4.0-snapshot-ddf0d0a.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7041b3fa92b8687a84c4ce666b5718bbbc315db1 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.5.0-snapshot-a4ef70f.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.5.0-snapshot-a4ef70f.jar.sha1 new file mode 100644 index 0000000000000..ea5680869c187 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analysis-stempel-9.5.0-snapshot-a4ef70f.jar.sha1 @@ -0,0 +1 @@ +19aa9eff0e0671fd91eb435a2e2fa29dec52cf5c \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.4.0-snapshot-ddf0d0a.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.4.0-snapshot-ddf0d0a.jar.sha1 deleted file mode 100644 index 77589a361badf..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.4.0-snapshot-ddf0d0a.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0a5ec9a237c2539e3cbabfadff707252e33b3599 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.5.0-snapshot-a4ef70f.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.5.0-snapshot-a4ef70f.jar.sha1 new file mode 100644 index 0000000000000..4f81941a1746e --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analysis-morfologik-9.5.0-snapshot-a4ef70f.jar.sha1 @@ -0,0 +1 @@ +05ff979dfe3ded901ccd72d5a5d66349286c44bf \ No newline at end of file diff --git a/plugins/discovery-azure-classic/build.gradle b/plugins/discovery-azure-classic/build.gradle index 5755ff55bfff9..c88d19f0e2806 100644 --- a/plugins/discovery-azure-classic/build.gradle +++ b/plugins/discovery-azure-classic/build.gradle @@ -59,7 +59,7 @@ dependencies { api "com.sun.jersey:jersey-client:${versions.jersey}" api "com.sun.jersey:jersey-core:${versions.jersey}" api "com.sun.jersey:jersey-json:${versions.jersey}" - api 'org.codehaus.jettison:jettison:1.5.0' + api "org.codehaus.jettison:jettison:${versions.jettison}" api 'com.sun.xml.bind:jaxb-impl:2.2.3-1' // HACK: javax.xml.bind was removed from default modules in java 9, so we pull the api in here, diff --git a/plugins/discovery-azure-classic/licenses/jettison-1.5.0.jar.sha1 b/plugins/discovery-azure-classic/licenses/jettison-1.5.0.jar.sha1 deleted file mode 100644 index ec93f83474541..0000000000000 --- a/plugins/discovery-azure-classic/licenses/jettison-1.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -933c7df7a4b78c9a9322f431014ea699b1fc0cc0 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/licenses/jettison-1.5.1.jar.sha1 b/plugins/discovery-azure-classic/licenses/jettison-1.5.1.jar.sha1 new file mode 100644 index 0000000000000..29227ed427953 --- /dev/null +++ b/plugins/discovery-azure-classic/licenses/jettison-1.5.1.jar.sha1 @@ -0,0 +1 @@ +d8918f348f234f5046bd39ea1ed9fc91deac402f \ No newline at end of file diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index 1766aa14ea9e9..8a7e48fc671ff 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -46,8 +46,6 @@ dependencies { api "commons-logging:commons-logging:${versions.commonslogging}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" - api "com.fasterxml.jackson.core:jackson-databind:${versions.jackson_databind}" - api "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" } restResources { diff --git a/plugins/discovery-ec2/licenses/jackson-LICENSE b/plugins/discovery-ec2/licenses/jackson-LICENSE deleted file mode 100644 index f5f45d26a49d6..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-LICENSE +++ /dev/null @@ -1,8 +0,0 @@ -This copy of Jackson JSON processor streaming parser/generator is licensed under the -Apache (Software) License, version 2.0 ("the License"). -See the License for details about distribution rights, and the -specific rights regarding derivate works. - -You may obtain a copy of the License at: - -http://www.apache.org/licenses/LICENSE-2.0 diff --git a/plugins/discovery-ec2/licenses/jackson-NOTICE b/plugins/discovery-ec2/licenses/jackson-NOTICE deleted file mode 100644 index 4c976b7b4cc58..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-NOTICE +++ /dev/null @@ -1,20 +0,0 @@ -# Jackson JSON processor - -Jackson is a high-performance, Free/Open Source JSON processing library. -It was originally written by Tatu Saloranta (tatu.saloranta@iki.fi), and has -been in development since 2007. -It is currently developed by a community of developers, as well as supported -commercially by FasterXML.com. - -## Licensing - -Jackson core and extension components may licensed under different licenses. -To find the details that apply to this artifact see the accompanying LICENSE file. -For more information, including possible other licensing options, contact -FasterXML.com (http://fasterxml.com). - -## Credits - -A list of contributors may be found from CREDITS file, which is included -in some artifacts (usually source distributions); but is always available -from the source code management (SCM) system project uses. diff --git a/plugins/discovery-ec2/licenses/jackson-annotations-2.13.3.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-annotations-2.13.3.jar.sha1 deleted file mode 100644 index 7e68b8b99757d..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-annotations-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7198b3aac15285a49e218e08441c5f70af00fc51 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.13.3.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.13.3.jar.sha1 deleted file mode 100644 index fd75028bd141f..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-databind-2.13.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -56deb9ea2c93a7a556b3afbedd616d342963464e \ No newline at end of file diff --git a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java index 8413a750e2741..cb2e28210faf1 100644 --- a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java +++ b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsConfig.java @@ -94,8 +94,13 @@ public class ExampleCustomSettingsConfig { private final List list; private final String filtered; + /** + * Instantiate this object based on the specified environment. + * + * @param environment The environment including paths to custom setting configuration files + */ public ExampleCustomSettingsConfig(final Environment environment) { - // Elasticsearch config directory + // OpenSearch config directory final Path configDir = environment.configDir(); // Resolve the plugin's custom settings file @@ -121,22 +126,47 @@ public ExampleCustomSettingsConfig(final Environment environment) { assert secured != null; } + /** + * Gets the value of the custom.simple String setting. + * + * @return the custom.simple value + */ public String getSimple() { return simple; } + /** + * Gets the value of the custom.bool boolean setting. + * + * @return the custom.bool value + */ public Boolean getBool() { return bool; } + /** + * Gets the value of the custom.validated String setting. + * + * @return the custom.validated value + */ public String getValidated() { return validated; } + /** + * Gets the value of the custom.filtered String setting. + * + * @return the custom.filtered value + */ public String getFiltered() { return filtered; } + /** + * Gets the value of the custom.list list of integers setting. + * + * @return the custom.list value + */ public List getList() { return list; } diff --git a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java index aa22938c72a01..0b619102c667f 100644 --- a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java +++ b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/ExampleCustomSettingsPlugin.java @@ -42,10 +42,19 @@ import static java.util.stream.Collectors.toList; +/** + * An example plugin that includes custom settings. + */ public class ExampleCustomSettingsPlugin extends Plugin { private final ExampleCustomSettingsConfig config; + /** + * Instantiate this plugin with the specified settings and config path. + * + * @param settings The settings for this plugin. + * @param configPath The path to this plugin's configuration files. + */ public ExampleCustomSettingsPlugin(final Settings settings, final Path configPath) { this.config = new ExampleCustomSettingsConfig(new Environment(settings, configPath)); @@ -53,9 +62,6 @@ public ExampleCustomSettingsPlugin(final Settings settings, final Path configPat assert "secret".equals(config.getFiltered()); } - /** - * @return the plugin's custom settings - */ @Override public List> getSettings() { return Arrays.asList( diff --git a/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/package-info.java b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/package-info.java new file mode 100644 index 0000000000000..5af8654201da2 --- /dev/null +++ b/plugins/examples/custom-settings/src/main/java/org/opensearch/example/customsettings/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Example classes demonstrating the use of custom settings in a plugin. + */ +package org.opensearch.example.customsettings; diff --git a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java index 49098ae36e30f..c646592af63cb 100644 --- a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java +++ b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/CustomSignificanceHeuristicPlugin.java @@ -44,6 +44,12 @@ * Plugin declaring a custom {@link SignificanceHeuristic}. */ public class CustomSignificanceHeuristicPlugin extends Plugin implements SearchPlugin { + + /** + * Instantiate this plugin. + */ + public CustomSignificanceHeuristicPlugin() {}; + @Override public List> getSignificanceHeuristics() { return singletonList(new SignificanceHeuristicSpec<>(SimpleHeuristic.NAME, SimpleHeuristic::new, SimpleHeuristic.PARSER)); diff --git a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java index 8365a56bcfe4e..9458bf5b75feb 100644 --- a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java +++ b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/SimpleHeuristic.java @@ -44,13 +44,25 @@ * A simple {@linkplain SignificanceHeuristic} used an example of declaring a custom heuristic. */ public class SimpleHeuristic extends SignificanceHeuristic { + /** + * The name of this NamedWriteable heuristic. + */ public static final String NAME = "simple"; + + /** + * The parser with which to deserialize this object from XContent. + */ public static final ObjectParser PARSER = new ObjectParser<>(NAME, SimpleHeuristic::new); + /** + * Instantiates this object. + */ public SimpleHeuristic() {} /** * Read from a stream. + * + * @param in Input to read the value from */ public SimpleHeuristic(StreamInput in) throws IOException { // Nothing to read diff --git a/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/package-info.java b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/package-info.java new file mode 100644 index 0000000000000..20809857273c4 --- /dev/null +++ b/plugins/examples/custom-significance-heuristic/src/main/java/org/opensearch/example/customsigheuristic/package-info.java @@ -0,0 +1,12 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + */ + +/** + * Example classes demonstrating the use of a custom significance heuristic. + */ +package org.opensearch.example.customsigheuristic; diff --git a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java index da154609e5f2f..05f26a8e401e1 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java +++ b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggester.java @@ -41,8 +41,16 @@ import java.io.IOException; import java.util.Locale; +/** + * A custom suggester supportiong suggestion-based search. + */ public class CustomSuggester extends Suggester { + /** + * Instantiate this object. + */ + public CustomSuggester() {} + // This is a pretty dumb implementation which returns the original text + fieldName + custom config option + 12 or 123 @Override public Suggest.Suggestion> innerExecute( diff --git a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java index 5706b654ffbde..b71a90e700d21 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java +++ b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggesterPlugin.java @@ -38,7 +38,16 @@ import java.util.Collections; import java.util.List; +/** + * Plugin demonstrating custom suggestion-based search. + */ public class CustomSuggesterPlugin extends Plugin implements SearchPlugin { + + /** + * Instantiate this class. + */ + public CustomSuggesterPlugin() {} + @Override public List> getSuggesters() { return Collections.singletonList( diff --git a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java index 50ee700c3a253..f35fde03d261f 100644 --- a/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java +++ b/plugins/examples/custom-suggester/src/main/java/org/opensearch/example/customsuggester/CustomSuggestion.java @@ -46,19 +46,43 @@ import static org.opensearch.common.xcontent.ConstructingObjectParser.constructorArg; +/** + * The suggestion responses corresponding with the suggestions in the request. + */ public class CustomSuggestion extends Suggest.Suggestion { + /** + * An integer representing the type of the suggestion formerly used for internal serialization over the network. + * + * This class is now serialized as a NamedWriteable and this value only remains for backwards compatibility + */ public static final int TYPE = 999; + /** + * A meaningless value used to test that plugin suggesters can add fields to their Suggestion types. + */ public static final ParseField DUMMY = new ParseField("dummy"); private String dummy; + /** + * Instantiate this object with the specified name, size, and value for the configured field. + * + * @param name The name of the suggestion as is defined in the request. + * @param size The suggested term size specified in request, only used for merging shard responses. + * @param dummy The added custom suggestion type. + */ public CustomSuggestion(String name, int size, String dummy) { super(name, size); this.dummy = dummy; } + /** + * Instantiate this object from a stream. + * + * @param in Input to read the value from + * @throws IOException on failure to read the value. + */ public CustomSuggestion(StreamInput in) throws IOException { super(in); dummy = in.readString(); @@ -85,6 +109,8 @@ public int getWriteableType() { * * This can't be serialized to xcontent because Suggestions appear in xcontent as an array of entries, so there is no place * to add a custom field. But we can still use a custom field internally and use it to define a Suggestion's behavior + * + * @return the value. */ public String getDummy() { return dummy; @@ -95,12 +121,23 @@ protected Entry newEntry(StreamInput in) throws IOException { return new Entry(in); } + /** + * Instantiate a CustomSuggestion from XContent. + * + * @param parser The XContent parser to use + * @param name Tne name of the suggestion + * @return A new CustomSuggestion instance for the specified name. + * @throws IOException on deserialization error. + */ public static CustomSuggestion fromXContent(XContentParser parser, String name) throws IOException { CustomSuggestion suggestion = new CustomSuggestion(name, -1, null); parseEntries(parser, suggestion, Entry::fromXContent); return suggestion; } + /** + * Represents a part from the suggest text with suggested options. + */ public static class Entry extends Suggest.Suggestion.Entry { private static final ObjectParser PARSER = new ObjectParser<>("CustomSuggestionEntryParser", true, Entry::new); @@ -117,13 +154,30 @@ public static class Entry extends Suggest.Suggestion.Entry otherEntry) { @@ -150,6 +206,8 @@ protected void merge(Suggest.Suggestion.Entry